summaryrefslogtreecommitdiff
path: root/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip
diff options
context:
space:
mode:
Diffstat (limited to '.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip')
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__init__.py1
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__main__.py19
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__pycache__/__init__.cpython-37.pycbin0 -> 154 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__pycache__/__main__.cpython-37.pycbin0 -> 406 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__init__.py77
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/__init__.cpython-37.pycbin0 -> 1695 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/build_env.cpython-37.pycbin0 -> 7390 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/cache.cpython-37.pycbin0 -> 7008 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/configuration.cpython-37.pycbin0 -> 10510 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/download.cpython-37.pycbin0 -> 25556 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/exceptions.cpython-37.pycbin0 -> 12700 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/index.cpython-37.pycbin0 -> 37345 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-37.pycbin0 -> 10498 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/locations.cpython-37.pycbin0 -> 2914 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pep425tags.cpython-37.pycbin0 -> 8273 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pyproject.cpython-37.pycbin0 -> 3134 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/wheel.cpython-37.pycbin0 -> 27114 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/build_env.py218
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cache.py224
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__init__.py4
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-37.pycbin0 -> 229 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pycbin0 -> 5042 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-37.pycbin0 -> 8106 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pycbin0 -> 19805 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-37.pycbin0 -> 2151 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/parser.cpython-37.pycbin0 -> 8893 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-37.pycbin0 -> 358 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py152
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/base_command.py346
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py929
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py98
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/parser.py261
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py8
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__init__.py81
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-37.pycbin0 -> 2526 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/check.cpython-37.pycbin0 -> 1282 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/completion.cpython-37.pycbin0 -> 3031 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-37.pycbin0 -> 7072 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/debug.cpython-37.pycbin0 -> 3313 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/download.cpython-37.pycbin0 -> 4649 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-37.pycbin0 -> 2973 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/hash.cpython-37.pycbin0 -> 2021 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/help.cpython-37.pycbin0 -> 1197 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/install.cpython-37.pycbin0 -> 12835 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/list.cpython-37.pycbin0 -> 8882 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/search.cpython-37.pycbin0 -> 4372 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/show.cpython-37.pycbin0 -> 5844 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-37.pycbin0 -> 2653 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-37.pycbin0 -> 4819 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/check.py41
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/completion.py94
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/configuration.py258
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/debug.py114
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/download.py168
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/freeze.py101
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/hash.py57
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/help.py37
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/install.py580
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/list.py311
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/search.py139
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/show.py168
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py78
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/wheel.py181
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/configuration.py417
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py23
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-37.pycbin0 -> 814 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/base.cpython-37.pycbin0 -> 1593 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-37.pycbin0 -> 939 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/source.cpython-37.pycbin0 -> 3007 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-37.pycbin0 -> 999 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/base.py33
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/installed.py15
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/source.py80
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py17
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/download.py1163
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/exceptions.py305
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/index.py1508
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py457
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/locations.py142
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__init__.py2
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/__init__.cpython-37.pycbin0 -> 217 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/candidate.cpython-37.pycbin0 -> 1442 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/format_control.cpython-37.pycbin0 -> 2220 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/index.cpython-37.pycbin0 -> 1121 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/link.cpython-37.pycbin0 -> 6207 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-37.pycbin0 -> 3215 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-37.pycbin0 -> 1579 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/target_python.cpython-37.pycbin0 -> 3155 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/candidate.py36
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/format_control.py73
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/index.py31
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/link.py213
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/search_scope.py113
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py47
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/target_python.py106
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__init__.py0
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-37.pycbin0 -> 153 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/check.cpython-37.pycbin0 -> 3649 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-37.pycbin0 -> 5643 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-37.pycbin0 -> 5671 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/check.py159
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/freeze.py253
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/prepare.py287
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pep425tags.py387
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pyproject.py171
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__init__.py78
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/__init__.cpython-37.pycbin0 -> 1664 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/constructors.cpython-37.pycbin0 -> 7860 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_file.cpython-37.pycbin0 -> 9310 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_install.cpython-37.pycbin0 -> 25400 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_set.cpython-37.pycbin0 -> 5625 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-37.pycbin0 -> 3176 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pycbin0 -> 17239 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/constructors.py349
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_file.py399
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_install.py1035
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_set.py193
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py96
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py633
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__init__.py0
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-37.pycbin0 -> 148 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-37.pycbin0 -> 7979 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/compat.cpython-37.pycbin0 -> 6955 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-37.pycbin0 -> 2765 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-37.pycbin0 -> 1232 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-37.pycbin0 -> 617 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-37.pycbin0 -> 2212 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-37.pycbin0 -> 4088 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/logging.cpython-37.pycbin0 -> 9080 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-37.pycbin0 -> 755 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/misc.cpython-37.pycbin0 -> 29434 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/models.cpython-37.pycbin0 -> 1896 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/outdated.cpython-37.pycbin0 -> 4154 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-37.pycbin0 -> 2583 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pycbin0 -> 1004 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pycbin0 -> 4870 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/typing.cpython-37.pycbin0 -> 1278 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/ui.cpython-37.pycbin0 -> 11697 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-37.pycbin0 -> 863 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py268
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/compat.py293
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py100
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/encoding.py39
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py30
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/glibc.py120
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/hashes.py128
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/logging.py394
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py20
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/misc.py1204
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/models.py40
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/outdated.py178
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/packaging.py94
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py36
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py155
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/typing.py29
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/ui.py424
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py34
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py12
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-37.pycbin0 -> 424 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pycbin0 -> 3340 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/git.cpython-37.pycbin0 -> 8877 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pycbin0 -> 3641 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-37.pycbin0 -> 8244 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pycbin0 -> 16902 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py101
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/git.py358
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py103
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py314
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py600
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/wheel.py1125
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__init__.py109
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/__init__.cpython-37.pycbin0 -> 2815 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/appdirs.cpython-37.pycbin0 -> 20565 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/distro.cpython-37.pycbin0 -> 36278 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/ipaddress.cpython-37.pycbin0 -> 66408 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-37.pycbin0 -> 221766 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/retrying.cpython-37.pycbin0 -> 8046 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/six.cpython-37.pycbin0 -> 26370 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/appdirs.py604
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__init__.py11
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pycbin0 -> 506 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pycbin0 -> 1509 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pycbin0 -> 2992 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pycbin0 -> 1722 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pycbin0 -> 713 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pycbin0 -> 7592 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pycbin0 -> 2110 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pycbin0 -> 4630 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pycbin0 -> 4194 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pycbin0 -> 614 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/_cmd.py57
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/adapter.py133
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/cache.py39
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py2
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pycbin0 -> 250 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pycbin0 -> 3184 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pycbin0 -> 1506 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py146
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py33
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/compat.py29
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/controller.py367
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py80
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/heuristics.py135
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/serialize.py186
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/wrapper.py29
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__init__.py3
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__main__.py2
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-37.pycbin0 -> 213 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-37.pycbin0 -> 216 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-37.pycbin0 -> 425 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/cacert.pem4618
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/core.py15
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__init__.py39
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-37.pycbin0 -> 800 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pycbin0 -> 27135 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pycbin0 -> 1076 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pycbin0 -> 6262 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pycbin0 -> 2183 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pycbin0 -> 3393 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pycbin0 -> 2840 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-37.pycbin0 -> 311 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pycbin0 -> 1083 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-37.pycbin0 -> 2574 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-37.pycbin0 -> 2561 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-37.pycbin0 -> 7022 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pycbin0 -> 2369 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pycbin0 -> 12019 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pycbin0 -> 1084 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pycbin0 -> 27139 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pycbin0 -> 1084 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pycbin0 -> 19063 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pycbin0 -> 1092 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pycbin0 -> 2926 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pycbin0 -> 22091 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pycbin0 -> 37970 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pycbin0 -> 23584 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pycbin0 -> 29040 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pycbin0 -> 23542 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pycbin0 -> 22171 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pycbin0 -> 23573 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pycbin0 -> 22150 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pycbin0 -> 22173 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pycbin0 -> 2883 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pycbin0 -> 2188 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pycbin0 -> 1079 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pycbin0 -> 15634 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pycbin0 -> 2941 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pycbin0 -> 1569 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pycbin0 -> 2395 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pycbin0 -> 5785 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pycbin0 -> 1926 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-37.pycbin0 -> 395 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/big5freq.py386
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/big5prober.py47
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py233
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py106
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/charsetprober.py145
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__init__.py1
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pycbin0 -> 152 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pycbin0 -> 2641 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/chardetect.py85
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/codingstatemachine.py88
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/compat.py34
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cp949prober.py49
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/enums.py76
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/escprober.py101
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/escsm.py246
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/eucjpprober.py92
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euckrfreq.py195
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euckrprober.py47
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euctwfreq.py387
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euctwprober.py46
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312freq.py283
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312prober.py46
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/hebrewprober.py292
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/jisfreq.py325
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/jpcntx.py233
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py228
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py333
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langgreekmodel.py225
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py200
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py225
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langthaimodel.py199
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langturkishmodel.py193
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/latin1prober.py145
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py91
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py54
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcssm.py572
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py132
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py73
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sjisprober.py92
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/universaldetector.py286
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/utf8prober.py82
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/version.py9
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__init__.py6
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-37.pycbin0 -> 400 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-37.pycbin0 -> 3298 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pycbin0 -> 7554 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-37.pycbin0 -> 1619 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-37.pycbin0 -> 3813 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-37.pycbin0 -> 4561 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/ansi.py102
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/ansitowin32.py257
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/initialise.py80
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/win32.py152
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/winterm.py169
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__init__.py23
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-37.pycbin0 -> 1004 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-37.pycbin0 -> 32008 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-37.pycbin0 -> 42529 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-37.pycbin0 -> 17287 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-37.pycbin0 -> 38659 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-37.pycbin0 -> 10246 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-37.pycbin0 -> 4432 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-37.pycbin0 -> 27647 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-37.pycbin0 -> 10842 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-37.pycbin0 -> 10642 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-37.pycbin0 -> 47955 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-37.pycbin0 -> 20382 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-37.pycbin0 -> 25572 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__init__.py6
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pycbin0 -> 440 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pycbin0 -> 1037 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pycbin0 -> 21353 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pycbin0 -> 15818 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pycbin0 -> 62683 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/misc.py41
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/shutil.py761
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg84
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py788
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py2607
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/compat.py1120
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/database.py1339
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/index.py516
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/locators.py1295
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/manifest.py393
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/markers.py131
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/metadata.py1096
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/resources.py355
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/scripts.py403
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/t32.exebin0 -> 92672 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/t64.exebin0 -> 102912 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/util.py1760
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/version.py736
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/w32.exebin0 -> 89088 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/w64.exebin0 -> 99840 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/wheel.py1004
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distro.py1216
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__init__.py35
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pycbin0 -> 1269 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pycbin0 -> 13716 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pycbin0 -> 22607 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pycbin0 -> 41508 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pycbin0 -> 3261 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-37.pycbin0 -> 66173 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pycbin0 -> 97770 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pycbin0 -> 10786 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_ihatexml.py288
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py923
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_tokenizer.py1721
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py14
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pycbin0 -> 382 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pycbin0 -> 1539 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-37.pycbin0 -> 1984 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pycbin0 -> 2187 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/_base.py40
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py44
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/py.py67
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_utils.py124
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/constants.py2947
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__init__.py0
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pycbin0 -> 157 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pycbin0 -> 1273 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pycbin0 -> 807 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pycbin0 -> 1827 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pycbin0 -> 2591 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pycbin0 -> 2718 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pycbin0 -> 16393 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pycbin0 -> 1311 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py29
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/base.py12
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py73
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/lint.py93
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py207
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py896
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py38
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py2791
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/serializer.py409
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py30
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pycbin0 -> 896 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pycbin0 -> 1493 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pycbin0 -> 1443 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py54
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py50
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py88
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pycbin0 -> 3277 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pycbin0 -> 11200 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pycbin0 -> 9304 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pycbin0 -> 11809 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pycbin0 -> 11749 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py417
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.py239
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py340
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py366
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py154
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pycbin0 -> 3954 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pycbin0 -> 6950 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pycbin0 -> 1679 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pycbin0 -> 3486 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pycbin0 -> 6595 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pycbin0 -> 1853 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py252
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.py43
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.py130
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py213
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py69
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__init__.py2
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-37.pycbin0 -> 214 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-37.pycbin0 -> 3021 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-37.pycbin0 -> 574 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/core.cpython-37.pycbin0 -> 9017 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-37.pycbin0 -> 21388 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-37.pycbin0 -> 1754 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-37.pycbin0 -> 168 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-37.pycbin0 -> 176048 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/codec.py118
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/compat.py12
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/core.py396
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/idnadata.py1979
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/intranges.py53
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/package_data.py2
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/uts46data.py8205
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/ipaddress.py2419
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__init__.py347
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/__init__.cpython-37.pycbin0 -> 9864 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/linklockfile.cpython-37.pycbin0 -> 2245 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-37.pycbin0 -> 2607 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-37.pycbin0 -> 4807 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-37.pycbin0 -> 3706 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-37.pycbin0 -> 2130 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/linklockfile.py73
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py84
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/pidlockfile.py190
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py156
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py70
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__init__.py65
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pycbin0 -> 1968 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-37.pycbin0 -> 175 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pycbin0 -> 1823 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pycbin0 -> 26266 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/_version.py1
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/exceptions.py48
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/fallback.py1027
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__about__.py27
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__init__.py26
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-37.pycbin0 -> 688 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-37.pycbin0 -> 526 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/_compat.cpython-37.pycbin0 -> 966 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-37.pycbin0 -> 2818 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-37.pycbin0 -> 8821 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-37.pycbin0 -> 3939 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pycbin0 -> 19716 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-37.pycbin0 -> 1404 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-37.pycbin0 -> 11908 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/_compat.py31
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/_structures.py68
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/markers.py296
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/requirements.py138
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/specifiers.py749
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/utils.py57
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/version.py420
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__init__.py4
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-37.pycbin0 -> 239 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pycbin0 -> 5582 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-37.pycbin0 -> 2717 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-37.pycbin0 -> 4765 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pycbin0 -> 2871 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-37.pycbin0 -> 975 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pycbin0 -> 4159 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pycbin0 -> 5440 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/_in_process.py207
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/build.py108
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/check.py202
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/colorlog.py115
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/compat.py23
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/envbuild.py158
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/wrappers.py163
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__init__.py3286
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pycbin0 -> 99535 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pycbin0 -> 599 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/py31compat.py23
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__init__.py177
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-37.pycbin0 -> 5515 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-37.pycbin0 -> 2607 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-37.pycbin0 -> 1419 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-37.pycbin0 -> 1384 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/bar.py91
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/counter.py41
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/spinner.py43
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pyparsing.py6493
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__init__.py4
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/__init__.cpython-37.pycbin0 -> 333 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/core.cpython-37.pycbin0 -> 896 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/parser.cpython-37.pycbin0 -> 10013 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/test.cpython-37.pycbin0 -> 1196 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/utils.cpython-37.pycbin0 -> 2095 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/writer.cpython-37.pycbin0 -> 3530 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/core.py13
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/parser.py341
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/test.py30
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/utils.py67
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/writer.py106
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__init__.py133
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-37.pycbin0 -> 3444 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-37.pycbin0 -> 507 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pycbin0 -> 1265 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-37.pycbin0 -> 16842 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/api.cpython-37.pycbin0 -> 6466 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-37.pycbin0 -> 8299 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-37.pycbin0 -> 590 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-37.pycbin0 -> 1569 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-37.pycbin0 -> 18744 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-37.pycbin0 -> 5462 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/help.cpython-37.pycbin0 -> 2643 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-37.pycbin0 -> 937 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/models.cpython-37.pycbin0 -> 24065 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-37.pycbin0 -> 467 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-37.pycbin0 -> 19385 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-37.pycbin0 -> 4123 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-37.pycbin0 -> 4336 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-37.pycbin0 -> 21996 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__version__.py14
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/_internal_utils.py42
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/adapters.py533
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/api.py158
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/auth.py305
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/certs.py18
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/compat.py74
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py549
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/exceptions.py126
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/help.py119
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/hooks.py34
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/models.py953
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/packages.py16
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/sessions.py770
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/status_codes.py120
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/structures.py103
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/utils.py977
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/retrying.py267
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/six.py952
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__init__.py91
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pycbin0 -> 2072 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pycbin0 -> 10636 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-37.pycbin0 -> 10409 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pycbin0 -> 23758 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pycbin0 -> 10357 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-37.pycbin0 -> 8051 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pycbin0 -> 2717 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pycbin0 -> 12813 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-37.pycbin0 -> 5539 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-37.pycbin0 -> 19914 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/_collections.py329
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/connection.py417
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/connectionpool.py897
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__init__.py0
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pycbin0 -> 156 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pycbin0 -> 1052 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pycbin0 -> 8271 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pycbin0 -> 3198 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pycbin0 -> 14765 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pycbin0 -> 19681 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pycbin0 -> 5472 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py30
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py0
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pycbin0 -> 173 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pycbin0 -> 10190 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pycbin0 -> 7449 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py593
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py346
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py289
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py111
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py485
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.py853
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/socks.py205
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/exceptions.py246
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/fields.py272
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/filepost.py98
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__init__.py5
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pycbin0 -> 270 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pycbin0 -> 24358 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py0
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pycbin0 -> 167 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pycbin0 -> 1261 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py53
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.py56
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/__init__.cpython-37.pycbin0 -> 1004 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/_mixin.cpython-37.pycbin0 -> 10978 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/abnf_regexp.cpython-37.pycbin0 -> 4028 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/api.cpython-37.pycbin0 -> 3656 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/builder.cpython-37.pycbin0 -> 8283 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/compat.cpython-37.pycbin0 -> 1098 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/exceptions.cpython-37.pycbin0 -> 4894 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/iri.cpython-37.pycbin0 -> 4384 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/misc.cpython-37.pycbin0 -> 2201 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/normalizers.cpython-37.pycbin0 -> 3501 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/parseresult.cpython-37.pycbin0 -> 10039 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/uri.cpython-37.pycbin0 -> 4199 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/validators.cpython-37.pycbin0 -> 12924 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.py353
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.py267
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.py106
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.py298
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.py54
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.py118
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.py147
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.py124
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.py167
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.py385
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.py153
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.py450
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/six.py868
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py19
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pycbin0 -> 511 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pycbin0 -> 3270 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py156
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/poolmanager.py455
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/request.py150
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/response.py760
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__init__.py56
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pycbin0 -> 976 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pycbin0 -> 3123 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pycbin0 -> 997 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pycbin0 -> 3279 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pycbin0 -> 1926 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pycbin0 -> 12776 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pycbin0 -> 9683 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pycbin0 -> 8727 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pycbin0 -> 7758 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pycbin0 -> 3089 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/connection.py134
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/queue.py21
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/request.py125
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/response.py87
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/retry.py412
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/ssl_.py392
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/timeout.py243
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/url.py289
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/wait.py150
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__init__.py342
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pycbin0 -> 9632 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-37.pycbin0 -> 4046 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pycbin0 -> 1868 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-37.pycbin0 -> 5009 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pycbin0 -> 2621 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/labels.py231
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/mklabels.py59
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/tests.py153
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/x_user_defined.py325
658 files changed, 111162 insertions, 0 deletions
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__init__.py
new file mode 100644
index 00000000..a0196ad5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__init__.py
@@ -0,0 +1 @@
+__version__ = "19.2.1"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__main__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__main__.py
new file mode 100644
index 00000000..0c223f8c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__main__.py
@@ -0,0 +1,19 @@
+from __future__ import absolute_import
+
+import os
+import sys
+
+# If we are running from a wheel, add the wheel to sys.path
+# This allows the usage python pip-*.whl/pip install pip-*.whl
+if __package__ == '':
+ # __file__ is pip-*.whl/pip/__main__.py
+ # first dirname call strips of '/__main__.py', second strips off '/pip'
+ # Resulting path is the name of the wheel itself
+ # Add that to sys.path so we can import pip
+ path = os.path.dirname(os.path.dirname(__file__))
+ sys.path.insert(0, path)
+
+from pip._internal import main as _main # isort:skip # noqa
+
+if __name__ == '__main__':
+ sys.exit(_main())
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..76c2d114
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__pycache__/__main__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__pycache__/__main__.cpython-37.pyc
new file mode 100644
index 00000000..d580bfe4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/__pycache__/__main__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__init__.py
new file mode 100644
index 00000000..fbadc28a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__init__.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+from __future__ import absolute_import
+
+import locale
+import logging
+import os
+import warnings
+
+import sys
+
+# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
+# but if invoked (i.e. imported), it will issue a warning to stderr if socks
+# isn't available. requests unconditionally imports urllib3's socks contrib
+# module, triggering this warning. The warning breaks DEP-8 tests (because of
+# the stderr output) and is just plain annoying in normal usage. I don't want
+# to add socks as yet another dependency for pip, nor do I want to allow-stderr
+# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
+# be done before the import of pip.vcs.
+from pip._vendor.urllib3.exceptions import DependencyWarning
+warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
+
+# We want to inject the use of SecureTransport as early as possible so that any
+# references or sessions or what have you are ensured to have it, however we
+# only want to do this in the case that we're running on macOS and the linked
+# OpenSSL is too old to handle TLSv1.2
+try:
+ import ssl
+except ImportError:
+ pass
+else:
+ # Checks for OpenSSL 1.0.1 on MacOS
+ if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
+ try:
+ from pip._vendor.urllib3.contrib import securetransport
+ except (ImportError, OSError):
+ pass
+ else:
+ securetransport.inject_into_urllib3()
+
+from pip._internal.cli.autocompletion import autocomplete
+from pip._internal.cli.main_parser import parse_command
+from pip._internal.commands import commands_dict
+from pip._internal.exceptions import PipError
+from pip._internal.utils import deprecation
+from pip._vendor.urllib3.exceptions import InsecureRequestWarning
+
+logger = logging.getLogger(__name__)
+
+# Hide the InsecureRequestWarning from urllib3
+warnings.filterwarnings("ignore", category=InsecureRequestWarning)
+
+
+def main(args=None):
+ if args is None:
+ args = sys.argv[1:]
+
+ # Configure our deprecation warnings to be sent through loggers
+ deprecation.install_warning_logger()
+
+ autocomplete()
+
+ try:
+ cmd_name, cmd_args = parse_command(args)
+ except PipError as exc:
+ sys.stderr.write("ERROR: %s" % exc)
+ sys.stderr.write(os.linesep)
+ sys.exit(1)
+
+ # Needed for locale.getpreferredencoding(False) to work
+ # in pip._internal.utils.encoding.auto_decode
+ try:
+ locale.setlocale(locale.LC_ALL, '')
+ except locale.Error as e:
+ # setlocale can apparently crash if locale are uninitialized
+ logger.debug("Ignoring error %s when setting locale", e)
+ command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
+ return command.main(cmd_args)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..fdb85bc3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/build_env.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/build_env.cpython-37.pyc
new file mode 100644
index 00000000..7f3cdbb1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/build_env.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/cache.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/cache.cpython-37.pyc
new file mode 100644
index 00000000..846c660d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/cache.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/configuration.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/configuration.cpython-37.pyc
new file mode 100644
index 00000000..20c8743e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/configuration.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/download.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/download.cpython-37.pyc
new file mode 100644
index 00000000..22fbb600
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/download.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/exceptions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 00000000..6d7ddb78
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/index.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/index.cpython-37.pyc
new file mode 100644
index 00000000..0ee5a4fc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/index.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-37.pyc
new file mode 100644
index 00000000..a671abd4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/locations.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/locations.cpython-37.pyc
new file mode 100644
index 00000000..1b6dcdbe
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/locations.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pep425tags.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pep425tags.cpython-37.pyc
new file mode 100644
index 00000000..307c519a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pep425tags.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pyproject.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pyproject.cpython-37.pyc
new file mode 100644
index 00000000..baf9a7bb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pyproject.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/wheel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/wheel.cpython-37.pyc
new file mode 100644
index 00000000..5cfb8ef6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/wheel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/build_env.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/build_env.py
new file mode 100644
index 00000000..a060ceea
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/build_env.py
@@ -0,0 +1,218 @@
+"""Build Environment used for isolation during sdist building
+"""
+
+import logging
+import os
+import sys
+import textwrap
+from collections import OrderedDict
+from distutils.sysconfig import get_python_lib
+from sysconfig import get_paths
+
+from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
+
+from pip import __file__ as pip_location
+from pip._internal.utils.misc import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+
+if MYPY_CHECK_RUNNING:
+ from typing import Tuple, Set, Iterable, Optional, List
+ from pip._internal.index import PackageFinder
+
+logger = logging.getLogger(__name__)
+
+
+class _Prefix:
+
+ def __init__(self, path):
+ # type: (str) -> None
+ self.path = path
+ self.setup = False
+ self.bin_dir = get_paths(
+ 'nt' if os.name == 'nt' else 'posix_prefix',
+ vars={'base': path, 'platbase': path}
+ )['scripts']
+ # Note: prefer distutils' sysconfig to get the
+ # library paths so PyPy is correctly supported.
+ purelib = get_python_lib(plat_specific=False, prefix=path)
+ platlib = get_python_lib(plat_specific=True, prefix=path)
+ if purelib == platlib:
+ self.lib_dirs = [purelib]
+ else:
+ self.lib_dirs = [purelib, platlib]
+
+
+class BuildEnvironment(object):
+ """Creates and manages an isolated environment to install build deps
+ """
+
+ def __init__(self):
+ # type: () -> None
+ self._temp_dir = TempDirectory(kind="build-env")
+ self._temp_dir.create()
+
+ self._prefixes = OrderedDict((
+ (name, _Prefix(os.path.join(self._temp_dir.path, name)))
+ for name in ('normal', 'overlay')
+ ))
+
+ self._bin_dirs = [] # type: List[str]
+ self._lib_dirs = [] # type: List[str]
+ for prefix in reversed(list(self._prefixes.values())):
+ self._bin_dirs.append(prefix.bin_dir)
+ self._lib_dirs.extend(prefix.lib_dirs)
+
+ # Customize site to:
+ # - ensure .pth files are honored
+ # - prevent access to system site packages
+ system_sites = {
+ os.path.normcase(site) for site in (
+ get_python_lib(plat_specific=False),
+ get_python_lib(plat_specific=True),
+ )
+ }
+ self._site_dir = os.path.join(self._temp_dir.path, 'site')
+ if not os.path.exists(self._site_dir):
+ os.mkdir(self._site_dir)
+ with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
+ fp.write(textwrap.dedent(
+ '''
+ import os, site, sys
+
+ # First, drop system-sites related paths.
+ original_sys_path = sys.path[:]
+ known_paths = set()
+ for path in {system_sites!r}:
+ site.addsitedir(path, known_paths=known_paths)
+ system_paths = set(
+ os.path.normcase(path)
+ for path in sys.path[len(original_sys_path):]
+ )
+ original_sys_path = [
+ path for path in original_sys_path
+ if os.path.normcase(path) not in system_paths
+ ]
+ sys.path = original_sys_path
+
+ # Second, add lib directories.
+ # ensuring .pth file are processed.
+ for path in {lib_dirs!r}:
+ assert not path in sys.path
+ site.addsitedir(path)
+ '''
+ ).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
+
+ def __enter__(self):
+ self._save_env = {
+ name: os.environ.get(name, None)
+ for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
+ }
+
+ path = self._bin_dirs[:]
+ old_path = self._save_env['PATH']
+ if old_path:
+ path.extend(old_path.split(os.pathsep))
+
+ pythonpath = [self._site_dir]
+
+ os.environ.update({
+ 'PATH': os.pathsep.join(path),
+ 'PYTHONNOUSERSITE': '1',
+ 'PYTHONPATH': os.pathsep.join(pythonpath),
+ })
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ for varname, old_value in self._save_env.items():
+ if old_value is None:
+ os.environ.pop(varname, None)
+ else:
+ os.environ[varname] = old_value
+
+ def cleanup(self):
+ # type: () -> None
+ self._temp_dir.cleanup()
+
+ def check_requirements(self, reqs):
+ # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
+ """Return 2 sets:
+ - conflicting requirements: set of (installed, wanted) reqs tuples
+ - missing requirements: set of reqs
+ """
+ missing = set()
+ conflicting = set()
+ if reqs:
+ ws = WorkingSet(self._lib_dirs)
+ for req in reqs:
+ try:
+ if ws.find(Requirement.parse(req)) is None:
+ missing.add(req)
+ except VersionConflict as e:
+ conflicting.add((str(e.args[0].as_requirement()),
+ str(e.args[1])))
+ return conflicting, missing
+
+ def install_requirements(
+ self,
+ finder, # type: PackageFinder
+ requirements, # type: Iterable[str]
+ prefix_as_string, # type: str
+ message # type: Optional[str]
+ ):
+ # type: (...) -> None
+ prefix = self._prefixes[prefix_as_string]
+ assert not prefix.setup
+ prefix.setup = True
+ if not requirements:
+ return
+ args = [
+ sys.executable, os.path.dirname(pip_location), 'install',
+ '--ignore-installed', '--no-user', '--prefix', prefix.path,
+ '--no-warn-script-location',
+ ] # type: List[str]
+ if logger.getEffectiveLevel() <= logging.DEBUG:
+ args.append('-v')
+ for format_control in ('no_binary', 'only_binary'):
+ formats = getattr(finder.format_control, format_control)
+ args.extend(('--' + format_control.replace('_', '-'),
+ ','.join(sorted(formats or {':none:'}))))
+
+ index_urls = finder.index_urls
+ if index_urls:
+ args.extend(['-i', index_urls[0]])
+ for extra_index in index_urls[1:]:
+ args.extend(['--extra-index-url', extra_index])
+ else:
+ args.append('--no-index')
+ for link in finder.find_links:
+ args.extend(['--find-links', link])
+
+ for host in finder.trusted_hosts:
+ args.extend(['--trusted-host', host])
+ if finder.allow_all_prereleases:
+ args.append('--pre')
+ args.append('--')
+ args.extend(requirements)
+ with open_spinner(message) as spinner:
+ call_subprocess(args, spinner=spinner)
+
+
+class NoOpBuildEnvironment(BuildEnvironment):
+ """A no-op drop-in replacement for BuildEnvironment
+ """
+
+ def __init__(self):
+ pass
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+ def cleanup(self):
+ pass
+
+ def install_requirements(self, finder, requirements, prefix, message):
+ raise NotImplementedError()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cache.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cache.py
new file mode 100644
index 00000000..894624c1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cache.py
@@ -0,0 +1,224 @@
+"""Cache Management
+"""
+
+import errno
+import hashlib
+import logging
+import os
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.models.link import Link
+from pip._internal.utils.compat import expanduser
+from pip._internal.utils.misc import path_to_url
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel import InvalidWheelFilename, Wheel
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Set, List, Any
+ from pip._internal.index import FormatControl
+
+logger = logging.getLogger(__name__)
+
+
+class Cache(object):
+ """An abstract class - provides cache directories for data from links
+
+
+ :param cache_dir: The root of the cache.
+ :param format_control: An object of FormatControl class to limit
+ binaries being read from the cache.
+ :param allowed_formats: which formats of files the cache should store.
+ ('binary' and 'source' are the only allowed values)
+ """
+
+ def __init__(self, cache_dir, format_control, allowed_formats):
+ # type: (str, FormatControl, Set[str]) -> None
+ super(Cache, self).__init__()
+ self.cache_dir = expanduser(cache_dir) if cache_dir else None
+ self.format_control = format_control
+ self.allowed_formats = allowed_formats
+
+ _valid_formats = {"source", "binary"}
+ assert self.allowed_formats.union(_valid_formats) == _valid_formats
+
+ def _get_cache_path_parts(self, link):
+ # type: (Link) -> List[str]
+ """Get parts of part that must be os.path.joined with cache_dir
+ """
+
+ # We want to generate an url to use as our cache key, we don't want to
+ # just re-use the URL because it might have other items in the fragment
+ # and we don't care about those.
+ key_parts = [link.url_without_fragment]
+ if link.hash_name is not None and link.hash is not None:
+ key_parts.append("=".join([link.hash_name, link.hash]))
+ key_url = "#".join(key_parts)
+
+ # Encode our key url with sha224, we'll use this because it has similar
+ # security properties to sha256, but with a shorter total output (and
+ # thus less secure). However the differences don't make a lot of
+ # difference for our use case here.
+ hashed = hashlib.sha224(key_url.encode()).hexdigest()
+
+ # We want to nest the directories some to prevent having a ton of top
+ # level directories where we might run out of sub directories on some
+ # FS.
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
+
+ return parts
+
+ def _get_candidates(self, link, package_name):
+ # type: (Link, Optional[str]) -> List[Any]
+ can_not_cache = (
+ not self.cache_dir or
+ not package_name or
+ not link
+ )
+ if can_not_cache:
+ return []
+
+ canonical_name = canonicalize_name(package_name)
+ formats = self.format_control.get_allowed_formats(
+ canonical_name
+ )
+ if not self.allowed_formats.intersection(formats):
+ return []
+
+ root = self.get_path_for_link(link)
+ try:
+ return os.listdir(root)
+ except OSError as err:
+ if err.errno in {errno.ENOENT, errno.ENOTDIR}:
+ return []
+ raise
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ """Return a directory to store cached items in for link.
+ """
+ raise NotImplementedError()
+
+ def get(self, link, package_name):
+ # type: (Link, Optional[str]) -> Link
+ """Returns a link to a cached item if it exists, otherwise returns the
+ passed link.
+ """
+ raise NotImplementedError()
+
+ def _link_for_candidate(self, link, candidate):
+ # type: (Link, str) -> Link
+ root = self.get_path_for_link(link)
+ path = os.path.join(root, candidate)
+
+ return Link(path_to_url(path))
+
+ def cleanup(self):
+ # type: () -> None
+ pass
+
+
+class SimpleWheelCache(Cache):
+ """A cache of wheels for future installs.
+ """
+
+ def __init__(self, cache_dir, format_control):
+ # type: (str, FormatControl) -> None
+ super(SimpleWheelCache, self).__init__(
+ cache_dir, format_control, {"binary"}
+ )
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ """Return a directory to store cached wheels for link
+
+ Because there are M wheels for any one sdist, we provide a directory
+ to cache them in, and then consult that directory when looking up
+ cache hits.
+
+ We only insert things into the cache if they have plausible version
+ numbers, so that we don't contaminate the cache with things that were
+ not unique. E.g. ./package might have dozens of installs done for it
+ and build a version of 0.0...and if we built and cached a wheel, we'd
+ end up using the same wheel even if the source has been edited.
+
+ :param link: The link of the sdist for which this will cache wheels.
+ """
+ parts = self._get_cache_path_parts(link)
+
+ # Store wheels within the root cache_dir
+ return os.path.join(self.cache_dir, "wheels", *parts)
+
+ def get(self, link, package_name):
+ # type: (Link, Optional[str]) -> Link
+ candidates = []
+
+ for wheel_name in self._get_candidates(link, package_name):
+ try:
+ wheel = Wheel(wheel_name)
+ except InvalidWheelFilename:
+ continue
+ if not wheel.supported():
+ # Built for a different python/arch/etc
+ continue
+ candidates.append((wheel.support_index_min(), wheel_name))
+
+ if not candidates:
+ return link
+
+ return self._link_for_candidate(link, min(candidates)[1])
+
+
+class EphemWheelCache(SimpleWheelCache):
+ """A SimpleWheelCache that creates it's own temporary cache directory
+ """
+
+ def __init__(self, format_control):
+ # type: (FormatControl) -> None
+ self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
+ self._temp_dir.create()
+
+ super(EphemWheelCache, self).__init__(
+ self._temp_dir.path, format_control
+ )
+
+ def cleanup(self):
+ # type: () -> None
+ self._temp_dir.cleanup()
+
+
+class WheelCache(Cache):
+ """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
+
+ This Cache allows for gracefully degradation, using the ephem wheel cache
+ when a certain link is not found in the simple wheel cache first.
+ """
+
+ def __init__(self, cache_dir, format_control):
+ # type: (str, FormatControl) -> None
+ super(WheelCache, self).__init__(
+ cache_dir, format_control, {'binary'}
+ )
+ self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
+ self._ephem_cache = EphemWheelCache(format_control)
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ return self._wheel_cache.get_path_for_link(link)
+
+ def get_ephem_path_for_link(self, link):
+ # type: (Link) -> str
+ return self._ephem_cache.get_path_for_link(link)
+
+ def get(self, link, package_name):
+ # type: (Link, Optional[str]) -> Link
+ retval = self._wheel_cache.get(link, package_name)
+ if retval is link:
+ retval = self._ephem_cache.get(link, package_name)
+ return retval
+
+ def cleanup(self):
+ # type: () -> None
+ self._wheel_cache.cleanup()
+ self._ephem_cache.cleanup()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__init__.py
new file mode 100644
index 00000000..e589bb91
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__init__.py
@@ -0,0 +1,4 @@
+"""Subpackage containing all of pip's command line interface related code
+"""
+
+# This file intentionally does not import submodules
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..09f43687
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc
new file mode 100644
index 00000000..4c1743dd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc
new file mode 100644
index 00000000..a3a0d223
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc
new file mode 100644
index 00000000..0e3c51cb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc
new file mode 100644
index 00000000..0b719c29
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/parser.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/parser.cpython-37.pyc
new file mode 100644
index 00000000..3fa51194
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/parser.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc
new file mode 100644
index 00000000..8cede51c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py
new file mode 100644
index 00000000..0a04199e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py
@@ -0,0 +1,152 @@
+"""Logic that powers autocompletion installed by ``pip completion``.
+"""
+
+import optparse
+import os
+import sys
+
+from pip._internal.cli.main_parser import create_main_parser
+from pip._internal.commands import commands_dict, get_summaries
+from pip._internal.utils.misc import get_installed_distributions
+
+
+def autocomplete():
+ """Entry Point for completion of main and subcommand options.
+ """
+ # Don't complete if user hasn't sourced bash_completion file.
+ if 'PIP_AUTO_COMPLETE' not in os.environ:
+ return
+ cwords = os.environ['COMP_WORDS'].split()[1:]
+ cword = int(os.environ['COMP_CWORD'])
+ try:
+ current = cwords[cword - 1]
+ except IndexError:
+ current = ''
+
+ subcommands = [cmd for cmd, summary in get_summaries()]
+ options = []
+ # subcommand
+ try:
+ subcommand_name = [w for w in cwords if w in subcommands][0]
+ except IndexError:
+ subcommand_name = None
+
+ parser = create_main_parser()
+ # subcommand options
+ if subcommand_name:
+ # special case: 'help' subcommand has no options
+ if subcommand_name == 'help':
+ sys.exit(1)
+ # special case: list locally installed dists for show and uninstall
+ should_list_installed = (
+ subcommand_name in ['show', 'uninstall'] and
+ not current.startswith('-')
+ )
+ if should_list_installed:
+ installed = []
+ lc = current.lower()
+ for dist in get_installed_distributions(local_only=True):
+ if dist.key.startswith(lc) and dist.key not in cwords[1:]:
+ installed.append(dist.key)
+ # if there are no dists installed, fall back to option completion
+ if installed:
+ for dist in installed:
+ print(dist)
+ sys.exit(1)
+
+ subcommand = commands_dict[subcommand_name]()
+
+ for opt in subcommand.parser.option_list_all:
+ if opt.help != optparse.SUPPRESS_HELP:
+ for opt_str in opt._long_opts + opt._short_opts:
+ options.append((opt_str, opt.nargs))
+
+ # filter out previously specified options from available options
+ prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
+ # filter options by current input
+ options = [(k, v) for k, v in options if k.startswith(current)]
+ # get completion type given cwords and available subcommand options
+ completion_type = get_path_completion_type(
+ cwords, cword, subcommand.parser.option_list_all,
+ )
+ # get completion files and directories if ``completion_type`` is
+ # ``<file>``, ``<dir>`` or ``<path>``
+ if completion_type:
+ options = auto_complete_paths(current, completion_type)
+ options = ((opt, 0) for opt in options)
+ for option in options:
+ opt_label = option[0]
+ # append '=' to options which require args
+ if option[1] and option[0][:2] == "--":
+ opt_label += '='
+ print(opt_label)
+ else:
+ # show main parser options only when necessary
+
+ opts = [i.option_list for i in parser.option_groups]
+ opts.append(parser.option_list)
+ opts = (o for it in opts for o in it)
+ if current.startswith('-'):
+ for opt in opts:
+ if opt.help != optparse.SUPPRESS_HELP:
+ subcommands += opt._long_opts + opt._short_opts
+ else:
+ # get completion type given cwords and all available options
+ completion_type = get_path_completion_type(cwords, cword, opts)
+ if completion_type:
+ subcommands = auto_complete_paths(current, completion_type)
+
+ print(' '.join([x for x in subcommands if x.startswith(current)]))
+ sys.exit(1)
+
+
+def get_path_completion_type(cwords, cword, opts):
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
+
+ :param cwords: same as the environmental variable ``COMP_WORDS``
+ :param cword: same as the environmental variable ``COMP_CWORD``
+ :param opts: The available options to check
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
+ """
+ if cword < 2 or not cwords[cword - 2].startswith('-'):
+ return
+ for opt in opts:
+ if opt.help == optparse.SUPPRESS_HELP:
+ continue
+ for o in str(opt).split('/'):
+ if cwords[cword - 2].split('=')[0] == o:
+ if not opt.metavar or any(
+ x in ('path', 'file', 'dir')
+ for x in opt.metavar.split('/')):
+ return opt.metavar
+
+
+def auto_complete_paths(current, completion_type):
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
+ and directories starting with ``current``; otherwise only list directories
+ starting with ``current``.
+
+ :param current: The word to be completed
+ :param completion_type: path completion type(`file`, `path` or `dir`)i
+ :return: A generator of regular files and/or directories
+ """
+ directory, filename = os.path.split(current)
+ current_path = os.path.abspath(directory)
+ # Don't complete paths if they can't be accessed
+ if not os.access(current_path, os.R_OK):
+ return
+ filename = os.path.normcase(filename)
+ # list all files that start with ``filename``
+ file_list = (x for x in os.listdir(current_path)
+ if os.path.normcase(x).startswith(filename))
+ for f in file_list:
+ opt = os.path.join(current_path, f)
+ comp_file = os.path.normcase(os.path.join(directory, f))
+ # complete regular files when there is not ``<dir>`` after option
+ # complete directories when there is ``<file>``, ``<path>`` or
+ # ``<dir>``after option
+ if completion_type != 'dir' and os.path.isfile(opt):
+ yield comp_file
+ elif os.path.isdir(opt):
+ yield os.path.join(comp_file, '')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/base_command.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/base_command.py
new file mode 100644
index 00000000..90830be4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/base_command.py
@@ -0,0 +1,346 @@
+"""Base Command class, and related routines"""
+from __future__ import absolute_import, print_function
+
+import logging
+import logging.config
+import optparse
+import os
+import platform
+import sys
+import traceback
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_search_scope
+from pip._internal.cli.parser import (
+ ConfigOptionParser, UpdatingDefaultsHelpFormatter,
+)
+from pip._internal.cli.status_codes import (
+ ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
+ VIRTUALENV_NOT_FOUND,
+)
+from pip._internal.download import PipSession
+from pip._internal.exceptions import (
+ BadCommand, CommandError, InstallationError, PreviousBuildDirError,
+ UninstallationError,
+)
+from pip._internal.index import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.req.constructors import (
+ install_req_from_editable, install_req_from_line,
+)
+from pip._internal.req.req_file import parse_requirements
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
+from pip._internal.utils.misc import get_prog, normalize_path
+from pip._internal.utils.outdated import pip_version_check
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, List, Tuple, Any
+ from optparse import Values
+ from pip._internal.cache import WheelCache
+ from pip._internal.req.req_set import RequirementSet
+
+__all__ = ['Command']
+
+logger = logging.getLogger(__name__)
+
+
+class Command(object):
+ name = None # type: Optional[str]
+ usage = None # type: Optional[str]
+ ignore_require_venv = False # type: bool
+
+ def __init__(self, isolated=False):
+ # type: (bool) -> None
+ parser_kw = {
+ 'usage': self.usage,
+ 'prog': '%s %s' % (get_prog(), self.name),
+ 'formatter': UpdatingDefaultsHelpFormatter(),
+ 'add_help_option': False,
+ 'name': self.name,
+ 'description': self.__doc__,
+ 'isolated': isolated,
+ }
+
+ self.parser = ConfigOptionParser(**parser_kw)
+
+ # Commands should add options to this option group
+ optgroup_name = '%s Options' % self.name.capitalize()
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
+
+ # Add the general options
+ gen_opts = cmdoptions.make_option_group(
+ cmdoptions.general_group,
+ self.parser,
+ )
+ self.parser.add_option_group(gen_opts)
+
+ def run(self, options, args):
+ # type: (Values, List[Any]) -> Any
+ raise NotImplementedError
+
+ @classmethod
+ def _get_index_urls(cls, options):
+ """Return a list of index urls from user-provided options."""
+ index_urls = []
+ if not getattr(options, "no_index", False):
+ url = getattr(options, "index_url", None)
+ if url:
+ index_urls.append(url)
+ urls = getattr(options, "extra_index_urls", None)
+ if urls:
+ index_urls.extend(urls)
+ # Return None rather than an empty list
+ return index_urls or None
+
+ def _build_session(self, options, retries=None, timeout=None):
+ # type: (Values, Optional[int], Optional[int]) -> PipSession
+ session = PipSession(
+ cache=(
+ normalize_path(os.path.join(options.cache_dir, "http"))
+ if options.cache_dir else None
+ ),
+ retries=retries if retries is not None else options.retries,
+ insecure_hosts=options.trusted_hosts,
+ index_urls=self._get_index_urls(options),
+ )
+
+ # Handle custom ca-bundles from the user
+ if options.cert:
+ session.verify = options.cert
+
+ # Handle SSL client certificate
+ if options.client_cert:
+ session.cert = options.client_cert
+
+ # Handle timeouts
+ if options.timeout or timeout:
+ session.timeout = (
+ timeout if timeout is not None else options.timeout
+ )
+
+ # Handle configured proxies
+ if options.proxy:
+ session.proxies = {
+ "http": options.proxy,
+ "https": options.proxy,
+ }
+
+ # Determine if we can prompt the user for authentication or not
+ session.auth.prompting = not options.no_input
+
+ return session
+
+ def parse_args(self, args):
+ # type: (List[str]) -> Tuple
+ # factored out for testability
+ return self.parser.parse_args(args)
+
+ def main(self, args):
+ # type: (List[str]) -> int
+ options, args = self.parse_args(args)
+
+ # Set verbosity so that it can be used elsewhere.
+ self.verbosity = options.verbose - options.quiet
+
+ level_number = setup_logging(
+ verbosity=self.verbosity,
+ no_color=options.no_color,
+ user_log_file=options.log,
+ )
+
+ if sys.version_info[:2] == (2, 7):
+ message = (
+ "A future version of pip will drop support for Python 2.7. "
+ "More details about Python 2 support in pip, can be found at "
+ "https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa
+ )
+ if platform.python_implementation() == "CPython":
+ message = (
+ "Python 2.7 will reach the end of its life on January "
+ "1st, 2020. Please upgrade your Python as Python 2.7 "
+ "won't be maintained after that date. "
+ ) + message
+ deprecated(message, replacement=None, gone_in=None)
+
+ # TODO: Try to get these passing down from the command?
+ # without resorting to os.environ to hold these.
+ # This also affects isolated builds and it should.
+
+ if options.no_input:
+ os.environ['PIP_NO_INPUT'] = '1'
+
+ if options.exists_action:
+ os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
+
+ if options.require_venv and not self.ignore_require_venv:
+ # If a venv is required check if it can really be found
+ if not running_under_virtualenv():
+ logger.critical(
+ 'Could not find an activated virtualenv (required).'
+ )
+ sys.exit(VIRTUALENV_NOT_FOUND)
+
+ try:
+ status = self.run(options, args)
+ # FIXME: all commands should return an exit status
+ # and when it is done, isinstance is not needed anymore
+ if isinstance(status, int):
+ return status
+ except PreviousBuildDirError as exc:
+ logger.critical(str(exc))
+ logger.debug('Exception information:', exc_info=True)
+
+ return PREVIOUS_BUILD_DIR_ERROR
+ except (InstallationError, UninstallationError, BadCommand) as exc:
+ logger.critical(str(exc))
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except CommandError as exc:
+ logger.critical('%s', exc)
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except BrokenStdoutLoggingError:
+ # Bypass our logger and write any remaining messages to stderr
+ # because stdout no longer works.
+ print('ERROR: Pipe to stdout was broken', file=sys.stderr)
+ if level_number <= logging.DEBUG:
+ traceback.print_exc(file=sys.stderr)
+
+ return ERROR
+ except KeyboardInterrupt:
+ logger.critical('Operation cancelled by user')
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except BaseException:
+ logger.critical('Exception:', exc_info=True)
+
+ return UNKNOWN_ERROR
+ finally:
+ allow_version_check = (
+ # Does this command have the index_group options?
+ hasattr(options, "no_index") and
+ # Is this command allowed to perform this check?
+ not (options.disable_pip_version_check or options.no_index)
+ )
+ # Check if we're using the latest version of pip available
+ if allow_version_check:
+ session = self._build_session(
+ options,
+ retries=0,
+ timeout=min(5, options.timeout)
+ )
+ with session:
+ pip_version_check(session, options)
+
+ # Shutdown the logging module
+ logging.shutdown()
+
+ return SUCCESS
+
+
+class RequirementCommand(Command):
+
+ @staticmethod
+ def populate_requirement_set(requirement_set, # type: RequirementSet
+ args, # type: List[str]
+ options, # type: Values
+ finder, # type: PackageFinder
+ session, # type: PipSession
+ name, # type: str
+ wheel_cache # type: Optional[WheelCache]
+ ):
+ # type: (...) -> None
+ """
+ Marshal cmd line args into a requirement set.
+ """
+ # NOTE: As a side-effect, options.require_hashes and
+ # requirement_set.require_hashes may be updated
+
+ for filename in options.constraints:
+ for req_to_add in parse_requirements(
+ filename,
+ constraint=True, finder=finder, options=options,
+ session=session, wheel_cache=wheel_cache):
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for req in args:
+ req_to_add = install_req_from_line(
+ req, None, isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ wheel_cache=wheel_cache
+ )
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for req in options.editables:
+ req_to_add = install_req_from_editable(
+ req,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ wheel_cache=wheel_cache
+ )
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for filename in options.requirements:
+ for req_to_add in parse_requirements(
+ filename,
+ finder=finder, options=options, session=session,
+ wheel_cache=wheel_cache,
+ use_pep517=options.use_pep517):
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+ # If --require-hashes was a line in a requirements file, tell
+ # RequirementSet about it:
+ requirement_set.require_hashes = options.require_hashes
+
+ if not (args or options.editables or options.requirements):
+ opts = {'name': name}
+ if options.find_links:
+ raise CommandError(
+ 'You must give at least one requirement to %(name)s '
+ '(maybe you meant "pip %(name)s %(links)s"?)' %
+ dict(opts, links=' '.join(options.find_links)))
+ else:
+ raise CommandError(
+ 'You must give at least one requirement to %(name)s '
+ '(see "pip help %(name)s")' % opts)
+
+ def _build_package_finder(
+ self,
+ options, # type: Values
+ session, # type: PipSession
+ target_python=None, # type: Optional[TargetPython]
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> PackageFinder
+ """
+ Create a package finder appropriate to this requirement command.
+
+ :param ignore_requires_python: Whether to ignore incompatible
+ "Requires-Python" values in links. Defaults to False.
+ """
+ search_scope = make_search_scope(options)
+ selection_prefs = SelectionPreferences(
+ allow_yanked=True,
+ format_control=options.format_control,
+ allow_all_prereleases=options.pre,
+ prefer_binary=options.prefer_binary,
+ ignore_requires_python=ignore_requires_python,
+ )
+
+ return PackageFinder.create(
+ search_scope=search_scope,
+ selection_prefs=selection_prefs,
+ trusted_hosts=options.trusted_hosts,
+ session=session,
+ target_python=target_python,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py
new file mode 100644
index 00000000..ecf4d20b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py
@@ -0,0 +1,929 @@
+"""
+shared options and groups
+
+The principle here is to define options once, but *not* instantiate them
+globally. One reason being that options with action='append' can carry state
+between parses. pip parses general options twice internally, and shouldn't
+pass on state. To be consistent, all options will follow this design.
+
+"""
+from __future__ import absolute_import
+
+import logging
+import textwrap
+import warnings
+from distutils.util import strtobool
+from functools import partial
+from optparse import SUPPRESS_HELP, Option, OptionGroup
+from textwrap import dedent
+
+from pip._internal.exceptions import CommandError
+from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.index import PyPI
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.models.target_python import TargetPython
+from pip._internal.utils.hashes import STRONG_HASHES
+from pip._internal.utils.misc import redact_password_from_url
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import BAR_TYPES
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Callable, Dict, Optional, Tuple
+ from optparse import OptionParser, Values
+ from pip._internal.cli.parser import ConfigOptionParser
+
+logger = logging.getLogger(__name__)
+
+
+def raise_option_error(parser, option, msg):
+ """
+ Raise an option parsing error using parser.error().
+
+ Args:
+ parser: an OptionParser instance.
+ option: an Option instance.
+ msg: the error text.
+ """
+ msg = '{} error: {}'.format(option, msg)
+ msg = textwrap.fill(' '.join(msg.split()))
+ parser.error(msg)
+
+
+def make_option_group(group, parser):
+ # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
+ """
+ Return an OptionGroup object
+ group -- assumed to be dict with 'name' and 'options' keys
+ parser -- an optparse Parser
+ """
+ option_group = OptionGroup(parser, group['name'])
+ for option in group['options']:
+ option_group.add_option(option())
+ return option_group
+
+
+def check_install_build_global(options, check_options=None):
+ # type: (Values, Optional[Values]) -> None
+ """Disable wheels if per-setup.py call options are set.
+
+ :param options: The OptionParser options to update.
+ :param check_options: The options to check, if not supplied defaults to
+ options.
+ """
+ if check_options is None:
+ check_options = options
+
+ def getname(n):
+ return getattr(check_options, n, None)
+ names = ["build_options", "global_options", "install_options"]
+ if any(map(getname, names)):
+ control = options.format_control
+ control.disallow_binaries()
+ warnings.warn(
+ 'Disabling all use of wheels due to the use of --build-options '
+ '/ --global-options / --install-options.', stacklevel=2,
+ )
+
+
+def check_dist_restriction(options, check_target=False):
+ # type: (Values, bool) -> None
+ """Function for determining if custom platform options are allowed.
+
+ :param options: The OptionParser options.
+ :param check_target: Whether or not to check if --target is being used.
+ """
+ dist_restriction_set = any([
+ options.python_version,
+ options.platform,
+ options.abi,
+ options.implementation,
+ ])
+
+ binary_only = FormatControl(set(), {':all:'})
+ sdist_dependencies_allowed = (
+ options.format_control != binary_only and
+ not options.ignore_dependencies
+ )
+
+ # Installations or downloads using dist restrictions must not combine
+ # source distributions and dist-specific wheels, as they are not
+ # guaranteed to be locally compatible.
+ if dist_restriction_set and sdist_dependencies_allowed:
+ raise CommandError(
+ "When restricting platform and interpreter constraints using "
+ "--python-version, --platform, --abi, or --implementation, "
+ "either --no-deps must be set, or --only-binary=:all: must be "
+ "set and --no-binary must not be set (or must be set to "
+ ":none:)."
+ )
+
+ if check_target:
+ if dist_restriction_set and not options.target_dir:
+ raise CommandError(
+ "Can not use any platform or abi specific options unless "
+ "installing via '--target'"
+ )
+
+
+###########
+# options #
+###########
+
+help_ = partial(
+ Option,
+ '-h', '--help',
+ dest='help',
+ action='help',
+ help='Show help.',
+) # type: Callable[..., Option]
+
+isolated_mode = partial(
+ Option,
+ "--isolated",
+ dest="isolated_mode",
+ action="store_true",
+ default=False,
+ help=(
+ "Run pip in an isolated mode, ignoring environment variables and user "
+ "configuration."
+ ),
+) # type: Callable[..., Option]
+
+require_virtualenv = partial(
+ Option,
+ # Run only if inside a virtualenv, bail if not.
+ '--require-virtualenv', '--require-venv',
+ dest='require_venv',
+ action='store_true',
+ default=False,
+ help=SUPPRESS_HELP
+) # type: Callable[..., Option]
+
+verbose = partial(
+ Option,
+ '-v', '--verbose',
+ dest='verbose',
+ action='count',
+ default=0,
+ help='Give more output. Option is additive, and can be used up to 3 times.'
+) # type: Callable[..., Option]
+
+no_color = partial(
+ Option,
+ '--no-color',
+ dest='no_color',
+ action='store_true',
+ default=False,
+ help="Suppress colored output",
+) # type: Callable[..., Option]
+
+version = partial(
+ Option,
+ '-V', '--version',
+ dest='version',
+ action='store_true',
+ help='Show version and exit.',
+) # type: Callable[..., Option]
+
+quiet = partial(
+ Option,
+ '-q', '--quiet',
+ dest='quiet',
+ action='count',
+ default=0,
+ help=(
+ 'Give less output. Option is additive, and can be used up to 3'
+ ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
+ ' levels).'
+ ),
+) # type: Callable[..., Option]
+
+progress_bar = partial(
+ Option,
+ '--progress-bar',
+ dest='progress_bar',
+ type='choice',
+ choices=list(BAR_TYPES.keys()),
+ default='on',
+ help=(
+ 'Specify type of progress to be displayed [' +
+ '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
+ ),
+) # type: Callable[..., Option]
+
+log = partial(
+ Option,
+ "--log", "--log-file", "--local-log",
+ dest="log",
+ metavar="path",
+ help="Path to a verbose appending log."
+) # type: Callable[..., Option]
+
+no_input = partial(
+ Option,
+ # Don't ask for input
+ '--no-input',
+ dest='no_input',
+ action='store_true',
+ default=False,
+ help=SUPPRESS_HELP
+) # type: Callable[..., Option]
+
+proxy = partial(
+ Option,
+ '--proxy',
+ dest='proxy',
+ type='str',
+ default='',
+ help="Specify a proxy in the form [user:passwd@]proxy.server:port."
+) # type: Callable[..., Option]
+
+retries = partial(
+ Option,
+ '--retries',
+ dest='retries',
+ type='int',
+ default=5,
+ help="Maximum number of retries each connection should attempt "
+ "(default %default times).",
+) # type: Callable[..., Option]
+
+timeout = partial(
+ Option,
+ '--timeout', '--default-timeout',
+ metavar='sec',
+ dest='timeout',
+ type='float',
+ default=15,
+ help='Set the socket timeout (default %default seconds).',
+) # type: Callable[..., Option]
+
+skip_requirements_regex = partial(
+ Option,
+ # A regex to be used to skip requirements
+ '--skip-requirements-regex',
+ dest='skip_requirements_regex',
+ type='str',
+ default='',
+ help=SUPPRESS_HELP,
+) # type: Callable[..., Option]
+
+
+def exists_action():
+ # type: () -> Option
+ return Option(
+ # Option when path already exist
+ '--exists-action',
+ dest='exists_action',
+ type='choice',
+ choices=['s', 'i', 'w', 'b', 'a'],
+ default=[],
+ action='append',
+ metavar='action',
+ help="Default action when a path already exists: "
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
+ )
+
+
+cert = partial(
+ Option,
+ '--cert',
+ dest='cert',
+ type='str',
+ metavar='path',
+ help="Path to alternate CA bundle.",
+) # type: Callable[..., Option]
+
+client_cert = partial(
+ Option,
+ '--client-cert',
+ dest='client_cert',
+ type='str',
+ default=None,
+ metavar='path',
+ help="Path to SSL client certificate, a single file containing the "
+ "private key and the certificate in PEM format.",
+) # type: Callable[..., Option]
+
+index_url = partial(
+ Option,
+ '-i', '--index-url', '--pypi-url',
+ dest='index_url',
+ metavar='URL',
+ default=PyPI.simple_url,
+ help="Base URL of the Python Package Index (default %default). "
+ "This should point to a repository compliant with PEP 503 "
+ "(the simple repository API) or a local directory laid out "
+ "in the same format.",
+) # type: Callable[..., Option]
+
+
+def extra_index_url():
+ return Option(
+ '--extra-index-url',
+ dest='extra_index_urls',
+ metavar='URL',
+ action='append',
+ default=[],
+ help="Extra URLs of package indexes to use in addition to "
+ "--index-url. Should follow the same rules as "
+ "--index-url.",
+ )
+
+
+no_index = partial(
+ Option,
+ '--no-index',
+ dest='no_index',
+ action='store_true',
+ default=False,
+ help='Ignore package index (only looking at --find-links URLs instead).',
+) # type: Callable[..., Option]
+
+
+def find_links():
+ # type: () -> Option
+ return Option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='url',
+ help="If a url or path to an html file, then parse for links to "
+ "archives. If a local path or file:// url that's a directory, "
+ "then look for archives in the directory listing.",
+ )
+
+
+def make_search_scope(options, suppress_no_index=False):
+ # type: (Values, bool) -> SearchScope
+ """
+ :param suppress_no_index: Whether to ignore the --no-index option
+ when constructing the SearchScope object.
+ """
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index and not suppress_no_index:
+ logger.debug(
+ 'Ignoring indexes: %s',
+ ','.join(redact_password_from_url(url) for url in index_urls),
+ )
+ index_urls = []
+
+ search_scope = SearchScope(
+ find_links=options.find_links,
+ index_urls=index_urls,
+ )
+
+ return search_scope
+
+
+def trusted_host():
+ # type: () -> Option
+ return Option(
+ "--trusted-host",
+ dest="trusted_hosts",
+ action="append",
+ metavar="HOSTNAME",
+ default=[],
+ help="Mark this host as trusted, even though it does not have valid "
+ "or any HTTPS.",
+ )
+
+
+def constraints():
+ # type: () -> Option
+ return Option(
+ '-c', '--constraint',
+ dest='constraints',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Constrain versions using the given constraints file. '
+ 'This option can be used multiple times.'
+ )
+
+
+def requirements():
+ # type: () -> Option
+ return Option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Install from the given requirements file. '
+ 'This option can be used multiple times.'
+ )
+
+
+def editable():
+ # type: () -> Option
+ return Option(
+ '-e', '--editable',
+ dest='editables',
+ action='append',
+ default=[],
+ metavar='path/url',
+ help=('Install a project in editable mode (i.e. setuptools '
+ '"develop mode") from a local project path or a VCS url.'),
+ )
+
+
+src = partial(
+ Option,
+ '--src', '--source', '--source-dir', '--source-directory',
+ dest='src_dir',
+ metavar='dir',
+ default=get_src_prefix(),
+ help='Directory to check out editable projects into. '
+ 'The default in a virtualenv is "<venv path>/src". '
+ 'The default for global installs is "<current dir>/src".'
+) # type: Callable[..., Option]
+
+
+def _get_format_control(values, option):
+ # type: (Values, Option) -> Any
+ """Get a format_control object."""
+ return getattr(values, option.dest)
+
+
+def _handle_no_binary(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ existing = _get_format_control(parser.values, option)
+ FormatControl.handle_mutual_excludes(
+ value, existing.no_binary, existing.only_binary,
+ )
+
+
+def _handle_only_binary(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ existing = _get_format_control(parser.values, option)
+ FormatControl.handle_mutual_excludes(
+ value, existing.only_binary, existing.no_binary,
+ )
+
+
+def no_binary():
+ # type: () -> Option
+ format_control = FormatControl(set(), set())
+ return Option(
+ "--no-binary", dest="format_control", action="callback",
+ callback=_handle_no_binary, type="str",
+ default=format_control,
+ help="Do not use binary packages. Can be supplied multiple times, and "
+ "each time adds to the existing value. Accepts either :all: to "
+ "disable all binary packages, :none: to empty the set, or one or "
+ "more package names with commas between them. Note that some "
+ "packages are tricky to compile and may fail to install when "
+ "this option is used on them.",
+ )
+
+
+def only_binary():
+ # type: () -> Option
+ format_control = FormatControl(set(), set())
+ return Option(
+ "--only-binary", dest="format_control", action="callback",
+ callback=_handle_only_binary, type="str",
+ default=format_control,
+ help="Do not use source packages. Can be supplied multiple times, and "
+ "each time adds to the existing value. Accepts either :all: to "
+ "disable all source packages, :none: to empty the set, or one or "
+ "more package names with commas between them. Packages without "
+ "binary distributions will fail to install when this option is "
+ "used on them.",
+ )
+
+
+platform = partial(
+ Option,
+ '--platform',
+ dest='platform',
+ metavar='platform',
+ default=None,
+ help=("Only use wheels compatible with <platform>. "
+ "Defaults to the platform of the running system."),
+) # type: Callable[..., Option]
+
+
+# This was made a separate function for unit-testing purposes.
+def _convert_python_version(value):
+ # type: (str) -> Tuple[Tuple[int, ...], Optional[str]]
+ """
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
+
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
+ non-None if and only if there was a parsing error.
+ """
+ if not value:
+ # The empty string is the same as not providing a value.
+ return (None, None)
+
+ parts = value.split('.')
+ if len(parts) > 3:
+ return ((), 'at most three version parts are allowed')
+
+ if len(parts) == 1:
+ # Then we are in the case of "3" or "37".
+ value = parts[0]
+ if len(value) > 1:
+ parts = [value[0], value[1:]]
+
+ try:
+ version_info = tuple(int(part) for part in parts)
+ except ValueError:
+ return ((), 'each version part must be an integer')
+
+ return (version_info, None)
+
+
+def _handle_python_version(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Handle a provided --python-version value.
+ """
+ version_info, error_msg = _convert_python_version(value)
+ if error_msg is not None:
+ msg = (
+ 'invalid --python-version value: {!r}: {}'.format(
+ value, error_msg,
+ )
+ )
+ raise_option_error(parser, option=option, msg=msg)
+
+ parser.values.python_version = version_info
+
+
+python_version = partial(
+ Option,
+ '--python-version',
+ dest='python_version',
+ metavar='python_version',
+ action='callback',
+ callback=_handle_python_version, type='str',
+ default=None,
+ help=dedent("""\
+ The Python interpreter version to use for wheel and "Requires-Python"
+ compatibility checks. Defaults to a version derived from the running
+ interpreter. The version can be specified using up to three dot-separated
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
+ """),
+) # type: Callable[..., Option]
+
+
+implementation = partial(
+ Option,
+ '--implementation',
+ dest='implementation',
+ metavar='implementation',
+ default=None,
+ help=("Only use wheels compatible with Python "
+ "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
+ " or 'ip'. If not specified, then the current "
+ "interpreter implementation is used. Use 'py' to force "
+ "implementation-agnostic wheels."),
+) # type: Callable[..., Option]
+
+
+abi = partial(
+ Option,
+ '--abi',
+ dest='abi',
+ metavar='abi',
+ default=None,
+ help=("Only use wheels compatible with Python "
+ "abi <abi>, e.g. 'pypy_41'. If not specified, then the "
+ "current interpreter abi tag is used. Generally "
+ "you will need to specify --implementation, "
+ "--platform, and --python-version when using "
+ "this option."),
+) # type: Callable[..., Option]
+
+
+def add_target_python_options(cmd_opts):
+ # type: (OptionGroup) -> None
+ cmd_opts.add_option(platform())
+ cmd_opts.add_option(python_version())
+ cmd_opts.add_option(implementation())
+ cmd_opts.add_option(abi())
+
+
+def make_target_python(options):
+ # type: (Values) -> TargetPython
+ target_python = TargetPython(
+ platform=options.platform,
+ py_version_info=options.python_version,
+ abi=options.abi,
+ implementation=options.implementation,
+ )
+
+ return target_python
+
+
+def prefer_binary():
+ # type: () -> Option
+ return Option(
+ "--prefer-binary",
+ dest="prefer_binary",
+ action="store_true",
+ default=False,
+ help="Prefer older binary packages over newer source packages."
+ )
+
+
+cache_dir = partial(
+ Option,
+ "--cache-dir",
+ dest="cache_dir",
+ default=USER_CACHE_DIR,
+ metavar="dir",
+ help="Store the cache data in <dir>."
+) # type: Callable[..., Option]
+
+
+def _handle_no_cache_dir(option, opt, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Process a value provided for the --no-cache-dir option.
+
+ This is an optparse.Option callback for the --no-cache-dir option.
+ """
+ # The value argument will be None if --no-cache-dir is passed via the
+ # command-line, since the option doesn't accept arguments. However,
+ # the value can be non-None if the option is triggered e.g. by an
+ # environment variable, like PIP_NO_CACHE_DIR=true.
+ if value is not None:
+ # Then parse the string value to get argument error-checking.
+ try:
+ strtobool(value)
+ except ValueError as exc:
+ raise_option_error(parser, option=option, msg=str(exc))
+
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
+ # rather than enabled (logic would say the latter). Thus, we disable
+ # the cache directory not just on values that parse to True, but (for
+ # backwards compatibility reasons) also on values that parse to False.
+ # In other words, always set it to False if the option is provided in
+ # some (valid) form.
+ parser.values.cache_dir = False
+
+
+no_cache = partial(
+ Option,
+ "--no-cache-dir",
+ dest="cache_dir",
+ action="callback",
+ callback=_handle_no_cache_dir,
+ help="Disable the cache.",
+) # type: Callable[..., Option]
+
+no_deps = partial(
+ Option,
+ '--no-deps', '--no-dependencies',
+ dest='ignore_dependencies',
+ action='store_true',
+ default=False,
+ help="Don't install package dependencies.",
+) # type: Callable[..., Option]
+
+build_dir = partial(
+ Option,
+ '-b', '--build', '--build-dir', '--build-directory',
+ dest='build_dir',
+ metavar='dir',
+ help='Directory to unpack packages into and build in. Note that '
+ 'an initial build still takes place in a temporary directory. '
+ 'The location of temporary directories can be controlled by setting '
+ 'the TMPDIR environment variable (TEMP on Windows) appropriately. '
+ 'When passed, build directories are not cleaned in case of failures.'
+) # type: Callable[..., Option]
+
+ignore_requires_python = partial(
+ Option,
+ '--ignore-requires-python',
+ dest='ignore_requires_python',
+ action='store_true',
+ help='Ignore the Requires-Python information.'
+) # type: Callable[..., Option]
+
+no_build_isolation = partial(
+ Option,
+ '--no-build-isolation',
+ dest='build_isolation',
+ action='store_false',
+ default=True,
+ help='Disable isolation when building a modern source distribution. '
+ 'Build dependencies specified by PEP 518 must be already installed '
+ 'if this option is used.'
+) # type: Callable[..., Option]
+
+
+def _handle_no_use_pep517(option, opt, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Process a value provided for the --no-use-pep517 option.
+
+ This is an optparse.Option callback for the no_use_pep517 option.
+ """
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
+ # will be None if --no-use-pep517 is passed via the command-line.
+ # However, the value can be non-None if the option is triggered e.g.
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
+ if value is not None:
+ msg = """A value was passed for --no-use-pep517,
+ probably using either the PIP_NO_USE_PEP517 environment variable
+ or the "no-use-pep517" config file option. Use an appropriate value
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
+ config file option instead.
+ """
+ raise_option_error(parser, option=option, msg=msg)
+
+ # Otherwise, --no-use-pep517 was passed via the command-line.
+ parser.values.use_pep517 = False
+
+
+use_pep517 = partial(
+ Option,
+ '--use-pep517',
+ dest='use_pep517',
+ action='store_true',
+ default=None,
+ help='Use PEP 517 for building source distributions '
+ '(use --no-use-pep517 to force legacy behaviour).'
+) # type: Any
+
+no_use_pep517 = partial(
+ Option,
+ '--no-use-pep517',
+ dest='use_pep517',
+ action='callback',
+ callback=_handle_no_use_pep517,
+ default=None,
+ help=SUPPRESS_HELP
+) # type: Any
+
+install_options = partial(
+ Option,
+ '--install-option',
+ dest='install_options',
+ action='append',
+ metavar='options',
+ help="Extra arguments to be supplied to the setup.py install "
+ "command (use like --install-option=\"--install-scripts=/usr/local/"
+ "bin\"). Use multiple --install-option options to pass multiple "
+ "options to setup.py install. If you are using an option with a "
+ "directory path, be sure to use absolute path.",
+) # type: Callable[..., Option]
+
+global_options = partial(
+ Option,
+ '--global-option',
+ dest='global_options',
+ action='append',
+ metavar='options',
+ help="Extra global options to be supplied to the setup.py "
+ "call before the install command.",
+) # type: Callable[..., Option]
+
+no_clean = partial(
+ Option,
+ '--no-clean',
+ action='store_true',
+ default=False,
+ help="Don't clean up build directories."
+) # type: Callable[..., Option]
+
+pre = partial(
+ Option,
+ '--pre',
+ action='store_true',
+ default=False,
+ help="Include pre-release and development versions. By default, "
+ "pip only finds stable versions.",
+) # type: Callable[..., Option]
+
+disable_pip_version_check = partial(
+ Option,
+ "--disable-pip-version-check",
+ dest="disable_pip_version_check",
+ action="store_true",
+ default=False,
+ help="Don't periodically check PyPI to determine whether a new version "
+ "of pip is available for download. Implied with --no-index.",
+) # type: Callable[..., Option]
+
+
+# Deprecated, Remove later
+always_unzip = partial(
+ Option,
+ '-Z', '--always-unzip',
+ dest='always_unzip',
+ action='store_true',
+ help=SUPPRESS_HELP,
+) # type: Callable[..., Option]
+
+
+def _handle_merge_hash(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """Given a value spelled "algo:digest", append the digest to a list
+ pointed to in a dict by the algo name."""
+ if not parser.values.hashes:
+ parser.values.hashes = {}
+ try:
+ algo, digest = value.split(':', 1)
+ except ValueError:
+ parser.error('Arguments to %s must be a hash name '
+ 'followed by a value, like --hash=sha256:abcde...' %
+ opt_str)
+ if algo not in STRONG_HASHES:
+ parser.error('Allowed hash algorithms for %s are %s.' %
+ (opt_str, ', '.join(STRONG_HASHES)))
+ parser.values.hashes.setdefault(algo, []).append(digest)
+
+
+hash = partial(
+ Option,
+ '--hash',
+ # Hash values eventually end up in InstallRequirement.hashes due to
+ # __dict__ copying in process_line().
+ dest='hashes',
+ action='callback',
+ callback=_handle_merge_hash,
+ type='string',
+ help="Verify that the package's archive matches this "
+ 'hash before installing. Example: --hash=sha256:abcdef...',
+) # type: Callable[..., Option]
+
+
+require_hashes = partial(
+ Option,
+ '--require-hashes',
+ dest='require_hashes',
+ action='store_true',
+ default=False,
+ help='Require a hash to check each requirement against, for '
+ 'repeatable installs. This option is implied when any package in a '
+ 'requirements file has a --hash option.',
+) # type: Callable[..., Option]
+
+
+list_path = partial(
+ Option,
+ '--path',
+ dest='path',
+ action='append',
+ help='Restrict to the specified installation path for listing '
+ 'packages (can be used multiple times).'
+) # type: Callable[..., Option]
+
+
+def check_list_path_option(options):
+ # type: (Values) -> None
+ if options.path and (options.user or options.local):
+ raise CommandError(
+ "Cannot combine '--path' with '--user' or '--local'"
+ )
+
+
+##########
+# groups #
+##########
+
+general_group = {
+ 'name': 'General Options',
+ 'options': [
+ help_,
+ isolated_mode,
+ require_virtualenv,
+ verbose,
+ version,
+ quiet,
+ log,
+ no_input,
+ proxy,
+ retries,
+ timeout,
+ skip_requirements_regex,
+ exists_action,
+ trusted_host,
+ cert,
+ client_cert,
+ cache_dir,
+ no_cache,
+ disable_pip_version_check,
+ no_color,
+ ]
+} # type: Dict[str, Any]
+
+index_group = {
+ 'name': 'Package Index Options',
+ 'options': [
+ index_url,
+ extra_index_url,
+ no_index,
+ find_links,
+ ]
+} # type: Dict[str, Any]
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py
new file mode 100644
index 00000000..6d0b719a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py
@@ -0,0 +1,98 @@
+"""A single place for constructing and exposing the main parser
+"""
+
+import os
+import sys
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.parser import (
+ ConfigOptionParser, UpdatingDefaultsHelpFormatter,
+)
+from pip._internal.commands import (
+ commands_dict, get_similar_commands, get_summaries,
+)
+from pip._internal.exceptions import CommandError
+from pip._internal.utils.misc import get_pip_version, get_prog
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Tuple, List
+
+
+__all__ = ["create_main_parser", "parse_command"]
+
+
+def create_main_parser():
+ # type: () -> ConfigOptionParser
+ """Creates and returns the main parser for pip's CLI
+ """
+
+ parser_kw = {
+ 'usage': '\n%prog <command> [options]',
+ 'add_help_option': False,
+ 'formatter': UpdatingDefaultsHelpFormatter(),
+ 'name': 'global',
+ 'prog': get_prog(),
+ }
+
+ parser = ConfigOptionParser(**parser_kw)
+ parser.disable_interspersed_args()
+
+ parser.version = get_pip_version()
+
+ # add the general options
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
+ parser.add_option_group(gen_opts)
+
+ # so the help formatter knows
+ parser.main = True # type: ignore
+
+ # create command listing for description
+ command_summaries = get_summaries()
+ description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
+ parser.description = '\n'.join(description)
+
+ return parser
+
+
+def parse_command(args):
+ # type: (List[str]) -> Tuple[str, List[str]]
+ parser = create_main_parser()
+
+ # Note: parser calls disable_interspersed_args(), so the result of this
+ # call is to split the initial args into the general options before the
+ # subcommand and everything else.
+ # For example:
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
+ # general_options: ['--timeout==5']
+ # args_else: ['install', '--user', 'INITools']
+ general_options, args_else = parser.parse_args(args)
+
+ # --version
+ if general_options.version:
+ sys.stdout.write(parser.version) # type: ignore
+ sys.stdout.write(os.linesep)
+ sys.exit()
+
+ # pip || pip help -> print_help()
+ if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
+ parser.print_help()
+ sys.exit()
+
+ # the subcommand name
+ cmd_name = args_else[0]
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = ['unknown command "%s"' % cmd_name]
+ if guess:
+ msg.append('maybe you meant "%s"' % guess)
+
+ raise CommandError(' - '.join(msg))
+
+ # all the args without the subcommand
+ cmd_args = args[:]
+ cmd_args.remove(cmd_name)
+
+ return cmd_name, cmd_args
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/parser.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/parser.py
new file mode 100644
index 00000000..e1eaac42
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/parser.py
@@ -0,0 +1,261 @@
+"""Base option parser setup"""
+from __future__ import absolute_import
+
+import logging
+import optparse
+import sys
+import textwrap
+from distutils.util import strtobool
+
+from pip._vendor.six import string_types
+
+from pip._internal.cli.status_codes import UNKNOWN_ERROR
+from pip._internal.configuration import Configuration, ConfigurationError
+from pip._internal.utils.compat import get_terminal_size
+
+logger = logging.getLogger(__name__)
+
+
+class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
+ """A prettier/less verbose help formatter for optparse."""
+
+ def __init__(self, *args, **kwargs):
+ # help position must be aligned with __init__.parseopts.description
+ kwargs['max_help_position'] = 30
+ kwargs['indent_increment'] = 1
+ kwargs['width'] = get_terminal_size()[0] - 2
+ optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
+
+ def format_option_strings(self, option):
+ return self._format_option_strings(option, ' <%s>', ', ')
+
+ def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
+ """
+ Return a comma-separated list of option strings and metavars.
+
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
+ :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
+ :param optsep: separator
+ """
+ opts = []
+
+ if option._short_opts:
+ opts.append(option._short_opts[0])
+ if option._long_opts:
+ opts.append(option._long_opts[0])
+ if len(opts) > 1:
+ opts.insert(1, optsep)
+
+ if option.takes_value():
+ metavar = option.metavar or option.dest.lower()
+ opts.append(mvarfmt % metavar.lower())
+
+ return ''.join(opts)
+
+ def format_heading(self, heading):
+ if heading == 'Options':
+ return ''
+ return heading + ':\n'
+
+ def format_usage(self, usage):
+ """
+ Ensure there is only one newline between usage and the first heading
+ if there is no description.
+ """
+ msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
+ return msg
+
+ def format_description(self, description):
+ # leave full control over description to us
+ if description:
+ if hasattr(self.parser, 'main'):
+ label = 'Commands'
+ else:
+ label = 'Description'
+ # some doc strings have initial newlines, some don't
+ description = description.lstrip('\n')
+ # some doc strings have final newlines and spaces, some don't
+ description = description.rstrip()
+ # dedent, then reindent
+ description = self.indent_lines(textwrap.dedent(description), " ")
+ description = '%s:\n%s\n' % (label, description)
+ return description
+ else:
+ return ''
+
+ def format_epilog(self, epilog):
+ # leave full control over epilog to us
+ if epilog:
+ return epilog
+ else:
+ return ''
+
+ def indent_lines(self, text, indent):
+ new_lines = [indent + line for line in text.split('\n')]
+ return "\n".join(new_lines)
+
+
+class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
+ """Custom help formatter for use in ConfigOptionParser.
+
+ This is updates the defaults before expanding them, allowing
+ them to show up correctly in the help listing.
+ """
+
+ def expand_default(self, option):
+ if self.parser is not None:
+ self.parser._update_defaults(self.parser.defaults)
+ return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class CustomOptionParser(optparse.OptionParser):
+
+ def insert_option_group(self, idx, *args, **kwargs):
+ """Insert an OptionGroup at a given position."""
+ group = self.add_option_group(*args, **kwargs)
+
+ self.option_groups.pop()
+ self.option_groups.insert(idx, group)
+
+ return group
+
+ @property
+ def option_list_all(self):
+ """Get a list of all options, including those in option groups."""
+ res = self.option_list[:]
+ for i in self.option_groups:
+ res.extend(i.option_list)
+
+ return res
+
+
+class ConfigOptionParser(CustomOptionParser):
+ """Custom option parser which updates its defaults by checking the
+ configuration files and environmental variables"""
+
+ def __init__(self, *args, **kwargs):
+ self.name = kwargs.pop('name')
+
+ isolated = kwargs.pop("isolated", False)
+ self.config = Configuration(isolated)
+
+ assert self.name
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def check_default(self, option, key, val):
+ try:
+ return option.check_value(key, val)
+ except optparse.OptionValueError as exc:
+ print("An error occurred during configuration: %s" % exc)
+ sys.exit(3)
+
+ def _get_ordered_configuration_items(self):
+ # Configuration gives keys in an unordered manner. Order them.
+ override_order = ["global", self.name, ":env:"]
+
+ # Pool the options into different groups
+ section_items = {name: [] for name in override_order}
+ for section_key, val in self.config.items():
+ # ignore empty values
+ if not val:
+ logger.debug(
+ "Ignoring configuration key '%s' as it's value is empty.",
+ section_key
+ )
+ continue
+
+ section, key = section_key.split(".", 1)
+ if section in override_order:
+ section_items[section].append((key, val))
+
+ # Yield each group in their override order
+ for section in override_order:
+ for key, val in section_items[section]:
+ yield key, val
+
+ def _update_defaults(self, defaults):
+ """Updates the given defaults with values from the config files and
+ the environ. Does a little special handling for certain types of
+ options (lists)."""
+
+ # Accumulate complex default state.
+ self.values = optparse.Values(self.defaults)
+ late_eval = set()
+ # Then set the options with those values
+ for key, val in self._get_ordered_configuration_items():
+ # '--' because configuration supports only long names
+ option = self.get_option('--' + key)
+
+ # Ignore options not present in this parser. E.g. non-globals put
+ # in [global] by users that want them to apply to all applicable
+ # commands.
+ if option is None:
+ continue
+
+ if option.action in ('store_true', 'store_false', 'count'):
+ try:
+ val = strtobool(val)
+ except ValueError:
+ error_msg = invalid_config_error_message(
+ option.action, key, val
+ )
+ self.error(error_msg)
+
+ elif option.action == 'append':
+ val = val.split()
+ val = [self.check_default(option, key, v) for v in val]
+ elif option.action == 'callback':
+ late_eval.add(option.dest)
+ opt_str = option.get_opt_string()
+ val = option.convert_value(opt_str, val)
+ # From take_action
+ args = option.callback_args or ()
+ kwargs = option.callback_kwargs or {}
+ option.callback(option, opt_str, val, self, *args, **kwargs)
+ else:
+ val = self.check_default(option, key, val)
+
+ defaults[option.dest] = val
+
+ for key in late_eval:
+ defaults[key] = getattr(self.values, key)
+ self.values = None
+ return defaults
+
+ def get_default_values(self):
+ """Overriding to make updating the defaults after instantiation of
+ the option parser possible, _update_defaults() does the dirty work."""
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return optparse.Values(self.defaults)
+
+ # Load the configuration, or error out in case of an error
+ try:
+ self.config.load()
+ except ConfigurationError as err:
+ self.exit(UNKNOWN_ERROR, str(err))
+
+ defaults = self._update_defaults(self.defaults.copy()) # ours
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isinstance(default, string_types):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+ return optparse.Values(defaults)
+
+ def error(self, msg):
+ self.print_usage(sys.stderr)
+ self.exit(UNKNOWN_ERROR, "%s\n" % msg)
+
+
+def invalid_config_error_message(action, key, val):
+ """Returns a better error message when invalid configuration option
+ is provided."""
+ if action in ('store_true', 'store_false'):
+ return ("{0} is not a valid value for {1} option, "
+ "please specify a boolean value like yes/no, "
+ "true/false or 1/0 instead.").format(val, key)
+
+ return ("{0} is not a valid value for {1} option, "
+ "please specify a numerical value like 1/0 "
+ "instead.").format(val, key)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py
new file mode 100644
index 00000000..275360a3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+SUCCESS = 0
+ERROR = 1
+UNKNOWN_ERROR = 2
+VIRTUALENV_NOT_FOUND = 3
+PREVIOUS_BUILD_DIR_ERROR = 4
+NO_MATCHES_FOUND = 23
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__init__.py
new file mode 100644
index 00000000..9e0ab86b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__init__.py
@@ -0,0 +1,81 @@
+"""
+Package containing all pip commands
+"""
+from __future__ import absolute_import
+
+from pip._internal.commands.completion import CompletionCommand
+from pip._internal.commands.configuration import ConfigurationCommand
+from pip._internal.commands.debug import DebugCommand
+from pip._internal.commands.download import DownloadCommand
+from pip._internal.commands.freeze import FreezeCommand
+from pip._internal.commands.hash import HashCommand
+from pip._internal.commands.help import HelpCommand
+from pip._internal.commands.list import ListCommand
+from pip._internal.commands.check import CheckCommand
+from pip._internal.commands.search import SearchCommand
+from pip._internal.commands.show import ShowCommand
+from pip._internal.commands.install import InstallCommand
+from pip._internal.commands.uninstall import UninstallCommand
+from pip._internal.commands.wheel import WheelCommand
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Type
+ from pip._internal.cli.base_command import Command
+
+commands_order = [
+ InstallCommand,
+ DownloadCommand,
+ UninstallCommand,
+ FreezeCommand,
+ ListCommand,
+ ShowCommand,
+ CheckCommand,
+ ConfigurationCommand,
+ SearchCommand,
+ WheelCommand,
+ HashCommand,
+ CompletionCommand,
+ DebugCommand,
+ HelpCommand,
+] # type: List[Type[Command]]
+
+commands_dict = {c.name: c for c in commands_order}
+
+
+def get_summaries(ordered=True):
+ """Yields sorted (command name, command summary) tuples."""
+
+ if ordered:
+ cmditems = _sort_commands(commands_dict, commands_order)
+ else:
+ cmditems = commands_dict.items()
+
+ for name, command_class in cmditems:
+ yield (name, command_class.summary)
+
+
+def get_similar_commands(name):
+ """Command name auto-correct."""
+ from difflib import get_close_matches
+
+ name = name.lower()
+
+ close_commands = get_close_matches(name, commands_dict.keys())
+
+ if close_commands:
+ return close_commands[0]
+ else:
+ return False
+
+
+def _sort_commands(cmddict, order):
+ def keyfn(key):
+ try:
+ return order.index(key[1])
+ except ValueError:
+ # unordered items should come last
+ return 0xff
+
+ return sorted(cmddict.items(), key=keyfn)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..2cde7849
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/check.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/check.cpython-37.pyc
new file mode 100644
index 00000000..144e4054
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/check.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/completion.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/completion.cpython-37.pyc
new file mode 100644
index 00000000..7fdc69fe
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/completion.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc
new file mode 100644
index 00000000..309428ab
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/debug.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/debug.cpython-37.pyc
new file mode 100644
index 00000000..f7cac51b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/debug.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/download.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/download.cpython-37.pyc
new file mode 100644
index 00000000..2c9d0b1c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/download.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc
new file mode 100644
index 00000000..4bc32147
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/hash.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/hash.cpython-37.pyc
new file mode 100644
index 00000000..353ce32a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/hash.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/help.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/help.cpython-37.pyc
new file mode 100644
index 00000000..a310cb1d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/help.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/install.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/install.cpython-37.pyc
new file mode 100644
index 00000000..edf1d4e7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/install.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/list.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/list.cpython-37.pyc
new file mode 100644
index 00000000..6ff853c7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/list.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/search.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/search.cpython-37.pyc
new file mode 100644
index 00000000..dd9671f0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/search.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/show.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/show.cpython-37.pyc
new file mode 100644
index 00000000..44c0e016
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/show.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc
new file mode 100644
index 00000000..6879aed7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc
new file mode 100644
index 00000000..05e74585
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/check.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/check.py
new file mode 100644
index 00000000..801cecc0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/check.py
@@ -0,0 +1,41 @@
+import logging
+
+from pip._internal.cli.base_command import Command
+from pip._internal.operations.check import (
+ check_package_set, create_package_set_from_installed,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class CheckCommand(Command):
+ """Verify installed packages have compatible dependencies."""
+ name = 'check'
+ usage = """
+ %prog [options]"""
+ summary = 'Verify installed packages have compatible dependencies.'
+
+ def run(self, options, args):
+ package_set, parsing_probs = create_package_set_from_installed()
+ missing, conflicting = check_package_set(package_set)
+
+ for project_name in missing:
+ version = package_set[project_name].version
+ for dependency in missing[project_name]:
+ logger.info(
+ "%s %s requires %s, which is not installed.",
+ project_name, version, dependency[0],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name].version
+ for dep_name, dep_version, req in conflicting[project_name]:
+ logger.info(
+ "%s %s has requirement %s, but you have %s %s.",
+ project_name, version, req, dep_name, dep_version,
+ )
+
+ if missing or conflicting or parsing_probs:
+ return 1
+ else:
+ logger.info("No broken requirements found.")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/completion.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/completion.py
new file mode 100644
index 00000000..2fcdd393
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/completion.py
@@ -0,0 +1,94 @@
+from __future__ import absolute_import
+
+import sys
+import textwrap
+
+from pip._internal.cli.base_command import Command
+from pip._internal.utils.misc import get_prog
+
+BASE_COMPLETION = """
+# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
+"""
+
+COMPLETION_SCRIPTS = {
+ 'bash': """
+ _pip_completion()
+ {
+ COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ PIP_AUTO_COMPLETE=1 $1 ) )
+ }
+ complete -o default -F _pip_completion %(prog)s
+ """,
+ 'zsh': """
+ function _pip_completion {
+ local words cword
+ read -Ac words
+ read -cn cword
+ reply=( $( COMP_WORDS="$words[*]" \\
+ COMP_CWORD=$(( cword-1 )) \\
+ PIP_AUTO_COMPLETE=1 $words[1] ) )
+ }
+ compctl -K _pip_completion %(prog)s
+ """,
+ 'fish': """
+ function __fish_complete_pip
+ set -lx COMP_WORDS (commandline -o) ""
+ set -lx COMP_CWORD ( \\
+ math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
+ )
+ set -lx PIP_AUTO_COMPLETE 1
+ string split \\ -- (eval $COMP_WORDS[1])
+ end
+ complete -fa "(__fish_complete_pip)" -c %(prog)s
+ """,
+}
+
+
+class CompletionCommand(Command):
+ """A helper command to be used for command completion."""
+ name = 'completion'
+ summary = 'A helper command used for command completion.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(CompletionCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '--bash', '-b',
+ action='store_const',
+ const='bash',
+ dest='shell',
+ help='Emit completion code for bash')
+ cmd_opts.add_option(
+ '--zsh', '-z',
+ action='store_const',
+ const='zsh',
+ dest='shell',
+ help='Emit completion code for zsh')
+ cmd_opts.add_option(
+ '--fish', '-f',
+ action='store_const',
+ const='fish',
+ dest='shell',
+ help='Emit completion code for fish')
+
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ """Prints the completion code of the given shell"""
+ shells = COMPLETION_SCRIPTS.keys()
+ shell_options = ['--' + shell for shell in sorted(shells)]
+ if options.shell in shells:
+ script = textwrap.dedent(
+ COMPLETION_SCRIPTS.get(options.shell, '') % {
+ 'prog': get_prog(),
+ }
+ )
+ print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
+ else:
+ sys.stderr.write(
+ 'ERROR: You must pass %s\n' % ' or '.join(shell_options)
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/configuration.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/configuration.py
new file mode 100644
index 00000000..1ec77d2a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/configuration.py
@@ -0,0 +1,258 @@
+import logging
+import os
+import subprocess
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.configuration import (
+ Configuration, get_configuration_files, kinds,
+)
+from pip._internal.exceptions import PipError
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.misc import get_prog
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+logger = logging.getLogger(__name__)
+
+
+class ConfigurationCommand(Command):
+ """Manage local and global configuration.
+
+ Subcommands:
+
+ list: List the active configuration (or from the file specified)
+ edit: Edit the configuration file in an editor
+ get: Get the value associated with name
+ set: Set the name=value
+ unset: Unset the value associated with name
+
+ If none of --user, --global and --site are passed, a virtual
+ environment configuration file is used if one is active and the file
+ exists. Otherwise, all modifications happen on the to the user file by
+ default.
+ """
+
+ name = 'config'
+ usage = """
+ %prog [<file-option>] list
+ %prog [<file-option>] [--editor <editor-path>] edit
+
+ %prog [<file-option>] get name
+ %prog [<file-option>] set name value
+ %prog [<file-option>] unset name
+ """
+
+ summary = "Manage local and global configuration."
+
+ def __init__(self, *args, **kwargs):
+ super(ConfigurationCommand, self).__init__(*args, **kwargs)
+
+ self.configuration = None
+
+ self.cmd_opts.add_option(
+ '--editor',
+ dest='editor',
+ action='store',
+ default=None,
+ help=(
+ 'Editor to use to edit the file. Uses VISUAL or EDITOR '
+ 'environment variables if not provided.'
+ )
+ )
+
+ self.cmd_opts.add_option(
+ '--global',
+ dest='global_file',
+ action='store_true',
+ default=False,
+ help='Use the system-wide configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user_file',
+ action='store_true',
+ default=False,
+ help='Use the user configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--site',
+ dest='site_file',
+ action='store_true',
+ default=False,
+ help='Use the current environment configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--venv',
+ dest='venv_file',
+ action='store_true',
+ default=False,
+ help=(
+ '[Deprecated] Use the current environment configuration '
+ 'file in a virtual environment only'
+ )
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ handlers = {
+ "list": self.list_values,
+ "edit": self.open_in_editor,
+ "get": self.get_name,
+ "set": self.set_name_value,
+ "unset": self.unset_name
+ }
+
+ # Determine action
+ if not args or args[0] not in handlers:
+ logger.error("Need an action ({}) to perform.".format(
+ ", ".join(sorted(handlers)))
+ )
+ return ERROR
+
+ action = args[0]
+
+ # Determine which configuration files are to be loaded
+ # Depends on whether the command is modifying.
+ try:
+ load_only = self._determine_file(
+ options, need_value=(action in ["get", "set", "unset", "edit"])
+ )
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ # Load a new configuration
+ self.configuration = Configuration(
+ isolated=options.isolated_mode, load_only=load_only
+ )
+ self.configuration.load()
+
+ # Error handling happens here, not in the action-handlers.
+ try:
+ handlers[action](options, args[1:])
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ return SUCCESS
+
+ def _determine_file(self, options, need_value):
+ # Convert legacy venv_file option to site_file or error
+ if options.venv_file and not options.site_file:
+ if running_under_virtualenv():
+ options.site_file = True
+ deprecated(
+ "The --venv option has been deprecated.",
+ replacement="--site",
+ gone_in="19.3",
+ )
+ else:
+ raise PipError(
+ "Legacy --venv option requires a virtual environment. "
+ "Use --site instead."
+ )
+
+ file_options = [key for key, value in (
+ (kinds.USER, options.user_file),
+ (kinds.GLOBAL, options.global_file),
+ (kinds.SITE, options.site_file),
+ ) if value]
+
+ if not file_options:
+ if not need_value:
+ return None
+ # Default to user, unless there's a site file.
+ elif any(
+ os.path.exists(site_config_file)
+ for site_config_file in get_configuration_files()[kinds.SITE]
+ ):
+ return kinds.SITE
+ else:
+ return kinds.USER
+ elif len(file_options) == 1:
+ return file_options[0]
+
+ raise PipError(
+ "Need exactly one file to operate upon "
+ "(--user, --site, --global) to perform."
+ )
+
+ def list_values(self, options, args):
+ self._get_n_args(args, "list", n=0)
+
+ for key, value in sorted(self.configuration.items()):
+ logger.info("%s=%r", key, value)
+
+ def get_name(self, options, args):
+ key = self._get_n_args(args, "get [name]", n=1)
+ value = self.configuration.get_value(key)
+
+ logger.info("%s", value)
+
+ def set_name_value(self, options, args):
+ key, value = self._get_n_args(args, "set [name] [value]", n=2)
+ self.configuration.set_value(key, value)
+
+ self._save_configuration()
+
+ def unset_name(self, options, args):
+ key = self._get_n_args(args, "unset [name]", n=1)
+ self.configuration.unset_value(key)
+
+ self._save_configuration()
+
+ def open_in_editor(self, options, args):
+ editor = self._determine_editor(options)
+
+ fname = self.configuration.get_file_to_edit()
+ if fname is None:
+ raise PipError("Could not determine appropriate file.")
+
+ try:
+ subprocess.check_call([editor, fname])
+ except subprocess.CalledProcessError as e:
+ raise PipError(
+ "Editor Subprocess exited with exit code {}"
+ .format(e.returncode)
+ )
+
+ def _get_n_args(self, args, example, n):
+ """Helper to make sure the command got the right number of arguments
+ """
+ if len(args) != n:
+ msg = (
+ 'Got unexpected number of arguments, expected {}. '
+ '(example: "{} config {}")'
+ ).format(n, get_prog(), example)
+ raise PipError(msg)
+
+ if n == 1:
+ return args[0]
+ else:
+ return args
+
+ def _save_configuration(self):
+ # We successfully ran a modifying command. Need to save the
+ # configuration.
+ try:
+ self.configuration.save()
+ except Exception:
+ logger.error(
+ "Unable to save configuration. Please report this as a bug.",
+ exc_info=1
+ )
+ raise PipError("Internal Error.")
+
+ def _determine_editor(self, options):
+ if options.editor is not None:
+ return options.editor
+ elif "VISUAL" in os.environ:
+ return os.environ["VISUAL"]
+ elif "EDITOR" in os.environ:
+ return os.environ["EDITOR"]
+ else:
+ raise PipError("Could not determine editor to use.")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/debug.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/debug.py
new file mode 100644
index 00000000..eb4f8c4e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/debug.py
@@ -0,0 +1,114 @@
+from __future__ import absolute_import
+
+import locale
+import logging
+import sys
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_pip_version
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel import format_tag
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, List
+ from optparse import Values
+
+logger = logging.getLogger(__name__)
+
+
+def show_value(name, value):
+ # type: (str, str) -> None
+ logger.info('{}: {}'.format(name, value))
+
+
+def show_sys_implementation():
+ # type: () -> None
+ logger.info('sys.implementation:')
+ if hasattr(sys, 'implementation'):
+ implementation = sys.implementation # type: ignore
+ implementation_name = implementation.name
+ else:
+ implementation_name = ''
+
+ with indent_log():
+ show_value('name', implementation_name)
+
+
+def show_tags(options):
+ # type: (Values) -> None
+ tag_limit = 10
+
+ target_python = make_target_python(options)
+ tags = target_python.get_tags()
+
+ # Display the target options that were explicitly provided.
+ formatted_target = target_python.format_given()
+ suffix = ''
+ if formatted_target:
+ suffix = ' (target: {})'.format(formatted_target)
+
+ msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
+ logger.info(msg)
+
+ if options.verbose < 1 and len(tags) > tag_limit:
+ tags_limited = True
+ tags = tags[:tag_limit]
+ else:
+ tags_limited = False
+
+ with indent_log():
+ for tag in tags:
+ logger.info(format_tag(tag))
+
+ if tags_limited:
+ msg = (
+ '...\n'
+ '[First {tag_limit} tags shown. Pass --verbose to show all.]'
+ ).format(tag_limit=tag_limit)
+ logger.info(msg)
+
+
+class DebugCommand(Command):
+ """
+ Display debug information.
+ """
+
+ name = 'debug'
+ usage = """
+ %prog <options>"""
+ summary = 'Show information useful for debugging.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(DebugCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+ cmdoptions.add_target_python_options(cmd_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ # type: (Values, List[Any]) -> int
+ logger.warning(
+ "This command is only meant for debugging. "
+ "Do not use this with automation for parsing and getting these "
+ "details, since the output and options of this command may "
+ "change without notice."
+ )
+ show_value('pip version', get_pip_version())
+ show_value('sys.version', sys.version)
+ show_value('sys.executable', sys.executable)
+ show_value('sys.getdefaultencoding', sys.getdefaultencoding())
+ show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
+ show_value(
+ 'locale.getpreferredencoding', locale.getpreferredencoding(),
+ )
+ show_value('sys.platform', sys.platform)
+ show_sys_implementation()
+
+ show_tags(options)
+
+ return SUCCESS
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/download.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/download.py
new file mode 100644
index 00000000..5642b561
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/download.py
@@ -0,0 +1,168 @@
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import RequirementCommand
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.legacy_resolve import Resolver
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import RequirementTracker
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import ensure_dir, normalize_path
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+class DownloadCommand(RequirementCommand):
+ """
+ Download packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports downloading from "requirements files", which provide
+ an easy way to specify a whole environment to be downloaded.
+ """
+ name = 'download'
+
+ usage = """
+ %prog [options] <requirement specifier> [package-index-options] ...
+ %prog [options] -r <requirements file> [package-index-options] ...
+ %prog [options] <vcs project url> ...
+ %prog [options] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ summary = 'Download packages.'
+
+ def __init__(self, *args, **kw):
+ super(DownloadCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.build_dir())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.global_options())
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(cmdoptions.src())
+ cmd_opts.add_option(cmdoptions.pre())
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+
+ cmd_opts.add_option(
+ '-d', '--dest', '--destination-dir', '--destination-directory',
+ dest='download_dir',
+ metavar='dir',
+ default=os.curdir,
+ help=("Download packages into <dir>."),
+ )
+
+ cmdoptions.add_target_python_options(cmd_opts)
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ options.ignore_installed = True
+ # editable doesn't really make sense for `pip download`, but the bowels
+ # of the RequirementSet code require that property.
+ options.editables = []
+
+ cmdoptions.check_dist_restriction(options)
+
+ options.src_dir = os.path.abspath(options.src_dir)
+ options.download_dir = normalize_path(options.download_dir)
+
+ ensure_dir(options.download_dir)
+
+ with self._build_session(options) as session:
+ target_python = make_target_python(options)
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ )
+ build_delete = (not (options.no_clean or options.build_dir))
+ if options.cache_dir and not check_path_owner(options.cache_dir):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned "
+ "by the current user and caching wheels has been "
+ "disabled. check the permissions and owner of that "
+ "directory. If executing pip with sudo, you may want "
+ "sudo's -H flag.",
+ options.cache_dir,
+ )
+ options.cache_dir = None
+
+ with RequirementTracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="download"
+ ) as directory:
+
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ )
+ self.populate_requirement_set(
+ requirement_set,
+ args,
+ options,
+ finder,
+ session,
+ self.name,
+ None
+ )
+
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=options.download_dir,
+ wheel_download_dir=None,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ req_tracker=req_tracker,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=None,
+ use_user_site=False,
+ upgrade_strategy="to-satisfy-only",
+ force_reinstall=False,
+ ignore_dependencies=options.ignore_dependencies,
+ py_version_info=options.python_version,
+ ignore_requires_python=False,
+ ignore_installed=True,
+ isolated=options.isolated_mode,
+ )
+ resolver.resolve(requirement_set)
+
+ downloaded = ' '.join([
+ req.name for req in requirement_set.successfully_downloaded
+ ])
+ if downloaded:
+ logger.info('Successfully downloaded %s', downloaded)
+
+ # Clean up
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+
+ return requirement_set
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/freeze.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/freeze.py
new file mode 100644
index 00000000..9fc5b046
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/freeze.py
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+
+import sys
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.models.format_control import FormatControl
+from pip._internal.operations.freeze import freeze
+from pip._internal.utils.compat import stdlib_pkgs
+
+DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
+
+
+class FreezeCommand(Command):
+ """
+ Output installed packages in requirements format.
+
+ packages are listed in a case-insensitive sorted order.
+ """
+ name = 'freeze'
+ usage = """
+ %prog [options]"""
+ summary = 'Output installed packages in requirements format.'
+ log_streams = ("ext://sys.stderr", "ext://sys.stderr")
+
+ def __init__(self, *args, **kw):
+ super(FreezeCommand, self).__init__(*args, **kw)
+
+ self.cmd_opts.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help="Use the order in the given requirements file and its "
+ "comments when generating output. This option can be "
+ "used multiple times.")
+ self.cmd_opts.add_option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='URL',
+ help='URL for finding packages, which will be added to the '
+ 'output.')
+ self.cmd_opts.add_option(
+ '-l', '--local',
+ dest='local',
+ action='store_true',
+ default=False,
+ help='If in a virtualenv that has global access, do not output '
+ 'globally-installed packages.')
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user',
+ action='store_true',
+ default=False,
+ help='Only output packages installed in user-site.')
+ self.cmd_opts.add_option(cmdoptions.list_path())
+ self.cmd_opts.add_option(
+ '--all',
+ dest='freeze_all',
+ action='store_true',
+ help='Do not skip these packages in the output:'
+ ' %s' % ', '.join(DEV_PKGS))
+ self.cmd_opts.add_option(
+ '--exclude-editable',
+ dest='exclude_editable',
+ action='store_true',
+ help='Exclude editable package from output.')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ format_control = FormatControl(set(), set())
+ wheel_cache = WheelCache(options.cache_dir, format_control)
+ skip = set(stdlib_pkgs)
+ if not options.freeze_all:
+ skip.update(DEV_PKGS)
+
+ cmdoptions.check_list_path_option(options)
+
+ freeze_kwargs = dict(
+ requirement=options.requirements,
+ find_links=options.find_links,
+ local_only=options.local,
+ user_only=options.user,
+ paths=options.path,
+ skip_regex=options.skip_requirements_regex,
+ isolated=options.isolated_mode,
+ wheel_cache=wheel_cache,
+ skip=skip,
+ exclude_editable=options.exclude_editable,
+ )
+
+ try:
+ for line in freeze(**freeze_kwargs):
+ sys.stdout.write(line + '\n')
+ finally:
+ wheel_cache.cleanup()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/hash.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/hash.py
new file mode 100644
index 00000000..423440e9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/hash.py
@@ -0,0 +1,57 @@
+from __future__ import absolute_import
+
+import hashlib
+import logging
+import sys
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR
+from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
+from pip._internal.utils.misc import read_chunks
+
+logger = logging.getLogger(__name__)
+
+
+class HashCommand(Command):
+ """
+ Compute a hash of a local package archive.
+
+ These can be used with --hash in a requirements file to do repeatable
+ installs.
+
+ """
+ name = 'hash'
+ usage = '%prog [options] <file> ...'
+ summary = 'Compute hashes of package archives.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(HashCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-a', '--algorithm',
+ dest='algorithm',
+ choices=STRONG_HASHES,
+ action='store',
+ default=FAVORITE_HASH,
+ help='The hash algorithm to use: one of %s' %
+ ', '.join(STRONG_HASHES))
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ self.parser.print_usage(sys.stderr)
+ return ERROR
+
+ algorithm = options.algorithm
+ for path in args:
+ logger.info('%s:\n--hash=%s:%s',
+ path, algorithm, _hash_of_file(path, algorithm))
+
+
+def _hash_of_file(path, algorithm):
+ """Return the hash digest of a file."""
+ with open(path, 'rb') as archive:
+ hash = hashlib.new(algorithm)
+ for chunk in read_chunks(archive):
+ hash.update(chunk)
+ return hash.hexdigest()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/help.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/help.py
new file mode 100644
index 00000000..49a81cbb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/help.py
@@ -0,0 +1,37 @@
+from __future__ import absolute_import
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+
+
+class HelpCommand(Command):
+ """Show help for commands"""
+ name = 'help'
+ usage = """
+ %prog <command>"""
+ summary = 'Show help for commands.'
+ ignore_require_venv = True
+
+ def run(self, options, args):
+ from pip._internal.commands import commands_dict, get_similar_commands
+
+ try:
+ # 'pip help' with no args is handled by pip.__init__.parseopt()
+ cmd_name = args[0] # the command we need help for
+ except IndexError:
+ return SUCCESS
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = ['unknown command "%s"' % cmd_name]
+ if guess:
+ msg.append('maybe you meant "%s"' % guess)
+
+ raise CommandError(' - '.join(msg))
+
+ command = commands_dict[cmd_name]()
+ command.parser.print_help()
+
+ return SUCCESS
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/install.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/install.py
new file mode 100644
index 00000000..ebeceacf
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/install.py
@@ -0,0 +1,580 @@
+from __future__ import absolute_import
+
+import errno
+import logging
+import operator
+import os
+import shutil
+from optparse import SUPPRESS_HELP
+
+from pip._vendor import pkg_resources
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import RequirementCommand
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.status_codes import ERROR
+from pip._internal.exceptions import (
+ CommandError, InstallationError, PreviousBuildDirError,
+)
+from pip._internal.legacy_resolve import Resolver
+from pip._internal.locations import distutils_scheme
+from pip._internal.operations.check import check_install_conflicts
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet, install_given_reqs
+from pip._internal.req.req_tracker import RequirementTracker
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import (
+ ensure_dir, get_installed_version,
+ protect_pip_from_modification_on_windows,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.virtualenv import virtualenv_no_global
+from pip._internal.wheel import WheelBuilder
+
+logger = logging.getLogger(__name__)
+
+
+def is_wheel_installed():
+ """
+ Return whether the wheel package is installed.
+ """
+ try:
+ import wheel # noqa: F401
+ except ImportError:
+ return False
+
+ return True
+
+
+def build_wheels(builder, pep517_requirements, legacy_requirements, session):
+ """
+ Build wheels for requirements, depending on whether wheel is installed.
+ """
+ # We don't build wheels for legacy requirements if wheel is not installed.
+ should_build_legacy = is_wheel_installed()
+
+ # Always build PEP 517 requirements
+ build_failures = builder.build(
+ pep517_requirements,
+ session=session, autobuilding=True
+ )
+
+ if should_build_legacy:
+ # We don't care about failures building legacy
+ # requirements, as we'll fall through to a direct
+ # install for those.
+ builder.build(
+ legacy_requirements,
+ session=session, autobuilding=True
+ )
+
+ return build_failures
+
+
+class InstallCommand(RequirementCommand):
+ """
+ Install packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports installing from "requirements files," which provide
+ an easy way to specify a whole environment to be installed.
+ """
+ name = 'install'
+
+ usage = """
+ %prog [options] <requirement specifier> [package-index-options] ...
+ %prog [options] -r <requirements file> [package-index-options] ...
+ %prog [options] [-e] <vcs project url> ...
+ %prog [options] [-e] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ summary = 'Install packages.'
+
+ def __init__(self, *args, **kw):
+ super(InstallCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.pre())
+
+ cmd_opts.add_option(cmdoptions.editable())
+ cmd_opts.add_option(
+ '-t', '--target',
+ dest='target_dir',
+ metavar='dir',
+ default=None,
+ help='Install packages into <dir>. '
+ 'By default this will not replace existing files/folders in '
+ '<dir>. Use --upgrade to replace existing packages in <dir> '
+ 'with new versions.'
+ )
+ cmdoptions.add_target_python_options(cmd_opts)
+
+ cmd_opts.add_option(
+ '--user',
+ dest='use_user_site',
+ action='store_true',
+ help="Install to the Python user install directory for your "
+ "platform. Typically ~/.local/, or %APPDATA%\\Python on "
+ "Windows. (See the Python documentation for site.USER_BASE "
+ "for full details.)")
+ cmd_opts.add_option(
+ '--no-user',
+ dest='use_user_site',
+ action='store_false',
+ help=SUPPRESS_HELP)
+ cmd_opts.add_option(
+ '--root',
+ dest='root_path',
+ metavar='dir',
+ default=None,
+ help="Install everything relative to this alternate root "
+ "directory.")
+ cmd_opts.add_option(
+ '--prefix',
+ dest='prefix_path',
+ metavar='dir',
+ default=None,
+ help="Installation prefix where lib, bin and other top-level "
+ "folders are placed")
+
+ cmd_opts.add_option(cmdoptions.build_dir())
+
+ cmd_opts.add_option(cmdoptions.src())
+
+ cmd_opts.add_option(
+ '-U', '--upgrade',
+ dest='upgrade',
+ action='store_true',
+ help='Upgrade all specified packages to the newest available '
+ 'version. The handling of dependencies depends on the '
+ 'upgrade-strategy used.'
+ )
+
+ cmd_opts.add_option(
+ '--upgrade-strategy',
+ dest='upgrade_strategy',
+ default='only-if-needed',
+ choices=['only-if-needed', 'eager'],
+ help='Determines how dependency upgrading should be handled '
+ '[default: %default]. '
+ '"eager" - dependencies are upgraded regardless of '
+ 'whether the currently installed version satisfies the '
+ 'requirements of the upgraded package(s). '
+ '"only-if-needed" - are upgraded only when they do not '
+ 'satisfy the requirements of the upgraded package(s).'
+ )
+
+ cmd_opts.add_option(
+ '--force-reinstall',
+ dest='force_reinstall',
+ action='store_true',
+ help='Reinstall all packages even if they are already '
+ 'up-to-date.')
+
+ cmd_opts.add_option(
+ '-I', '--ignore-installed',
+ dest='ignore_installed',
+ action='store_true',
+ help='Ignore the installed packages (reinstalling instead).')
+
+ cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+
+ cmd_opts.add_option(cmdoptions.install_options())
+ cmd_opts.add_option(cmdoptions.global_options())
+
+ cmd_opts.add_option(
+ "--compile",
+ action="store_true",
+ dest="compile",
+ default=True,
+ help="Compile Python source files to bytecode",
+ )
+
+ cmd_opts.add_option(
+ "--no-compile",
+ action="store_false",
+ dest="compile",
+ help="Do not compile Python source files to bytecode",
+ )
+
+ cmd_opts.add_option(
+ "--no-warn-script-location",
+ action="store_false",
+ dest="warn_script_location",
+ default=True,
+ help="Do not warn when installing scripts outside PATH",
+ )
+ cmd_opts.add_option(
+ "--no-warn-conflicts",
+ action="store_false",
+ dest="warn_about_conflicts",
+ default=True,
+ help="Do not warn about broken dependencies",
+ )
+
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ cmdoptions.check_install_build_global(options)
+ upgrade_strategy = "to-satisfy-only"
+ if options.upgrade:
+ upgrade_strategy = options.upgrade_strategy
+
+ if options.build_dir:
+ options.build_dir = os.path.abspath(options.build_dir)
+
+ cmdoptions.check_dist_restriction(options, check_target=True)
+
+ options.src_dir = os.path.abspath(options.src_dir)
+ install_options = options.install_options or []
+ if options.use_user_site:
+ if options.prefix_path:
+ raise CommandError(
+ "Can not combine '--user' and '--prefix' as they imply "
+ "different installation locations"
+ )
+ if virtualenv_no_global():
+ raise InstallationError(
+ "Can not perform a '--user' install. User site-packages "
+ "are not visible in this virtualenv."
+ )
+ install_options.append('--user')
+ install_options.append('--prefix=')
+
+ target_temp_dir = TempDirectory(kind="target")
+ if options.target_dir:
+ options.ignore_installed = True
+ options.target_dir = os.path.abspath(options.target_dir)
+ if (os.path.exists(options.target_dir) and not
+ os.path.isdir(options.target_dir)):
+ raise CommandError(
+ "Target path exists but is not a directory, will not "
+ "continue."
+ )
+
+ # Create a target directory for using with the target option
+ target_temp_dir.create()
+ install_options.append('--home=' + target_temp_dir.path)
+
+ global_options = options.global_options or []
+
+ with self._build_session(options) as session:
+ target_python = make_target_python(options)
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ ignore_requires_python=options.ignore_requires_python,
+ )
+ build_delete = (not (options.no_clean or options.build_dir))
+ wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+ if options.cache_dir and not check_path_owner(options.cache_dir):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned "
+ "by the current user and caching wheels has been "
+ "disabled. check the permissions and owner of that "
+ "directory. If executing pip with sudo, you may want "
+ "sudo's -H flag.",
+ options.cache_dir,
+ )
+ options.cache_dir = None
+
+ with RequirementTracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="install"
+ ) as directory:
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ check_supported_wheels=not options.target_dir,
+ )
+
+ try:
+ self.populate_requirement_set(
+ requirement_set, args, options, finder, session,
+ self.name, wheel_cache
+ )
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=None,
+ wheel_download_dir=None,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ req_tracker=req_tracker,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=wheel_cache,
+ use_user_site=options.use_user_site,
+ upgrade_strategy=upgrade_strategy,
+ force_reinstall=options.force_reinstall,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_requires_python=options.ignore_requires_python,
+ ignore_installed=options.ignore_installed,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517
+ )
+ resolver.resolve(requirement_set)
+
+ protect_pip_from_modification_on_windows(
+ modifying_pip=requirement_set.has_requirement("pip")
+ )
+
+ # Consider legacy and PEP517-using requirements separately
+ legacy_requirements = []
+ pep517_requirements = []
+ for req in requirement_set.requirements.values():
+ if req.use_pep517:
+ pep517_requirements.append(req)
+ else:
+ legacy_requirements.append(req)
+
+ wheel_builder = WheelBuilder(
+ finder, preparer, wheel_cache,
+ build_options=[], global_options=[],
+ )
+
+ build_failures = build_wheels(
+ builder=wheel_builder,
+ pep517_requirements=pep517_requirements,
+ legacy_requirements=legacy_requirements,
+ session=session,
+ )
+
+ # If we're using PEP 517, we cannot do a direct install
+ # so we fail here.
+ if build_failures:
+ raise InstallationError(
+ "Could not build wheels for {} which use"
+ " PEP 517 and cannot be installed directly".format(
+ ", ".join(r.name for r in build_failures)))
+
+ to_install = resolver.get_installation_order(
+ requirement_set
+ )
+
+ # Consistency Checking of the package set we're installing.
+ should_warn_about_conflicts = (
+ not options.ignore_dependencies and
+ options.warn_about_conflicts
+ )
+ if should_warn_about_conflicts:
+ self._warn_about_conflicts(to_install)
+
+ # Don't warn about script install locations if
+ # --target has been specified
+ warn_script_location = options.warn_script_location
+ if options.target_dir:
+ warn_script_location = False
+
+ installed = install_given_reqs(
+ to_install,
+ install_options,
+ global_options,
+ root=options.root_path,
+ home=target_temp_dir.path,
+ prefix=options.prefix_path,
+ pycompile=options.compile,
+ warn_script_location=warn_script_location,
+ use_user_site=options.use_user_site,
+ )
+
+ lib_locations = get_lib_location_guesses(
+ user=options.use_user_site,
+ home=target_temp_dir.path,
+ root=options.root_path,
+ prefix=options.prefix_path,
+ isolated=options.isolated_mode,
+ )
+ working_set = pkg_resources.WorkingSet(lib_locations)
+
+ reqs = sorted(installed, key=operator.attrgetter('name'))
+ items = []
+ for req in reqs:
+ item = req.name
+ try:
+ installed_version = get_installed_version(
+ req.name, working_set=working_set
+ )
+ if installed_version:
+ item += '-' + installed_version
+ except Exception:
+ pass
+ items.append(item)
+ installed = ' '.join(items)
+ if installed:
+ logger.info('Successfully installed %s', installed)
+ except EnvironmentError as error:
+ show_traceback = (self.verbosity >= 1)
+
+ message = create_env_error_message(
+ error, show_traceback, options.use_user_site,
+ )
+ logger.error(message, exc_info=show_traceback)
+
+ return ERROR
+ except PreviousBuildDirError:
+ options.no_clean = True
+ raise
+ finally:
+ # Clean up
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+ wheel_cache.cleanup()
+
+ if options.target_dir:
+ self._handle_target_dir(
+ options.target_dir, target_temp_dir, options.upgrade
+ )
+ return requirement_set
+
+ def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
+ ensure_dir(target_dir)
+
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ lib_dir_list = []
+
+ with target_temp_dir:
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ scheme = distutils_scheme('', home=target_temp_dir.path)
+ purelib_dir = scheme['purelib']
+ platlib_dir = scheme['platlib']
+ data_dir = scheme['data']
+
+ if os.path.exists(purelib_dir):
+ lib_dir_list.append(purelib_dir)
+ if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
+ lib_dir_list.append(platlib_dir)
+ if os.path.exists(data_dir):
+ lib_dir_list.append(data_dir)
+
+ for lib_dir in lib_dir_list:
+ for item in os.listdir(lib_dir):
+ if lib_dir == data_dir:
+ ddir = os.path.join(data_dir, item)
+ if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
+ continue
+ target_item_dir = os.path.join(target_dir, item)
+ if os.path.exists(target_item_dir):
+ if not upgrade:
+ logger.warning(
+ 'Target directory %s already exists. Specify '
+ '--upgrade to force replacement.',
+ target_item_dir
+ )
+ continue
+ if os.path.islink(target_item_dir):
+ logger.warning(
+ 'Target directory %s already exists and is '
+ 'a link. Pip will not automatically replace '
+ 'links, please remove if replacement is '
+ 'desired.',
+ target_item_dir
+ )
+ continue
+ if os.path.isdir(target_item_dir):
+ shutil.rmtree(target_item_dir)
+ else:
+ os.remove(target_item_dir)
+
+ shutil.move(
+ os.path.join(lib_dir, item),
+ target_item_dir
+ )
+
+ def _warn_about_conflicts(self, to_install):
+ try:
+ package_set, _dep_info = check_install_conflicts(to_install)
+ except Exception:
+ logger.error("Error checking for conflicts.", exc_info=True)
+ return
+ missing, conflicting = _dep_info
+
+ # NOTE: There is some duplication here from pip check
+ for project_name in missing:
+ version = package_set[project_name][0]
+ for dependency in missing[project_name]:
+ logger.critical(
+ "%s %s requires %s, which is not installed.",
+ project_name, version, dependency[1],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name][0]
+ for dep_name, dep_version, req in conflicting[project_name]:
+ logger.critical(
+ "%s %s has requirement %s, but you'll have %s %s which is "
+ "incompatible.",
+ project_name, version, req, dep_name, dep_version,
+ )
+
+
+def get_lib_location_guesses(*args, **kwargs):
+ scheme = distutils_scheme('', *args, **kwargs)
+ return [scheme['purelib'], scheme['platlib']]
+
+
+def create_env_error_message(error, show_traceback, using_user_site):
+ """Format an error message for an EnvironmentError
+
+ It may occur anytime during the execution of the install command.
+ """
+ parts = []
+
+ # Mention the error if we are not going to show a traceback
+ parts.append("Could not install packages due to an EnvironmentError")
+ if not show_traceback:
+ parts.append(": ")
+ parts.append(str(error))
+ else:
+ parts.append(".")
+
+ # Spilt the error indication from a helper message (if any)
+ parts[-1] += "\n"
+
+ # Suggest useful actions to the user:
+ # (1) using user site-packages or (2) verifying the permissions
+ if error.errno == errno.EACCES:
+ user_option_part = "Consider using the `--user` option"
+ permissions_part = "Check the permissions"
+
+ if not using_user_site:
+ parts.extend([
+ user_option_part, " or ",
+ permissions_part.lower(),
+ ])
+ else:
+ parts.append(permissions_part)
+ parts.append(".\n")
+
+ return "".join(parts).strip() + "\n"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/list.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/list.py
new file mode 100644
index 00000000..cf71b13e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/list.py
@@ -0,0 +1,311 @@
+from __future__ import absolute_import
+
+import json
+import logging
+
+from pip._vendor import six
+from pip._vendor.six.moves import zip_longest
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.cmdoptions import make_search_scope
+from pip._internal.exceptions import CommandError
+from pip._internal.index import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.utils.misc import (
+ dist_is_editable, get_installed_distributions,
+)
+from pip._internal.utils.packaging import get_installer
+
+logger = logging.getLogger(__name__)
+
+
+class ListCommand(Command):
+ """
+ List installed packages, including editables.
+
+ Packages are listed in a case-insensitive sorted order.
+ """
+ name = 'list'
+ usage = """
+ %prog [options]"""
+ summary = 'List installed packages.'
+
+ def __init__(self, *args, **kw):
+ super(ListCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '-o', '--outdated',
+ action='store_true',
+ default=False,
+ help='List outdated packages')
+ cmd_opts.add_option(
+ '-u', '--uptodate',
+ action='store_true',
+ default=False,
+ help='List uptodate packages')
+ cmd_opts.add_option(
+ '-e', '--editable',
+ action='store_true',
+ default=False,
+ help='List editable projects.')
+ cmd_opts.add_option(
+ '-l', '--local',
+ action='store_true',
+ default=False,
+ help=('If in a virtualenv that has global access, do not list '
+ 'globally-installed packages.'),
+ )
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user',
+ action='store_true',
+ default=False,
+ help='Only output packages installed in user-site.')
+ cmd_opts.add_option(cmdoptions.list_path())
+ cmd_opts.add_option(
+ '--pre',
+ action='store_true',
+ default=False,
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
+
+ cmd_opts.add_option(
+ '--format',
+ action='store',
+ dest='list_format',
+ default="columns",
+ choices=('columns', 'freeze', 'json'),
+ help="Select the output format among: columns (default), freeze, "
+ "or json",
+ )
+
+ cmd_opts.add_option(
+ '--not-required',
+ action='store_true',
+ dest='not_required',
+ help="List packages that are not dependencies of "
+ "installed packages.",
+ )
+
+ cmd_opts.add_option(
+ '--exclude-editable',
+ action='store_false',
+ dest='include_editable',
+ help='Exclude editable package from output.',
+ )
+ cmd_opts.add_option(
+ '--include-editable',
+ action='store_true',
+ dest='include_editable',
+ help='Include editable package from output.',
+ default=True,
+ )
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group, self.parser
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def _build_package_finder(self, options, session):
+ """
+ Create a package finder appropriate to this list command.
+ """
+ search_scope = make_search_scope(options)
+
+ # Pass allow_yanked=False to ignore yanked versions.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=options.pre,
+ )
+
+ return PackageFinder.create(
+ search_scope=search_scope,
+ selection_prefs=selection_prefs,
+ trusted_hosts=options.trusted_hosts,
+ session=session,
+ )
+
+ def run(self, options, args):
+ if options.outdated and options.uptodate:
+ raise CommandError(
+ "Options --outdated and --uptodate cannot be combined.")
+
+ cmdoptions.check_list_path_option(options)
+
+ packages = get_installed_distributions(
+ local_only=options.local,
+ user_only=options.user,
+ editables_only=options.editable,
+ include_editables=options.include_editable,
+ paths=options.path,
+ )
+
+ # get_not_required must be called firstly in order to find and
+ # filter out all dependencies correctly. Otherwise a package
+ # can't be identified as requirement because some parent packages
+ # could be filtered out before.
+ if options.not_required:
+ packages = self.get_not_required(packages, options)
+
+ if options.outdated:
+ packages = self.get_outdated(packages, options)
+ elif options.uptodate:
+ packages = self.get_uptodate(packages, options)
+
+ self.output_package_listing(packages, options)
+
+ def get_outdated(self, packages, options):
+ return [
+ dist for dist in self.iter_packages_latest_infos(packages, options)
+ if dist.latest_version > dist.parsed_version
+ ]
+
+ def get_uptodate(self, packages, options):
+ return [
+ dist for dist in self.iter_packages_latest_infos(packages, options)
+ if dist.latest_version == dist.parsed_version
+ ]
+
+ def get_not_required(self, packages, options):
+ dep_keys = set()
+ for dist in packages:
+ dep_keys.update(requirement.key for requirement in dist.requires())
+ return {pkg for pkg in packages if pkg.key not in dep_keys}
+
+ def iter_packages_latest_infos(self, packages, options):
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, session)
+
+ for dist in packages:
+ typ = 'unknown'
+ all_candidates = finder.find_all_candidates(dist.key)
+ if not options.pre:
+ # Remove prereleases
+ all_candidates = [candidate for candidate in all_candidates
+ if not candidate.version.is_prerelease]
+
+ evaluator = finder.make_candidate_evaluator(
+ project_name=dist.project_name,
+ )
+ best_candidate = evaluator.get_best_candidate(all_candidates)
+ if best_candidate is None:
+ continue
+
+ remote_version = best_candidate.version
+ if best_candidate.link.is_wheel:
+ typ = 'wheel'
+ else:
+ typ = 'sdist'
+ # This is dirty but makes the rest of the code much cleaner
+ dist.latest_version = remote_version
+ dist.latest_filetype = typ
+ yield dist
+
+ def output_package_listing(self, packages, options):
+ packages = sorted(
+ packages,
+ key=lambda dist: dist.project_name.lower(),
+ )
+ if options.list_format == 'columns' and packages:
+ data, header = format_for_columns(packages, options)
+ self.output_package_listing_columns(data, header)
+ elif options.list_format == 'freeze':
+ for dist in packages:
+ if options.verbose >= 1:
+ logger.info("%s==%s (%s)", dist.project_name,
+ dist.version, dist.location)
+ else:
+ logger.info("%s==%s", dist.project_name, dist.version)
+ elif options.list_format == 'json':
+ logger.info(format_for_json(packages, options))
+
+ def output_package_listing_columns(self, data, header):
+ # insert the header first: we need to know the size of column names
+ if len(data) > 0:
+ data.insert(0, header)
+
+ pkg_strings, sizes = tabulate(data)
+
+ # Create and add a separator.
+ if len(data) > 0:
+ pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
+
+ for val in pkg_strings:
+ logger.info(val)
+
+
+def tabulate(vals):
+ # From pfmoore on GitHub:
+ # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
+ assert len(vals) > 0
+
+ sizes = [0] * max(len(x) for x in vals)
+ for row in vals:
+ sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
+
+ result = []
+ for row in vals:
+ display = " ".join([str(c).ljust(s) if c is not None else ''
+ for s, c in zip_longest(sizes, row)])
+ result.append(display)
+
+ return result, sizes
+
+
+def format_for_columns(pkgs, options):
+ """
+ Convert the package data into something usable
+ by output_package_listing_columns.
+ """
+ running_outdated = options.outdated
+ # Adjust the header for the `pip list --outdated` case.
+ if running_outdated:
+ header = ["Package", "Version", "Latest", "Type"]
+ else:
+ header = ["Package", "Version"]
+
+ data = []
+ if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
+ header.append("Location")
+ if options.verbose >= 1:
+ header.append("Installer")
+
+ for proj in pkgs:
+ # if we're working on the 'outdated' list, separate out the
+ # latest_version and type
+ row = [proj.project_name, proj.version]
+
+ if running_outdated:
+ row.append(proj.latest_version)
+ row.append(proj.latest_filetype)
+
+ if options.verbose >= 1 or dist_is_editable(proj):
+ row.append(proj.location)
+ if options.verbose >= 1:
+ row.append(get_installer(proj))
+
+ data.append(row)
+
+ return data, header
+
+
+def format_for_json(packages, options):
+ data = []
+ for dist in packages:
+ info = {
+ 'name': dist.project_name,
+ 'version': six.text_type(dist.version),
+ }
+ if options.verbose >= 1:
+ info['location'] = dist.location
+ info['installer'] = get_installer(dist)
+ if options.outdated:
+ info['latest_version'] = six.text_type(dist.latest_version)
+ info['latest_filetype'] = dist.latest_filetype
+ data.append(info)
+ return json.dumps(data)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/search.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/search.py
new file mode 100644
index 00000000..58027112
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/search.py
@@ -0,0 +1,139 @@
+from __future__ import absolute_import
+
+import logging
+import sys
+import textwrap
+from collections import OrderedDict
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.version import parse as parse_version
+# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import
+from pip._vendor.six.moves import xmlrpc_client # type: ignore
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
+from pip._internal.download import PipXmlrpcTransport
+from pip._internal.exceptions import CommandError
+from pip._internal.models.index import PyPI
+from pip._internal.utils.compat import get_terminal_size
+from pip._internal.utils.logging import indent_log
+
+logger = logging.getLogger(__name__)
+
+
+class SearchCommand(Command):
+ """Search for PyPI packages whose name or summary contains <query>."""
+ name = 'search'
+ usage = """
+ %prog [options] <query>"""
+ summary = 'Search PyPI for packages.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(SearchCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-i', '--index',
+ dest='index',
+ metavar='URL',
+ default=PyPI.pypi_url,
+ help='Base URL of Python Package Index (default %default)')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ raise CommandError('Missing required argument (search query).')
+ query = args
+ pypi_hits = self.search(query, options)
+ hits = transform_hits(pypi_hits)
+
+ terminal_width = None
+ if sys.stdout.isatty():
+ terminal_width = get_terminal_size()[0]
+
+ print_results(hits, terminal_width=terminal_width)
+ if pypi_hits:
+ return SUCCESS
+ return NO_MATCHES_FOUND
+
+ def search(self, query, options):
+ index_url = options.index
+ with self._build_session(options) as session:
+ transport = PipXmlrpcTransport(index_url, session)
+ pypi = xmlrpc_client.ServerProxy(index_url, transport)
+ hits = pypi.search({'name': query, 'summary': query}, 'or')
+ return hits
+
+
+def transform_hits(hits):
+ """
+ The list from pypi is really a list of versions. We want a list of
+ packages with the list of versions stored inline. This converts the
+ list from pypi into one we can use.
+ """
+ packages = OrderedDict()
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary']
+ version = hit['version']
+
+ if name not in packages.keys():
+ packages[name] = {
+ 'name': name,
+ 'summary': summary,
+ 'versions': [version],
+ }
+ else:
+ packages[name]['versions'].append(version)
+
+ # if this is the highest version, replace summary and score
+ if version == highest_version(packages[name]['versions']):
+ packages[name]['summary'] = summary
+
+ return list(packages.values())
+
+
+def print_results(hits, name_column_width=None, terminal_width=None):
+ if not hits:
+ return
+ if name_column_width is None:
+ name_column_width = max([
+ len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
+ for hit in hits
+ ]) + 4
+
+ installed_packages = [p.project_name for p in pkg_resources.working_set]
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary'] or ''
+ latest = highest_version(hit.get('versions', ['-']))
+ if terminal_width is not None:
+ target_width = terminal_width - name_column_width - 5
+ if target_width > 10:
+ # wrap and indent summary to fit terminal
+ summary = textwrap.wrap(summary, target_width)
+ summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
+
+ line = '%-*s - %s' % (name_column_width,
+ '%s (%s)' % (name, latest), summary)
+ try:
+ logger.info(line)
+ if name in installed_packages:
+ dist = pkg_resources.get_distribution(name)
+ with indent_log():
+ if dist.version == latest:
+ logger.info('INSTALLED: %s (latest)', dist.version)
+ else:
+ logger.info('INSTALLED: %s', dist.version)
+ if parse_version(latest).pre:
+ logger.info('LATEST: %s (pre-release; install'
+ ' with "pip install --pre")', latest)
+ else:
+ logger.info('LATEST: %s', latest)
+ except UnicodeEncodeError:
+ pass
+
+
+def highest_version(versions):
+ return max(versions, key=parse_version)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/show.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/show.py
new file mode 100644
index 00000000..a18a9020
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/show.py
@@ -0,0 +1,168 @@
+from __future__ import absolute_import
+
+import logging
+import os
+from email.parser import FeedParser
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+
+logger = logging.getLogger(__name__)
+
+
+class ShowCommand(Command):
+ """
+ Show information about one or more installed packages.
+
+ The output is in RFC-compliant mail header format.
+ """
+ name = 'show'
+ usage = """
+ %prog [options] <package> ..."""
+ summary = 'Show information about installed packages.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(ShowCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-f', '--files',
+ dest='files',
+ action='store_true',
+ default=False,
+ help='Show the full list of installed files for each package.')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ logger.warning('ERROR: Please provide a package name or names.')
+ return ERROR
+ query = args
+
+ results = search_packages_info(query)
+ if not print_results(
+ results, list_files=options.files, verbose=options.verbose):
+ return ERROR
+ return SUCCESS
+
+
+def search_packages_info(query):
+ """
+ Gather details from installed distributions. Print distribution name,
+ version, location, and installed files. Installed files requires a
+ pip generated 'installed-files.txt' in the distributions '.egg-info'
+ directory.
+ """
+ installed = {}
+ for p in pkg_resources.working_set:
+ installed[canonicalize_name(p.project_name)] = p
+
+ query_names = [canonicalize_name(name) for name in query]
+
+ for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
+ package = {
+ 'name': dist.project_name,
+ 'version': dist.version,
+ 'location': dist.location,
+ 'requires': [dep.project_name for dep in dist.requires()],
+ }
+ file_list = None
+ metadata = None
+ if isinstance(dist, pkg_resources.DistInfoDistribution):
+ # RECORDs should be part of .dist-info metadatas
+ if dist.has_metadata('RECORD'):
+ lines = dist.get_metadata_lines('RECORD')
+ paths = [l.split(',')[0] for l in lines]
+ paths = [os.path.join(dist.location, p) for p in paths]
+ file_list = [os.path.relpath(p, dist.location) for p in paths]
+
+ if dist.has_metadata('METADATA'):
+ metadata = dist.get_metadata('METADATA')
+ else:
+ # Otherwise use pip's log for .egg-info's
+ if dist.has_metadata('installed-files.txt'):
+ paths = dist.get_metadata_lines('installed-files.txt')
+ paths = [os.path.join(dist.egg_info, p) for p in paths]
+ file_list = [os.path.relpath(p, dist.location) for p in paths]
+
+ if dist.has_metadata('PKG-INFO'):
+ metadata = dist.get_metadata('PKG-INFO')
+
+ if dist.has_metadata('entry_points.txt'):
+ entry_points = dist.get_metadata_lines('entry_points.txt')
+ package['entry_points'] = entry_points
+
+ if dist.has_metadata('INSTALLER'):
+ for line in dist.get_metadata_lines('INSTALLER'):
+ if line.strip():
+ package['installer'] = line.strip()
+ break
+
+ # @todo: Should pkg_resources.Distribution have a
+ # `get_pkg_info` method?
+ feed_parser = FeedParser()
+ feed_parser.feed(metadata)
+ pkg_info_dict = feed_parser.close()
+ for key in ('metadata-version', 'summary',
+ 'home-page', 'author', 'author-email', 'license'):
+ package[key] = pkg_info_dict.get(key)
+
+ # It looks like FeedParser cannot deal with repeated headers
+ classifiers = []
+ for line in metadata.splitlines():
+ if line.startswith('Classifier: '):
+ classifiers.append(line[len('Classifier: '):])
+ package['classifiers'] = classifiers
+
+ if file_list:
+ package['files'] = sorted(file_list)
+ yield package
+
+
+def print_results(distributions, list_files=False, verbose=False):
+ """
+ Print the informations from installed distributions found.
+ """
+ results_printed = False
+ for i, dist in enumerate(distributions):
+ results_printed = True
+ if i > 0:
+ logger.info("---")
+
+ name = dist.get('name', '')
+ required_by = [
+ pkg.project_name for pkg in pkg_resources.working_set
+ if name in [required.name for required in pkg.requires()]
+ ]
+
+ logger.info("Name: %s", name)
+ logger.info("Version: %s", dist.get('version', ''))
+ logger.info("Summary: %s", dist.get('summary', ''))
+ logger.info("Home-page: %s", dist.get('home-page', ''))
+ logger.info("Author: %s", dist.get('author', ''))
+ logger.info("Author-email: %s", dist.get('author-email', ''))
+ logger.info("License: %s", dist.get('license', ''))
+ logger.info("Location: %s", dist.get('location', ''))
+ logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
+ logger.info("Required-by: %s", ', '.join(required_by))
+
+ if verbose:
+ logger.info("Metadata-Version: %s",
+ dist.get('metadata-version', ''))
+ logger.info("Installer: %s", dist.get('installer', ''))
+ logger.info("Classifiers:")
+ for classifier in dist.get('classifiers', []):
+ logger.info(" %s", classifier)
+ logger.info("Entry-points:")
+ for entry in dist.get('entry_points', []):
+ logger.info(" %s", entry.strip())
+ if list_files:
+ logger.info("Files:")
+ for line in dist.get('files', []):
+ logger.info(" %s", line.strip())
+ if "files" not in dist:
+ logger.info("Cannot locate installed-files.txt")
+ return results_printed
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py
new file mode 100644
index 00000000..0cd6f54b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py
@@ -0,0 +1,78 @@
+from __future__ import absolute_import
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.exceptions import InstallationError
+from pip._internal.req import parse_requirements
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import protect_pip_from_modification_on_windows
+
+
+class UninstallCommand(Command):
+ """
+ Uninstall packages.
+
+ pip is able to uninstall most installed packages. Known exceptions are:
+
+ - Pure distutils packages installed with ``python setup.py install``, which
+ leave behind no metadata to determine what files were installed.
+ - Script wrappers installed by ``python setup.py develop``.
+ """
+ name = 'uninstall'
+ usage = """
+ %prog [options] <package> ...
+ %prog [options] -r <requirements file> ..."""
+ summary = 'Uninstall packages.'
+
+ def __init__(self, *args, **kw):
+ super(UninstallCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Uninstall all the packages listed in the given requirements '
+ 'file. This option can be used multiple times.',
+ )
+ self.cmd_opts.add_option(
+ '-y', '--yes',
+ dest='yes',
+ action='store_true',
+ help="Don't ask for confirmation of uninstall deletions.")
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ with self._build_session(options) as session:
+ reqs_to_uninstall = {}
+ for name in args:
+ req = install_req_from_line(
+ name, isolated=options.isolated_mode,
+ )
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ for filename in options.requirements:
+ for req in parse_requirements(
+ filename,
+ options=options,
+ session=session):
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ if not reqs_to_uninstall:
+ raise InstallationError(
+ 'You must give at least one requirement to %(name)s (see '
+ '"pip help %(name)s")' % dict(name=self.name)
+ )
+
+ protect_pip_from_modification_on_windows(
+ modifying_pip="pip" in reqs_to_uninstall
+ )
+
+ for req in reqs_to_uninstall.values():
+ uninstall_pathset = req.uninstall(
+ auto_confirm=options.yes, verbose=self.verbosity > 0,
+ )
+ if uninstall_pathset:
+ uninstall_pathset.commit()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/wheel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/wheel.py
new file mode 100644
index 00000000..97f3b148
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/wheel.py
@@ -0,0 +1,181 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import RequirementCommand
+from pip._internal.exceptions import CommandError, PreviousBuildDirError
+from pip._internal.legacy_resolve import Resolver
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import RequirementTracker
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.wheel import WheelBuilder
+
+logger = logging.getLogger(__name__)
+
+
+class WheelCommand(RequirementCommand):
+ """
+ Build Wheel archives for your requirements and dependencies.
+
+ Wheel is a built-package format, and offers the advantage of not
+ recompiling your software during every install. For more details, see the
+ wheel docs: https://wheel.readthedocs.io/en/latest/
+
+ Requirements: setuptools>=0.8, and wheel.
+
+ 'pip wheel' uses the bdist_wheel setuptools extension from the wheel
+ package to build individual wheels.
+
+ """
+
+ name = 'wheel'
+ usage = """
+ %prog [options] <requirement specifier> ...
+ %prog [options] -r <requirements file> ...
+ %prog [options] [-e] <vcs project url> ...
+ %prog [options] [-e] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ summary = 'Build wheels from your requirements.'
+
+ def __init__(self, *args, **kw):
+ super(WheelCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '-w', '--wheel-dir',
+ dest='wheel_dir',
+ metavar='dir',
+ default=os.curdir,
+ help=("Build wheels into <dir>, where the default is the "
+ "current working directory."),
+ )
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(
+ '--build-option',
+ dest='build_options',
+ metavar='options',
+ action='append',
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
+ )
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.editable())
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.src())
+ cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.build_dir())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+
+ cmd_opts.add_option(
+ '--global-option',
+ dest='global_options',
+ action='append',
+ metavar='options',
+ help="Extra global options to be supplied to the setup.py "
+ "call before the 'bdist_wheel' command.")
+
+ cmd_opts.add_option(
+ '--pre',
+ action='store_true',
+ default=False,
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
+
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ cmdoptions.check_install_build_global(options)
+
+ if options.build_dir:
+ options.build_dir = os.path.abspath(options.build_dir)
+
+ options.src_dir = os.path.abspath(options.src_dir)
+
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, session)
+ build_delete = (not (options.no_clean or options.build_dir))
+ wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+ with RequirementTracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="wheel"
+ ) as directory:
+
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ )
+
+ try:
+ self.populate_requirement_set(
+ requirement_set, args, options, finder, session,
+ self.name, wheel_cache
+ )
+
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=None,
+ wheel_download_dir=options.wheel_dir,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ req_tracker=req_tracker,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=wheel_cache,
+ use_user_site=False,
+ upgrade_strategy="to-satisfy-only",
+ force_reinstall=False,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_requires_python=options.ignore_requires_python,
+ ignore_installed=True,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517
+ )
+ resolver.resolve(requirement_set)
+
+ # build wheels
+ wb = WheelBuilder(
+ finder, preparer, wheel_cache,
+ build_options=options.build_options or [],
+ global_options=options.global_options or [],
+ no_clean=options.no_clean,
+ )
+ build_failures = wb.build(
+ requirement_set.requirements.values(), session=session,
+ )
+ if len(build_failures) != 0:
+ raise CommandError(
+ "Failed to build one or more wheels"
+ )
+ except PreviousBuildDirError:
+ options.no_clean = True
+ raise
+ finally:
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+ wheel_cache.cleanup()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/configuration.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/configuration.py
new file mode 100644
index 00000000..437e92ee
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/configuration.py
@@ -0,0 +1,417 @@
+"""Configuration management setup
+
+Some terminology:
+- name
+ As written in config files.
+- value
+ Value associated with a name
+- key
+ Name combined with it's section (section.name)
+- variant
+ A single word describing where the configuration key-value pair came from
+"""
+
+import locale
+import logging
+import os
+import sys
+
+from pip._vendor.six.moves import configparser
+
+from pip._internal.exceptions import (
+ ConfigurationError, ConfigurationFileCouldNotBeLoaded,
+)
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS, expanduser
+from pip._internal.utils.misc import ensure_dir, enum
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, List, NewType, Optional, Tuple
+ )
+
+ RawConfigParser = configparser.RawConfigParser # Shorthand
+ Kind = NewType("Kind", str)
+
+logger = logging.getLogger(__name__)
+
+
+# NOTE: Maybe use the optionx attribute to normalize keynames.
+def _normalize_name(name):
+ # type: (str) -> str
+ """Make a name consistent regardless of source (environment or file)
+ """
+ name = name.lower().replace('_', '-')
+ if name.startswith('--'):
+ name = name[2:] # only prefer long opts
+ return name
+
+
+def _disassemble_key(name):
+ # type: (str) -> List[str]
+ if "." not in name:
+ error_message = (
+ "Key does not contain dot separated section and key. "
+ "Perhaps you wanted to use 'global.{}' instead?"
+ ).format(name)
+ raise ConfigurationError(error_message)
+ return name.split(".", 1)
+
+
+# The kinds of configurations there are.
+kinds = enum(
+ USER="user", # User Specific
+ GLOBAL="global", # System Wide
+ SITE="site", # [Virtual] Environment Specific
+ ENV="env", # from PIP_CONFIG_FILE
+ ENV_VAR="env-var", # from Environment Variables
+)
+
+
+CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
+
+
+def get_configuration_files():
+ global_config_files = [
+ os.path.join(path, CONFIG_BASENAME)
+ for path in appdirs.site_config_dirs('pip')
+ ]
+
+ site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
+ legacy_config_file = os.path.join(
+ expanduser('~'),
+ 'pip' if WINDOWS else '.pip',
+ CONFIG_BASENAME,
+ )
+ new_config_file = os.path.join(
+ appdirs.user_config_dir("pip"), CONFIG_BASENAME
+ )
+ return {
+ kinds.GLOBAL: global_config_files,
+ kinds.SITE: [site_config_file],
+ kinds.USER: [legacy_config_file, new_config_file],
+ }
+
+
+class Configuration(object):
+ """Handles management of configuration.
+
+ Provides an interface to accessing and managing configuration files.
+
+ This class converts provides an API that takes "section.key-name" style
+ keys and stores the value associated with it as "key-name" under the
+ section "section".
+
+ This allows for a clean interface wherein the both the section and the
+ key-name are preserved in an easy to manage form in the configuration files
+ and the data stored is also nice.
+ """
+
+ def __init__(self, isolated, load_only=None):
+ # type: (bool, Kind) -> None
+ super(Configuration, self).__init__()
+
+ _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None]
+ if load_only not in _valid_load_only:
+ raise ConfigurationError(
+ "Got invalid value for load_only - should be one of {}".format(
+ ", ".join(map(repr, _valid_load_only[:-1]))
+ )
+ )
+ self.isolated = isolated # type: bool
+ self.load_only = load_only # type: Optional[Kind]
+
+ # The order here determines the override order.
+ self._override_order = [
+ kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
+ ]
+
+ self._ignore_env_names = ["version", "help"]
+
+ # Because we keep track of where we got the data from
+ self._parsers = {
+ variant: [] for variant in self._override_order
+ } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
+ self._config = {
+ variant: {} for variant in self._override_order
+ } # type: Dict[Kind, Dict[str, Any]]
+ self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
+
+ def load(self):
+ # type: () -> None
+ """Loads configuration from configuration files and environment
+ """
+ self._load_config_files()
+ if not self.isolated:
+ self._load_environment_vars()
+
+ def get_file_to_edit(self):
+ # type: () -> Optional[str]
+ """Returns the file with highest priority in configuration
+ """
+ assert self.load_only is not None, \
+ "Need to be specified a file to be editing"
+
+ try:
+ return self._get_parser_to_modify()[0]
+ except IndexError:
+ return None
+
+ def items(self):
+ # type: () -> Iterable[Tuple[str, Any]]
+ """Returns key-value pairs like dict.items() representing the loaded
+ configuration
+ """
+ return self._dictionary.items()
+
+ def get_value(self, key):
+ # type: (str) -> Any
+ """Get a value from the configuration.
+ """
+ try:
+ return self._dictionary[key]
+ except KeyError:
+ raise ConfigurationError("No such key - {}".format(key))
+
+ def set_value(self, key, value):
+ # type: (str, Any) -> None
+ """Modify a value in the configuration.
+ """
+ self._ensure_have_load_only()
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Modify the parser and the configuration
+ if not parser.has_section(section):
+ parser.add_section(section)
+ parser.set(section, name, value)
+
+ self._config[self.load_only][key] = value
+ self._mark_as_modified(fname, parser)
+
+ def unset_value(self, key):
+ # type: (str) -> None
+ """Unset a value in the configuration.
+ """
+ self._ensure_have_load_only()
+
+ if key not in self._config[self.load_only]:
+ raise ConfigurationError("No such key - {}".format(key))
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Remove the key in the parser
+ modified_something = False
+ if parser.has_section(section):
+ # Returns whether the option was removed or not
+ modified_something = parser.remove_option(section, name)
+
+ if modified_something:
+ # name removed from parser, section may now be empty
+ section_iter = iter(parser.items(section))
+ try:
+ val = next(section_iter)
+ except StopIteration:
+ val = None
+
+ if val is None:
+ parser.remove_section(section)
+
+ self._mark_as_modified(fname, parser)
+ else:
+ raise ConfigurationError(
+ "Fatal Internal error [id=1]. Please report as a bug."
+ )
+
+ del self._config[self.load_only][key]
+
+ def save(self):
+ # type: () -> None
+ """Save the current in-memory state.
+ """
+ self._ensure_have_load_only()
+
+ for fname, parser in self._modified_parsers:
+ logger.info("Writing to %s", fname)
+
+ # Ensure directory exists.
+ ensure_dir(os.path.dirname(fname))
+
+ with open(fname, "w") as f:
+ parser.write(f)
+
+ #
+ # Private routines
+ #
+
+ def _ensure_have_load_only(self):
+ # type: () -> None
+ if self.load_only is None:
+ raise ConfigurationError("Needed a specific file to be modifying.")
+ logger.debug("Will be working with %s variant only", self.load_only)
+
+ @property
+ def _dictionary(self):
+ # type: () -> Dict[str, Any]
+ """A dictionary representing the loaded configuration.
+ """
+ # NOTE: Dictionaries are not populated if not loaded. So, conditionals
+ # are not needed here.
+ retval = {}
+
+ for variant in self._override_order:
+ retval.update(self._config[variant])
+
+ return retval
+
+ def _load_config_files(self):
+ # type: () -> None
+ """Loads configuration from configuration files
+ """
+ config_files = dict(self._iter_config_files())
+ if config_files[kinds.ENV][0:1] == [os.devnull]:
+ logger.debug(
+ "Skipping loading configuration files due to "
+ "environment's PIP_CONFIG_FILE being os.devnull"
+ )
+ return
+
+ for variant, files in config_files.items():
+ for fname in files:
+ # If there's specific variant set in `load_only`, load only
+ # that variant, not the others.
+ if self.load_only is not None and variant != self.load_only:
+ logger.debug(
+ "Skipping file '%s' (variant: %s)", fname, variant
+ )
+ continue
+
+ parser = self._load_file(variant, fname)
+
+ # Keeping track of the parsers used
+ self._parsers[variant].append((fname, parser))
+
+ def _load_file(self, variant, fname):
+ # type: (Kind, str) -> RawConfigParser
+ logger.debug("For variant '%s', will try loading '%s'", variant, fname)
+ parser = self._construct_parser(fname)
+
+ for section in parser.sections():
+ items = parser.items(section)
+ self._config[variant].update(self._normalized_keys(section, items))
+
+ return parser
+
+ def _construct_parser(self, fname):
+ # type: (str) -> RawConfigParser
+ parser = configparser.RawConfigParser()
+ # If there is no such file, don't bother reading it but create the
+ # parser anyway, to hold the data.
+ # Doing this is useful when modifying and saving files, where we don't
+ # need to construct a parser.
+ if os.path.exists(fname):
+ try:
+ parser.read(fname)
+ except UnicodeDecodeError:
+ # See https://github.com/pypa/pip/issues/4963
+ raise ConfigurationFileCouldNotBeLoaded(
+ reason="contains invalid {} characters".format(
+ locale.getpreferredencoding(False)
+ ),
+ fname=fname,
+ )
+ except configparser.Error as error:
+ # See https://github.com/pypa/pip/issues/4893
+ raise ConfigurationFileCouldNotBeLoaded(error=error)
+ return parser
+
+ def _load_environment_vars(self):
+ # type: () -> None
+ """Loads configuration from environment variables
+ """
+ self._config[kinds.ENV_VAR].update(
+ self._normalized_keys(":env:", self._get_environ_vars())
+ )
+
+ def _normalized_keys(self, section, items):
+ # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
+ """Normalizes items to construct a dictionary with normalized keys.
+
+ This routine is where the names become keys and are made the same
+ regardless of source - configuration files or environment.
+ """
+ normalized = {}
+ for name, val in items:
+ key = section + "." + _normalize_name(name)
+ normalized[key] = val
+ return normalized
+
+ def _get_environ_vars(self):
+ # type: () -> Iterable[Tuple[str, str]]
+ """Returns a generator with all environmental vars with prefix PIP_"""
+ for key, val in os.environ.items():
+ should_be_yielded = (
+ key.startswith("PIP_") and
+ key[4:].lower() not in self._ignore_env_names
+ )
+ if should_be_yielded:
+ yield key[4:].lower(), val
+
+ # XXX: This is patched in the tests.
+ def _iter_config_files(self):
+ # type: () -> Iterable[Tuple[Kind, List[str]]]
+ """Yields variant and configuration files associated with it.
+
+ This should be treated like items of a dictionary.
+ """
+ # SMELL: Move the conditions out of this function
+
+ # environment variables have the lowest priority
+ config_file = os.environ.get('PIP_CONFIG_FILE', None)
+ if config_file is not None:
+ yield kinds.ENV, [config_file]
+ else:
+ yield kinds.ENV, []
+
+ config_files = get_configuration_files()
+
+ # at the base we have any global configuration
+ yield kinds.GLOBAL, config_files[kinds.GLOBAL]
+
+ # per-user configuration next
+ should_load_user_config = not self.isolated and not (
+ config_file and os.path.exists(config_file)
+ )
+ if should_load_user_config:
+ # The legacy config file is overridden by the new config file
+ yield kinds.USER, config_files[kinds.USER]
+
+ # finally virtualenv configuration first trumping others
+ yield kinds.SITE, config_files[kinds.SITE]
+
+ def _get_parser_to_modify(self):
+ # type: () -> Tuple[str, RawConfigParser]
+ # Determine which parser to modify
+ parsers = self._parsers[self.load_only]
+ if not parsers:
+ # This should not happen if everything works correctly.
+ raise ConfigurationError(
+ "Fatal Internal error [id=2]. Please report as a bug."
+ )
+
+ # Use the highest priority parser.
+ return parsers[-1]
+
+ # XXX: This is patched in the tests.
+ def _mark_as_modified(self, fname, parser):
+ # type: (str, RawConfigParser) -> None
+ file_parser_tuple = (fname, parser)
+ if file_parser_tuple not in self._modified_parsers:
+ self._modified_parsers.append(file_parser_tuple)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py
new file mode 100644
index 00000000..fdf332a8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py
@@ -0,0 +1,23 @@
+from pip._internal.distributions.source import SourceDistribution
+from pip._internal.distributions.wheel import WheelDistribution
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.distributions.base import AbstractDistribution
+ from pip._internal.req.req_install import InstallRequirement
+
+
+def make_distribution_for_install_requirement(install_req):
+ # type: (InstallRequirement) -> AbstractDistribution
+ """Returns a Distribution for the given InstallRequirement
+ """
+ # If it's not an editable, is a wheel, it's a WheelDistribution
+ if install_req.editable:
+ return SourceDistribution(install_req)
+
+ if install_req.link and install_req.is_wheel:
+ return WheelDistribution(install_req)
+
+ # Otherwise, a SourceDistribution
+ return SourceDistribution(install_req)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..122d53d7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/base.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/base.cpython-37.pyc
new file mode 100644
index 00000000..a120de57
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/base.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-37.pyc
new file mode 100644
index 00000000..b8607195
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/source.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/source.cpython-37.pyc
new file mode 100644
index 00000000..fb91159a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/source.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc
new file mode 100644
index 00000000..594a21d4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/base.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/base.py
new file mode 100644
index 00000000..b9af3f02
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/base.py
@@ -0,0 +1,33 @@
+import abc
+
+from pip._vendor.six import add_metaclass
+
+
+@add_metaclass(abc.ABCMeta)
+class AbstractDistribution(object):
+ """A base class for handling installable artifacts.
+
+ The requirements for anything installable are as follows:
+
+ - we must be able to determine the requirement name
+ (or we can't correctly handle the non-upgrade case).
+
+ - for packages with setup requirements, we must also be able
+ to determine their requirements without installing additional
+ packages (for the same reason as run-time dependencies)
+
+ - we must be able to create a Distribution object exposing the
+ above metadata.
+ """
+
+ def __init__(self, req):
+ super(AbstractDistribution, self).__init__()
+ self.req = req
+
+ @abc.abstractmethod
+ def get_pkg_resources_distribution(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ raise NotImplementedError()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/installed.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/installed.py
new file mode 100644
index 00000000..c4a64e7c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/installed.py
@@ -0,0 +1,15 @@
+from pip._internal.distributions.base import AbstractDistribution
+
+
+class InstalledDistribution(AbstractDistribution):
+ """Represents an installed package.
+
+ This does not need any preparation as the required information has already
+ been computed.
+ """
+
+ def get_pkg_resources_distribution(self):
+ return self.req.satisfied_by
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/source.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/source.py
new file mode 100644
index 00000000..e5d9fd4b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/source.py
@@ -0,0 +1,80 @@
+import logging
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.exceptions import InstallationError
+
+logger = logging.getLogger(__name__)
+
+
+class SourceDistribution(AbstractDistribution):
+ """Represents a source distribution.
+
+ The preparation step for these needs metadata for the packages to be
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
+
+ NOTE from @pradyunsg (14 June 2019)
+ I expect SourceDistribution class will need to be split into
+ `legacy_source` (setup.py based) and `source` (PEP 517 based) when we start
+ bringing logic for preparation out of InstallRequirement into this class.
+ """
+
+ def get_pkg_resources_distribution(self):
+ return self.req.get_dist()
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ # Prepare for building. We need to:
+ # 1. Load pyproject.toml (if it exists)
+ # 2. Set up the build environment
+
+ self.req.load_pyproject_toml()
+ should_isolate = self.req.use_pep517 and build_isolation
+
+ def _raise_conflicts(conflicting_with, conflicting_reqs):
+ raise InstallationError(
+ "Some build dependencies for %s conflict with %s: %s." % (
+ self.req, conflicting_with, ', '.join(
+ '%s is incompatible with %s' % (installed, wanted)
+ for installed, wanted in sorted(conflicting))))
+
+ if should_isolate:
+ # Isolate in a BuildEnvironment and install the build-time
+ # requirements.
+ self.req.build_env = BuildEnvironment()
+ self.req.build_env.install_requirements(
+ finder, self.req.pyproject_requires, 'overlay',
+ "Installing build dependencies"
+ )
+ conflicting, missing = self.req.build_env.check_requirements(
+ self.req.requirements_to_check
+ )
+ if conflicting:
+ _raise_conflicts("PEP 517/518 supported requirements",
+ conflicting)
+ if missing:
+ logger.warning(
+ "Missing build requirements in pyproject.toml for %s.",
+ self.req,
+ )
+ logger.warning(
+ "The project does not specify a build backend, and "
+ "pip cannot fall back to setuptools without %s.",
+ " and ".join(map(repr, sorted(missing)))
+ )
+ # Install any extra build dependencies that the backend requests.
+ # This must be done in a second pass, as the pyproject.toml
+ # dependencies must be installed before we can call the backend.
+ with self.req.build_env:
+ # We need to have the env active when calling the hook.
+ self.req.spin_message = "Getting requirements to build wheel"
+ reqs = self.req.pep517_backend.get_requires_for_build_wheel()
+ conflicting, missing = self.req.build_env.check_requirements(reqs)
+ if conflicting:
+ _raise_conflicts("the backend dependencies", conflicting)
+ self.req.build_env.install_requirements(
+ finder, missing, 'normal',
+ "Installing backend dependencies"
+ )
+
+ self.req.prepare_metadata()
+ self.req.assert_source_matches_version()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py
new file mode 100644
index 00000000..de7be38e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py
@@ -0,0 +1,17 @@
+from pip._vendor import pkg_resources
+
+from pip._internal.distributions.base import AbstractDistribution
+
+
+class WheelDistribution(AbstractDistribution):
+ """Represents a wheel distribution.
+
+ This does not need any preparation as wheels can be directly unpacked.
+ """
+
+ def get_pkg_resources_distribution(self):
+ return list(pkg_resources.find_distributions(
+ self.req.source_dir))[0]
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/download.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/download.py
new file mode 100644
index 00000000..8715eb5b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/download.py
@@ -0,0 +1,1163 @@
+from __future__ import absolute_import
+
+import cgi
+import email.utils
+import json
+import logging
+import mimetypes
+import os
+import platform
+import re
+import shutil
+import sys
+
+from pip._vendor import requests, urllib3
+from pip._vendor.cachecontrol import CacheControlAdapter
+from pip._vendor.cachecontrol.caches import FileCache
+from pip._vendor.lockfile import LockError
+from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
+from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+from pip._vendor.requests.structures import CaseInsensitiveDict
+from pip._vendor.requests.utils import get_netrc_auth
+# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import
+from pip._vendor.six.moves import xmlrpc_client # type: ignore
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+import pip
+from pip._internal.exceptions import HashMismatch, InstallationError
+from pip._internal.models.index import PyPI
+# Import ssl from compat so the initial import occurs in only one place.
+from pip._internal.utils.compat import HAS_TLS, ssl
+from pip._internal.utils.encoding import auto_decode
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.glibc import libc_ver
+from pip._internal.utils.marker_files import write_delete_marker_file
+from pip._internal.utils.misc import (
+ ARCHIVE_EXTENSIONS, ask, ask_input, ask_password, ask_path_exists,
+ backup_dir, consume, display_path, format_size, get_installed_version,
+ path_to_url, remove_auth_from_url, rmtree, split_auth_netloc_from_url,
+ splitext, unpack_file,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import DownloadProgressProvider
+from pip._internal.vcs import vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Optional, Tuple, Dict, IO, Text, Union
+ )
+ from optparse import Values
+ from pip._internal.models.link import Link
+ from pip._internal.utils.hashes import Hashes
+ from pip._internal.vcs.versioncontrol import AuthInfo, VersionControl
+
+ Credentials = Tuple[str, str, str]
+
+
+__all__ = ['get_file_content',
+ 'is_url', 'url_to_path', 'path_to_url',
+ 'is_archive_file', 'unpack_vcs_link',
+ 'unpack_file_url', 'is_vcs_url', 'is_file_url',
+ 'unpack_http_url', 'unpack_url',
+ 'parse_content_disposition', 'sanitize_content_filename']
+
+
+logger = logging.getLogger(__name__)
+
+
+try:
+ import keyring # noqa
+except ImportError:
+ keyring = None
+except Exception as exc:
+ logger.warning("Keyring is skipped due to an exception: %s",
+ str(exc))
+ keyring = None
+
+# These are environment variables present when running under various
+# CI systems. For each variable, some CI systems that use the variable
+# are indicated. The collection was chosen so that for each of a number
+# of popular systems, at least one of the environment variables is used.
+# This list is used to provide some indication of and lower bound for
+# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
+# For more background, see: https://github.com/pypa/pip/issues/5499
+CI_ENVIRONMENT_VARIABLES = (
+ # Azure Pipelines
+ 'BUILD_BUILDID',
+ # Jenkins
+ 'BUILD_ID',
+ # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
+ 'CI',
+ # Explicit environment variable.
+ 'PIP_IS_CI',
+)
+
+
+def looks_like_ci():
+ # type: () -> bool
+ """
+ Return whether it looks like pip is running under CI.
+ """
+ # We don't use the method of checking for a tty (e.g. using isatty())
+ # because some CI systems mimic a tty (e.g. Travis CI). Thus that
+ # method doesn't provide definitive information in either direction.
+ return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
+
+
+def user_agent():
+ """
+ Return a string representing the user agent.
+ """
+ data = {
+ "installer": {"name": "pip", "version": pip.__version__},
+ "python": platform.python_version(),
+ "implementation": {
+ "name": platform.python_implementation(),
+ },
+ }
+
+ if data["implementation"]["name"] == 'CPython':
+ data["implementation"]["version"] = platform.python_version()
+ elif data["implementation"]["name"] == 'PyPy':
+ if sys.pypy_version_info.releaselevel == 'final':
+ pypy_version_info = sys.pypy_version_info[:3]
+ else:
+ pypy_version_info = sys.pypy_version_info
+ data["implementation"]["version"] = ".".join(
+ [str(x) for x in pypy_version_info]
+ )
+ elif data["implementation"]["name"] == 'Jython':
+ # Complete Guess
+ data["implementation"]["version"] = platform.python_version()
+ elif data["implementation"]["name"] == 'IronPython':
+ # Complete Guess
+ data["implementation"]["version"] = platform.python_version()
+
+ if sys.platform.startswith("linux"):
+ from pip._vendor import distro
+ distro_infos = dict(filter(
+ lambda x: x[1],
+ zip(["name", "version", "id"], distro.linux_distribution()),
+ ))
+ libc = dict(filter(
+ lambda x: x[1],
+ zip(["lib", "version"], libc_ver()),
+ ))
+ if libc:
+ distro_infos["libc"] = libc
+ if distro_infos:
+ data["distro"] = distro_infos
+
+ if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
+ data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
+
+ if platform.system():
+ data.setdefault("system", {})["name"] = platform.system()
+
+ if platform.release():
+ data.setdefault("system", {})["release"] = platform.release()
+
+ if platform.machine():
+ data["cpu"] = platform.machine()
+
+ if HAS_TLS:
+ data["openssl_version"] = ssl.OPENSSL_VERSION
+
+ setuptools_version = get_installed_version("setuptools")
+ if setuptools_version is not None:
+ data["setuptools_version"] = setuptools_version
+
+ # Use None rather than False so as not to give the impression that
+ # pip knows it is not being run under CI. Rather, it is a null or
+ # inconclusive result. Also, we include some value rather than no
+ # value to make it easier to know that the check has been run.
+ data["ci"] = True if looks_like_ci() else None
+
+ user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
+ if user_data is not None:
+ data["user_data"] = user_data
+
+ return "{data[installer][name]}/{data[installer][version]} {json}".format(
+ data=data,
+ json=json.dumps(data, separators=(",", ":"), sort_keys=True),
+ )
+
+
+def _get_keyring_auth(url, username):
+ """Return the tuple auth for a given url from keyring."""
+ if not url or not keyring:
+ return None
+
+ try:
+ try:
+ get_credential = keyring.get_credential
+ except AttributeError:
+ pass
+ else:
+ logger.debug("Getting credentials from keyring for %s", url)
+ cred = get_credential(url, username)
+ if cred is not None:
+ return cred.username, cred.password
+ return None
+
+ if username:
+ logger.debug("Getting password from keyring for %s", url)
+ password = keyring.get_password(url, username)
+ if password:
+ return username, password
+
+ except Exception as exc:
+ logger.warning("Keyring is skipped due to an exception: %s",
+ str(exc))
+
+
+class MultiDomainBasicAuth(AuthBase):
+
+ def __init__(self, prompting=True, index_urls=None):
+ # type: (bool, Optional[Values]) -> None
+ self.prompting = prompting
+ self.index_urls = index_urls
+ self.passwords = {} # type: Dict[str, AuthInfo]
+ # When the user is prompted to enter credentials and keyring is
+ # available, we will offer to save them. If the user accepts,
+ # this value is set to the credentials they entered. After the
+ # request authenticates, the caller should call
+ # ``save_credentials`` to save these.
+ self._credentials_to_save = None # type: Optional[Credentials]
+
+ def _get_index_url(self, url):
+ """Return the original index URL matching the requested URL.
+
+ Cached or dynamically generated credentials may work against
+ the original index URL rather than just the netloc.
+
+ The provided url should have had its username and password
+ removed already. If the original index url had credentials then
+ they will be included in the return value.
+
+ Returns None if no matching index was found, or if --no-index
+ was specified by the user.
+ """
+ if not url or not self.index_urls:
+ return None
+
+ for u in self.index_urls:
+ prefix = remove_auth_from_url(u).rstrip("/") + "/"
+ if url.startswith(prefix):
+ return u
+
+ def _get_new_credentials(self, original_url, allow_netrc=True,
+ allow_keyring=True):
+ """Find and return credentials for the specified URL."""
+ # Split the credentials and netloc from the url.
+ url, netloc, url_user_password = split_auth_netloc_from_url(
+ original_url)
+
+ # Start with the credentials embedded in the url
+ username, password = url_user_password
+ if username is not None and password is not None:
+ logger.debug("Found credentials in url for %s", netloc)
+ return url_user_password
+
+ # Find a matching index url for this request
+ index_url = self._get_index_url(url)
+ if index_url:
+ # Split the credentials from the url.
+ index_info = split_auth_netloc_from_url(index_url)
+ if index_info:
+ index_url, _, index_url_user_password = index_info
+ logger.debug("Found index url %s", index_url)
+
+ # If an index URL was found, try its embedded credentials
+ if index_url and index_url_user_password[0] is not None:
+ username, password = index_url_user_password
+ if username is not None and password is not None:
+ logger.debug("Found credentials in index url for %s", netloc)
+ return index_url_user_password
+
+ # Get creds from netrc if we still don't have them
+ if allow_netrc:
+ netrc_auth = get_netrc_auth(original_url)
+ if netrc_auth:
+ logger.debug("Found credentials in netrc for %s", netloc)
+ return netrc_auth
+
+ # If we don't have a password and keyring is available, use it.
+ if allow_keyring:
+ # The index url is more specific than the netloc, so try it first
+ kr_auth = (_get_keyring_auth(index_url, username) or
+ _get_keyring_auth(netloc, username))
+ if kr_auth:
+ logger.debug("Found credentials in keyring for %s", netloc)
+ return kr_auth
+
+ return None, None
+
+ def _get_url_and_credentials(self, original_url):
+ """Return the credentials to use for the provided URL.
+
+ If allowed, netrc and keyring may be used to obtain the
+ correct credentials.
+
+ Returns (url_without_credentials, username, password). Note
+ that even if the original URL contains credentials, this
+ function may return a different username and password.
+ """
+ url, netloc, _ = split_auth_netloc_from_url(original_url)
+
+ # Use any stored credentials that we have for this netloc
+ username, password = self.passwords.get(netloc, (None, None))
+
+ # If nothing cached, acquire new credentials without prompting
+ # the user (e.g. from netrc, keyring, or similar).
+ if username is None or password is None:
+ username, password = self._get_new_credentials(original_url)
+
+ if username is not None and password is not None:
+ # Store the username and password
+ self.passwords[netloc] = (username, password)
+
+ return url, username, password
+
+ def __call__(self, req):
+ # Get credentials for this request
+ url, username, password = self._get_url_and_credentials(req.url)
+
+ # Set the url of the request to the url without any credentials
+ req.url = url
+
+ if username is not None and password is not None:
+ # Send the basic auth with this request
+ req = HTTPBasicAuth(username, password)(req)
+
+ # Attach a hook to handle 401 responses
+ req.register_hook("response", self.handle_401)
+
+ return req
+
+ # Factored out to allow for easy patching in tests
+ def _prompt_for_password(self, netloc):
+ username = ask_input("User for %s: " % netloc)
+ if not username:
+ return None, None
+ auth = _get_keyring_auth(netloc, username)
+ if auth:
+ return auth[0], auth[1], False
+ password = ask_password("Password: ")
+ return username, password, True
+
+ # Factored out to allow for easy patching in tests
+ def _should_save_password_to_keyring(self):
+ if not keyring:
+ return False
+ return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
+
+ def handle_401(self, resp, **kwargs):
+ # We only care about 401 responses, anything else we want to just
+ # pass through the actual response
+ if resp.status_code != 401:
+ return resp
+
+ # We are not able to prompt the user so simply return the response
+ if not self.prompting:
+ return resp
+
+ parsed = urllib_parse.urlparse(resp.url)
+
+ # Prompt the user for a new username and password
+ username, password, save = self._prompt_for_password(parsed.netloc)
+
+ # Store the new username and password to use for future requests
+ self._credentials_to_save = None
+ if username is not None and password is not None:
+ self.passwords[parsed.netloc] = (username, password)
+
+ # Prompt to save the password to keyring
+ if save and self._should_save_password_to_keyring():
+ self._credentials_to_save = (parsed.netloc, username, password)
+
+ # Consume content and release the original connection to allow our new
+ # request to reuse the same one.
+ resp.content
+ resp.raw.release_conn()
+
+ # Add our new username and password to the request
+ req = HTTPBasicAuth(username or "", password or "")(resp.request)
+ req.register_hook("response", self.warn_on_401)
+
+ # On successful request, save the credentials that were used to
+ # keyring. (Note that if the user responded "no" above, this member
+ # is not set and nothing will be saved.)
+ if self._credentials_to_save:
+ req.register_hook("response", self.save_credentials)
+
+ # Send our new request
+ new_resp = resp.connection.send(req, **kwargs)
+ new_resp.history.append(resp)
+
+ return new_resp
+
+ def warn_on_401(self, resp, **kwargs):
+ """Response callback to warn about incorrect credentials."""
+ if resp.status_code == 401:
+ logger.warning('401 Error, Credentials not correct for %s',
+ resp.request.url)
+
+ def save_credentials(self, resp, **kwargs):
+ """Response callback to save credentials on success."""
+ assert keyring is not None, "should never reach here without keyring"
+ if not keyring:
+ return
+
+ creds = self._credentials_to_save
+ self._credentials_to_save = None
+ if creds and resp.status_code < 400:
+ try:
+ logger.info('Saving credentials to keyring')
+ keyring.set_password(*creds)
+ except Exception:
+ logger.exception('Failed to save credentials')
+
+
+class LocalFSAdapter(BaseAdapter):
+
+ def send(self, request, stream=None, timeout=None, verify=None, cert=None,
+ proxies=None):
+ pathname = url_to_path(request.url)
+
+ resp = Response()
+ resp.status_code = 200
+ resp.url = request.url
+
+ try:
+ stats = os.stat(pathname)
+ except OSError as exc:
+ resp.status_code = 404
+ resp.raw = exc
+ else:
+ modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
+ content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
+ resp.headers = CaseInsensitiveDict({
+ "Content-Type": content_type,
+ "Content-Length": stats.st_size,
+ "Last-Modified": modified,
+ })
+
+ resp.raw = open(pathname, "rb")
+ resp.close = resp.raw.close
+
+ return resp
+
+ def close(self):
+ pass
+
+
+class SafeFileCache(FileCache):
+ """
+ A file based cache which is safe to use even when the target directory may
+ not be accessible or writable.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(SafeFileCache, self).__init__(*args, **kwargs)
+
+ # Check to ensure that the directory containing our cache directory
+ # is owned by the user current executing pip. If it does not exist
+ # we will check the parent directory until we find one that does exist.
+ # If it is not owned by the user executing pip then we will disable
+ # the cache and log a warning.
+ if not check_path_owner(self.directory):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned by "
+ "the current user and the cache has been disabled. Please "
+ "check the permissions and owner of that directory. If "
+ "executing pip with sudo, you may want sudo's -H flag.",
+ self.directory,
+ )
+
+ # Set our directory to None to disable the Cache
+ self.directory = None
+
+ def get(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).get(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+ def set(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).set(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+ def delete(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).delete(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+
+class InsecureHTTPAdapter(HTTPAdapter):
+
+ def cert_verify(self, conn, url, verify, cert):
+ conn.cert_reqs = 'CERT_NONE'
+ conn.ca_certs = None
+
+
+class PipSession(requests.Session):
+
+ timeout = None # type: Optional[int]
+
+ def __init__(self, *args, **kwargs):
+ retries = kwargs.pop("retries", 0)
+ cache = kwargs.pop("cache", None)
+ insecure_hosts = kwargs.pop("insecure_hosts", [])
+ index_urls = kwargs.pop("index_urls", None)
+
+ super(PipSession, self).__init__(*args, **kwargs)
+
+ # Attach our User Agent to the request
+ self.headers["User-Agent"] = user_agent()
+
+ # Attach our Authentication handler to the session
+ self.auth = MultiDomainBasicAuth(index_urls=index_urls)
+
+ # Create our urllib3.Retry instance which will allow us to customize
+ # how we handle retries.
+ retries = urllib3.Retry(
+ # Set the total number of retries that a particular request can
+ # have.
+ total=retries,
+
+ # A 503 error from PyPI typically means that the Fastly -> Origin
+ # connection got interrupted in some way. A 503 error in general
+ # is typically considered a transient error so we'll go ahead and
+ # retry it.
+ # A 500 may indicate transient error in Amazon S3
+ # A 520 or 527 - may indicate transient error in CloudFlare
+ status_forcelist=[500, 503, 520, 527],
+
+ # Add a small amount of back off between failed requests in
+ # order to prevent hammering the service.
+ backoff_factor=0.25,
+ )
+
+ # We want to _only_ cache responses on securely fetched origins. We do
+ # this because we can't validate the response of an insecurely fetched
+ # origin, and we don't want someone to be able to poison the cache and
+ # require manual eviction from the cache to fix it.
+ if cache:
+ secure_adapter = CacheControlAdapter(
+ cache=SafeFileCache(cache, use_dir_lock=True),
+ max_retries=retries,
+ )
+ else:
+ secure_adapter = HTTPAdapter(max_retries=retries)
+
+ # Our Insecure HTTPAdapter disables HTTPS validation. It does not
+ # support caching (see above) so we'll use it for all http:// URLs as
+ # well as any https:// host that we've marked as ignoring TLS errors
+ # for.
+ insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
+ # Save this for later use in add_insecure_host().
+ self._insecure_adapter = insecure_adapter
+
+ self.mount("https://", secure_adapter)
+ self.mount("http://", insecure_adapter)
+
+ # Enable file:// urls
+ self.mount("file://", LocalFSAdapter())
+
+ # We want to use a non-validating adapter for any requests which are
+ # deemed insecure.
+ for host in insecure_hosts:
+ self.add_insecure_host(host)
+
+ def add_insecure_host(self, host):
+ # type: (str) -> None
+ self.mount('https://{}/'.format(host), self._insecure_adapter)
+
+ def request(self, method, url, *args, **kwargs):
+ # Allow setting a default timeout on a session
+ kwargs.setdefault("timeout", self.timeout)
+
+ # Dispatch the actual request
+ return super(PipSession, self).request(method, url, *args, **kwargs)
+
+
+def get_file_content(url, comes_from=None, session=None):
+ # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
+ """Gets the content of a file; it may be a filename, file: URL, or
+ http: URL. Returns (location, content). Content is unicode.
+
+ :param url: File path or url.
+ :param comes_from: Origin description of requirements.
+ :param session: Instance of pip.download.PipSession.
+ """
+ if session is None:
+ raise TypeError(
+ "get_file_content() missing 1 required keyword argument: 'session'"
+ )
+
+ match = _scheme_re.search(url)
+ if match:
+ scheme = match.group(1).lower()
+ if (scheme == 'file' and comes_from and
+ comes_from.startswith('http')):
+ raise InstallationError(
+ 'Requirements file %s references URL %s, which is local'
+ % (comes_from, url))
+ if scheme == 'file':
+ path = url.split(':', 1)[1]
+ path = path.replace('\\', '/')
+ match = _url_slash_drive_re.match(path)
+ if match:
+ path = match.group(1) + ':' + path.split('|', 1)[1]
+ path = urllib_parse.unquote(path)
+ if path.startswith('/'):
+ path = '/' + path.lstrip('/')
+ url = path
+ else:
+ # FIXME: catch some errors
+ resp = session.get(url)
+ resp.raise_for_status()
+ return resp.url, resp.text
+ try:
+ with open(url, 'rb') as f:
+ content = auto_decode(f.read())
+ except IOError as exc:
+ raise InstallationError(
+ 'Could not open requirements file: %s' % str(exc)
+ )
+ return url, content
+
+
+_scheme_re = re.compile(r'^(http|https|file):', re.I)
+_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
+
+
+def is_url(name):
+ # type: (Union[str, Text]) -> bool
+ """Returns true if the name looks like a URL"""
+ if ':' not in name:
+ return False
+ scheme = name.split(':', 1)[0].lower()
+ return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
+
+
+def url_to_path(url):
+ # type: (str) -> str
+ """
+ Convert a file: URL to a path.
+ """
+ assert url.startswith('file:'), (
+ "You can only turn file: urls into filenames (not %r)" % url)
+
+ _, netloc, path, _, _ = urllib_parse.urlsplit(url)
+
+ if not netloc or netloc == 'localhost':
+ # According to RFC 8089, same as empty authority.
+ netloc = ''
+ elif sys.platform == 'win32':
+ # If we have a UNC path, prepend UNC share notation.
+ netloc = '\\\\' + netloc
+ else:
+ raise ValueError(
+ 'non-local file URIs are not supported on this platform: %r'
+ % url
+ )
+
+ path = urllib_request.url2pathname(netloc + path)
+ return path
+
+
+def is_archive_file(name):
+ # type: (str) -> bool
+ """Return True if `name` is a considered as an archive file."""
+ ext = splitext(name)[1].lower()
+ if ext in ARCHIVE_EXTENSIONS:
+ return True
+ return False
+
+
+def unpack_vcs_link(link, location):
+ vcs_backend = _get_used_vcs_backend(link)
+ vcs_backend.unpack(location, url=link.url)
+
+
+def _get_used_vcs_backend(link):
+ # type: (Link) -> Optional[VersionControl]
+ """
+ Return a VersionControl object or None.
+ """
+ for vcs_backend in vcs.backends:
+ if link.scheme in vcs_backend.schemes:
+ return vcs_backend
+ return None
+
+
+def is_vcs_url(link):
+ # type: (Link) -> bool
+ return bool(_get_used_vcs_backend(link))
+
+
+def is_file_url(link):
+ # type: (Link) -> bool
+ return link.url.lower().startswith('file:')
+
+
+def is_dir_url(link):
+ # type: (Link) -> bool
+ """Return whether a file:// Link points to a directory.
+
+ ``link`` must not have any other scheme but file://. Call is_file_url()
+ first.
+
+ """
+ link_path = url_to_path(link.url_without_fragment)
+ return os.path.isdir(link_path)
+
+
+def _progress_indicator(iterable, *args, **kwargs):
+ return iterable
+
+
+def _download_url(
+ resp, # type: Response
+ link, # type: Link
+ content_file, # type: IO
+ hashes, # type: Optional[Hashes]
+ progress_bar # type: str
+):
+ # type: (...) -> None
+ try:
+ total_length = int(resp.headers['content-length'])
+ except (ValueError, KeyError, TypeError):
+ total_length = 0
+
+ cached_resp = getattr(resp, "from_cache", False)
+ if logger.getEffectiveLevel() > logging.INFO:
+ show_progress = False
+ elif cached_resp:
+ show_progress = False
+ elif total_length > (40 * 1000):
+ show_progress = True
+ elif not total_length:
+ show_progress = True
+ else:
+ show_progress = False
+
+ show_url = link.show_url
+
+ def resp_read(chunk_size):
+ try:
+ # Special case for urllib3.
+ for chunk in resp.raw.stream(
+ chunk_size,
+ # We use decode_content=False here because we don't
+ # want urllib3 to mess with the raw bytes we get
+ # from the server. If we decompress inside of
+ # urllib3 then we cannot verify the checksum
+ # because the checksum will be of the compressed
+ # file. This breakage will only occur if the
+ # server adds a Content-Encoding header, which
+ # depends on how the server was configured:
+ # - Some servers will notice that the file isn't a
+ # compressible file and will leave the file alone
+ # and with an empty Content-Encoding
+ # - Some servers will notice that the file is
+ # already compressed and will leave the file
+ # alone and will add a Content-Encoding: gzip
+ # header
+ # - Some servers won't notice anything at all and
+ # will take a file that's already been compressed
+ # and compress it again and set the
+ # Content-Encoding: gzip header
+ #
+ # By setting this not to decode automatically we
+ # hope to eliminate problems with the second case.
+ decode_content=False):
+ yield chunk
+ except AttributeError:
+ # Standard file-like object.
+ while True:
+ chunk = resp.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ def written_chunks(chunks):
+ for chunk in chunks:
+ content_file.write(chunk)
+ yield chunk
+
+ progress_indicator = _progress_indicator
+
+ if link.netloc == PyPI.netloc:
+ url = show_url
+ else:
+ url = link.url_without_fragment
+
+ if show_progress: # We don't show progress on cached responses
+ progress_indicator = DownloadProgressProvider(progress_bar,
+ max=total_length)
+ if total_length:
+ logger.info("Downloading %s (%s)", url, format_size(total_length))
+ else:
+ logger.info("Downloading %s", url)
+ elif cached_resp:
+ logger.info("Using cached %s", url)
+ else:
+ logger.info("Downloading %s", url)
+
+ logger.debug('Downloading from URL %s', link)
+
+ downloaded_chunks = written_chunks(
+ progress_indicator(
+ resp_read(CONTENT_CHUNK_SIZE),
+ CONTENT_CHUNK_SIZE
+ )
+ )
+ if hashes:
+ hashes.check_against_chunks(downloaded_chunks)
+ else:
+ consume(downloaded_chunks)
+
+
+def _copy_file(filename, location, link):
+ copy = True
+ download_location = os.path.join(location, link.filename)
+ if os.path.exists(download_location):
+ response = ask_path_exists(
+ 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
+ display_path(download_location), ('i', 'w', 'b', 'a'))
+ if response == 'i':
+ copy = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(download_location))
+ os.remove(download_location)
+ elif response == 'b':
+ dest_file = backup_dir(download_location)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(download_location),
+ display_path(dest_file),
+ )
+ shutil.move(download_location, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+ if copy:
+ shutil.copy(filename, download_location)
+ logger.info('Saved %s', display_path(download_location))
+
+
+def unpack_http_url(
+ link, # type: Link
+ location, # type: str
+ download_dir=None, # type: Optional[str]
+ session=None, # type: Optional[PipSession]
+ hashes=None, # type: Optional[Hashes]
+ progress_bar="on" # type: str
+):
+ # type: (...) -> None
+ if session is None:
+ raise TypeError(
+ "unpack_http_url() missing 1 required keyword argument: 'session'"
+ )
+
+ with TempDirectory(kind="unpack") as temp_dir:
+ # If a download dir is specified, is the file already downloaded there?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(link,
+ download_dir,
+ hashes)
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ content_type = mimetypes.guess_type(from_path)[0]
+ else:
+ # let's download to a tmp dir
+ from_path, content_type = _download_http_url(link,
+ session,
+ temp_dir.path,
+ hashes,
+ progress_bar)
+
+ # unpack the archive to the build dir location. even when only
+ # downloading archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type, link)
+
+ # a download dir is specified; let's copy the archive there
+ if download_dir and not already_downloaded_path:
+ _copy_file(from_path, download_dir, link)
+
+ if not already_downloaded_path:
+ os.unlink(from_path)
+
+
+def unpack_file_url(
+ link, # type: Link
+ location, # type: str
+ download_dir=None, # type: Optional[str]
+ hashes=None # type: Optional[Hashes]
+):
+ # type: (...) -> None
+ """Unpack link into location.
+
+ If download_dir is provided and link points to a file, make a copy
+ of the link file inside download_dir.
+ """
+ link_path = url_to_path(link.url_without_fragment)
+
+ # If it's a url to a local directory
+ if is_dir_url(link):
+ if os.path.isdir(location):
+ rmtree(location)
+ shutil.copytree(link_path, location, symlinks=True)
+ if download_dir:
+ logger.info('Link is a directory, ignoring download_dir')
+ return
+
+ # If --require-hashes is off, `hashes` is either empty, the
+ # link's embedded hash, or MissingHashes; it is required to
+ # match. If --require-hashes is on, we are satisfied by any
+ # hash in `hashes` matching: a URL-based or an option-based
+ # one; no internet-sourced hash will be in `hashes`.
+ if hashes:
+ hashes.check_against_path(link_path)
+
+ # If a download dir is specified, is the file already there and valid?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(link,
+ download_dir,
+ hashes)
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ else:
+ from_path = link_path
+
+ content_type = mimetypes.guess_type(from_path)[0]
+
+ # unpack the archive to the build dir location. even when only downloading
+ # archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type, link)
+
+ # a download dir is specified and not already downloaded
+ if download_dir and not already_downloaded_path:
+ _copy_file(from_path, download_dir, link)
+
+
+class PipXmlrpcTransport(xmlrpc_client.Transport):
+ """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
+ object.
+ """
+
+ def __init__(self, index_url, session, use_datetime=False):
+ xmlrpc_client.Transport.__init__(self, use_datetime)
+ index_parts = urllib_parse.urlparse(index_url)
+ self._scheme = index_parts.scheme
+ self._session = session
+
+ def request(self, host, handler, request_body, verbose=False):
+ parts = (self._scheme, host, handler, None, None, None)
+ url = urllib_parse.urlunparse(parts)
+ try:
+ headers = {'Content-Type': 'text/xml'}
+ response = self._session.post(url, data=request_body,
+ headers=headers, stream=True)
+ response.raise_for_status()
+ self.verbose = verbose
+ return self.parse_response(response.raw)
+ except requests.HTTPError as exc:
+ logger.critical(
+ "HTTP error %s while getting %s",
+ exc.response.status_code, url,
+ )
+ raise
+
+
+def unpack_url(
+ link, # type: Link
+ location, # type: str
+ download_dir=None, # type: Optional[str]
+ only_download=False, # type: bool
+ session=None, # type: Optional[PipSession]
+ hashes=None, # type: Optional[Hashes]
+ progress_bar="on" # type: str
+):
+ # type: (...) -> None
+ """Unpack link.
+ If link is a VCS link:
+ if only_download, export into download_dir and ignore location
+ else unpack into location
+ for other types of link:
+ - unpack into location
+ - if download_dir, copy the file into download_dir
+ - if only_download, mark location for deletion
+
+ :param hashes: A Hashes object, one of whose embedded hashes must match,
+ or HashMismatch will be raised. If the Hashes is empty, no matches are
+ required, and unhashable types of requirements (like VCS ones, which
+ would ordinarily raise HashUnsupported) are allowed.
+ """
+ # non-editable vcs urls
+ if is_vcs_url(link):
+ unpack_vcs_link(link, location)
+
+ # file urls
+ elif is_file_url(link):
+ unpack_file_url(link, location, download_dir, hashes=hashes)
+
+ # http urls
+ else:
+ if session is None:
+ session = PipSession()
+
+ unpack_http_url(
+ link,
+ location,
+ download_dir,
+ session,
+ hashes=hashes,
+ progress_bar=progress_bar
+ )
+ if only_download:
+ write_delete_marker_file(location)
+
+
+def sanitize_content_filename(filename):
+ # type: (str) -> str
+ """
+ Sanitize the "filename" value from a Content-Disposition header.
+ """
+ return os.path.basename(filename)
+
+
+def parse_content_disposition(content_disposition, default_filename):
+ # type: (str, str) -> str
+ """
+ Parse the "filename" value from a Content-Disposition header, and
+ return the default filename if the result is empty.
+ """
+ _type, params = cgi.parse_header(content_disposition)
+ filename = params.get('filename')
+ if filename:
+ # We need to sanitize the filename to prevent directory traversal
+ # in case the filename contains ".." path parts.
+ filename = sanitize_content_filename(filename)
+ return filename or default_filename
+
+
+def _download_http_url(
+ link, # type: Link
+ session, # type: PipSession
+ temp_dir, # type: str
+ hashes, # type: Optional[Hashes]
+ progress_bar # type: str
+):
+ # type: (...) -> Tuple[str, str]
+ """Download link url into temp_dir using provided session"""
+ target_url = link.url.split('#', 1)[0]
+ try:
+ resp = session.get(
+ target_url,
+ # We use Accept-Encoding: identity here because requests
+ # defaults to accepting compressed responses. This breaks in
+ # a variety of ways depending on how the server is configured.
+ # - Some servers will notice that the file isn't a compressible
+ # file and will leave the file alone and with an empty
+ # Content-Encoding
+ # - Some servers will notice that the file is already
+ # compressed and will leave the file alone and will add a
+ # Content-Encoding: gzip header
+ # - Some servers won't notice anything at all and will take
+ # a file that's already been compressed and compress it again
+ # and set the Content-Encoding: gzip header
+ # By setting this to request only the identity encoding We're
+ # hoping to eliminate the third case. Hopefully there does not
+ # exist a server which when given a file will notice it is
+ # already compressed and that you're not asking for a
+ # compressed file and will then decompress it before sending
+ # because if that's the case I don't think it'll ever be
+ # possible to make this work.
+ headers={"Accept-Encoding": "identity"},
+ stream=True,
+ )
+ resp.raise_for_status()
+ except requests.HTTPError as exc:
+ logger.critical(
+ "HTTP error %s while getting %s", exc.response.status_code, link,
+ )
+ raise
+
+ content_type = resp.headers.get('content-type', '')
+ filename = link.filename # fallback
+ # Have a look at the Content-Disposition header for a better guess
+ content_disposition = resp.headers.get('content-disposition')
+ if content_disposition:
+ filename = parse_content_disposition(content_disposition, filename)
+ ext = splitext(filename)[1] # type: Optional[str]
+ if not ext:
+ ext = mimetypes.guess_extension(content_type)
+ if ext:
+ filename += ext
+ if not ext and link.url != resp.url:
+ ext = os.path.splitext(resp.url)[1]
+ if ext:
+ filename += ext
+ file_path = os.path.join(temp_dir, filename)
+ with open(file_path, 'wb') as content_file:
+ _download_url(resp, link, content_file, hashes, progress_bar)
+ return file_path, content_type
+
+
+def _check_download_dir(link, download_dir, hashes):
+ # type: (Link, str, Optional[Hashes]) -> Optional[str]
+ """ Check download_dir for previously downloaded file with correct hash
+ If a correct file is found return its path else None
+ """
+ download_path = os.path.join(download_dir, link.filename)
+ if os.path.exists(download_path):
+ # If already downloaded, does its hash match?
+ logger.info('File was already downloaded %s', download_path)
+ if hashes:
+ try:
+ hashes.check_against_path(download_path)
+ except HashMismatch:
+ logger.warning(
+ 'Previously-downloaded file %s has bad hash. '
+ 'Re-downloading.',
+ download_path
+ )
+ os.unlink(download_path)
+ return None
+ return download_path
+ return None
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/exceptions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/exceptions.py
new file mode 100644
index 00000000..096adcd6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/exceptions.py
@@ -0,0 +1,305 @@
+"""Exceptions used throughout package"""
+from __future__ import absolute_import
+
+from itertools import chain, groupby, repeat
+
+from pip._vendor.six import iteritems
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.req.req_install import InstallRequirement
+
+
+class PipError(Exception):
+ """Base pip exception"""
+
+
+class ConfigurationError(PipError):
+ """General exception in configuration"""
+
+
+class InstallationError(PipError):
+ """General exception during installation"""
+
+
+class UninstallationError(PipError):
+ """General exception during uninstallation"""
+
+
+class NoneMetadataError(PipError):
+ """
+ Raised when accessing "METADATA" or "PKG-INFO" metadata for a
+ pip._vendor.pkg_resources.Distribution object and
+ `dist.has_metadata('METADATA')` returns True but
+ `dist.get_metadata('METADATA')` returns None (and similarly for
+ "PKG-INFO").
+ """
+
+ def __init__(self, dist, metadata_name):
+ # type: (Distribution, str) -> None
+ """
+ :param dist: A Distribution object.
+ :param metadata_name: The name of the metadata being accessed
+ (can be "METADATA" or "PKG-INFO").
+ """
+ self.dist = dist
+ self.metadata_name = metadata_name
+
+ def __str__(self):
+ # type: () -> str
+ # Use `dist` in the error message because its stringification
+ # includes more information, like the version and location.
+ return (
+ 'None {} metadata found for distribution: {}'.format(
+ self.metadata_name, self.dist,
+ )
+ )
+
+
+class DistributionNotFound(InstallationError):
+ """Raised when a distribution cannot be found to satisfy a requirement"""
+
+
+class RequirementsFileParseError(InstallationError):
+ """Raised when a general error occurs parsing a requirements file line."""
+
+
+class BestVersionAlreadyInstalled(PipError):
+ """Raised when the most up-to-date version of a package is already
+ installed."""
+
+
+class BadCommand(PipError):
+ """Raised when virtualenv or a command is not found"""
+
+
+class CommandError(PipError):
+ """Raised when there is an error in command-line arguments"""
+
+
+class PreviousBuildDirError(PipError):
+ """Raised when there's a previous conflicting build directory"""
+
+
+class InvalidWheelFilename(InstallationError):
+ """Invalid wheel filename."""
+
+
+class UnsupportedWheel(InstallationError):
+ """Unsupported wheel."""
+
+
+class HashErrors(InstallationError):
+ """Multiple HashError instances rolled into one for reporting"""
+
+ def __init__(self):
+ self.errors = []
+
+ def append(self, error):
+ self.errors.append(error)
+
+ def __str__(self):
+ lines = []
+ self.errors.sort(key=lambda e: e.order)
+ for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
+ lines.append(cls.head)
+ lines.extend(e.body() for e in errors_of_cls)
+ if lines:
+ return '\n'.join(lines)
+
+ def __nonzero__(self):
+ return bool(self.errors)
+
+ def __bool__(self):
+ return self.__nonzero__()
+
+
+class HashError(InstallationError):
+ """
+ A failure to verify a package against known-good hashes
+
+ :cvar order: An int sorting hash exception classes by difficulty of
+ recovery (lower being harder), so the user doesn't bother fretting
+ about unpinned packages when he has deeper issues, like VCS
+ dependencies, to deal with. Also keeps error reports in a
+ deterministic order.
+ :cvar head: A section heading for display above potentially many
+ exceptions of this kind
+ :ivar req: The InstallRequirement that triggered this error. This is
+ pasted on after the exception is instantiated, because it's not
+ typically available earlier.
+
+ """
+ req = None # type: Optional[InstallRequirement]
+ head = ''
+
+ def body(self):
+ """Return a summary of me for display under the heading.
+
+ This default implementation simply prints a description of the
+ triggering requirement.
+
+ :param req: The InstallRequirement that provoked this error, with
+ populate_link() having already been called
+
+ """
+ return ' %s' % self._requirement_name()
+
+ def __str__(self):
+ return '%s\n%s' % (self.head, self.body())
+
+ def _requirement_name(self):
+ """Return a description of the requirement that triggered me.
+
+ This default implementation returns long description of the req, with
+ line numbers
+
+ """
+ return str(self.req) if self.req else 'unknown package'
+
+
+class VcsHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 0
+ head = ("Can't verify hashes for these requirements because we don't "
+ "have a way to hash version control repositories:")
+
+
+class DirectoryUrlHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 1
+ head = ("Can't verify hashes for these file:// requirements because they "
+ "point to directories:")
+
+
+class HashMissing(HashError):
+ """A hash was needed for a requirement but is absent."""
+
+ order = 2
+ head = ('Hashes are required in --require-hashes mode, but they are '
+ 'missing from some requirements. Here is a list of those '
+ 'requirements along with the hashes their downloaded archives '
+ 'actually had. Add lines like these to your requirements files to '
+ 'prevent tampering. (If you did not enable --require-hashes '
+ 'manually, note that it turns on automatically when any package '
+ 'has a hash.)')
+
+ def __init__(self, gotten_hash):
+ """
+ :param gotten_hash: The hash of the (possibly malicious) archive we
+ just downloaded
+ """
+ self.gotten_hash = gotten_hash
+
+ def body(self):
+ # Dodge circular import.
+ from pip._internal.utils.hashes import FAVORITE_HASH
+
+ package = None
+ if self.req:
+ # In the case of URL-based requirements, display the original URL
+ # seen in the requirements file rather than the package name,
+ # so the output can be directly copied into the requirements file.
+ package = (self.req.original_link if self.req.original_link
+ # In case someone feeds something downright stupid
+ # to InstallRequirement's constructor.
+ else getattr(self.req, 'req', None))
+ return ' %s --hash=%s:%s' % (package or 'unknown package',
+ FAVORITE_HASH,
+ self.gotten_hash)
+
+
+class HashUnpinned(HashError):
+ """A requirement had a hash specified but was not pinned to a specific
+ version."""
+
+ order = 3
+ head = ('In --require-hashes mode, all requirements must have their '
+ 'versions pinned with ==. These do not:')
+
+
+class HashMismatch(HashError):
+ """
+ Distribution file hash values don't match.
+
+ :ivar package_name: The name of the package that triggered the hash
+ mismatch. Feel free to write to this after the exception is raise to
+ improve its error message.
+
+ """
+ order = 4
+ head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
+ 'FILE. If you have updated the package versions, please update '
+ 'the hashes. Otherwise, examine the package contents carefully; '
+ 'someone may have tampered with them.')
+
+ def __init__(self, allowed, gots):
+ """
+ :param allowed: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ :param gots: A dict of algorithm names pointing to hashes we
+ actually got from the files under suspicion
+ """
+ self.allowed = allowed
+ self.gots = gots
+
+ def body(self):
+ return ' %s:\n%s' % (self._requirement_name(),
+ self._hash_comparison())
+
+ def _hash_comparison(self):
+ """
+ Return a comparison of actual and expected hash values.
+
+ Example::
+
+ Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
+ or 123451234512345123451234512345123451234512345
+ Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
+
+ """
+ def hash_then_or(hash_name):
+ # For now, all the decent hashes have 6-char names, so we can get
+ # away with hard-coding space literals.
+ return chain([hash_name], repeat(' or'))
+
+ lines = []
+ for hash_name, expecteds in iteritems(self.allowed):
+ prefix = hash_then_or(hash_name)
+ lines.extend((' Expected %s %s' % (next(prefix), e))
+ for e in expecteds)
+ lines.append(' Got %s\n' %
+ self.gots[hash_name].hexdigest())
+ prefix = ' or'
+ return '\n'.join(lines)
+
+
+class UnsupportedPythonVersion(InstallationError):
+ """Unsupported python version according to Requires-Python package
+ metadata."""
+
+
+class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
+ """When there are errors while loading a configuration file
+ """
+
+ def __init__(self, reason="could not be loaded", fname=None, error=None):
+ super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
+ self.reason = reason
+ self.fname = fname
+ self.error = error
+
+ def __str__(self):
+ if self.fname is not None:
+ message_part = " in {}.".format(self.fname)
+ else:
+ assert self.error is not None
+ message_part = ".\n{}\n".format(self.error.message)
+ return "Configuration file {}{}".format(self.reason, message_part)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/index.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/index.py
new file mode 100644
index 00000000..a1aaad59
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/index.py
@@ -0,0 +1,1508 @@
+"""Routines related to PyPI, indexes"""
+from __future__ import absolute_import
+
+import cgi
+import itertools
+import logging
+import mimetypes
+import os
+import re
+
+from pip._vendor import html5lib, requests, six
+from pip._vendor.distlib.compat import unescape
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.requests.exceptions import HTTPError, RetryError, SSLError
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.download import is_url, url_to_path
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
+ UnsupportedWheel,
+)
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.link import Link
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.utils.compat import ipaddress
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, path_to_url,
+ redact_password_from_url,
+)
+from pip._internal.utils.packaging import check_requires_python
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel import Wheel
+
+if MYPY_CHECK_RUNNING:
+ from logging import Logger
+ from typing import (
+ Any, Callable, FrozenSet, Iterable, Iterator, List, MutableMapping,
+ Optional, Sequence, Set, Text, Tuple, Union,
+ )
+ import xml.etree.ElementTree
+ from pip._vendor.packaging.version import _BaseVersion
+ from pip._vendor.requests import Response
+ from pip._internal.models.search_scope import SearchScope
+ from pip._internal.req import InstallRequirement
+ from pip._internal.download import PipSession
+ from pip._internal.pep425tags import Pep425Tag
+ from pip._internal.utils.hashes import Hashes
+
+ BuildTag = Tuple[Any, ...] # either empty tuple or Tuple[int, str]
+ CandidateSortingKey = (
+ Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]]
+ )
+ HTMLElement = xml.etree.ElementTree.Element
+ SecureOrigin = Tuple[str, str, Optional[str]]
+
+
+__all__ = ['FormatControl', 'FoundCandidates', 'PackageFinder']
+
+
+SECURE_ORIGINS = [
+ # protocol, hostname, port
+ # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
+ ("https", "*", "*"),
+ ("*", "localhost", "*"),
+ ("*", "127.0.0.0/8", "*"),
+ ("*", "::1/128", "*"),
+ ("file", "*", None),
+ # ssh is always secure.
+ ("ssh", "*", "*"),
+] # type: List[SecureOrigin]
+
+
+logger = logging.getLogger(__name__)
+
+
+def _match_vcs_scheme(url):
+ # type: (str) -> Optional[str]
+ """Look for VCS schemes in the URL.
+
+ Returns the matched VCS scheme, or None if there's no match.
+ """
+ from pip._internal.vcs import vcs
+ for scheme in vcs.schemes:
+ if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
+ return scheme
+ return None
+
+
+def _is_url_like_archive(url):
+ # type: (str) -> bool
+ """Return whether the URL looks like an archive.
+ """
+ filename = Link(url).filename
+ for bad_ext in ARCHIVE_EXTENSIONS:
+ if filename.endswith(bad_ext):
+ return True
+ return False
+
+
+class _NotHTML(Exception):
+ def __init__(self, content_type, request_desc):
+ # type: (str, str) -> None
+ super(_NotHTML, self).__init__(content_type, request_desc)
+ self.content_type = content_type
+ self.request_desc = request_desc
+
+
+def _ensure_html_header(response):
+ # type: (Response) -> None
+ """Check the Content-Type header to ensure the response contains HTML.
+
+ Raises `_NotHTML` if the content type is not text/html.
+ """
+ content_type = response.headers.get("Content-Type", "")
+ if not content_type.lower().startswith("text/html"):
+ raise _NotHTML(content_type, response.request.method)
+
+
+class _NotHTTP(Exception):
+ pass
+
+
+def _ensure_html_response(url, session):
+ # type: (str, PipSession) -> None
+ """Send a HEAD request to the URL, and ensure the response contains HTML.
+
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
+ `_NotHTML` if the content type is not text/html.
+ """
+ scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
+ if scheme not in {'http', 'https'}:
+ raise _NotHTTP()
+
+ resp = session.head(url, allow_redirects=True)
+ resp.raise_for_status()
+
+ _ensure_html_header(resp)
+
+
+def _get_html_response(url, session):
+ # type: (str, PipSession) -> Response
+ """Access an HTML page with GET, and return the response.
+
+ This consists of three parts:
+
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
+ check the Content-Type is HTML, to avoid downloading a large file.
+ Raise `_NotHTTP` if the content type cannot be determined, or
+ `_NotHTML` if it is not HTML.
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
+ 3. Check the Content-Type header to make sure we got HTML, and raise
+ `_NotHTML` otherwise.
+ """
+ if _is_url_like_archive(url):
+ _ensure_html_response(url, session=session)
+
+ logger.debug('Getting page %s', redact_password_from_url(url))
+
+ resp = session.get(
+ url,
+ headers={
+ "Accept": "text/html",
+ # We don't want to blindly returned cached data for
+ # /simple/, because authors generally expecting that
+ # twine upload && pip install will function, but if
+ # they've done a pip install in the last ~10 minutes
+ # it won't. Thus by setting this to zero we will not
+ # blindly use any cached data, however the benefit of
+ # using max-age=0 instead of no-cache, is that we will
+ # still support conditional requests, so we will still
+ # minimize traffic sent in cases where the page hasn't
+ # changed at all, we will just always incur the round
+ # trip for the conditional GET now instead of only
+ # once per 10 minutes.
+ # For more information, please see pypa/pip#5670.
+ "Cache-Control": "max-age=0",
+ },
+ )
+ resp.raise_for_status()
+
+ # The check for archives above only works if the url ends with
+ # something that looks like an archive. However that is not a
+ # requirement of an url. Unless we issue a HEAD request on every
+ # url we cannot know ahead of time for sure if something is HTML
+ # or not. However we can check after we've downloaded it.
+ _ensure_html_header(resp)
+
+ return resp
+
+
+def _handle_get_page_fail(
+ link, # type: Link
+ reason, # type: Union[str, Exception]
+ meth=None # type: Optional[Callable[..., None]]
+):
+ # type: (...) -> None
+ if meth is None:
+ meth = logger.debug
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
+
+
+def _get_html_page(link, session=None):
+ # type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
+ if session is None:
+ raise TypeError(
+ "_get_html_page() missing 1 required keyword argument: 'session'"
+ )
+
+ url = link.url.split('#', 1)[0]
+
+ # Check for VCS schemes that do not support lookup as web pages.
+ vcs_scheme = _match_vcs_scheme(url)
+ if vcs_scheme:
+ logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
+ return None
+
+ # Tack index.html onto file:// URLs that point to directories
+ scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
+ if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
+ # add trailing slash if not present so urljoin doesn't trim
+ # final segment
+ if not url.endswith('/'):
+ url += '/'
+ url = urllib_parse.urljoin(url, 'index.html')
+ logger.debug(' file: URL is directory, getting %s', url)
+
+ try:
+ resp = _get_html_response(url, session=session)
+ except _NotHTTP:
+ logger.debug(
+ 'Skipping page %s because it looks like an archive, and cannot '
+ 'be checked by HEAD.', link,
+ )
+ except _NotHTML as exc:
+ logger.debug(
+ 'Skipping page %s because the %s request got Content-Type: %s',
+ link, exc.request_desc, exc.content_type,
+ )
+ except HTTPError as exc:
+ _handle_get_page_fail(link, exc)
+ except RetryError as exc:
+ _handle_get_page_fail(link, exc)
+ except SSLError as exc:
+ reason = "There was a problem confirming the ssl certificate: "
+ reason += str(exc)
+ _handle_get_page_fail(link, reason, meth=logger.info)
+ except requests.ConnectionError as exc:
+ _handle_get_page_fail(link, "connection error: %s" % exc)
+ except requests.Timeout:
+ _handle_get_page_fail(link, "timed out")
+ else:
+ return HTMLPage(resp.content, resp.url, resp.headers)
+ return None
+
+
+def _check_link_requires_python(
+ link, # type: Link
+ version_info, # type: Tuple[int, int, int]
+ ignore_requires_python=False, # type: bool
+):
+ # type: (...) -> bool
+ """
+ Return whether the given Python version is compatible with a link's
+ "Requires-Python" value.
+
+ :param version_info: A 3-tuple of ints representing the Python
+ major-minor-micro version to check.
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
+ value if the given Python version isn't compatible.
+ """
+ try:
+ is_compatible = check_requires_python(
+ link.requires_python, version_info=version_info,
+ )
+ except specifiers.InvalidSpecifier:
+ logger.debug(
+ "Ignoring invalid Requires-Python (%r) for link: %s",
+ link.requires_python, link,
+ )
+ else:
+ if not is_compatible:
+ version = '.'.join(map(str, version_info))
+ if not ignore_requires_python:
+ logger.debug(
+ 'Link requires a different Python (%s not in: %r): %s',
+ version, link.requires_python, link,
+ )
+ return False
+
+ logger.debug(
+ 'Ignoring failed Requires-Python check (%s not in: %r) '
+ 'for link: %s',
+ version, link.requires_python, link,
+ )
+
+ return True
+
+
+class LinkEvaluator(object):
+
+ """
+ Responsible for evaluating links for a particular project.
+ """
+
+ _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ def __init__(
+ self,
+ project_name, # type: str
+ canonical_name, # type: str
+ formats, # type: FrozenSet
+ target_python, # type: TargetPython
+ allow_yanked, # type: bool
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """
+ :param project_name: The user supplied package name.
+ :param canonical_name: The canonical package name.
+ :param formats: The formats allowed for this package. Should be a set
+ with 'binary' or 'source' or both in it.
+ :param target_python: The target Python interpreter to use when
+ evaluating link compatibility. This is used, for example, to
+ check wheel compatibility, as well as when checking the Python
+ version, e.g. the Python version embedded in a link filename
+ (or egg fragment) and against an HTML link's optional PEP 503
+ "data-requires-python" attribute.
+ :param allow_yanked: Whether files marked as yanked (in the sense
+ of PEP 592) are permitted to be candidates for install.
+ :param ignore_requires_python: Whether to ignore incompatible
+ PEP 503 "data-requires-python" values in HTML links. Defaults
+ to False.
+ """
+ if ignore_requires_python is None:
+ ignore_requires_python = False
+
+ self._allow_yanked = allow_yanked
+ self._canonical_name = canonical_name
+ self._ignore_requires_python = ignore_requires_python
+ self._formats = formats
+ self._target_python = target_python
+
+ self.project_name = project_name
+
+ def evaluate_link(self, link):
+ # type: (Link) -> Tuple[bool, Optional[Text]]
+ """
+ Determine whether a link is a candidate for installation.
+
+ :return: A tuple (is_candidate, result), where `result` is (1) a
+ version string if `is_candidate` is True, and (2) if
+ `is_candidate` is False, an optional string to log the reason
+ the link fails to qualify.
+ """
+ version = None
+ if link.is_yanked and not self._allow_yanked:
+ reason = link.yanked_reason or '<none given>'
+ # Mark this as a unicode string to prevent "UnicodeEncodeError:
+ # 'ascii' codec can't encode character" in Python 2 when
+ # the reason contains non-ascii characters.
+ return (False, u'yanked for reason: {}'.format(reason))
+
+ if link.egg_fragment:
+ egg_info = link.egg_fragment
+ ext = link.ext
+ else:
+ egg_info, ext = link.splitext()
+ if not ext:
+ return (False, 'not a file')
+ if ext not in SUPPORTED_EXTENSIONS:
+ return (False, 'unsupported archive format: %s' % ext)
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
+ reason = 'No binaries permitted for %s' % self.project_name
+ return (False, reason)
+ if "macosx10" in link.path and ext == '.zip':
+ return (False, 'macosx10 one')
+ if ext == WHEEL_EXTENSION:
+ try:
+ wheel = Wheel(link.filename)
+ except InvalidWheelFilename:
+ return (False, 'invalid wheel filename')
+ if canonicalize_name(wheel.name) != self._canonical_name:
+ reason = 'wrong project name (not %s)' % self.project_name
+ return (False, reason)
+
+ supported_tags = self._target_python.get_tags()
+ if not wheel.supported(supported_tags):
+ # Include the wheel's tags in the reason string to
+ # simplify troubleshooting compatibility issues.
+ file_tags = wheel.get_formatted_file_tags()
+ reason = (
+ "none of the wheel's tags match: {}".format(
+ ', '.join(file_tags)
+ )
+ )
+ return (False, reason)
+
+ version = wheel.version
+
+ # This should be up by the self.ok_binary check, but see issue 2700.
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
+ return (False, 'No sources permitted for %s' % self.project_name)
+
+ if not version:
+ version = _extract_version_from_fragment(
+ egg_info, self._canonical_name,
+ )
+ if not version:
+ return (
+ False, 'Missing project version for %s' % self.project_name,
+ )
+
+ match = self._py_version_re.search(version)
+ if match:
+ version = version[:match.start()]
+ py_version = match.group(1)
+ if py_version != self._target_python.py_version:
+ return (False, 'Python version is incorrect')
+
+ supports_python = _check_link_requires_python(
+ link, version_info=self._target_python.py_version_info,
+ ignore_requires_python=self._ignore_requires_python,
+ )
+ if not supports_python:
+ # Return None for the reason text to suppress calling
+ # _log_skipped_link().
+ return (False, None)
+
+ logger.debug('Found link %s, version: %s', link, version)
+
+ return (True, version)
+
+
+def filter_unallowed_hashes(
+ candidates, # type: List[InstallationCandidate]
+ hashes, # type: Hashes
+ project_name, # type: str
+):
+ # type: (...) -> List[InstallationCandidate]
+ """
+ Filter out candidates whose hashes aren't allowed, and return a new
+ list of candidates.
+
+ If at least one candidate has an allowed hash, then all candidates with
+ either an allowed hash or no hash specified are returned. Otherwise,
+ the given candidates are returned.
+
+ Including the candidates with no hash specified when there is a match
+ allows a warning to be logged if there is a more preferred candidate
+ with no hash specified. Returning all candidates in the case of no
+ matches lets pip report the hash of the candidate that would otherwise
+ have been installed (e.g. permitting the user to more easily update
+ their requirements file with the desired hash).
+ """
+ if not hashes:
+ logger.debug(
+ 'Given no hashes to check %s links for project %r: '
+ 'discarding no candidates',
+ len(candidates),
+ project_name,
+ )
+ # Make sure we're not returning back the given value.
+ return list(candidates)
+
+ matches_or_no_digest = []
+ # Collect the non-matches for logging purposes.
+ non_matches = []
+ match_count = 0
+ for candidate in candidates:
+ link = candidate.link
+ if not link.has_hash:
+ pass
+ elif link.is_hash_allowed(hashes=hashes):
+ match_count += 1
+ else:
+ non_matches.append(candidate)
+ continue
+
+ matches_or_no_digest.append(candidate)
+
+ if match_count:
+ filtered = matches_or_no_digest
+ else:
+ # Make sure we're not returning back the given value.
+ filtered = list(candidates)
+
+ if len(filtered) == len(candidates):
+ discard_message = 'discarding no candidates'
+ else:
+ discard_message = 'discarding {} non-matches:\n {}'.format(
+ len(non_matches),
+ '\n '.join(str(candidate.link) for candidate in non_matches)
+ )
+
+ logger.debug(
+ 'Checked %s links for project %r against %s hashes '
+ '(%s matches, %s no digest): %s',
+ len(candidates),
+ project_name,
+ hashes.digest_count,
+ match_count,
+ len(matches_or_no_digest) - match_count,
+ discard_message
+ )
+
+ return filtered
+
+
+class CandidatePreferences(object):
+
+ """
+ Encapsulates some of the preferences for filtering and sorting
+ InstallationCandidate objects.
+ """
+
+ def __init__(
+ self,
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ ):
+ # type: (...) -> None
+ """
+ :param allow_all_prereleases: Whether to allow all pre-releases.
+ """
+ self.allow_all_prereleases = allow_all_prereleases
+ self.prefer_binary = prefer_binary
+
+
+class CandidateEvaluator(object):
+
+ """
+ Responsible for filtering and sorting candidates for installation based
+ on what tags are valid.
+ """
+
+ @classmethod
+ def create(
+ cls,
+ project_name, # type: str
+ target_python=None, # type: Optional[TargetPython]
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> CandidateEvaluator
+ """Create a CandidateEvaluator object.
+
+ :param target_python: The target Python interpreter to use when
+ checking compatibility. If None (the default), a TargetPython
+ object will be constructed from the running Python.
+ :param hashes: An optional collection of allowed hashes.
+ """
+ if target_python is None:
+ target_python = TargetPython()
+ if specifier is None:
+ specifier = specifiers.SpecifierSet()
+
+ supported_tags = target_python.get_tags()
+
+ return cls(
+ project_name=project_name,
+ supported_tags=supported_tags,
+ specifier=specifier,
+ prefer_binary=prefer_binary,
+ allow_all_prereleases=allow_all_prereleases,
+ hashes=hashes,
+ )
+
+ def __init__(
+ self,
+ project_name, # type: str
+ supported_tags, # type: List[Pep425Tag]
+ specifier, # type: specifiers.BaseSpecifier
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> None
+ """
+ :param supported_tags: The PEP 425 tags supported by the target
+ Python in order of preference (most preferred first).
+ """
+ self._allow_all_prereleases = allow_all_prereleases
+ self._hashes = hashes
+ self._prefer_binary = prefer_binary
+ self._project_name = project_name
+ self._specifier = specifier
+ self._supported_tags = supported_tags
+
+ def get_applicable_candidates(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> List[InstallationCandidate]
+ """
+ Return the applicable candidates from a list of candidates.
+ """
+ # Using None infers from the specifier instead.
+ allow_prereleases = self._allow_all_prereleases or None
+ specifier = self._specifier
+ versions = {
+ str(v) for v in specifier.filter(
+ # We turn the version object into a str here because otherwise
+ # when we're debundled but setuptools isn't, Python will see
+ # packaging.version.Version and
+ # pkg_resources._vendor.packaging.version.Version as different
+ # types. This way we'll use a str as a common data interchange
+ # format. If we stop using the pkg_resources provided specifier
+ # and start using our own, we can drop the cast to str().
+ (str(c.version) for c in candidates),
+ prereleases=allow_prereleases,
+ )
+ }
+
+ # Again, converting version to str to deal with debundling.
+ applicable_candidates = [
+ c for c in candidates if str(c.version) in versions
+ ]
+
+ return filter_unallowed_hashes(
+ candidates=applicable_candidates,
+ hashes=self._hashes,
+ project_name=self._project_name,
+ )
+
+ def make_found_candidates(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> FoundCandidates
+ """
+ Create and return a `FoundCandidates` instance.
+
+ :param specifier: An optional object implementing `filter`
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+ versions.
+ """
+ applicable_candidates = self.get_applicable_candidates(candidates)
+
+ return FoundCandidates(
+ candidates,
+ applicable_candidates=applicable_candidates,
+ evaluator=self,
+ )
+
+ def _sort_key(self, candidate):
+ # type: (InstallationCandidate) -> CandidateSortingKey
+ """
+ Function to pass as the `key` argument to a call to sorted() to sort
+ InstallationCandidates by preference.
+
+ Returns a tuple such that tuples sorting as greater using Python's
+ default comparison operator are more preferred.
+
+ The preference is as follows:
+
+ First and foremost, candidates with allowed (matching) hashes are
+ always preferred over candidates without matching hashes. This is
+ because e.g. if the only candidate with an allowed hash is yanked,
+ we still want to use that candidate.
+
+ Second, excepting hash considerations, candidates that have been
+ yanked (in the sense of PEP 592) are always less preferred than
+ candidates that haven't been yanked. Then:
+
+ If not finding wheels, they are sorted by version only.
+ If finding wheels, then the sort order is by version, then:
+ 1. existing installs
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
+ 3. source archives
+ If prefer_binary was set, then all wheels are sorted above sources.
+
+ Note: it was considered to embed this logic into the Link
+ comparison operators, but then different sdist links
+ with the same version, would have to be considered equal
+ """
+ valid_tags = self._supported_tags
+ support_num = len(valid_tags)
+ build_tag = tuple() # type: BuildTag
+ binary_preference = 0
+ link = candidate.link
+ if link.is_wheel:
+ # can raise InvalidWheelFilename
+ wheel = Wheel(link.filename)
+ if not wheel.supported(valid_tags):
+ raise UnsupportedWheel(
+ "%s is not a supported wheel for this platform. It "
+ "can't be sorted." % wheel.filename
+ )
+ if self._prefer_binary:
+ binary_preference = 1
+ pri = -(wheel.support_index_min(valid_tags))
+ if wheel.build_tag is not None:
+ match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
+ build_tag_groups = match.groups()
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+ else: # sdist
+ pri = -(support_num)
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
+ return (
+ has_allowed_hash, yank_value, binary_preference, candidate.version,
+ build_tag, pri,
+ )
+
+ def get_best_candidate(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> Optional[InstallationCandidate]
+ """
+ Return the best candidate per the instance's sort order, or None if
+ no candidate is acceptable.
+ """
+ if not candidates:
+ return None
+
+ best_candidate = max(candidates, key=self._sort_key)
+
+ # Log a warning per PEP 592 if necessary before returning.
+ link = best_candidate.link
+ if link.is_yanked:
+ reason = link.yanked_reason or '<none given>'
+ msg = (
+ # Mark this as a unicode string to prevent
+ # "UnicodeEncodeError: 'ascii' codec can't encode character"
+ # in Python 2 when the reason contains non-ascii characters.
+ u'The candidate selected for download or install is a '
+ 'yanked version: {candidate}\n'
+ 'Reason for being yanked: {reason}'
+ ).format(candidate=best_candidate, reason=reason)
+ logger.warning(msg)
+
+ return best_candidate
+
+
+class FoundCandidates(object):
+ """A collection of candidates, returned by `PackageFinder.find_candidates`.
+
+ This class is only intended to be instantiated by CandidateEvaluator's
+ `make_found_candidates()` method.
+ """
+
+ def __init__(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ applicable_candidates, # type: List[InstallationCandidate]
+ evaluator, # type: CandidateEvaluator
+ ):
+ # type: (...) -> None
+ """
+ :param candidates: A sequence of all available candidates found.
+ :param applicable_candidates: The applicable candidates.
+ :param evaluator: A CandidateEvaluator object to sort applicable
+ candidates by order of preference.
+ """
+ self._applicable_candidates = applicable_candidates
+ self._candidates = candidates
+ self._evaluator = evaluator
+
+ def iter_all(self):
+ # type: () -> Iterable[InstallationCandidate]
+ """Iterate through all candidates.
+ """
+ return iter(self._candidates)
+
+ def iter_applicable(self):
+ # type: () -> Iterable[InstallationCandidate]
+ """Iterate through the applicable candidates.
+ """
+ return iter(self._applicable_candidates)
+
+ def get_best(self):
+ # type: () -> Optional[InstallationCandidate]
+ """Return the best candidate available, or None if no applicable
+ candidates are found.
+ """
+ candidates = list(self.iter_applicable())
+ return self._evaluator.get_best_candidate(candidates)
+
+
+class PackageFinder(object):
+ """This finds packages.
+
+ This is meant to match easy_install's technique for looking for
+ packages, by reading pages and looking for appropriate links.
+ """
+
+ def __init__(
+ self,
+ search_scope, # type: SearchScope
+ session, # type: PipSession
+ target_python, # type: TargetPython
+ allow_yanked, # type: bool
+ format_control=None, # type: Optional[FormatControl]
+ trusted_hosts=None, # type: Optional[List[str]]
+ candidate_prefs=None, # type: CandidatePreferences
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """
+ This constructor is primarily meant to be used by the create() class
+ method and from tests.
+
+ :param session: The Session to use to make requests.
+ :param format_control: A FormatControl object, used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param candidate_prefs: Options to use when creating a
+ CandidateEvaluator object.
+ """
+ if trusted_hosts is None:
+ trusted_hosts = []
+ if candidate_prefs is None:
+ candidate_prefs = CandidatePreferences()
+
+ format_control = format_control or FormatControl(set(), set())
+
+ self._allow_yanked = allow_yanked
+ self._candidate_prefs = candidate_prefs
+ self._ignore_requires_python = ignore_requires_python
+ self._target_python = target_python
+
+ self.search_scope = search_scope
+ self.session = session
+ self.format_control = format_control
+ self.trusted_hosts = trusted_hosts
+
+ # These are boring links that have already been logged somehow.
+ self._logged_links = set() # type: Set[Link]
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ @classmethod
+ def create(
+ cls,
+ search_scope, # type: SearchScope
+ selection_prefs, # type: SelectionPreferences
+ trusted_hosts=None, # type: Optional[List[str]]
+ session=None, # type: Optional[PipSession]
+ target_python=None, # type: Optional[TargetPython]
+ ):
+ # type: (...) -> PackageFinder
+ """Create a PackageFinder.
+
+ :param selection_prefs: The candidate selection preferences, as a
+ SelectionPreferences object.
+ :param trusted_hosts: Domains not to emit warnings for when not using
+ HTTPS.
+ :param session: The Session to use to make requests.
+ :param target_python: The target Python interpreter to use when
+ checking compatibility. If None (the default), a TargetPython
+ object will be constructed from the running Python.
+ """
+ if session is None:
+ raise TypeError(
+ "PackageFinder.create() missing 1 required keyword argument: "
+ "'session'"
+ )
+ if target_python is None:
+ target_python = TargetPython()
+
+ candidate_prefs = CandidatePreferences(
+ prefer_binary=selection_prefs.prefer_binary,
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
+ )
+
+ return cls(
+ candidate_prefs=candidate_prefs,
+ search_scope=search_scope,
+ session=session,
+ target_python=target_python,
+ allow_yanked=selection_prefs.allow_yanked,
+ format_control=selection_prefs.format_control,
+ trusted_hosts=trusted_hosts,
+ ignore_requires_python=selection_prefs.ignore_requires_python,
+ )
+
+ @property
+ def find_links(self):
+ # type: () -> List[str]
+ return self.search_scope.find_links
+
+ @property
+ def index_urls(self):
+ # type: () -> List[str]
+ return self.search_scope.index_urls
+
+ @property
+ def allow_all_prereleases(self):
+ # type: () -> bool
+ return self._candidate_prefs.allow_all_prereleases
+
+ def set_allow_all_prereleases(self):
+ # type: () -> None
+ self._candidate_prefs.allow_all_prereleases = True
+
+ def add_trusted_host(self, host, source=None):
+ # type: (str, Optional[str]) -> None
+ """
+ :param source: An optional source string, for logging where the host
+ string came from.
+ """
+ # It is okay to add a previously added host because PipSession stores
+ # the resulting prefixes in a dict.
+ msg = 'adding trusted host: {!r}'.format(host)
+ if source is not None:
+ msg += ' (from {})'.format(source)
+ logger.info(msg)
+ self.session.add_insecure_host(host)
+ if host in self.trusted_hosts:
+ return
+
+ self.trusted_hosts.append(host)
+
+ def iter_secure_origins(self):
+ # type: () -> Iterator[SecureOrigin]
+ for secure_origin in SECURE_ORIGINS:
+ yield secure_origin
+ for host in self.trusted_hosts:
+ yield ('*', host, '*')
+
+ @staticmethod
+ def _sort_locations(locations, expand_dir=False):
+ # type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
+ """
+ Sort locations into "files" (archives) and "urls", and return
+ a pair of lists (files,urls)
+ """
+ files = []
+ urls = []
+
+ # puts the url for the given file path into the appropriate list
+ def sort_path(path):
+ url = path_to_url(path)
+ if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
+ urls.append(url)
+ else:
+ files.append(url)
+
+ for url in locations:
+
+ is_local_path = os.path.exists(url)
+ is_file_url = url.startswith('file:')
+
+ if is_local_path or is_file_url:
+ if is_local_path:
+ path = url
+ else:
+ path = url_to_path(url)
+ if os.path.isdir(path):
+ if expand_dir:
+ path = os.path.realpath(path)
+ for item in os.listdir(path):
+ sort_path(os.path.join(path, item))
+ elif is_file_url:
+ urls.append(url)
+ else:
+ logger.warning(
+ "Path '{0}' is ignored: "
+ "it is a directory.".format(path),
+ )
+ elif os.path.isfile(path):
+ sort_path(path)
+ else:
+ logger.warning(
+ "Url '%s' is ignored: it is neither a file "
+ "nor a directory.", url,
+ )
+ elif is_url(url):
+ # Only add url with clear scheme
+ urls.append(url)
+ else:
+ logger.warning(
+ "Url '%s' is ignored. It is either a non-existing "
+ "path or lacks a specific scheme.", url,
+ )
+
+ return files, urls
+
+ def _validate_secure_origin(self, logger, location):
+ # type: (Logger, Link) -> bool
+ # Determine if this url used a secure transport mechanism
+ parsed = urllib_parse.urlparse(str(location))
+ origin = (parsed.scheme, parsed.hostname, parsed.port)
+
+ # The protocol to use to see if the protocol matches.
+ # Don't count the repository type as part of the protocol: in
+ # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
+ # the last scheme.)
+ protocol = origin[0].rsplit('+', 1)[-1]
+
+ # Determine if our origin is a secure origin by looking through our
+ # hardcoded list of secure origins, as well as any additional ones
+ # configured on this PackageFinder instance.
+ for secure_origin in self.iter_secure_origins():
+ if protocol != secure_origin[0] and secure_origin[0] != "*":
+ continue
+
+ try:
+ # We need to do this decode dance to ensure that we have a
+ # unicode object, even on Python 2.x.
+ addr = ipaddress.ip_address(
+ origin[1]
+ if (
+ isinstance(origin[1], six.text_type) or
+ origin[1] is None
+ )
+ else origin[1].decode("utf8")
+ )
+ network = ipaddress.ip_network(
+ secure_origin[1]
+ if isinstance(secure_origin[1], six.text_type)
+ # setting secure_origin[1] to proper Union[bytes, str]
+ # creates problems in other places
+ else secure_origin[1].decode("utf8") # type: ignore
+ )
+ except ValueError:
+ # We don't have both a valid address or a valid network, so
+ # we'll check this origin against hostnames.
+ if (origin[1] and
+ origin[1].lower() != secure_origin[1].lower() and
+ secure_origin[1] != "*"):
+ continue
+ else:
+ # We have a valid address and network, so see if the address
+ # is contained within the network.
+ if addr not in network:
+ continue
+
+ # Check to see if the port patches
+ if (origin[2] != secure_origin[2] and
+ secure_origin[2] != "*" and
+ secure_origin[2] is not None):
+ continue
+
+ # If we've gotten here, then this origin matches the current
+ # secure origin and we should return True
+ return True
+
+ # If we've gotten to this point, then the origin isn't secure and we
+ # will not accept it as a valid location to search. We will however
+ # log a warning that we are ignoring it.
+ logger.warning(
+ "The repository located at %s is not a trusted or secure host and "
+ "is being ignored. If this repository is available via HTTPS we "
+ "recommend you use HTTPS instead, otherwise you may silence "
+ "this warning and allow it anyway with '--trusted-host %s'.",
+ parsed.hostname,
+ parsed.hostname,
+ )
+
+ return False
+
+ def make_link_evaluator(self, project_name):
+ # type: (str) -> LinkEvaluator
+ canonical_name = canonicalize_name(project_name)
+ formats = self.format_control.get_allowed_formats(canonical_name)
+
+ return LinkEvaluator(
+ project_name=project_name,
+ canonical_name=canonical_name,
+ formats=formats,
+ target_python=self._target_python,
+ allow_yanked=self._allow_yanked,
+ ignore_requires_python=self._ignore_requires_python,
+ )
+
+ def find_all_candidates(self, project_name):
+ # type: (str) -> List[InstallationCandidate]
+ """Find all available InstallationCandidate for project_name
+
+ This checks index_urls and find_links.
+ All versions found are returned as an InstallationCandidate list.
+
+ See LinkEvaluator.evaluate_link() for details on which files
+ are accepted.
+ """
+ search_scope = self.search_scope
+ index_locations = search_scope.get_index_urls_locations(project_name)
+ index_file_loc, index_url_loc = self._sort_locations(index_locations)
+ fl_file_loc, fl_url_loc = self._sort_locations(
+ self.find_links, expand_dir=True,
+ )
+
+ file_locations = (Link(url) for url in itertools.chain(
+ index_file_loc, fl_file_loc,
+ ))
+
+ # We trust every url that the user has given us whether it was given
+ # via --index-url or --find-links.
+ # We want to filter out any thing which does not have a secure origin.
+ url_locations = [
+ link for link in itertools.chain(
+ (Link(url) for url in index_url_loc),
+ (Link(url) for url in fl_url_loc),
+ )
+ if self._validate_secure_origin(logger, link)
+ ]
+
+ logger.debug('%d location(s) to search for versions of %s:',
+ len(url_locations), project_name)
+
+ for location in url_locations:
+ logger.debug('* %s', location)
+
+ link_evaluator = self.make_link_evaluator(project_name)
+ find_links_versions = self._package_versions(
+ link_evaluator,
+ # We trust every directly linked archive in find_links
+ (Link(url, '-f') for url in self.find_links),
+ )
+
+ page_versions = []
+ for page in self._get_pages(url_locations, project_name):
+ logger.debug('Analyzing links from page %s', page.url)
+ with indent_log():
+ page_versions.extend(
+ self._package_versions(link_evaluator, page.iter_links())
+ )
+
+ file_versions = self._package_versions(link_evaluator, file_locations)
+ if file_versions:
+ file_versions.sort(reverse=True)
+ logger.debug(
+ 'Local files found: %s',
+ ', '.join([
+ url_to_path(candidate.link.url)
+ for candidate in file_versions
+ ])
+ )
+
+ # This is an intentional priority ordering
+ return file_versions + find_links_versions + page_versions
+
+ def make_candidate_evaluator(
+ self,
+ project_name, # type: str
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> CandidateEvaluator
+ """Create a CandidateEvaluator object to use.
+ """
+ candidate_prefs = self._candidate_prefs
+ return CandidateEvaluator.create(
+ project_name=project_name,
+ target_python=self._target_python,
+ prefer_binary=candidate_prefs.prefer_binary,
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
+ specifier=specifier,
+ hashes=hashes,
+ )
+
+ def find_candidates(
+ self,
+ project_name, # type: str
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> FoundCandidates
+ """Find matches for the given project and specifier.
+
+ :param specifier: An optional object implementing `filter`
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+ versions.
+
+ :return: A `FoundCandidates` instance.
+ """
+ candidates = self.find_all_candidates(project_name)
+ candidate_evaluator = self.make_candidate_evaluator(
+ project_name=project_name,
+ specifier=specifier,
+ hashes=hashes,
+ )
+ return candidate_evaluator.make_found_candidates(candidates)
+
+ def find_requirement(self, req, upgrade):
+ # type: (InstallRequirement, bool) -> Optional[Link]
+ """Try to find a Link matching req
+
+ Expects req, an InstallRequirement and upgrade, a boolean
+ Returns a Link if found,
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
+ """
+ hashes = req.hashes(trust_internet=False)
+ candidates = self.find_candidates(
+ req.name, specifier=req.specifier, hashes=hashes,
+ )
+ best_candidate = candidates.get_best()
+
+ installed_version = None # type: Optional[_BaseVersion]
+ if req.satisfied_by is not None:
+ installed_version = parse_version(req.satisfied_by.version)
+
+ def _format_versions(cand_iter):
+ # This repeated parse_version and str() conversion is needed to
+ # handle different vendoring sources from pip and pkg_resources.
+ # If we stop using the pkg_resources provided specifier and start
+ # using our own, we can drop the cast to str().
+ return ", ".join(sorted(
+ {str(c.version) for c in cand_iter},
+ key=parse_version,
+ )) or "none"
+
+ if installed_version is None and best_candidate is None:
+ logger.critical(
+ 'Could not find a version that satisfies the requirement %s '
+ '(from versions: %s)',
+ req,
+ _format_versions(candidates.iter_all()),
+ )
+
+ raise DistributionNotFound(
+ 'No matching distribution found for %s' % req
+ )
+
+ best_installed = False
+ if installed_version and (
+ best_candidate is None or
+ best_candidate.version <= installed_version):
+ best_installed = True
+
+ if not upgrade and installed_version is not None:
+ if best_installed:
+ logger.debug(
+ 'Existing installed version (%s) is most up-to-date and '
+ 'satisfies requirement',
+ installed_version,
+ )
+ else:
+ logger.debug(
+ 'Existing installed version (%s) satisfies requirement '
+ '(most up-to-date version is %s)',
+ installed_version,
+ best_candidate.version,
+ )
+ return None
+
+ if best_installed:
+ # We have an existing version, and its the best version
+ logger.debug(
+ 'Installed version (%s) is most up-to-date (past versions: '
+ '%s)',
+ installed_version,
+ _format_versions(candidates.iter_applicable()),
+ )
+ raise BestVersionAlreadyInstalled
+
+ logger.debug(
+ 'Using version %s (newest of versions: %s)',
+ best_candidate.version,
+ _format_versions(candidates.iter_applicable()),
+ )
+ return best_candidate.link
+
+ def _get_pages(self, locations, project_name):
+ # type: (Iterable[Link], str) -> Iterable[HTMLPage]
+ """
+ Yields (page, page_url) from the given locations, skipping
+ locations that have errors.
+ """
+ seen = set() # type: Set[Link]
+ for location in locations:
+ if location in seen:
+ continue
+ seen.add(location)
+
+ page = _get_html_page(location, session=self.session)
+ if page is None:
+ continue
+
+ yield page
+
+ def _sort_links(self, links):
+ # type: (Iterable[Link]) -> List[Link]
+ """
+ Returns elements of links in order, non-egg links first, egg links
+ second, while eliminating duplicates
+ """
+ eggs, no_eggs = [], []
+ seen = set() # type: Set[Link]
+ for link in links:
+ if link not in seen:
+ seen.add(link)
+ if link.egg_fragment:
+ eggs.append(link)
+ else:
+ no_eggs.append(link)
+ return no_eggs + eggs
+
+ def _log_skipped_link(self, link, reason):
+ # type: (Link, Text) -> None
+ if link not in self._logged_links:
+ # Mark this as a unicode string to prevent "UnicodeEncodeError:
+ # 'ascii' codec can't encode character" in Python 2 when
+ # the reason contains non-ascii characters.
+ # Also, put the link at the end so the reason is more visible
+ # and because the link string is usually very long.
+ logger.debug(u'Skipping link: %s: %s', reason, link)
+ self._logged_links.add(link)
+
+ def get_install_candidate(self, link_evaluator, link):
+ # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate]
+ """
+ If the link is a candidate for install, convert it to an
+ InstallationCandidate and return it. Otherwise, return None.
+ """
+ is_candidate, result = link_evaluator.evaluate_link(link)
+ if not is_candidate:
+ if result:
+ self._log_skipped_link(link, reason=result)
+ return None
+
+ return InstallationCandidate(
+ project=link_evaluator.project_name,
+ link=link,
+ # Convert the Text result to str since InstallationCandidate
+ # accepts str.
+ version=str(result),
+ )
+
+ def _package_versions(self, link_evaluator, links):
+ # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate]
+ result = []
+ for link in self._sort_links(links):
+ candidate = self.get_install_candidate(link_evaluator, link)
+ if candidate is not None:
+ result.append(candidate)
+ return result
+
+
+def _find_name_version_sep(fragment, canonical_name):
+ # type: (str, str) -> int
+ """Find the separator's index based on the package's canonical name.
+
+ :param fragment: A <package>+<version> filename "fragment" (stem) or
+ egg fragment.
+ :param canonical_name: The package's canonical name.
+
+ This function is needed since the canonicalized name does not necessarily
+ have the same length as the egg info's name part. An example::
+
+ >>> fragment = 'foo__bar-1.0'
+ >>> canonical_name = 'foo-bar'
+ >>> _find_name_version_sep(fragment, canonical_name)
+ 8
+ """
+ # Project name and version must be separated by one single dash. Find all
+ # occurrences of dashes; if the string in front of it matches the canonical
+ # name, this is the one separating the name and version parts.
+ for i, c in enumerate(fragment):
+ if c != "-":
+ continue
+ if canonicalize_name(fragment[:i]) == canonical_name:
+ return i
+ raise ValueError("{} does not match {}".format(fragment, canonical_name))
+
+
+def _extract_version_from_fragment(fragment, canonical_name):
+ # type: (str, str) -> Optional[str]
+ """Parse the version string from a <package>+<version> filename
+ "fragment" (stem) or egg fragment.
+
+ :param fragment: The string to parse. E.g. foo-2.1
+ :param canonical_name: The canonicalized name of the package this
+ belongs to.
+ """
+ try:
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
+ except ValueError:
+ return None
+ version = fragment[version_start:]
+ if not version:
+ return None
+ return version
+
+
+def _determine_base_url(document, page_url):
+ """Determine the HTML document's base URL.
+
+ This looks for a ``<base>`` tag in the HTML document. If present, its href
+ attribute denotes the base URL of anchor tags in the document. If there is
+ no such tag (or if it does not have a valid href attribute), the HTML
+ file's URL is used as the base URL.
+
+ :param document: An HTML document representation. The current
+ implementation expects the result of ``html5lib.parse()``.
+ :param page_url: The URL of the HTML document.
+ """
+ for base in document.findall(".//base"):
+ href = base.get("href")
+ if href is not None:
+ return href
+ return page_url
+
+
+def _get_encoding_from_headers(headers):
+ """Determine if we have any encoding information in our headers.
+ """
+ if headers and "Content-Type" in headers:
+ content_type, params = cgi.parse_header(headers["Content-Type"])
+ if "charset" in params:
+ return params['charset']
+ return None
+
+
+def _clean_link(url):
+ # type: (str) -> str
+ """Makes sure a link is fully encoded. That is, if a ' ' shows up in
+ the link, it will be rewritten to %20 (while not over-quoting
+ % or other characters)."""
+ # Split the URL into parts according to the general structure
+ # `scheme://netloc/path;parameters?query#fragment`. Note that the
+ # `netloc` can be empty and the URI will then refer to a local
+ # filesystem path.
+ result = urllib_parse.urlparse(url)
+ # In both cases below we unquote prior to quoting to make sure
+ # nothing is double quoted.
+ if result.netloc == "":
+ # On Windows the path part might contain a drive letter which
+ # should not be quoted. On Linux where drive letters do not
+ # exist, the colon should be quoted. We rely on urllib.request
+ # to do the right thing here.
+ path = urllib_request.pathname2url(
+ urllib_request.url2pathname(result.path))
+ else:
+ # In addition to the `/` character we protect `@` so that
+ # revision strings in VCS URLs are properly parsed.
+ path = urllib_parse.quote(urllib_parse.unquote(result.path), safe="/@")
+ return urllib_parse.urlunparse(result._replace(path=path))
+
+
+def _create_link_from_element(
+ anchor, # type: HTMLElement
+ page_url, # type: str
+ base_url, # type: str
+):
+ # type: (...) -> Optional[Link]
+ """
+ Convert an anchor element in a simple repository page to a Link.
+ """
+ href = anchor.get("href")
+ if not href:
+ return None
+
+ url = _clean_link(urllib_parse.urljoin(base_url, href))
+ pyrequire = anchor.get('data-requires-python')
+ pyrequire = unescape(pyrequire) if pyrequire else None
+
+ yanked_reason = anchor.get('data-yanked')
+ if yanked_reason:
+ # This is a unicode string in Python 2 (and 3).
+ yanked_reason = unescape(yanked_reason)
+
+ link = Link(
+ url,
+ comes_from=page_url,
+ requires_python=pyrequire,
+ yanked_reason=yanked_reason,
+ )
+
+ return link
+
+
+class HTMLPage(object):
+ """Represents one page, along with its URL"""
+
+ def __init__(self, content, url, headers=None):
+ # type: (bytes, str, MutableMapping[str, str]) -> None
+ self.content = content
+ self.url = url
+ self.headers = headers
+
+ def __str__(self):
+ return redact_password_from_url(self.url)
+
+ def iter_links(self):
+ # type: () -> Iterable[Link]
+ """Yields all links in the page"""
+ document = html5lib.parse(
+ self.content,
+ transport_encoding=_get_encoding_from_headers(self.headers),
+ namespaceHTMLElements=False,
+ )
+ base_url = _determine_base_url(document, self.url)
+ for anchor in document.findall(".//a"):
+ link = _create_link_from_element(
+ anchor,
+ page_url=self.url,
+ base_url=base_url,
+ )
+ if link is None:
+ continue
+ yield link
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py
new file mode 100644
index 00000000..1d9229cb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py
@@ -0,0 +1,457 @@
+"""Dependency Resolution
+
+The dependency resolution in pip is performed as follows:
+
+for top-level requirements:
+ a. only one spec allowed per project, regardless of conflicts or not.
+ otherwise a "double requirement" exception is raised
+ b. they override sub-dependency requirements.
+for sub-dependencies
+ a. "first found, wins" (where the order is breadth first)
+"""
+
+import logging
+import sys
+from collections import defaultdict
+from itertools import chain
+
+from pip._vendor.packaging import specifiers
+
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
+ UnsupportedPythonVersion,
+)
+from pip._internal.req.constructors import install_req_from_req_string
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ dist_in_usersite, ensure_dir, normalize_version_info,
+)
+from pip._internal.utils.packaging import (
+ check_requires_python, get_requires_python,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import DefaultDict, List, Optional, Set, Tuple
+ from pip._vendor import pkg_resources
+
+ from pip._internal.cache import WheelCache
+ from pip._internal.distributions import AbstractDistribution
+ from pip._internal.download import PipSession
+ from pip._internal.index import PackageFinder
+ from pip._internal.operations.prepare import RequirementPreparer
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.req.req_set import RequirementSet
+
+logger = logging.getLogger(__name__)
+
+
+def _check_dist_requires_python(
+ dist, # type: pkg_resources.Distribution
+ version_info, # type: Tuple[int, int, int]
+ ignore_requires_python=False, # type: bool
+):
+ # type: (...) -> None
+ """
+ Check whether the given Python version is compatible with a distribution's
+ "Requires-Python" value.
+
+ :param version_info: A 3-tuple of ints representing the Python
+ major-minor-micro version to check.
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
+ value if the given Python version isn't compatible.
+
+ :raises UnsupportedPythonVersion: When the given Python version isn't
+ compatible.
+ """
+ requires_python = get_requires_python(dist)
+ try:
+ is_compatible = check_requires_python(
+ requires_python, version_info=version_info,
+ )
+ except specifiers.InvalidSpecifier as exc:
+ logger.warning(
+ "Package %r has an invalid Requires-Python: %s",
+ dist.project_name, exc,
+ )
+ return
+
+ if is_compatible:
+ return
+
+ version = '.'.join(map(str, version_info))
+ if ignore_requires_python:
+ logger.debug(
+ 'Ignoring failed Requires-Python check for package %r: '
+ '%s not in %r',
+ dist.project_name, version, requires_python,
+ )
+ return
+
+ raise UnsupportedPythonVersion(
+ 'Package {!r} requires a different Python: {} not in {!r}'.format(
+ dist.project_name, version, requires_python,
+ ))
+
+
+class Resolver(object):
+ """Resolves which packages need to be installed/uninstalled to perform \
+ the requested operation without breaking the requirements of any package.
+ """
+
+ _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+ def __init__(
+ self,
+ preparer, # type: RequirementPreparer
+ session, # type: PipSession
+ finder, # type: PackageFinder
+ wheel_cache, # type: Optional[WheelCache]
+ use_user_site, # type: bool
+ ignore_dependencies, # type: bool
+ ignore_installed, # type: bool
+ ignore_requires_python, # type: bool
+ force_reinstall, # type: bool
+ isolated, # type: bool
+ upgrade_strategy, # type: str
+ use_pep517=None, # type: Optional[bool]
+ py_version_info=None, # type: Optional[Tuple[int, ...]]
+ ):
+ # type: (...) -> None
+ super(Resolver, self).__init__()
+ assert upgrade_strategy in self._allowed_strategies
+
+ if py_version_info is None:
+ py_version_info = sys.version_info[:3]
+ else:
+ py_version_info = normalize_version_info(py_version_info)
+
+ self._py_version_info = py_version_info
+
+ self.preparer = preparer
+ self.finder = finder
+ self.session = session
+
+ # NOTE: This would eventually be replaced with a cache that can give
+ # information about both sdist and wheels transparently.
+ self.wheel_cache = wheel_cache
+
+ # This is set in resolve
+ self.require_hashes = None # type: Optional[bool]
+
+ self.upgrade_strategy = upgrade_strategy
+ self.force_reinstall = force_reinstall
+ self.isolated = isolated
+ self.ignore_dependencies = ignore_dependencies
+ self.ignore_installed = ignore_installed
+ self.ignore_requires_python = ignore_requires_python
+ self.use_user_site = use_user_site
+ self.use_pep517 = use_pep517
+
+ self._discovered_dependencies = \
+ defaultdict(list) # type: DefaultDict[str, List]
+
+ def resolve(self, requirement_set):
+ # type: (RequirementSet) -> None
+ """Resolve what operations need to be done
+
+ As a side-effect of this method, the packages (and their dependencies)
+ are downloaded, unpacked and prepared for installation. This
+ preparation is done by ``pip.operations.prepare``.
+
+ Once PyPI has static dependency metadata available, it would be
+ possible to move the preparation to become a step separated from
+ dependency resolution.
+ """
+ # make the wheelhouse
+ if self.preparer.wheel_download_dir:
+ ensure_dir(self.preparer.wheel_download_dir)
+
+ # If any top-level requirement has a hash specified, enter
+ # hash-checking mode, which requires hashes from all.
+ root_reqs = (
+ requirement_set.unnamed_requirements +
+ list(requirement_set.requirements.values())
+ )
+ self.require_hashes = (
+ requirement_set.require_hashes or
+ any(req.has_hash_options for req in root_reqs)
+ )
+
+ # Display where finder is looking for packages
+ search_scope = self.finder.search_scope
+ locations = search_scope.get_formatted_locations()
+ if locations:
+ logger.info(locations)
+
+ # Actually prepare the files, and collect any exceptions. Most hash
+ # exceptions cannot be checked ahead of time, because
+ # req.populate_link() needs to be called before we can make decisions
+ # based on link type.
+ discovered_reqs = [] # type: List[InstallRequirement]
+ hash_errors = HashErrors()
+ for req in chain(root_reqs, discovered_reqs):
+ try:
+ discovered_reqs.extend(
+ self._resolve_one(requirement_set, req)
+ )
+ except HashError as exc:
+ exc.req = req
+ hash_errors.append(exc)
+
+ if hash_errors:
+ raise hash_errors
+
+ def _is_upgrade_allowed(self, req):
+ # type: (InstallRequirement) -> bool
+ if self.upgrade_strategy == "to-satisfy-only":
+ return False
+ elif self.upgrade_strategy == "eager":
+ return True
+ else:
+ assert self.upgrade_strategy == "only-if-needed"
+ return req.is_direct
+
+ def _set_req_to_reinstall(self, req):
+ # type: (InstallRequirement) -> None
+ """
+ Set a requirement to be installed.
+ """
+ # Don't uninstall the conflict if doing a user install and the
+ # conflict is not a user install.
+ if not self.use_user_site or dist_in_usersite(req.satisfied_by):
+ req.conflicts_with = req.satisfied_by
+ req.satisfied_by = None
+
+ # XXX: Stop passing requirement_set for options
+ def _check_skip_installed(self, req_to_install):
+ # type: (InstallRequirement) -> Optional[str]
+ """Check if req_to_install should be skipped.
+
+ This will check if the req is installed, and whether we should upgrade
+ or reinstall it, taking into account all the relevant user options.
+
+ After calling this req_to_install will only have satisfied_by set to
+ None if the req_to_install is to be upgraded/reinstalled etc. Any
+ other value will be a dist recording the current thing installed that
+ satisfies the requirement.
+
+ Note that for vcs urls and the like we can't assess skipping in this
+ routine - we simply identify that we need to pull the thing down,
+ then later on it is pulled down and introspected to assess upgrade/
+ reinstalls etc.
+
+ :return: A text reason for why it was skipped, or None.
+ """
+ if self.ignore_installed:
+ return None
+
+ req_to_install.check_if_exists(self.use_user_site)
+ if not req_to_install.satisfied_by:
+ return None
+
+ if self.force_reinstall:
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ if not self._is_upgrade_allowed(req_to_install):
+ if self.upgrade_strategy == "only-if-needed":
+ return 'already satisfied, skipping upgrade'
+ return 'already satisfied'
+
+ # Check for the possibility of an upgrade. For link-based
+ # requirements we have to pull the tree down and inspect to assess
+ # the version #, so it's handled way down.
+ if not req_to_install.link:
+ try:
+ self.finder.find_requirement(req_to_install, upgrade=True)
+ except BestVersionAlreadyInstalled:
+ # Then the best version is installed.
+ return 'already up-to-date'
+ except DistributionNotFound:
+ # No distribution found, so we squash the error. It will
+ # be raised later when we re-try later to do the install.
+ # Why don't we just raise here?
+ pass
+
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ def _get_abstract_dist_for(self, req):
+ # type: (InstallRequirement) -> AbstractDistribution
+ """Takes a InstallRequirement and returns a single AbstractDist \
+ representing a prepared variant of the same.
+ """
+ assert self.require_hashes is not None, (
+ "require_hashes should have been set in Resolver.resolve()"
+ )
+
+ if req.editable:
+ return self.preparer.prepare_editable_requirement(
+ req, self.require_hashes, self.use_user_site, self.finder,
+ )
+
+ # satisfied_by is only evaluated by calling _check_skip_installed,
+ # so it must be None here.
+ assert req.satisfied_by is None
+ skip_reason = self._check_skip_installed(req)
+
+ if req.satisfied_by:
+ return self.preparer.prepare_installed_requirement(
+ req, self.require_hashes, skip_reason
+ )
+
+ upgrade_allowed = self._is_upgrade_allowed(req)
+ abstract_dist = self.preparer.prepare_linked_requirement(
+ req, self.session, self.finder, upgrade_allowed,
+ self.require_hashes
+ )
+
+ # NOTE
+ # The following portion is for determining if a certain package is
+ # going to be re-installed/upgraded or not and reporting to the user.
+ # This should probably get cleaned up in a future refactor.
+
+ # req.req is only avail after unpack for URL
+ # pkgs repeat check_if_exists to uninstall-on-upgrade
+ # (#14)
+ if not self.ignore_installed:
+ req.check_if_exists(self.use_user_site)
+
+ if req.satisfied_by:
+ should_modify = (
+ self.upgrade_strategy != "to-satisfy-only" or
+ self.force_reinstall or
+ self.ignore_installed or
+ req.link.scheme == 'file'
+ )
+ if should_modify:
+ self._set_req_to_reinstall(req)
+ else:
+ logger.info(
+ 'Requirement already satisfied (use --upgrade to upgrade):'
+ ' %s', req,
+ )
+
+ return abstract_dist
+
+ def _resolve_one(
+ self,
+ requirement_set, # type: RequirementSet
+ req_to_install # type: InstallRequirement
+ ):
+ # type: (...) -> List[InstallRequirement]
+ """Prepare a single requirements file.
+
+ :return: A list of additional InstallRequirements to also install.
+ """
+ # Tell user what we are doing for this requirement:
+ # obtain (editable), skipping, processing (local url), collecting
+ # (remote url or package name)
+ if req_to_install.constraint or req_to_install.prepared:
+ return []
+
+ req_to_install.prepared = True
+
+ # register tmp src for cleanup in case something goes wrong
+ requirement_set.reqs_to_cleanup.append(req_to_install)
+
+ abstract_dist = self._get_abstract_dist_for(req_to_install)
+
+ # Parse and return dependencies
+ dist = abstract_dist.get_pkg_resources_distribution()
+ # This will raise UnsupportedPythonVersion if the given Python
+ # version isn't compatible with the distribution's Requires-Python.
+ _check_dist_requires_python(
+ dist, version_info=self._py_version_info,
+ ignore_requires_python=self.ignore_requires_python,
+ )
+
+ more_reqs = [] # type: List[InstallRequirement]
+
+ def add_req(subreq, extras_requested):
+ sub_install_req = install_req_from_req_string(
+ str(subreq),
+ req_to_install,
+ isolated=self.isolated,
+ wheel_cache=self.wheel_cache,
+ use_pep517=self.use_pep517
+ )
+ parent_req_name = req_to_install.name
+ to_scan_again, add_to_parent = requirement_set.add_requirement(
+ sub_install_req,
+ parent_req_name=parent_req_name,
+ extras_requested=extras_requested,
+ )
+ if parent_req_name and add_to_parent:
+ self._discovered_dependencies[parent_req_name].append(
+ add_to_parent
+ )
+ more_reqs.extend(to_scan_again)
+
+ with indent_log():
+ # We add req_to_install before its dependencies, so that we
+ # can refer to it when adding dependencies.
+ if not requirement_set.has_requirement(req_to_install.name):
+ # 'unnamed' requirements will get added here
+ req_to_install.is_direct = True
+ requirement_set.add_requirement(
+ req_to_install, parent_req_name=None,
+ )
+
+ if not self.ignore_dependencies:
+ if req_to_install.extras:
+ logger.debug(
+ "Installing extra requirements: %r",
+ ','.join(req_to_install.extras),
+ )
+ missing_requested = sorted(
+ set(req_to_install.extras) - set(dist.extras)
+ )
+ for missing in missing_requested:
+ logger.warning(
+ '%s does not provide the extra \'%s\'',
+ dist, missing
+ )
+
+ available_requested = sorted(
+ set(dist.extras) & set(req_to_install.extras)
+ )
+ for subreq in dist.requires(available_requested):
+ add_req(subreq, extras_requested=available_requested)
+
+ if not req_to_install.editable and not req_to_install.satisfied_by:
+ # XXX: --no-install leads this to report 'Successfully
+ # downloaded' for only non-editable reqs, even though we took
+ # action on them.
+ requirement_set.successfully_downloaded.append(req_to_install)
+
+ return more_reqs
+
+ def get_installation_order(self, req_set):
+ # type: (RequirementSet) -> List[InstallRequirement]
+ """Create the installation order.
+
+ The installation order is topological - requirements are installed
+ before the requiring thing. We break cycles at an arbitrary point,
+ and make no other guarantees.
+ """
+ # The current implementation, which we may change at any point
+ # installs the user specified things in the order given, except when
+ # dependencies must come earlier to achieve topological order.
+ order = []
+ ordered_reqs = set() # type: Set[InstallRequirement]
+
+ def schedule(req):
+ if req.satisfied_by or req in ordered_reqs:
+ return
+ if req.constraint:
+ return
+ ordered_reqs.add(req)
+ for dep in self._discovered_dependencies[req.name]:
+ schedule(dep)
+ order.append(req)
+
+ for install_req in req_set.requirements.values():
+ schedule(install_req)
+ return order
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/locations.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/locations.py
new file mode 100644
index 00000000..5f843d79
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/locations.py
@@ -0,0 +1,142 @@
+"""Locations where we look for configs, install stuff, etc"""
+from __future__ import absolute_import
+
+import os
+import os.path
+import platform
+import site
+import sys
+import sysconfig
+from distutils import sysconfig as distutils_sysconfig
+from distutils.command.install import SCHEME_KEYS # type: ignore
+
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Union, Dict, List, Optional
+
+
+# Application Directories
+USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+
+
+def get_src_prefix():
+ if running_under_virtualenv():
+ src_prefix = os.path.join(sys.prefix, 'src')
+ else:
+ # FIXME: keep src in cwd for now (it is not a temporary folder)
+ try:
+ src_prefix = os.path.join(os.getcwd(), 'src')
+ except OSError:
+ # In case the current working directory has been renamed or deleted
+ sys.exit(
+ "The folder you are executing pip from can no longer be found."
+ )
+
+ # under macOS + virtualenv sys.prefix is not properly resolved
+ # it is something like /path/to/python/bin/..
+ return os.path.abspath(src_prefix)
+
+
+# FIXME doesn't account for venv linked to global site-packages
+
+site_packages = sysconfig.get_path("purelib") # type: Optional[str]
+
+# This is because of a bug in PyPy's sysconfig module, see
+# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
+# for more information.
+if platform.python_implementation().lower() == "pypy":
+ site_packages = distutils_sysconfig.get_python_lib()
+try:
+ # Use getusersitepackages if this is present, as it ensures that the
+ # value is initialised properly.
+ user_site = site.getusersitepackages()
+except AttributeError:
+ user_site = site.USER_SITE
+
+if WINDOWS:
+ bin_py = os.path.join(sys.prefix, 'Scripts')
+ bin_user = os.path.join(user_site, 'Scripts')
+ # buildout uses 'bin' on Windows too?
+ if not os.path.exists(bin_py):
+ bin_py = os.path.join(sys.prefix, 'bin')
+ bin_user = os.path.join(user_site, 'bin')
+else:
+ bin_py = os.path.join(sys.prefix, 'bin')
+ bin_user = os.path.join(user_site, 'bin')
+
+ # Forcing to use /usr/local/bin for standard macOS framework installs
+ # Also log to ~/Library/Logs/ for use with the Console.app log viewer
+ if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
+ bin_py = '/usr/local/bin'
+
+
+def distutils_scheme(dist_name, user=False, home=None, root=None,
+ isolated=False, prefix=None):
+ # type:(str, bool, str, str, bool, str) -> dict
+ """
+ Return a distutils install scheme
+ """
+ from distutils.dist import Distribution
+
+ scheme = {}
+
+ if isolated:
+ extra_dist_args = {"script_args": ["--no-user-cfg"]}
+ else:
+ extra_dist_args = {}
+ dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
+ dist_args.update(extra_dist_args)
+
+ d = Distribution(dist_args)
+ # Ignoring, typeshed issue reported python/typeshed/issues/2567
+ d.parse_config_files()
+ # NOTE: Ignoring type since mypy can't find attributes on 'Command'
+ i = d.get_command_obj('install', create=True) # type: Any
+ assert i is not None
+ # NOTE: setting user or home has the side-effect of creating the home dir
+ # or user base for installations during finalize_options()
+ # ideally, we'd prefer a scheme class that has no side-effects.
+ assert not (user and prefix), "user={} prefix={}".format(user, prefix)
+ assert not (home and prefix), "home={} prefix={}".format(home, prefix)
+ i.user = user or i.user
+ if user or home:
+ i.prefix = ""
+ i.prefix = prefix or i.prefix
+ i.home = home or i.home
+ i.root = root or i.root
+ i.finalize_options()
+ for key in SCHEME_KEYS:
+ scheme[key] = getattr(i, 'install_' + key)
+
+ # install_lib specified in setup.cfg should install *everything*
+ # into there (i.e. it takes precedence over both purelib and
+ # platlib). Note, i.install_lib is *always* set after
+ # finalize_options(); we only want to override here if the user
+ # has explicitly requested it hence going back to the config
+
+ # Ignoring, typeshed issue reported python/typeshed/issues/2567
+ if 'install_lib' in d.get_option_dict('install'): # type: ignore
+ scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
+
+ if running_under_virtualenv():
+ scheme['headers'] = os.path.join(
+ sys.prefix,
+ 'include',
+ 'site',
+ 'python' + sys.version[:3],
+ dist_name,
+ )
+
+ if root is not None:
+ path_no_drive = os.path.splitdrive(
+ os.path.abspath(scheme["headers"]))[1]
+ scheme["headers"] = os.path.join(
+ root,
+ path_no_drive[1:],
+ )
+
+ return scheme
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__init__.py
new file mode 100644
index 00000000..7855226e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__init__.py
@@ -0,0 +1,2 @@
+"""A package that contains models that represent entities.
+"""
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..0c0e1078
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/candidate.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/candidate.cpython-37.pyc
new file mode 100644
index 00000000..8c36fe7c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/candidate.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/format_control.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/format_control.cpython-37.pyc
new file mode 100644
index 00000000..e74488f2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/format_control.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/index.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/index.cpython-37.pyc
new file mode 100644
index 00000000..9286478b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/index.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/link.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/link.cpython-37.pyc
new file mode 100644
index 00000000..921bce9c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/link.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-37.pyc
new file mode 100644
index 00000000..13792322
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc
new file mode 100644
index 00000000..f40b57f7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/target_python.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/target_python.cpython-37.pyc
new file mode 100644
index 00000000..d198a509
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/target_python.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/candidate.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/candidate.py
new file mode 100644
index 00000000..1b99690f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/candidate.py
@@ -0,0 +1,36 @@
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from pip._vendor.packaging.version import _BaseVersion
+ from pip._internal.models.link import Link
+ from typing import Any
+
+
+class InstallationCandidate(KeyBasedCompareMixin):
+ """Represents a potential "candidate" for installation.
+ """
+
+ def __init__(self, project, version, link):
+ # type: (Any, str, Link) -> None
+ self.project = project
+ self.version = parse_version(version) # type: _BaseVersion
+ self.link = link
+
+ super(InstallationCandidate, self).__init__(
+ key=(self.project, self.version, self.link),
+ defining_class=InstallationCandidate
+ )
+
+ def __repr__(self):
+ # type: () -> str
+ return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
+ self.project, self.version, self.link,
+ )
+
+ def __str__(self):
+ return '{!r} candidate (version {} at {})'.format(
+ self.project, self.version, self.link,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/format_control.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/format_control.py
new file mode 100644
index 00000000..53138e48
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/format_control.py
@@ -0,0 +1,73 @@
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Set, FrozenSet
+
+
+class FormatControl(object):
+ """Helper for managing formats from which a package can be installed.
+ """
+
+ def __init__(self, no_binary=None, only_binary=None):
+ # type: (Optional[Set], Optional[Set]) -> None
+ if no_binary is None:
+ no_binary = set()
+ if only_binary is None:
+ only_binary = set()
+
+ self.no_binary = no_binary
+ self.only_binary = only_binary
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return "{}({}, {})".format(
+ self.__class__.__name__,
+ self.no_binary,
+ self.only_binary
+ )
+
+ @staticmethod
+ def handle_mutual_excludes(value, target, other):
+ # type: (str, Optional[Set], Optional[Set]) -> None
+ new = value.split(',')
+ while ':all:' in new:
+ other.clear()
+ target.clear()
+ target.add(':all:')
+ del new[:new.index(':all:') + 1]
+ # Without a none, we want to discard everything as :all: covers it
+ if ':none:' not in new:
+ return
+ for name in new:
+ if name == ':none:':
+ target.clear()
+ continue
+ name = canonicalize_name(name)
+ other.discard(name)
+ target.add(name)
+
+ def get_allowed_formats(self, canonical_name):
+ # type: (str) -> FrozenSet
+ result = {"binary", "source"}
+ if canonical_name in self.only_binary:
+ result.discard('source')
+ elif canonical_name in self.no_binary:
+ result.discard('binary')
+ elif ':all:' in self.only_binary:
+ result.discard('source')
+ elif ':all:' in self.no_binary:
+ result.discard('binary')
+ return frozenset(result)
+
+ def disallow_binaries(self):
+ # type: () -> None
+ self.handle_mutual_excludes(
+ ':all:', self.no_binary, self.only_binary,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/index.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/index.py
new file mode 100644
index 00000000..ead1efbd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/index.py
@@ -0,0 +1,31 @@
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+
+class PackageIndex(object):
+ """Represents a Package Index and provides easier access to endpoints
+ """
+
+ def __init__(self, url, file_storage_domain):
+ # type: (str, str) -> None
+ super(PackageIndex, self).__init__()
+ self.url = url
+ self.netloc = urllib_parse.urlsplit(url).netloc
+ self.simple_url = self._url_for_path('simple')
+ self.pypi_url = self._url_for_path('pypi')
+
+ # This is part of a temporary hack used to block installs of PyPI
+ # packages which depend on external urls only necessary until PyPI can
+ # block such packages themselves
+ self.file_storage_domain = file_storage_domain
+
+ def _url_for_path(self, path):
+ # type: (str) -> str
+ return urllib_parse.urljoin(self.url, path)
+
+
+PyPI = PackageIndex(
+ 'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
+)
+TestPyPI = PackageIndex(
+ 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
+)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/link.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/link.py
new file mode 100644
index 00000000..d42be28c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/link.py
@@ -0,0 +1,213 @@
+import posixpath
+import re
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.utils.misc import (
+ WHEEL_EXTENSION, path_to_url, redact_password_from_url,
+ split_auth_from_netloc, splitext,
+)
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Text, Tuple, Union
+ from pip._internal.index import HTMLPage
+ from pip._internal.utils.hashes import Hashes
+
+
+class Link(KeyBasedCompareMixin):
+ """Represents a parsed link from a Package Index's simple URL
+ """
+
+ def __init__(
+ self,
+ url, # type: str
+ comes_from=None, # type: Optional[Union[str, HTMLPage]]
+ requires_python=None, # type: Optional[str]
+ yanked_reason=None, # type: Optional[Text]
+ ):
+ # type: (...) -> None
+ """
+ :param url: url of the resource pointed to (href of the link)
+ :param comes_from: instance of HTMLPage where the link was found,
+ or string.
+ :param requires_python: String containing the `Requires-Python`
+ metadata field, specified in PEP 345. This may be specified by
+ a data-requires-python attribute in the HTML link tag, as
+ described in PEP 503.
+ :param yanked_reason: the reason the file has been yanked, if the
+ file has been yanked, or None if the file hasn't been yanked.
+ This is the value of the "data-yanked" attribute, if present, in
+ a simple repository HTML link. If the file has been yanked but
+ no reason was provided, this should be the empty string. See
+ PEP 592 for more information and the specification.
+ """
+
+ # url can be a UNC windows share
+ if url.startswith('\\\\'):
+ url = path_to_url(url)
+
+ self._parsed_url = urllib_parse.urlsplit(url)
+ # Store the url as a private attribute to prevent accidentally
+ # trying to set a new value.
+ self._url = url
+
+ self.comes_from = comes_from
+ self.requires_python = requires_python if requires_python else None
+ self.yanked_reason = yanked_reason
+
+ super(Link, self).__init__(key=url, defining_class=Link)
+
+ def __str__(self):
+ if self.requires_python:
+ rp = ' (requires-python:%s)' % self.requires_python
+ else:
+ rp = ''
+ if self.comes_from:
+ return '%s (from %s)%s' % (redact_password_from_url(self._url),
+ self.comes_from, rp)
+ else:
+ return redact_password_from_url(str(self._url))
+
+ def __repr__(self):
+ return '<Link %s>' % self
+
+ @property
+ def url(self):
+ # type: () -> str
+ return self._url
+
+ @property
+ def filename(self):
+ # type: () -> str
+ path = self.path.rstrip('/')
+ name = posixpath.basename(path)
+ if not name:
+ # Make sure we don't leak auth information if the netloc
+ # includes a username and password.
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
+ return netloc
+
+ name = urllib_parse.unquote(name)
+ assert name, ('URL %r produced no filename' % self._url)
+ return name
+
+ @property
+ def scheme(self):
+ # type: () -> str
+ return self._parsed_url.scheme
+
+ @property
+ def netloc(self):
+ # type: () -> str
+ """
+ This can contain auth information.
+ """
+ return self._parsed_url.netloc
+
+ @property
+ def path(self):
+ # type: () -> str
+ return urllib_parse.unquote(self._parsed_url.path)
+
+ def splitext(self):
+ # type: () -> Tuple[str, str]
+ return splitext(posixpath.basename(self.path.rstrip('/')))
+
+ @property
+ def ext(self):
+ # type: () -> str
+ return self.splitext()[1]
+
+ @property
+ def url_without_fragment(self):
+ # type: () -> str
+ scheme, netloc, path, query, fragment = self._parsed_url
+ return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
+
+ _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
+
+ @property
+ def egg_fragment(self):
+ # type: () -> Optional[str]
+ match = self._egg_fragment_re.search(self._url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
+
+ @property
+ def subdirectory_fragment(self):
+ # type: () -> Optional[str]
+ match = self._subdirectory_fragment_re.search(self._url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _hash_re = re.compile(
+ r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
+ )
+
+ @property
+ def hash(self):
+ # type: () -> Optional[str]
+ match = self._hash_re.search(self._url)
+ if match:
+ return match.group(2)
+ return None
+
+ @property
+ def hash_name(self):
+ # type: () -> Optional[str]
+ match = self._hash_re.search(self._url)
+ if match:
+ return match.group(1)
+ return None
+
+ @property
+ def show_url(self):
+ # type: () -> Optional[str]
+ return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
+
+ @property
+ def is_wheel(self):
+ # type: () -> bool
+ return self.ext == WHEEL_EXTENSION
+
+ @property
+ def is_artifact(self):
+ # type: () -> bool
+ """
+ Determines if this points to an actual artifact (e.g. a tarball) or if
+ it points to an "abstract" thing like a path or a VCS location.
+ """
+ from pip._internal.vcs import vcs
+
+ if self.scheme in vcs.all_schemes:
+ return False
+
+ return True
+
+ @property
+ def is_yanked(self):
+ # type: () -> bool
+ return self.yanked_reason is not None
+
+ @property
+ def has_hash(self):
+ return self.hash_name is not None
+
+ def is_hash_allowed(self, hashes):
+ # type: (Optional[Hashes]) -> bool
+ """
+ Return True if the link has a hash and it is allowed.
+ """
+ if hashes is None or not self.has_hash:
+ return False
+ # Assert non-None so mypy knows self.hash_name and self.hash are str.
+ assert self.hash_name is not None
+ assert self.hash is not None
+
+ return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/search_scope.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/search_scope.py
new file mode 100644
index 00000000..62152449
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/search_scope.py
@@ -0,0 +1,113 @@
+import itertools
+import logging
+import os
+import posixpath
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.models.index import PyPI
+from pip._internal.utils.compat import HAS_TLS
+from pip._internal.utils.misc import normalize_path, redact_password_from_url
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchScope(object):
+
+ """
+ Encapsulates the locations that pip is configured to search.
+ """
+
+ @classmethod
+ def create(
+ cls,
+ find_links, # type: List[str]
+ index_urls, # type: List[str]
+ ):
+ # type: (...) -> SearchScope
+ """
+ Create a SearchScope object after normalizing the `find_links`.
+ """
+ # Build find_links. If an argument starts with ~, it may be
+ # a local file relative to a home directory. So try normalizing
+ # it and if it exists, use the normalized version.
+ # This is deliberately conservative - it might be fine just to
+ # blindly normalize anything starting with a ~...
+ built_find_links = [] # type: List[str]
+ for link in find_links:
+ if link.startswith('~'):
+ new_link = normalize_path(link)
+ if os.path.exists(new_link):
+ link = new_link
+ built_find_links.append(link)
+
+ # If we don't have TLS enabled, then WARN if anyplace we're looking
+ # relies on TLS.
+ if not HAS_TLS:
+ for link in itertools.chain(index_urls, built_find_links):
+ parsed = urllib_parse.urlparse(link)
+ if parsed.scheme == 'https':
+ logger.warning(
+ 'pip is configured with locations that require '
+ 'TLS/SSL, however the ssl module in Python is not '
+ 'available.'
+ )
+ break
+
+ return cls(
+ find_links=built_find_links,
+ index_urls=index_urls,
+ )
+
+ def __init__(
+ self,
+ find_links, # type: List[str]
+ index_urls, # type: List[str]
+ ):
+ # type: (...) -> None
+ self.find_links = find_links
+ self.index_urls = index_urls
+
+ def get_formatted_locations(self):
+ # type: () -> str
+ lines = []
+ if self.index_urls and self.index_urls != [PyPI.simple_url]:
+ lines.append(
+ 'Looking in indexes: {}'.format(', '.join(
+ redact_password_from_url(url) for url in self.index_urls))
+ )
+ if self.find_links:
+ lines.append(
+ 'Looking in links: {}'.format(', '.join(
+ redact_password_from_url(url) for url in self.find_links))
+ )
+ return '\n'.join(lines)
+
+ def get_index_urls_locations(self, project_name):
+ # type: (str) -> List[str]
+ """Returns the locations found via self.index_urls
+
+ Checks the url_name on the main (first in the list) index and
+ use this url_name to produce all locations
+ """
+
+ def mkurl_pypi_url(url):
+ loc = posixpath.join(
+ url,
+ urllib_parse.quote(canonicalize_name(project_name)))
+ # For maximum compatibility with easy_install, ensure the path
+ # ends in a trailing slash. Although this isn't in the spec
+ # (and PyPI can handle it without the slash) some other index
+ # implementations might break if they relied on easy_install's
+ # behavior.
+ if not loc.endswith('/'):
+ loc = loc + '/'
+ return loc
+
+ return [mkurl_pypi_url(url) for url in self.index_urls]
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py
new file mode 100644
index 00000000..f58fdce9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py
@@ -0,0 +1,47 @@
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+ from pip._internal.models.format_control import FormatControl
+
+
+class SelectionPreferences(object):
+
+ """
+ Encapsulates the candidate selection preferences for downloading
+ and installing files.
+ """
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ def __init__(
+ self,
+ allow_yanked, # type: bool
+ allow_all_prereleases=False, # type: bool
+ format_control=None, # type: Optional[FormatControl]
+ prefer_binary=False, # type: bool
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """Create a SelectionPreferences object.
+
+ :param allow_yanked: Whether files marked as yanked (in the sense
+ of PEP 592) are permitted to be candidates for install.
+ :param format_control: A FormatControl object or None. Used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param prefer_binary: Whether to prefer an old, but valid, binary
+ dist over a new source dist.
+ :param ignore_requires_python: Whether to ignore incompatible
+ "Requires-Python" values in links. Defaults to False.
+ """
+ if ignore_requires_python is None:
+ ignore_requires_python = False
+
+ self.allow_yanked = allow_yanked
+ self.allow_all_prereleases = allow_all_prereleases
+ self.format_control = format_control
+ self.prefer_binary = prefer_binary
+ self.ignore_requires_python = ignore_requires_python
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/target_python.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/target_python.py
new file mode 100644
index 00000000..a23b79c4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/target_python.py
@@ -0,0 +1,106 @@
+import sys
+
+from pip._internal.pep425tags import get_supported, version_info_to_nodot
+from pip._internal.utils.misc import normalize_version_info
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Tuple
+ from pip._internal.pep425tags import Pep425Tag
+
+
+class TargetPython(object):
+
+ """
+ Encapsulates the properties of a Python interpreter one is targeting
+ for a package install, download, etc.
+ """
+
+ def __init__(
+ self,
+ platform=None, # type: Optional[str]
+ py_version_info=None, # type: Optional[Tuple[int, ...]]
+ abi=None, # type: Optional[str]
+ implementation=None, # type: Optional[str]
+ ):
+ # type: (...) -> None
+ """
+ :param platform: A string or None. If None, searches for packages
+ that are supported by the current system. Otherwise, will find
+ packages that can be built on the platform passed in. These
+ packages will only be downloaded for distribution: they will
+ not be built locally.
+ :param py_version_info: An optional tuple of ints representing the
+ Python version information to use (e.g. `sys.version_info[:3]`).
+ This can have length 1, 2, or 3 when provided.
+ :param abi: A string or None. This is passed to pep425tags.py's
+ get_supported() function as is.
+ :param implementation: A string or None. This is passed to
+ pep425tags.py's get_supported() function as is.
+ """
+ # Store the given py_version_info for when we call get_supported().
+ self._given_py_version_info = py_version_info
+
+ if py_version_info is None:
+ py_version_info = sys.version_info[:3]
+ else:
+ py_version_info = normalize_version_info(py_version_info)
+
+ py_version = '.'.join(map(str, py_version_info[:2]))
+
+ self.abi = abi
+ self.implementation = implementation
+ self.platform = platform
+ self.py_version = py_version
+ self.py_version_info = py_version_info
+
+ # This is used to cache the return value of get_tags().
+ self._valid_tags = None # type: Optional[List[Pep425Tag]]
+
+ def format_given(self):
+ # type: () -> str
+ """
+ Format the given, non-None attributes for display.
+ """
+ display_version = None
+ if self._given_py_version_info is not None:
+ display_version = '.'.join(
+ str(part) for part in self._given_py_version_info
+ )
+
+ key_values = [
+ ('platform', self.platform),
+ ('version_info', display_version),
+ ('abi', self.abi),
+ ('implementation', self.implementation),
+ ]
+ return ' '.join(
+ '{}={!r}'.format(key, value) for key, value in key_values
+ if value is not None
+ )
+
+ def get_tags(self):
+ # type: () -> List[Pep425Tag]
+ """
+ Return the supported PEP 425 tags to check wheel candidates against.
+
+ The tags are returned in order of preference (most preferred first).
+ """
+ if self._valid_tags is None:
+ # Pass versions=None if no py_version_info was given since
+ # versions=None uses special default logic.
+ py_version_info = self._given_py_version_info
+ if py_version_info is None:
+ versions = None
+ else:
+ versions = [version_info_to_nodot(py_version_info)]
+
+ tags = get_supported(
+ versions=versions,
+ platform=self.platform,
+ abi=self.abi,
+ impl=self.implementation,
+ )
+ self._valid_tags = tags
+
+ return self._valid_tags
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__init__.py
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..2020fda1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/check.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/check.cpython-37.pyc
new file mode 100644
index 00000000..28338255
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/check.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc
new file mode 100644
index 00000000..710c275d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc
new file mode 100644
index 00000000..48ba3172
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/check.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/check.py
new file mode 100644
index 00000000..7b8b369f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/check.py
@@ -0,0 +1,159 @@
+"""Validation of dependencies of packages
+"""
+
+import logging
+from collections import namedtuple
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import RequirementParseError
+
+from pip._internal.distributions import (
+ make_distribution_for_install_requirement,
+)
+from pip._internal.utils.misc import get_installed_distributions
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+logger = logging.getLogger(__name__)
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.req.req_install import InstallRequirement
+ from typing import (
+ Any, Callable, Dict, Optional, Set, Tuple, List
+ )
+
+ # Shorthands
+ PackageSet = Dict[str, 'PackageDetails']
+ Missing = Tuple[str, Any]
+ Conflicting = Tuple[str, str, Any]
+
+ MissingDict = Dict[str, List[Missing]]
+ ConflictingDict = Dict[str, List[Conflicting]]
+ CheckResult = Tuple[MissingDict, ConflictingDict]
+
+PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
+
+
+def create_package_set_from_installed(**kwargs):
+ # type: (**Any) -> Tuple[PackageSet, bool]
+ """Converts a list of distributions into a PackageSet.
+ """
+ # Default to using all packages installed on the system
+ if kwargs == {}:
+ kwargs = {"local_only": False, "skip": ()}
+
+ package_set = {}
+ problems = False
+ for dist in get_installed_distributions(**kwargs):
+ name = canonicalize_name(dist.project_name)
+ try:
+ package_set[name] = PackageDetails(dist.version, dist.requires())
+ except RequirementParseError as e:
+ # Don't crash on broken metadata
+ logging.warning("Error parsing requirements for %s: %s", name, e)
+ problems = True
+ return package_set, problems
+
+
+def check_package_set(package_set, should_ignore=None):
+ # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
+ """Check if a package set is consistent
+
+ If should_ignore is passed, it should be a callable that takes a
+ package name and returns a boolean.
+ """
+ if should_ignore is None:
+ def should_ignore(name):
+ return False
+
+ missing = dict()
+ conflicting = dict()
+
+ for package_name in package_set:
+ # Info about dependencies of package_name
+ missing_deps = set() # type: Set[Missing]
+ conflicting_deps = set() # type: Set[Conflicting]
+
+ if should_ignore(package_name):
+ continue
+
+ for req in package_set[package_name].requires:
+ name = canonicalize_name(req.project_name) # type: str
+
+ # Check if it's missing
+ if name not in package_set:
+ missed = True
+ if req.marker is not None:
+ missed = req.marker.evaluate()
+ if missed:
+ missing_deps.add((name, req))
+ continue
+
+ # Check if there's a conflict
+ version = package_set[name].version # type: str
+ if not req.specifier.contains(version, prereleases=True):
+ conflicting_deps.add((name, version, req))
+
+ if missing_deps:
+ missing[package_name] = sorted(missing_deps, key=str)
+ if conflicting_deps:
+ conflicting[package_name] = sorted(conflicting_deps, key=str)
+
+ return missing, conflicting
+
+
+def check_install_conflicts(to_install):
+ # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
+ """For checking if the dependency graph would be consistent after \
+ installing given requirements
+ """
+ # Start from the current state
+ package_set, _ = create_package_set_from_installed()
+ # Install packages
+ would_be_installed = _simulate_installation_of(to_install, package_set)
+
+ # Only warn about directly-dependent packages; create a whitelist of them
+ whitelist = _create_whitelist(would_be_installed, package_set)
+
+ return (
+ package_set,
+ check_package_set(
+ package_set, should_ignore=lambda name: name not in whitelist
+ )
+ )
+
+
+def _simulate_installation_of(to_install, package_set):
+ # type: (List[InstallRequirement], PackageSet) -> Set[str]
+ """Computes the version of packages after installing to_install.
+ """
+
+ # Keep track of packages that were installed
+ installed = set()
+
+ # Modify it as installing requirement_set would (assuming no errors)
+ for inst_req in to_install:
+ abstract_dist = make_distribution_for_install_requirement(inst_req)
+ dist = abstract_dist.get_pkg_resources_distribution()
+
+ name = canonicalize_name(dist.key)
+ package_set[name] = PackageDetails(dist.version, dist.requires())
+
+ installed.add(name)
+
+ return installed
+
+
+def _create_whitelist(would_be_installed, package_set):
+ # type: (Set[str], PackageSet) -> Set[str]
+ packages_affected = set(would_be_installed)
+
+ for package_name in package_set:
+ if package_name in packages_affected:
+ continue
+
+ for req in package_set[package_name].requires:
+ if canonicalize_name(req.name) in packages_affected:
+ packages_affected.add(package_name)
+ break
+
+ return packages_affected
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/freeze.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/freeze.py
new file mode 100644
index 00000000..6f5a3dd9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/freeze.py
@@ -0,0 +1,253 @@
+from __future__ import absolute_import
+
+import collections
+import logging
+import os
+import re
+
+from pip._vendor import six
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import RequirementParseError
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.req.constructors import (
+ install_req_from_editable, install_req_from_line,
+)
+from pip._internal.req.req_file import COMMENT_RE
+from pip._internal.utils.misc import (
+ dist_is_editable, get_installed_distributions,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
+ )
+ from pip._internal.cache import WheelCache
+ from pip._vendor.pkg_resources import (
+ Distribution, Requirement
+ )
+
+ RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
+
+
+logger = logging.getLogger(__name__)
+
+
+def freeze(
+ requirement=None, # type: Optional[List[str]]
+ find_links=None, # type: Optional[List[str]]
+ local_only=None, # type: Optional[bool]
+ user_only=None, # type: Optional[bool]
+ paths=None, # type: Optional[List[str]]
+ skip_regex=None, # type: Optional[str]
+ isolated=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ exclude_editable=False, # type: bool
+ skip=() # type: Container[str]
+):
+ # type: (...) -> Iterator[str]
+ find_links = find_links or []
+ skip_match = None
+
+ if skip_regex:
+ skip_match = re.compile(skip_regex).search
+
+ for link in find_links:
+ yield '-f %s' % link
+ installations = {} # type: Dict[str, FrozenRequirement]
+ for dist in get_installed_distributions(local_only=local_only,
+ skip=(),
+ user_only=user_only,
+ paths=paths):
+ try:
+ req = FrozenRequirement.from_dist(dist)
+ except RequirementParseError as exc:
+ # We include dist rather than dist.project_name because the
+ # dist string includes more information, like the version and
+ # location. We also include the exception message to aid
+ # troubleshooting.
+ logger.warning(
+ 'Could not generate requirement for distribution %r: %s',
+ dist, exc
+ )
+ continue
+ if exclude_editable and req.editable:
+ continue
+ installations[req.name] = req
+
+ if requirement:
+ # the options that don't get turned into an InstallRequirement
+ # should only be emitted once, even if the same option is in multiple
+ # requirements files, so we need to keep track of what has been emitted
+ # so that we don't emit it again if it's seen again
+ emitted_options = set() # type: Set[str]
+ # keep track of which files a requirement is in so that we can
+ # give an accurate warning if a requirement appears multiple times.
+ req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
+ for req_file_path in requirement:
+ with open(req_file_path) as req_file:
+ for line in req_file:
+ if (not line.strip() or
+ line.strip().startswith('#') or
+ (skip_match and skip_match(line)) or
+ line.startswith((
+ '-r', '--requirement',
+ '-Z', '--always-unzip',
+ '-f', '--find-links',
+ '-i', '--index-url',
+ '--pre',
+ '--trusted-host',
+ '--process-dependency-links',
+ '--extra-index-url'))):
+ line = line.rstrip()
+ if line not in emitted_options:
+ emitted_options.add(line)
+ yield line
+ continue
+
+ if line.startswith('-e') or line.startswith('--editable'):
+ if line.startswith('-e'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--editable'):].strip().lstrip('=')
+ line_req = install_req_from_editable(
+ line,
+ isolated=isolated,
+ wheel_cache=wheel_cache,
+ )
+ else:
+ line_req = install_req_from_line(
+ COMMENT_RE.sub('', line).strip(),
+ isolated=isolated,
+ wheel_cache=wheel_cache,
+ )
+
+ if not line_req.name:
+ logger.info(
+ "Skipping line in requirement file [%s] because "
+ "it's not clear what it would install: %s",
+ req_file_path, line.strip(),
+ )
+ logger.info(
+ " (add #egg=PackageName to the URL to avoid"
+ " this warning)"
+ )
+ elif line_req.name not in installations:
+ # either it's not installed, or it is installed
+ # but has been processed already
+ if not req_files[line_req.name]:
+ logger.warning(
+ "Requirement file [%s] contains %s, but "
+ "package %r is not installed",
+ req_file_path,
+ COMMENT_RE.sub('', line).strip(), line_req.name
+ )
+ else:
+ req_files[line_req.name].append(req_file_path)
+ else:
+ yield str(installations[line_req.name]).rstrip()
+ del installations[line_req.name]
+ req_files[line_req.name].append(req_file_path)
+
+ # Warn about requirements that were included multiple times (in a
+ # single requirements file or in different requirements files).
+ for name, files in six.iteritems(req_files):
+ if len(files) > 1:
+ logger.warning("Requirement %s included multiple times [%s]",
+ name, ', '.join(sorted(set(files))))
+
+ yield(
+ '## The following requirements were added by '
+ 'pip freeze:'
+ )
+ for installation in sorted(
+ installations.values(), key=lambda x: x.name.lower()):
+ if canonicalize_name(installation.name) not in skip:
+ yield str(installation).rstrip()
+
+
+def get_requirement_info(dist):
+ # type: (Distribution) -> RequirementInfo
+ """
+ Compute and return values (req, editable, comments) for use in
+ FrozenRequirement.from_dist().
+ """
+ if not dist_is_editable(dist):
+ return (None, False, [])
+
+ location = os.path.normcase(os.path.abspath(dist.location))
+
+ from pip._internal.vcs import vcs, RemoteNotFoundError
+ vcs_backend = vcs.get_backend_for_dir(location)
+
+ if vcs_backend is None:
+ req = dist.as_requirement()
+ logger.debug(
+ 'No VCS found for editable requirement "%s" in: %r', req,
+ location,
+ )
+ comments = [
+ '# Editable install with no version control ({})'.format(req)
+ ]
+ return (location, True, comments)
+
+ try:
+ req = vcs_backend.get_src_requirement(location, dist.project_name)
+ except RemoteNotFoundError:
+ req = dist.as_requirement()
+ comments = [
+ '# Editable {} install with no remote ({})'.format(
+ type(vcs_backend).__name__, req,
+ )
+ ]
+ return (location, True, comments)
+
+ except BadCommand:
+ logger.warning(
+ 'cannot determine version of editable source in %s '
+ '(%s command not found in path)',
+ location,
+ vcs_backend.name,
+ )
+ return (None, True, [])
+
+ except InstallationError as exc:
+ logger.warning(
+ "Error when trying to get requirement for VCS system %s, "
+ "falling back to uneditable format", exc
+ )
+ else:
+ if req is not None:
+ return (req, True, [])
+
+ logger.warning(
+ 'Could not determine repository location of %s', location
+ )
+ comments = ['## !! Could not determine repository location']
+
+ return (None, False, comments)
+
+
+class FrozenRequirement(object):
+ def __init__(self, name, req, editable, comments=()):
+ # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
+ self.name = name
+ self.req = req
+ self.editable = editable
+ self.comments = comments
+
+ @classmethod
+ def from_dist(cls, dist):
+ # type: (Distribution) -> FrozenRequirement
+ req, editable, comments = get_requirement_info(dist)
+ if req is None:
+ req = dist.as_requirement()
+
+ return cls(dist.project_name, req, editable, comments=comments)
+
+ def __str__(self):
+ req = self.req
+ if self.editable:
+ req = '-e %s' % req
+ return '\n'.join(list(self.comments) + [str(req)]) + '\n'
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/prepare.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/prepare.py
new file mode 100644
index 00000000..6cf5f0ed
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/prepare.py
@@ -0,0 +1,287 @@
+"""Prepares a distribution for installation
+"""
+
+import logging
+import os
+
+from pip._vendor import requests
+
+from pip._internal.distributions import (
+ make_distribution_for_install_requirement,
+)
+from pip._internal.distributions.installed import InstalledDistribution
+from pip._internal.download import (
+ is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
+)
+from pip._internal.exceptions import (
+ DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
+ PreviousBuildDirError, VcsHashUnsupported,
+)
+from pip._internal.utils.compat import expanduser
+from pip._internal.utils.hashes import MissingHashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import display_path, normalize_path
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+
+ from pip._internal.distributions import AbstractDistribution
+ from pip._internal.download import PipSession
+ from pip._internal.index import PackageFinder
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.req.req_tracker import RequirementTracker
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementPreparer(object):
+ """Prepares a Requirement
+ """
+
+ def __init__(
+ self,
+ build_dir, # type: str
+ download_dir, # type: Optional[str]
+ src_dir, # type: str
+ wheel_download_dir, # type: Optional[str]
+ progress_bar, # type: str
+ build_isolation, # type: bool
+ req_tracker # type: RequirementTracker
+ ):
+ # type: (...) -> None
+ super(RequirementPreparer, self).__init__()
+
+ self.src_dir = src_dir
+ self.build_dir = build_dir
+ self.req_tracker = req_tracker
+
+ # Where still packed archives should be written to. If None, they are
+ # not saved, and are deleted immediately after unpacking.
+ self.download_dir = download_dir
+
+ # Where still-packed .whl files should be written to. If None, they are
+ # written to the download_dir parameter. Separate to download_dir to
+ # permit only keeping wheel archives for pip wheel.
+ if wheel_download_dir:
+ wheel_download_dir = normalize_path(wheel_download_dir)
+ self.wheel_download_dir = wheel_download_dir
+
+ # NOTE
+ # download_dir and wheel_download_dir overlap semantically and may
+ # be combined if we're willing to have non-wheel archives present in
+ # the wheelhouse output by 'pip wheel'.
+
+ self.progress_bar = progress_bar
+
+ # Is build isolation allowed?
+ self.build_isolation = build_isolation
+
+ @property
+ def _download_should_save(self):
+ # type: () -> bool
+ # TODO: Modify to reduce indentation needed
+ if self.download_dir:
+ self.download_dir = expanduser(self.download_dir)
+ if os.path.exists(self.download_dir):
+ return True
+ else:
+ logger.critical('Could not find download directory')
+ raise InstallationError(
+ "Could not find or access download directory '%s'"
+ % display_path(self.download_dir))
+ return False
+
+ def prepare_linked_requirement(
+ self,
+ req, # type: InstallRequirement
+ session, # type: PipSession
+ finder, # type: PackageFinder
+ upgrade_allowed, # type: bool
+ require_hashes # type: bool
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare a requirement that would be obtained from req.link
+ """
+ # TODO: Breakup into smaller functions
+ if req.link and req.link.scheme == 'file':
+ path = url_to_path(req.link.url)
+ logger.info('Processing %s', display_path(path))
+ else:
+ logger.info('Collecting %s', req)
+
+ with indent_log():
+ # @@ if filesystem packages are not marked
+ # editable in a req, a non deterministic error
+ # occurs when the script attempts to unpack the
+ # build directory
+ req.ensure_has_source_dir(self.build_dir)
+ # If a checkout exists, it's unwise to keep going. version
+ # inconsistencies are logged later, but do not fail the
+ # installation.
+ # FIXME: this won't upgrade when there's an existing
+ # package unpacked in `req.source_dir`
+ # package unpacked in `req.source_dir`
+ if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
+ raise PreviousBuildDirError(
+ "pip can't proceed with requirements '%s' due to a"
+ " pre-existing build directory (%s). This is "
+ "likely due to a previous installation that failed"
+ ". pip is being responsible and not assuming it "
+ "can delete this. Please delete it and try again."
+ % (req, req.source_dir)
+ )
+ req.populate_link(finder, upgrade_allowed, require_hashes)
+
+ # We can't hit this spot and have populate_link return None.
+ # req.satisfied_by is None here (because we're
+ # guarded) and upgrade has no impact except when satisfied_by
+ # is not None.
+ # Then inside find_requirement existing_applicable -> False
+ # If no new versions are found, DistributionNotFound is raised,
+ # otherwise a result is guaranteed.
+ assert req.link
+ link = req.link
+
+ # Now that we have the real link, we can tell what kind of
+ # requirements we have and raise some more informative errors
+ # than otherwise. (For example, we can raise VcsHashUnsupported
+ # for a VCS URL rather than HashMissing.)
+ if require_hashes:
+ # We could check these first 2 conditions inside
+ # unpack_url and save repetition of conditions, but then
+ # we would report less-useful error messages for
+ # unhashable requirements, complaining that there's no
+ # hash provided.
+ if is_vcs_url(link):
+ raise VcsHashUnsupported()
+ elif is_file_url(link) and is_dir_url(link):
+ raise DirectoryUrlHashUnsupported()
+ if not req.original_link and not req.is_pinned:
+ # Unpinned packages are asking for trouble when a new
+ # version is uploaded. This isn't a security check, but
+ # it saves users a surprising hash mismatch in the
+ # future.
+ #
+ # file:/// URLs aren't pinnable, so don't complain
+ # about them not being pinned.
+ raise HashUnpinned()
+
+ hashes = req.hashes(trust_internet=not require_hashes)
+ if require_hashes and not hashes:
+ # Known-good hashes are missing for this requirement, so
+ # shim it with a facade object that will provoke hash
+ # computation and then raise a HashMissing exception
+ # showing the user what the hash should be.
+ hashes = MissingHashes()
+
+ try:
+ download_dir = self.download_dir
+ # We always delete unpacked sdists after pip ran.
+ autodelete_unpacked = True
+ if req.link.is_wheel and self.wheel_download_dir:
+ # when doing 'pip wheel` we download wheels to a
+ # dedicated dir.
+ download_dir = self.wheel_download_dir
+ if req.link.is_wheel:
+ if download_dir:
+ # When downloading, we only unpack wheels to get
+ # metadata.
+ autodelete_unpacked = True
+ else:
+ # When installing a wheel, we use the unpacked
+ # wheel.
+ autodelete_unpacked = False
+ unpack_url(
+ req.link, req.source_dir,
+ download_dir, autodelete_unpacked,
+ session=session, hashes=hashes,
+ progress_bar=self.progress_bar
+ )
+ except requests.HTTPError as exc:
+ logger.critical(
+ 'Could not install requirement %s because of error %s',
+ req,
+ exc,
+ )
+ raise InstallationError(
+ 'Could not install requirement %s because of HTTP '
+ 'error %s for URL %s' %
+ (req, exc, req.link)
+ )
+ abstract_dist = make_distribution_for_install_requirement(req)
+ with self.req_tracker.track(req):
+ abstract_dist.prepare_distribution_metadata(
+ finder, self.build_isolation,
+ )
+ if self._download_should_save:
+ # Make a .zip of the source_dir we already created.
+ if not req.link.is_artifact:
+ req.archive(self.download_dir)
+ return abstract_dist
+
+ def prepare_editable_requirement(
+ self,
+ req, # type: InstallRequirement
+ require_hashes, # type: bool
+ use_user_site, # type: bool
+ finder # type: PackageFinder
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare an editable requirement
+ """
+ assert req.editable, "cannot prepare a non-editable req as editable"
+
+ logger.info('Obtaining %s', req)
+
+ with indent_log():
+ if require_hashes:
+ raise InstallationError(
+ 'The editable requirement %s cannot be installed when '
+ 'requiring hashes, because there is no single file to '
+ 'hash.' % req
+ )
+ req.ensure_has_source_dir(self.src_dir)
+ req.update_editable(not self._download_should_save)
+
+ abstract_dist = make_distribution_for_install_requirement(req)
+ with self.req_tracker.track(req):
+ abstract_dist.prepare_distribution_metadata(
+ finder, self.build_isolation,
+ )
+
+ if self._download_should_save:
+ req.archive(self.download_dir)
+ req.check_if_exists(use_user_site)
+
+ return abstract_dist
+
+ def prepare_installed_requirement(
+ self,
+ req, # type: InstallRequirement
+ require_hashes, # type: bool
+ skip_reason # type: str
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare an already-installed requirement
+ """
+ assert req.satisfied_by, "req should have been satisfied but isn't"
+ assert skip_reason is not None, (
+ "did not get skip reason skipped but req.satisfied_by "
+ "is set to %r" % (req.satisfied_by,)
+ )
+ logger.info(
+ 'Requirement %s: %s (%s)',
+ skip_reason, req, req.satisfied_by.version
+ )
+ with indent_log():
+ if require_hashes:
+ logger.debug(
+ 'Since it is already installed, we are trusting this '
+ 'package without checking its hash. To ensure a '
+ 'completely repeatable environment, install into an '
+ 'empty virtualenv.'
+ )
+ abstract_dist = InstalledDistribution(req)
+
+ return abstract_dist
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pep425tags.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pep425tags.py
new file mode 100644
index 00000000..07dc148e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pep425tags.py
@@ -0,0 +1,387 @@
+"""Generate and work with PEP 425 Compatibility Tags."""
+from __future__ import absolute_import
+
+import distutils.util
+import logging
+import platform
+import re
+import sys
+import sysconfig
+import warnings
+from collections import OrderedDict
+
+import pip._internal.utils.glibc
+from pip._internal.utils.compat import get_extension_suffixes
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Tuple, Callable, List, Optional, Union, Dict
+ )
+
+ Pep425Tag = Tuple[str, str, str]
+
+logger = logging.getLogger(__name__)
+
+_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
+
+
+def get_config_var(var):
+ # type: (str) -> Optional[str]
+ try:
+ return sysconfig.get_config_var(var)
+ except IOError as e: # Issue #1074
+ warnings.warn("{}".format(e), RuntimeWarning)
+ return None
+
+
+def get_abbr_impl():
+ # type: () -> str
+ """Return abbreviated implementation name."""
+ if hasattr(sys, 'pypy_version_info'):
+ pyimpl = 'pp'
+ elif sys.platform.startswith('java'):
+ pyimpl = 'jy'
+ elif sys.platform == 'cli':
+ pyimpl = 'ip'
+ else:
+ pyimpl = 'cp'
+ return pyimpl
+
+
+def version_info_to_nodot(version_info):
+ # type: (Tuple[int, ...]) -> str
+ # Only use up to the first two numbers.
+ return ''.join(map(str, version_info[:2]))
+
+
+def get_impl_ver():
+ # type: () -> str
+ """Return implementation version."""
+ impl_ver = get_config_var("py_version_nodot")
+ if not impl_ver or get_abbr_impl() == 'pp':
+ impl_ver = ''.join(map(str, get_impl_version_info()))
+ return impl_ver
+
+
+def get_impl_version_info():
+ # type: () -> Tuple[int, ...]
+ """Return sys.version_info-like tuple for use in decrementing the minor
+ version."""
+ if get_abbr_impl() == 'pp':
+ # as per https://github.com/pypa/pip/issues/2882
+ # attrs exist only on pypy
+ return (sys.version_info[0],
+ sys.pypy_version_info.major, # type: ignore
+ sys.pypy_version_info.minor) # type: ignore
+ else:
+ return sys.version_info[0], sys.version_info[1]
+
+
+def get_impl_tag():
+ # type: () -> str
+ """
+ Returns the Tag for this specific implementation.
+ """
+ return "{}{}".format(get_abbr_impl(), get_impl_ver())
+
+
+def get_flag(var, fallback, expected=True, warn=True):
+ # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool
+ """Use a fallback method for determining SOABI flags if the needed config
+ var is unset or unavailable."""
+ val = get_config_var(var)
+ if val is None:
+ if warn:
+ logger.debug("Config variable '%s' is unset, Python ABI tag may "
+ "be incorrect", var)
+ return fallback()
+ return val == expected
+
+
+def get_abi_tag():
+ # type: () -> Optional[str]
+ """Return the ABI tag based on SOABI (if available) or emulate SOABI
+ (CPython 2, PyPy)."""
+ soabi = get_config_var('SOABI')
+ impl = get_abbr_impl()
+ if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
+ d = ''
+ m = ''
+ u = ''
+ if get_flag('Py_DEBUG',
+ lambda: hasattr(sys, 'gettotalrefcount'),
+ warn=(impl == 'cp')):
+ d = 'd'
+ if get_flag('WITH_PYMALLOC',
+ lambda: impl == 'cp',
+ warn=(impl == 'cp')):
+ m = 'm'
+ if get_flag('Py_UNICODE_SIZE',
+ lambda: sys.maxunicode == 0x10ffff,
+ expected=4,
+ warn=(impl == 'cp' and
+ sys.version_info < (3, 3))) \
+ and sys.version_info < (3, 3):
+ u = 'u'
+ abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
+ elif soabi and soabi.startswith('cpython-'):
+ abi = 'cp' + soabi.split('-')[1]
+ elif soabi:
+ abi = soabi.replace('.', '_').replace('-', '_')
+ else:
+ abi = None
+ return abi
+
+
+def _is_running_32bit():
+ # type: () -> bool
+ return sys.maxsize == 2147483647
+
+
+def get_platform():
+ # type: () -> str
+ """Return our platform name 'win32', 'linux_x86_64'"""
+ if sys.platform == 'darwin':
+ # distutils.util.get_platform() returns the release based on the value
+ # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
+ # be significantly older than the user's current machine.
+ release, _, machine = platform.mac_ver()
+ split_ver = release.split('.')
+
+ if machine == "x86_64" and _is_running_32bit():
+ machine = "i386"
+ elif machine == "ppc64" and _is_running_32bit():
+ machine = "ppc"
+
+ return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
+
+ # XXX remove distutils dependency
+ result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
+ if result == "linux_x86_64" and _is_running_32bit():
+ # 32 bit Python program (running on a 64 bit Linux): pip should only
+ # install and run 32 bit compiled extensions in that case.
+ result = "linux_i686"
+
+ return result
+
+
+def is_manylinux1_compatible():
+ # type: () -> bool
+ # Only Linux, and only x86-64 / i686
+ if get_platform() not in {"linux_x86_64", "linux_i686"}:
+ return False
+
+ # Check for presence of _manylinux module
+ try:
+ import _manylinux
+ return bool(_manylinux.manylinux1_compatible)
+ except (ImportError, AttributeError):
+ # Fall through to heuristic check below
+ pass
+
+ # Check glibc version. CentOS 5 uses glibc 2.5.
+ return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
+
+
+def is_manylinux2010_compatible():
+ # type: () -> bool
+ # Only Linux, and only x86-64 / i686
+ if get_platform() not in {"linux_x86_64", "linux_i686"}:
+ return False
+
+ # Check for presence of _manylinux module
+ try:
+ import _manylinux
+ return bool(_manylinux.manylinux2010_compatible)
+ except (ImportError, AttributeError):
+ # Fall through to heuristic check below
+ pass
+
+ # Check glibc version. CentOS 6 uses glibc 2.12.
+ return pip._internal.utils.glibc.have_compatible_glibc(2, 12)
+
+
+def get_darwin_arches(major, minor, machine):
+ # type: (int, int, str) -> List[str]
+ """Return a list of supported arches (including group arches) for
+ the given major, minor and machine architecture of an macOS machine.
+ """
+ arches = []
+
+ def _supports_arch(major, minor, arch):
+ # type: (int, int, str) -> bool
+ # Looking at the application support for macOS versions in the chart
+ # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
+ # our timeline looks roughly like:
+ #
+ # 10.0 - Introduces ppc support.
+ # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
+ # and x86_64 support is CLI only, and cannot be used for GUI
+ # applications.
+ # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
+ # 10.6 - Drops support for ppc64
+ # 10.7 - Drops support for ppc
+ #
+ # Given that we do not know if we're installing a CLI or a GUI
+ # application, we must be conservative and assume it might be a GUI
+ # application and behave as if ppc64 and x86_64 support did not occur
+ # until 10.5.
+ #
+ # Note: The above information is taken from the "Application support"
+ # column in the chart not the "Processor support" since I believe
+ # that we care about what instruction sets an application can use
+ # not which processors the OS supports.
+ if arch == 'ppc':
+ return (major, minor) <= (10, 5)
+ if arch == 'ppc64':
+ return (major, minor) == (10, 5)
+ if arch == 'i386':
+ return (major, minor) >= (10, 4)
+ if arch == 'x86_64':
+ return (major, minor) >= (10, 5)
+ if arch in groups:
+ for garch in groups[arch]:
+ if _supports_arch(major, minor, garch):
+ return True
+ return False
+
+ groups = OrderedDict([
+ ("fat", ("i386", "ppc")),
+ ("intel", ("x86_64", "i386")),
+ ("fat64", ("x86_64", "ppc64")),
+ ("fat32", ("x86_64", "i386", "ppc")),
+ ]) # type: Dict[str, Tuple[str, ...]]
+
+ if _supports_arch(major, minor, machine):
+ arches.append(machine)
+
+ for garch in groups:
+ if machine in groups[garch] and _supports_arch(major, minor, garch):
+ arches.append(garch)
+
+ arches.append('universal')
+
+ return arches
+
+
+def get_all_minor_versions_as_strings(version_info):
+ # type: (Tuple[int, ...]) -> List[str]
+ versions = []
+ major = version_info[:-1]
+ # Support all previous minor Python versions.
+ for minor in range(version_info[-1], -1, -1):
+ versions.append(''.join(map(str, major + (minor,))))
+ return versions
+
+
+def get_supported(
+ versions=None, # type: Optional[List[str]]
+ noarch=False, # type: bool
+ platform=None, # type: Optional[str]
+ impl=None, # type: Optional[str]
+ abi=None # type: Optional[str]
+):
+ # type: (...) -> List[Pep425Tag]
+ """Return a list of supported tags for each version specified in
+ `versions`.
+
+ :param versions: a list of string versions, of the form ["33", "32"],
+ or None. The first version will be assumed to support our ABI.
+ :param platform: specify the exact platform you want valid
+ tags for, or None. If None, use the local system platform.
+ :param impl: specify the exact implementation you want valid
+ tags for, or None. If None, use the local interpreter impl.
+ :param abi: specify the exact abi you want valid
+ tags for, or None. If None, use the local interpreter abi.
+ """
+ supported = []
+
+ # Versions must be given with respect to the preference
+ if versions is None:
+ version_info = get_impl_version_info()
+ versions = get_all_minor_versions_as_strings(version_info)
+
+ impl = impl or get_abbr_impl()
+
+ abis = [] # type: List[str]
+
+ abi = abi or get_abi_tag()
+ if abi:
+ abis[0:0] = [abi]
+
+ abi3s = set()
+ for suffix in get_extension_suffixes():
+ if suffix.startswith('.abi'):
+ abi3s.add(suffix.split('.', 2)[1])
+
+ abis.extend(sorted(list(abi3s)))
+
+ abis.append('none')
+
+ if not noarch:
+ arch = platform or get_platform()
+ arch_prefix, arch_sep, arch_suffix = arch.partition('_')
+ if arch.startswith('macosx'):
+ # support macosx-10.6-intel on macosx-10.9-x86_64
+ match = _osx_arch_pat.match(arch)
+ if match:
+ name, major, minor, actual_arch = match.groups()
+ tpl = '{}_{}_%i_%s'.format(name, major)
+ arches = []
+ for m in reversed(range(int(minor) + 1)):
+ for a in get_darwin_arches(int(major), m, actual_arch):
+ arches.append(tpl % (m, a))
+ else:
+ # arch pattern didn't match (?!)
+ arches = [arch]
+ elif arch_prefix == 'manylinux2010':
+ # manylinux1 wheels run on most manylinux2010 systems with the
+ # exception of wheels depending on ncurses. PEP 571 states
+ # manylinux1 wheels should be considered manylinux2010 wheels:
+ # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
+ arches = [arch, 'manylinux1' + arch_sep + arch_suffix]
+ elif platform is None:
+ arches = []
+ if is_manylinux2010_compatible():
+ arches.append('manylinux2010' + arch_sep + arch_suffix)
+ if is_manylinux1_compatible():
+ arches.append('manylinux1' + arch_sep + arch_suffix)
+ arches.append(arch)
+ else:
+ arches = [arch]
+
+ # Current version, current API (built specifically for our Python):
+ for abi in abis:
+ for arch in arches:
+ supported.append(('%s%s' % (impl, versions[0]), abi, arch))
+
+ # abi3 modules compatible with older version of Python
+ for version in versions[1:]:
+ # abi3 was introduced in Python 3.2
+ if version in {'31', '30'}:
+ break
+ for abi in abi3s: # empty set if not Python 3
+ for arch in arches:
+ supported.append(("%s%s" % (impl, version), abi, arch))
+
+ # Has binaries, does not use the Python API:
+ for arch in arches:
+ supported.append(('py%s' % (versions[0][0]), 'none', arch))
+
+ # No abi / arch, but requires our implementation:
+ supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
+ # Tagged specifically as being cross-version compatible
+ # (with just the major version specified)
+ supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
+
+ # No abi / arch, generic Python
+ for i, version in enumerate(versions):
+ supported.append(('py%s' % (version,), 'none', 'any'))
+ if i == 0:
+ supported.append(('py%s' % (version[0]), 'none', 'any'))
+
+ return supported
+
+
+implementation_tag = get_impl_tag()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pyproject.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pyproject.py
new file mode 100644
index 00000000..43efbed4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pyproject.py
@@ -0,0 +1,171 @@
+from __future__ import absolute_import
+
+import io
+import os
+import sys
+
+from pip._vendor import pytoml, six
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Tuple, Optional, List
+
+
+def _is_list_of_str(obj):
+ # type: (Any) -> bool
+ return (
+ isinstance(obj, list) and
+ all(isinstance(item, six.string_types) for item in obj)
+ )
+
+
+def make_pyproject_path(setup_py_dir):
+ # type: (str) -> str
+ path = os.path.join(setup_py_dir, 'pyproject.toml')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(path, six.text_type):
+ path = path.encode(sys.getfilesystemencoding())
+
+ return path
+
+
+def load_pyproject_toml(
+ use_pep517, # type: Optional[bool]
+ pyproject_toml, # type: str
+ setup_py, # type: str
+ req_name # type: str
+):
+ # type: (...) -> Optional[Tuple[List[str], str, List[str]]]
+ """Load the pyproject.toml file.
+
+ Parameters:
+ use_pep517 - Has the user requested PEP 517 processing? None
+ means the user hasn't explicitly specified.
+ pyproject_toml - Location of the project's pyproject.toml file
+ setup_py - Location of the project's setup.py file
+ req_name - The name of the requirement we're processing (for
+ error reporting)
+
+ Returns:
+ None if we should use the legacy code path, otherwise a tuple
+ (
+ requirements from pyproject.toml,
+ name of PEP 517 backend,
+ requirements we should check are installed after setting
+ up the build environment
+ )
+ """
+ has_pyproject = os.path.isfile(pyproject_toml)
+ has_setup = os.path.isfile(setup_py)
+
+ if has_pyproject:
+ with io.open(pyproject_toml, encoding="utf-8") as f:
+ pp_toml = pytoml.load(f)
+ build_system = pp_toml.get("build-system")
+ else:
+ build_system = None
+
+ # The following cases must use PEP 517
+ # We check for use_pep517 being non-None and falsey because that means
+ # the user explicitly requested --no-use-pep517. The value 0 as
+ # opposed to False can occur when the value is provided via an
+ # environment variable or config file option (due to the quirk of
+ # strtobool() returning an integer in pip's configuration code).
+ if has_pyproject and not has_setup:
+ if use_pep517 is not None and not use_pep517:
+ raise InstallationError(
+ "Disabling PEP 517 processing is invalid: "
+ "project does not have a setup.py"
+ )
+ use_pep517 = True
+ elif build_system and "build-backend" in build_system:
+ if use_pep517 is not None and not use_pep517:
+ raise InstallationError(
+ "Disabling PEP 517 processing is invalid: "
+ "project specifies a build backend of {} "
+ "in pyproject.toml".format(
+ build_system["build-backend"]
+ )
+ )
+ use_pep517 = True
+
+ # If we haven't worked out whether to use PEP 517 yet,
+ # and the user hasn't explicitly stated a preference,
+ # we do so if the project has a pyproject.toml file.
+ elif use_pep517 is None:
+ use_pep517 = has_pyproject
+
+ # At this point, we know whether we're going to use PEP 517.
+ assert use_pep517 is not None
+
+ # If we're using the legacy code path, there is nothing further
+ # for us to do here.
+ if not use_pep517:
+ return None
+
+ if build_system is None:
+ # Either the user has a pyproject.toml with no build-system
+ # section, or the user has no pyproject.toml, but has opted in
+ # explicitly via --use-pep517.
+ # In the absence of any explicit backend specification, we
+ # assume the setuptools backend that most closely emulates the
+ # traditional direct setup.py execution, and require wheel and
+ # a version of setuptools that supports that backend.
+
+ build_system = {
+ "requires": ["setuptools>=40.8.0", "wheel"],
+ "build-backend": "setuptools.build_meta:__legacy__",
+ }
+
+ # If we're using PEP 517, we have build system information (either
+ # from pyproject.toml, or defaulted by the code above).
+ # Note that at this point, we do not know if the user has actually
+ # specified a backend, though.
+ assert build_system is not None
+
+ # Ensure that the build-system section in pyproject.toml conforms
+ # to PEP 518.
+ error_template = (
+ "{package} has a pyproject.toml file that does not comply "
+ "with PEP 518: {reason}"
+ )
+
+ # Specifying the build-system table but not the requires key is invalid
+ if "requires" not in build_system:
+ raise InstallationError(
+ error_template.format(package=req_name, reason=(
+ "it has a 'build-system' table but not "
+ "'build-system.requires' which is mandatory in the table"
+ ))
+ )
+
+ # Error out if requires is not a list of strings
+ requires = build_system["requires"]
+ if not _is_list_of_str(requires):
+ raise InstallationError(error_template.format(
+ package=req_name,
+ reason="'build-system.requires' is not a list of strings.",
+ ))
+
+ backend = build_system.get("build-backend")
+ check = [] # type: List[str]
+ if backend is None:
+ # If the user didn't specify a backend, we assume they want to use
+ # the setuptools backend. But we can't be sure they have included
+ # a version of setuptools which supplies the backend, or wheel
+ # (which is needed by the backend) in their requirements. So we
+ # make a note to check that those requirements are present once
+ # we have set up the environment.
+ # This is quite a lot of work to check for a very specific case. But
+ # the problem is, that case is potentially quite common - projects that
+ # adopted PEP 518 early for the ability to specify requirements to
+ # execute setup.py, but never considered needing to mention the build
+ # tools themselves. The original PEP 518 code had a similar check (but
+ # implemented in a different way).
+ backend = "setuptools.build_meta:__legacy__"
+ check = ["setuptools>=40.8.0", "wheel"]
+
+ return (requires, backend, check)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__init__.py
new file mode 100644
index 00000000..c39f63fa
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__init__.py
@@ -0,0 +1,78 @@
+from __future__ import absolute_import
+
+import logging
+
+from .req_install import InstallRequirement
+from .req_set import RequirementSet
+from .req_file import parse_requirements
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, List, Sequence
+
+__all__ = [
+ "RequirementSet", "InstallRequirement",
+ "parse_requirements", "install_given_reqs",
+]
+
+logger = logging.getLogger(__name__)
+
+
+def install_given_reqs(
+ to_install, # type: List[InstallRequirement]
+ install_options, # type: List[str]
+ global_options=(), # type: Sequence[str]
+ *args, # type: Any
+ **kwargs # type: Any
+):
+ # type: (...) -> List[InstallRequirement]
+ """
+ Install everything in the given list.
+
+ (to be called after having downloaded and unpacked the packages)
+ """
+
+ if to_install:
+ logger.info(
+ 'Installing collected packages: %s',
+ ', '.join([req.name for req in to_install]),
+ )
+
+ with indent_log():
+ for requirement in to_install:
+ if requirement.conflicts_with:
+ logger.info(
+ 'Found existing installation: %s',
+ requirement.conflicts_with,
+ )
+ with indent_log():
+ uninstalled_pathset = requirement.uninstall(
+ auto_confirm=True
+ )
+ try:
+ requirement.install(
+ install_options,
+ global_options,
+ *args,
+ **kwargs
+ )
+ except Exception:
+ should_rollback = (
+ requirement.conflicts_with and
+ not requirement.install_succeeded
+ )
+ # if install did not succeed, rollback previous uninstall
+ if should_rollback:
+ uninstalled_pathset.rollback()
+ raise
+ else:
+ should_commit = (
+ requirement.conflicts_with and
+ requirement.install_succeeded
+ )
+ if should_commit:
+ uninstalled_pathset.commit()
+ requirement.remove_temporary_source()
+
+ return to_install
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..fee124c0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/constructors.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
new file mode 100644
index 00000000..7c20dade
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_file.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
new file mode 100644
index 00000000..c6146ec7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_install.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
new file mode 100644
index 00000000..f7b8a6ef
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_set.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
new file mode 100644
index 00000000..36a3106d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
new file mode 100644
index 00000000..eec20425
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
new file mode 100644
index 00000000..83068d48
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/constructors.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/constructors.py
new file mode 100644
index 00000000..cd0ab504
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/constructors.py
@@ -0,0 +1,349 @@
+"""Backing implementation for InstallRequirement's various constructors
+
+The idea here is that these formed a major chunk of InstallRequirement's size
+so, moving them and support code dedicated to them outside of that class
+helps creates for better understandability for the rest of the code.
+
+These are meant to be used elsewhere within pip to create instances of
+InstallRequirement.
+"""
+
+import logging
+import os
+import re
+
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+from pip._vendor.packaging.specifiers import Specifier
+from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
+
+from pip._internal.download import is_archive_file, is_url, url_to_path
+from pip._internal.exceptions import InstallationError
+from pip._internal.models.index import PyPI, TestPyPI
+from pip._internal.models.link import Link
+from pip._internal.pyproject import make_pyproject_path
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.misc import is_installable_dir, path_to_url
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.vcs import vcs
+from pip._internal.wheel import Wheel
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Optional, Set, Tuple, Union,
+ )
+ from pip._internal.cache import WheelCache
+
+
+__all__ = [
+ "install_req_from_editable", "install_req_from_line",
+ "parse_editable"
+]
+
+logger = logging.getLogger(__name__)
+operators = Specifier._operators.keys()
+
+
+def _strip_extras(path):
+ # type: (str) -> Tuple[str, Optional[str]]
+ m = re.match(r'^(.+)(\[[^\]]+\])$', path)
+ extras = None
+ if m:
+ path_no_extras = m.group(1)
+ extras = m.group(2)
+ else:
+ path_no_extras = path
+
+ return path_no_extras, extras
+
+
+def parse_editable(editable_req):
+ # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
+ """Parses an editable requirement into:
+ - a requirement name
+ - an URL
+ - extras
+ - editable options
+ Accepted requirements:
+ svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
+ .[some_extra]
+ """
+
+ url = editable_req
+
+ # If a file path is specified with extras, strip off the extras.
+ url_no_extras, extras = _strip_extras(url)
+
+ if os.path.isdir(url_no_extras):
+ if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
+ msg = (
+ 'File "setup.py" not found. Directory cannot be installed '
+ 'in editable mode: {}'.format(os.path.abspath(url_no_extras))
+ )
+ pyproject_path = make_pyproject_path(url_no_extras)
+ if os.path.isfile(pyproject_path):
+ msg += (
+ '\n(A "pyproject.toml" file was found, but editable '
+ 'mode currently requires a setup.py based build.)'
+ )
+ raise InstallationError(msg)
+
+ # Treating it as code that has already been checked out
+ url_no_extras = path_to_url(url_no_extras)
+
+ if url_no_extras.lower().startswith('file:'):
+ package_name = Link(url_no_extras).egg_fragment
+ if extras:
+ return (
+ package_name,
+ url_no_extras,
+ Requirement("placeholder" + extras.lower()).extras,
+ )
+ else:
+ return package_name, url_no_extras, None
+
+ for version_control in vcs:
+ if url.lower().startswith('%s:' % version_control):
+ url = '%s+%s' % (version_control, url)
+ break
+
+ if '+' not in url:
+ raise InstallationError(
+ '{} is not a valid editable requirement. '
+ 'It should either be a path to a local project or a VCS URL '
+ '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req)
+ )
+
+ vc_type = url.split('+', 1)[0].lower()
+
+ if not vcs.get_backend(vc_type):
+ error_message = 'For --editable=%s only ' % editable_req + \
+ ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
+ ' is currently supported'
+ raise InstallationError(error_message)
+
+ package_name = Link(url).egg_fragment
+ if not package_name:
+ raise InstallationError(
+ "Could not detect requirement name for '%s', please specify one "
+ "with #egg=your_package_name" % editable_req
+ )
+ return package_name, url, None
+
+
+def deduce_helpful_msg(req):
+ # type: (str) -> str
+ """Returns helpful msg in case requirements file does not exist,
+ or cannot be parsed.
+
+ :params req: Requirements file path
+ """
+ msg = ""
+ if os.path.exists(req):
+ msg = " It does exist."
+ # Try to parse and check if it is a requirements file.
+ try:
+ with open(req, 'r') as fp:
+ # parse first line only
+ next(parse_requirements(fp.read()))
+ msg += " The argument you provided " + \
+ "(%s) appears to be a" % (req) + \
+ " requirements file. If that is the" + \
+ " case, use the '-r' flag to install" + \
+ " the packages specified within it."
+ except RequirementParseError:
+ logger.debug("Cannot parse '%s' as requirements \
+ file" % (req), exc_info=True)
+ else:
+ msg += " File '%s' does not exist." % (req)
+ return msg
+
+
+# ---- The actual constructors follow ----
+
+
+def install_req_from_editable(
+ editable_req, # type: str
+ comes_from=None, # type: Optional[str]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False # type: bool
+):
+ # type: (...) -> InstallRequirement
+ name, url, extras_override = parse_editable(editable_req)
+ if url.startswith('file:'):
+ source_dir = url_to_path(url)
+ else:
+ source_dir = None
+
+ if name is not None:
+ try:
+ req = Requirement(name)
+ except InvalidRequirement:
+ raise InstallationError("Invalid requirement: '%s'" % name)
+ else:
+ req = None
+ return InstallRequirement(
+ req, comes_from, source_dir=source_dir,
+ editable=True,
+ link=Link(url),
+ constraint=constraint,
+ use_pep517=use_pep517,
+ isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache,
+ extras=extras_override or (),
+ )
+
+
+def install_req_from_line(
+ name, # type: str
+ comes_from=None, # type: Optional[Union[str, InstallRequirement]]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False, # type: bool
+ line_source=None, # type: Optional[str]
+):
+ # type: (...) -> InstallRequirement
+ """Creates an InstallRequirement from a name, which might be a
+ requirement, directory containing 'setup.py', filename, or URL.
+
+ :param line_source: An optional string describing where the line is from,
+ for logging purposes in case of an error.
+ """
+ if is_url(name):
+ marker_sep = '; '
+ else:
+ marker_sep = ';'
+ if marker_sep in name:
+ name, markers_as_string = name.split(marker_sep, 1)
+ markers_as_string = markers_as_string.strip()
+ if not markers_as_string:
+ markers = None
+ else:
+ markers = Marker(markers_as_string)
+ else:
+ markers = None
+ name = name.strip()
+ req_as_string = None
+ path = os.path.normpath(os.path.abspath(name))
+ link = None
+ extras_as_string = None
+
+ if is_url(name):
+ link = Link(name)
+ else:
+ p, extras_as_string = _strip_extras(path)
+ looks_like_dir = os.path.isdir(p) and (
+ os.path.sep in name or
+ (os.path.altsep is not None and os.path.altsep in name) or
+ name.startswith('.')
+ )
+ if looks_like_dir:
+ if not is_installable_dir(p):
+ raise InstallationError(
+ "Directory %r is not installable. Neither 'setup.py' "
+ "nor 'pyproject.toml' found." % name
+ )
+ link = Link(path_to_url(p))
+ elif is_archive_file(p):
+ if not os.path.isfile(p):
+ logger.warning(
+ 'Requirement %r looks like a filename, but the '
+ 'file does not exist',
+ name
+ )
+ link = Link(path_to_url(p))
+
+ # it's a local file, dir, or url
+ if link:
+ # Handle relative file URLs
+ if link.scheme == 'file' and re.search(r'\.\./', link.url):
+ link = Link(
+ path_to_url(os.path.normpath(os.path.abspath(link.path))))
+ # wheel file
+ if link.is_wheel:
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
+ req_as_string = "%s==%s" % (wheel.name, wheel.version)
+ else:
+ # set the req to the egg fragment. when it's not there, this
+ # will become an 'unnamed' requirement
+ req_as_string = link.egg_fragment
+
+ # a requirement specifier
+ else:
+ req_as_string = name
+
+ if extras_as_string:
+ extras = Requirement("placeholder" + extras_as_string.lower()).extras
+ else:
+ extras = ()
+ if req_as_string is not None:
+ try:
+ req = Requirement(req_as_string)
+ except InvalidRequirement:
+ if os.path.sep in req_as_string:
+ add_msg = "It looks like a path."
+ add_msg += deduce_helpful_msg(req_as_string)
+ elif ('=' in req_as_string and
+ not any(op in req_as_string for op in operators)):
+ add_msg = "= is not a valid operator. Did you mean == ?"
+ else:
+ add_msg = ''
+ if line_source is None:
+ source = ''
+ else:
+ source = ' (from {})'.format(line_source)
+ msg = (
+ 'Invalid requirement: {!r}{}'.format(req_as_string, source)
+ )
+ if add_msg:
+ msg += '\nHint: {}'.format(add_msg)
+ raise InstallationError(msg)
+ else:
+ req = None
+
+ return InstallRequirement(
+ req, comes_from, link=link, markers=markers,
+ use_pep517=use_pep517, isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache,
+ constraint=constraint,
+ extras=extras,
+ )
+
+
+def install_req_from_req_string(
+ req_string, # type: str
+ comes_from=None, # type: Optional[InstallRequirement]
+ isolated=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None # type: Optional[bool]
+):
+ # type: (...) -> InstallRequirement
+ try:
+ req = Requirement(req_string)
+ except InvalidRequirement:
+ raise InstallationError("Invalid requirement: '%s'" % req_string)
+
+ domains_not_allowed = [
+ PyPI.file_storage_domain,
+ TestPyPI.file_storage_domain,
+ ]
+ if (req.url and comes_from and comes_from.link and
+ comes_from.link.netloc in domains_not_allowed):
+ # Explicitly disallow pypi packages that depend on external urls
+ raise InstallationError(
+ "Packages installed from PyPI cannot depend on packages "
+ "which are not also hosted on PyPI.\n"
+ "%s depends on %s " % (comes_from.name, req)
+ )
+
+ return InstallRequirement(
+ req, comes_from, isolated=isolated, wheel_cache=wheel_cache,
+ use_pep517=use_pep517
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_file.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_file.py
new file mode 100644
index 00000000..5a9920fe
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_file.py
@@ -0,0 +1,399 @@
+"""
+Requirements file parsing
+"""
+
+from __future__ import absolute_import
+
+import optparse
+import os
+import re
+import shlex
+import sys
+
+from pip._vendor.six.moves import filterfalse
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.cli import cmdoptions
+from pip._internal.download import get_file_content
+from pip._internal.exceptions import RequirementsFileParseError
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.req.constructors import (
+ install_req_from_editable, install_req_from_line,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Callable, Iterator, List, NoReturn, Optional, Text, Tuple,
+ )
+ from pip._internal.req import InstallRequirement
+ from pip._internal.cache import WheelCache
+ from pip._internal.index import PackageFinder
+ from pip._internal.download import PipSession
+
+ ReqFileLines = Iterator[Tuple[int, Text]]
+
+__all__ = ['parse_requirements']
+
+SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
+COMMENT_RE = re.compile(r'(^|\s+)#.*$')
+
+# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
+# variable name consisting of only uppercase letters, digits or the '_'
+# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
+# 2013 Edition.
+ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
+
+SUPPORTED_OPTIONS = [
+ cmdoptions.constraints,
+ cmdoptions.editable,
+ cmdoptions.requirements,
+ cmdoptions.no_index,
+ cmdoptions.index_url,
+ cmdoptions.find_links,
+ cmdoptions.extra_index_url,
+ cmdoptions.always_unzip,
+ cmdoptions.no_binary,
+ cmdoptions.only_binary,
+ cmdoptions.pre,
+ cmdoptions.trusted_host,
+ cmdoptions.require_hashes,
+] # type: List[Callable[..., optparse.Option]]
+
+# options to be passed to requirements
+SUPPORTED_OPTIONS_REQ = [
+ cmdoptions.install_options,
+ cmdoptions.global_options,
+ cmdoptions.hash,
+] # type: List[Callable[..., optparse.Option]]
+
+# the 'dest' string values
+SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
+
+
+def parse_requirements(
+ filename, # type: str
+ finder=None, # type: Optional[PackageFinder]
+ comes_from=None, # type: Optional[str]
+ options=None, # type: Optional[optparse.Values]
+ session=None, # type: Optional[PipSession]
+ constraint=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None # type: Optional[bool]
+):
+ # type: (...) -> Iterator[InstallRequirement]
+ """Parse a requirements file and yield InstallRequirement instances.
+
+ :param filename: Path or url of requirements file.
+ :param finder: Instance of pip.index.PackageFinder.
+ :param comes_from: Origin description of requirements.
+ :param options: cli options.
+ :param session: Instance of pip.download.PipSession.
+ :param constraint: If true, parsing a constraint file rather than
+ requirements file.
+ :param wheel_cache: Instance of pip.wheel.WheelCache
+ :param use_pep517: Value of the --use-pep517 option.
+ """
+ if session is None:
+ raise TypeError(
+ "parse_requirements() missing 1 required keyword argument: "
+ "'session'"
+ )
+
+ _, content = get_file_content(
+ filename, comes_from=comes_from, session=session
+ )
+
+ lines_enum = preprocess(content, options)
+
+ for line_number, line in lines_enum:
+ req_iter = process_line(line, filename, line_number, finder,
+ comes_from, options, session, wheel_cache,
+ use_pep517=use_pep517, constraint=constraint)
+ for req in req_iter:
+ yield req
+
+
+def preprocess(content, options):
+ # type: (Text, Optional[optparse.Values]) -> ReqFileLines
+ """Split, filter, and join lines, and return a line iterator
+
+ :param content: the content of the requirements file
+ :param options: cli options
+ """
+ lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
+ lines_enum = join_lines(lines_enum)
+ lines_enum = ignore_comments(lines_enum)
+ lines_enum = skip_regex(lines_enum, options)
+ lines_enum = expand_env_variables(lines_enum)
+ return lines_enum
+
+
+def process_line(
+ line, # type: Text
+ filename, # type: str
+ line_number, # type: int
+ finder=None, # type: Optional[PackageFinder]
+ comes_from=None, # type: Optional[str]
+ options=None, # type: Optional[optparse.Values]
+ session=None, # type: Optional[PipSession]
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None, # type: Optional[bool]
+ constraint=False, # type: bool
+):
+ # type: (...) -> Iterator[InstallRequirement]
+ """Process a single requirements line; This can result in creating/yielding
+ requirements, or updating the finder.
+
+ For lines that contain requirements, the only options that have an effect
+ are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
+ requirement. Other options from SUPPORTED_OPTIONS may be present, but are
+ ignored.
+
+ For lines that do not contain requirements, the only options that have an
+ effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
+ be present, but are ignored. These lines may contain multiple options
+ (although our docs imply only one is supported), and all our parsed and
+ affect the finder.
+
+ :param constraint: If True, parsing a constraints file.
+ :param options: OptionParser options that we may update
+ """
+ parser = build_parser(line)
+ defaults = parser.get_default_values()
+ defaults.index_url = None
+ if finder:
+ defaults.format_control = finder.format_control
+ args_str, options_str = break_args_options(line)
+ # Prior to 2.7.3, shlex cannot deal with unicode entries
+ if sys.version_info < (2, 7, 3):
+ # https://github.com/python/mypy/issues/1174
+ options_str = options_str.encode('utf8') # type: ignore
+ # https://github.com/python/mypy/issues/1174
+ opts, _ = parser.parse_args(
+ shlex.split(options_str), defaults) # type: ignore
+
+ # preserve for the nested code path
+ line_comes_from = '%s %s (line %s)' % (
+ '-c' if constraint else '-r', filename, line_number,
+ )
+
+ # yield a line requirement
+ if args_str:
+ isolated = options.isolated_mode if options else False
+ if options:
+ cmdoptions.check_install_build_global(options, opts)
+ # get the options that apply to requirements
+ req_options = {}
+ for dest in SUPPORTED_OPTIONS_REQ_DEST:
+ if dest in opts.__dict__ and opts.__dict__[dest]:
+ req_options[dest] = opts.__dict__[dest]
+ line_source = 'line {} of {}'.format(line_number, filename)
+ yield install_req_from_line(
+ args_str,
+ comes_from=line_comes_from,
+ use_pep517=use_pep517,
+ isolated=isolated,
+ options=req_options,
+ wheel_cache=wheel_cache,
+ constraint=constraint,
+ line_source=line_source,
+ )
+
+ # yield an editable requirement
+ elif opts.editables:
+ isolated = options.isolated_mode if options else False
+ yield install_req_from_editable(
+ opts.editables[0], comes_from=line_comes_from,
+ use_pep517=use_pep517,
+ constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
+ )
+
+ # parse a nested requirements file
+ elif opts.requirements or opts.constraints:
+ if opts.requirements:
+ req_path = opts.requirements[0]
+ nested_constraint = False
+ else:
+ req_path = opts.constraints[0]
+ nested_constraint = True
+ # original file is over http
+ if SCHEME_RE.search(filename):
+ # do a url join so relative paths work
+ req_path = urllib_parse.urljoin(filename, req_path)
+ # original file and nested file are paths
+ elif not SCHEME_RE.search(req_path):
+ # do a join so relative paths work
+ req_path = os.path.join(os.path.dirname(filename), req_path)
+ # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
+ parsed_reqs = parse_requirements(
+ req_path, finder, comes_from, options, session,
+ constraint=nested_constraint, wheel_cache=wheel_cache
+ )
+ for req in parsed_reqs:
+ yield req
+
+ # percolate hash-checking option upward
+ elif opts.require_hashes:
+ options.require_hashes = opts.require_hashes
+
+ # set finder options
+ elif finder:
+ find_links = finder.find_links
+ index_urls = finder.index_urls
+ if opts.index_url:
+ index_urls = [opts.index_url]
+ if opts.no_index is True:
+ index_urls = []
+ if opts.extra_index_urls:
+ index_urls.extend(opts.extra_index_urls)
+ if opts.find_links:
+ # FIXME: it would be nice to keep track of the source
+ # of the find_links: support a find-links local path
+ # relative to a requirements file.
+ value = opts.find_links[0]
+ req_dir = os.path.dirname(os.path.abspath(filename))
+ relative_to_reqs_file = os.path.join(req_dir, value)
+ if os.path.exists(relative_to_reqs_file):
+ value = relative_to_reqs_file
+ find_links.append(value)
+
+ search_scope = SearchScope(
+ find_links=find_links,
+ index_urls=index_urls,
+ )
+ finder.search_scope = search_scope
+
+ if opts.pre:
+ finder.set_allow_all_prereleases()
+ for host in opts.trusted_hosts or []:
+ source = 'line {} of {}'.format(line_number, filename)
+ finder.add_trusted_host(host, source=source)
+
+
+def break_args_options(line):
+ # type: (Text) -> Tuple[str, Text]
+ """Break up the line into an args and options string. We only want to shlex
+ (and then optparse) the options, not the args. args can contain markers
+ which are corrupted by shlex.
+ """
+ tokens = line.split(' ')
+ args = []
+ options = tokens[:]
+ for token in tokens:
+ if token.startswith('-') or token.startswith('--'):
+ break
+ else:
+ args.append(token)
+ options.pop(0)
+ return ' '.join(args), ' '.join(options) # type: ignore
+
+
+def build_parser(line):
+ # type: (Text) -> optparse.OptionParser
+ """
+ Return a parser for parsing requirement lines
+ """
+ parser = optparse.OptionParser(add_help_option=False)
+
+ option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
+ for option_factory in option_factories:
+ option = option_factory()
+ parser.add_option(option)
+
+ # By default optparse sys.exits on parsing errors. We want to wrap
+ # that in our own exception.
+ def parser_exit(self, msg):
+ # type: (Any, str) -> NoReturn
+ # add offending line
+ msg = 'Invalid requirement: %s\n%s' % (line, msg)
+ raise RequirementsFileParseError(msg)
+ # NOTE: mypy disallows assigning to a method
+ # https://github.com/python/mypy/issues/2427
+ parser.exit = parser_exit # type: ignore
+
+ return parser
+
+
+def join_lines(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """Joins a line ending in '\' with the previous line (except when following
+ comments). The joined line takes on the index of the first line.
+ """
+ primary_line_number = None
+ new_line = [] # type: List[Text]
+ for line_number, line in lines_enum:
+ if not line.endswith('\\') or COMMENT_RE.match(line):
+ if COMMENT_RE.match(line):
+ # this ensures comments are always matched later
+ line = ' ' + line
+ if new_line:
+ new_line.append(line)
+ yield primary_line_number, ''.join(new_line)
+ new_line = []
+ else:
+ yield line_number, line
+ else:
+ if not new_line:
+ primary_line_number = line_number
+ new_line.append(line.strip('\\'))
+
+ # last line contains \
+ if new_line:
+ yield primary_line_number, ''.join(new_line)
+
+ # TODO: handle space after '\'.
+
+
+def ignore_comments(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """
+ Strips comments and filter empty lines.
+ """
+ for line_number, line in lines_enum:
+ line = COMMENT_RE.sub('', line)
+ line = line.strip()
+ if line:
+ yield line_number, line
+
+
+def skip_regex(lines_enum, options):
+ # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines
+ """
+ Skip lines that match '--skip-requirements-regex' pattern
+
+ Note: the regex pattern is only built once
+ """
+ skip_regex = options.skip_requirements_regex if options else None
+ if skip_regex:
+ pattern = re.compile(skip_regex)
+ lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
+ return lines_enum
+
+
+def expand_env_variables(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """Replace all environment variables that can be retrieved via `os.getenv`.
+
+ The only allowed format for environment variables defined in the
+ requirement file is `${MY_VARIABLE_1}` to ensure two things:
+
+ 1. Strings that contain a `$` aren't accidentally (partially) expanded.
+ 2. Ensure consistency across platforms for requirement files.
+
+ These points are the result of a discussion on the `github pull
+ request #3514 <https://github.com/pypa/pip/pull/3514>`_.
+
+ Valid characters in variable names follow the `POSIX standard
+ <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
+ to uppercase letter, digits and the `_` (underscore).
+ """
+ for line_number, line in lines_enum:
+ for env_var, var_name in ENV_VAR_RE.findall(line):
+ value = os.getenv(var_name)
+ if not value:
+ continue
+
+ line = line.replace(env_var, value)
+
+ yield line_number, line
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_install.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_install.py
new file mode 100644
index 00000000..f5c93504
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_install.py
@@ -0,0 +1,1035 @@
+from __future__ import absolute_import
+
+import logging
+import os
+import shutil
+import sys
+import sysconfig
+import zipfile
+from distutils.util import change_root
+
+from pip._vendor import pkg_resources, six
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import Version
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+from pip._internal import wheel
+from pip._internal.build_env import NoOpBuildEnvironment
+from pip._internal.exceptions import InstallationError
+from pip._internal.models.link import Link
+from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
+from pip._internal.req.req_uninstall import UninstallPathSet
+from pip._internal.utils.compat import native_str
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.marker_files import PIP_DELETE_MARKER_FILENAME
+from pip._internal.utils.misc import (
+ _make_build_dir, ask_path_exists, backup_dir, call_subprocess,
+ display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
+ get_installed_version, redact_password_from_url, rmtree,
+)
+from pip._internal.utils.packaging import get_metadata
+from pip._internal.utils.setuptools_build import make_setuptools_shim_args
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+from pip._internal.utils.virtualenv import running_under_virtualenv
+from pip._internal.vcs import vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, List, Mapping, Optional, Sequence, Union,
+ )
+ from pip._internal.build_env import BuildEnvironment
+ from pip._internal.cache import WheelCache
+ from pip._internal.index import PackageFinder
+ from pip._vendor.pkg_resources import Distribution
+ from pip._vendor.packaging.specifiers import SpecifierSet
+ from pip._vendor.packaging.markers import Marker
+
+
+logger = logging.getLogger(__name__)
+
+
+class InstallRequirement(object):
+ """
+ Represents something that may be installed later on, may have information
+ about where to fetch the relevant requirement and also contains logic for
+ installing the said requirement.
+ """
+
+ def __init__(
+ self,
+ req, # type: Optional[Requirement]
+ comes_from, # type: Optional[Union[str, InstallRequirement]]
+ source_dir=None, # type: Optional[str]
+ editable=False, # type: bool
+ link=None, # type: Optional[Link]
+ update=True, # type: bool
+ markers=None, # type: Optional[Marker]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False, # type: bool
+ extras=() # type: Iterable[str]
+ ):
+ # type: (...) -> None
+ assert req is None or isinstance(req, Requirement), req
+ self.req = req
+ self.comes_from = comes_from
+ self.constraint = constraint
+ if source_dir is None:
+ self.source_dir = None # type: Optional[str]
+ else:
+ self.source_dir = os.path.normpath(os.path.abspath(source_dir))
+ self.editable = editable
+
+ self._wheel_cache = wheel_cache
+ if link is None and req and req.url:
+ # PEP 508 URL requirement
+ link = Link(req.url)
+ self.link = self.original_link = link
+
+ if extras:
+ self.extras = extras
+ elif req:
+ self.extras = {
+ pkg_resources.safe_extra(extra) for extra in req.extras
+ }
+ else:
+ self.extras = set()
+ if markers is None and req:
+ markers = req.marker
+ self.markers = markers
+
+ self._egg_info_path = None # type: Optional[str]
+ # This holds the pkg_resources.Distribution object if this requirement
+ # is already available:
+ self.satisfied_by = None
+ # This hold the pkg_resources.Distribution object if this requirement
+ # conflicts with another installed distribution:
+ self.conflicts_with = None
+ # Temporary build location
+ self._temp_build_dir = TempDirectory(kind="req-build")
+ # Used to store the global directory where the _temp_build_dir should
+ # have been created. Cf _correct_build_location method.
+ self._ideal_build_dir = None # type: Optional[str]
+ # True if the editable should be updated:
+ self.update = update
+ # Set to True after successful installation
+ self.install_succeeded = None # type: Optional[bool]
+ # UninstallPathSet of uninstalled distribution (for possible rollback)
+ self.uninstalled_pathset = None
+ self.options = options if options else {}
+ # Set to True after successful preparation of this requirement
+ self.prepared = False
+ self.is_direct = False
+
+ self.isolated = isolated
+ self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
+
+ # For PEP 517, the directory where we request the project metadata
+ # gets stored. We need this to pass to build_wheel, so the backend
+ # can ensure that the wheel matches the metadata (see the PEP for
+ # details).
+ self.metadata_directory = None # type: Optional[str]
+
+ # The static build requirements (from pyproject.toml)
+ self.pyproject_requires = None # type: Optional[List[str]]
+
+ # Build requirements that we will check are available
+ self.requirements_to_check = [] # type: List[str]
+
+ # The PEP 517 backend we should use to build the project
+ self.pep517_backend = None # type: Optional[Pep517HookCaller]
+
+ # Are we using PEP 517 for this requirement?
+ # After pyproject.toml has been loaded, the only valid values are True
+ # and False. Before loading, None is valid (meaning "use the default").
+ # Setting an explicit value before loading pyproject.toml is supported,
+ # but after loading this flag should be treated as read only.
+ self.use_pep517 = use_pep517
+
+ def __str__(self):
+ # type: () -> str
+ if self.req:
+ s = str(self.req)
+ if self.link:
+ s += ' from %s' % redact_password_from_url(self.link.url)
+ elif self.link:
+ s = redact_password_from_url(self.link.url)
+ else:
+ s = '<InstallRequirement>'
+ if self.satisfied_by is not None:
+ s += ' in %s' % display_path(self.satisfied_by.location)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from # type: Optional[str]
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += ' (from %s)' % comes_from
+ return s
+
+ def __repr__(self):
+ # type: () -> str
+ return '<%s object: %s editable=%r>' % (
+ self.__class__.__name__, str(self), self.editable)
+
+ def format_debug(self):
+ # type: () -> str
+ """An un-tested helper for getting state, for debugging.
+ """
+ attributes = vars(self)
+ names = sorted(attributes)
+
+ state = (
+ "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
+ )
+ return '<{name} object: {{{state}}}>'.format(
+ name=self.__class__.__name__,
+ state=", ".join(state),
+ )
+
+ def populate_link(self, finder, upgrade, require_hashes):
+ # type: (PackageFinder, bool, bool) -> None
+ """Ensure that if a link can be found for this, that it is found.
+
+ Note that self.link may still be None - if Upgrade is False and the
+ requirement is already installed.
+
+ If require_hashes is True, don't use the wheel cache, because cached
+ wheels, always built locally, have different hashes than the files
+ downloaded from the index server and thus throw false hash mismatches.
+ Furthermore, cached wheels at present have undeterministic contents due
+ to file modification times.
+ """
+ if self.link is None:
+ self.link = finder.find_requirement(self, upgrade)
+ if self._wheel_cache is not None and not require_hashes:
+ old_link = self.link
+ self.link = self._wheel_cache.get(self.link, self.name)
+ if old_link != self.link:
+ logger.debug('Using cached wheel link: %s', self.link)
+
+ # Things that are valid for all kinds of requirements?
+ @property
+ def name(self):
+ # type: () -> Optional[str]
+ if self.req is None:
+ return None
+ return native_str(pkg_resources.safe_name(self.req.name))
+
+ @property
+ def specifier(self):
+ # type: () -> SpecifierSet
+ return self.req.specifier
+
+ @property
+ def is_pinned(self):
+ # type: () -> bool
+ """Return whether I am pinned to an exact version.
+
+ For example, some-package==1.2 is pinned; some-package>1.2 is not.
+ """
+ specifiers = self.specifier
+ return (len(specifiers) == 1 and
+ next(iter(specifiers)).operator in {'==', '==='})
+
+ @property
+ def installed_version(self):
+ # type: () -> Optional[str]
+ return get_installed_version(self.name)
+
+ def match_markers(self, extras_requested=None):
+ # type: (Optional[Iterable[str]]) -> bool
+ if not extras_requested:
+ # Provide an extra to safely evaluate the markers
+ # without matching any extra
+ extras_requested = ('',)
+ if self.markers is not None:
+ return any(
+ self.markers.evaluate({'extra': extra})
+ for extra in extras_requested)
+ else:
+ return True
+
+ @property
+ def has_hash_options(self):
+ # type: () -> bool
+ """Return whether any known-good hashes are specified as options.
+
+ These activate --require-hashes mode; hashes specified as part of a
+ URL do not.
+
+ """
+ return bool(self.options.get('hashes', {}))
+
+ def hashes(self, trust_internet=True):
+ # type: (bool) -> Hashes
+ """Return a hash-comparer that considers my option- and URL-based
+ hashes to be known-good.
+
+ Hashes in URLs--ones embedded in the requirements file, not ones
+ downloaded from an index server--are almost peers with ones from
+ flags. They satisfy --require-hashes (whether it was implicitly or
+ explicitly activated) but do not activate it. md5 and sha224 are not
+ allowed in flags, which should nudge people toward good algos. We
+ always OR all hashes together, even ones from URLs.
+
+ :param trust_internet: Whether to trust URL-based (#md5=...) hashes
+ downloaded from the internet, as by populate_link()
+
+ """
+ good_hashes = self.options.get('hashes', {}).copy()
+ link = self.link if trust_internet else self.original_link
+ if link and link.hash:
+ good_hashes.setdefault(link.hash_name, []).append(link.hash)
+ return Hashes(good_hashes)
+
+ def from_path(self):
+ # type: () -> Optional[str]
+ """Format a nice indicator to show where this "comes from"
+ """
+ if self.req is None:
+ return None
+ s = str(self.req)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += '->' + comes_from
+ return s
+
+ def build_location(self, build_dir):
+ # type: (str) -> str
+ assert build_dir is not None
+ if self._temp_build_dir.path is not None:
+ return self._temp_build_dir.path
+ if self.req is None:
+ # for requirement via a path to a directory: the name of the
+ # package is not available yet so we create a temp directory
+ # Once run_egg_info will have run, we'll be able
+ # to fix it via _correct_build_location
+ # Some systems have /tmp as a symlink which confuses custom
+ # builds (such as numpy). Thus, we ensure that the real path
+ # is returned.
+ self._temp_build_dir.create()
+ self._ideal_build_dir = build_dir
+
+ return self._temp_build_dir.path
+ if self.editable:
+ name = self.name.lower()
+ else:
+ name = self.name
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
+ # need this)
+ if not os.path.exists(build_dir):
+ logger.debug('Creating directory %s', build_dir)
+ _make_build_dir(build_dir)
+ return os.path.join(build_dir, name)
+
+ def _correct_build_location(self):
+ # type: () -> None
+ """Move self._temp_build_dir to self._ideal_build_dir/self.req.name
+
+ For some requirements (e.g. a path to a directory), the name of the
+ package is not available until we run egg_info, so the build_location
+ will return a temporary directory and store the _ideal_build_dir.
+
+ This is only called by self.run_egg_info to fix the temporary build
+ directory.
+ """
+ if self.source_dir is not None:
+ return
+ assert self.req is not None
+ assert self._temp_build_dir.path
+ assert (self._ideal_build_dir is not None and
+ self._ideal_build_dir.path) # type: ignore
+ old_location = self._temp_build_dir.path
+ self._temp_build_dir.path = None
+
+ new_location = self.build_location(self._ideal_build_dir)
+ if os.path.exists(new_location):
+ raise InstallationError(
+ 'A package already exists in %s; please remove it to continue'
+ % display_path(new_location))
+ logger.debug(
+ 'Moving package %s from %s to new location %s',
+ self, display_path(old_location), display_path(new_location),
+ )
+ shutil.move(old_location, new_location)
+ self._temp_build_dir.path = new_location
+ self._ideal_build_dir = None
+ self.source_dir = os.path.normpath(os.path.abspath(new_location))
+ self._egg_info_path = None
+
+ # Correct the metadata directory, if it exists
+ if self.metadata_directory:
+ old_meta = self.metadata_directory
+ rel = os.path.relpath(old_meta, start=old_location)
+ new_meta = os.path.join(new_location, rel)
+ new_meta = os.path.normpath(os.path.abspath(new_meta))
+ self.metadata_directory = new_meta
+
+ def remove_temporary_source(self):
+ # type: () -> None
+ """Remove the source files from this requirement, if they are marked
+ for deletion"""
+ if self.source_dir and os.path.exists(
+ os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
+ logger.debug('Removing source in %s', self.source_dir)
+ rmtree(self.source_dir)
+ self.source_dir = None
+ self._temp_build_dir.cleanup()
+ self.build_env.cleanup()
+
+ def check_if_exists(self, use_user_site):
+ # type: (bool) -> bool
+ """Find an installed distribution that satisfies or conflicts
+ with this requirement, and set self.satisfied_by or
+ self.conflicts_with appropriately.
+ """
+ if self.req is None:
+ return False
+ try:
+ # get_distribution() will resolve the entire list of requirements
+ # anyway, and we've already determined that we need the requirement
+ # in question, so strip the marker so that we don't try to
+ # evaluate it.
+ no_marker = Requirement(str(self.req))
+ no_marker.marker = None
+ self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
+ if self.editable and self.satisfied_by:
+ self.conflicts_with = self.satisfied_by
+ # when installing editables, nothing pre-existing should ever
+ # satisfy
+ self.satisfied_by = None
+ return True
+ except pkg_resources.DistributionNotFound:
+ return False
+ except pkg_resources.VersionConflict:
+ existing_dist = pkg_resources.get_distribution(
+ self.req.name
+ )
+ if use_user_site:
+ if dist_in_usersite(existing_dist):
+ self.conflicts_with = existing_dist
+ elif (running_under_virtualenv() and
+ dist_in_site_packages(existing_dist)):
+ raise InstallationError(
+ "Will not install to the user site because it will "
+ "lack sys.path precedence to %s in %s" %
+ (existing_dist.project_name, existing_dist.location)
+ )
+ else:
+ self.conflicts_with = existing_dist
+ return True
+
+ # Things valid for wheels
+ @property
+ def is_wheel(self):
+ # type: () -> bool
+ if not self.link:
+ return False
+ return self.link.is_wheel
+
+ def move_wheel_files(
+ self,
+ wheeldir, # type: str
+ root=None, # type: Optional[str]
+ home=None, # type: Optional[str]
+ prefix=None, # type: Optional[str]
+ warn_script_location=True, # type: bool
+ use_user_site=False, # type: bool
+ pycompile=True # type: bool
+ ):
+ # type: (...) -> None
+ wheel.move_wheel_files(
+ self.name, self.req, wheeldir,
+ user=use_user_site,
+ home=home,
+ root=root,
+ prefix=prefix,
+ pycompile=pycompile,
+ isolated=self.isolated,
+ warn_script_location=warn_script_location,
+ )
+
+ # Things valid for sdists
+ @property
+ def setup_py_dir(self):
+ # type: () -> str
+ return os.path.join(
+ self.source_dir,
+ self.link and self.link.subdirectory_fragment or '')
+
+ @property
+ def setup_py_path(self):
+ # type: () -> str
+ assert self.source_dir, "No source dir for %s" % self
+
+ setup_py = os.path.join(self.setup_py_dir, 'setup.py')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(setup_py, six.text_type):
+ setup_py = setup_py.encode(sys.getfilesystemencoding())
+
+ return setup_py
+
+ @property
+ def pyproject_toml_path(self):
+ # type: () -> str
+ assert self.source_dir, "No source dir for %s" % self
+
+ return make_pyproject_path(self.setup_py_dir)
+
+ def load_pyproject_toml(self):
+ # type: () -> None
+ """Load the pyproject.toml file.
+
+ After calling this routine, all of the attributes related to PEP 517
+ processing for this requirement have been set. In particular, the
+ use_pep517 attribute can be used to determine whether we should
+ follow the PEP 517 or legacy (setup.py) code path.
+ """
+ pyproject_toml_data = load_pyproject_toml(
+ self.use_pep517,
+ self.pyproject_toml_path,
+ self.setup_py_path,
+ str(self)
+ )
+
+ self.use_pep517 = (pyproject_toml_data is not None)
+
+ if not self.use_pep517:
+ return
+
+ requires, backend, check = pyproject_toml_data
+ self.requirements_to_check = check
+ self.pyproject_requires = requires
+ self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend)
+
+ # Use a custom function to call subprocesses
+ self.spin_message = ""
+
+ def runner(
+ cmd, # type: List[str]
+ cwd=None, # type: Optional[str]
+ extra_environ=None # type: Optional[Mapping[str, Any]]
+ ):
+ # type: (...) -> None
+ with open_spinner(self.spin_message) as spinner:
+ call_subprocess(
+ cmd,
+ cwd=cwd,
+ extra_environ=extra_environ,
+ spinner=spinner
+ )
+ self.spin_message = ""
+
+ self.pep517_backend._subprocess_runner = runner
+
+ def prepare_metadata(self):
+ # type: () -> None
+ """Ensure that project metadata is available.
+
+ Under PEP 517, call the backend hook to prepare the metadata.
+ Under legacy processing, call setup.py egg-info.
+ """
+ assert self.source_dir
+
+ with indent_log():
+ if self.use_pep517:
+ self.prepare_pep517_metadata()
+ else:
+ self.run_egg_info()
+
+ if not self.req:
+ if isinstance(parse_version(self.metadata["Version"]), Version):
+ op = "=="
+ else:
+ op = "==="
+ self.req = Requirement(
+ "".join([
+ self.metadata["Name"],
+ op,
+ self.metadata["Version"],
+ ])
+ )
+ self._correct_build_location()
+ else:
+ metadata_name = canonicalize_name(self.metadata["Name"])
+ if canonicalize_name(self.req.name) != metadata_name:
+ logger.warning(
+ 'Generating metadata for package %s '
+ 'produced metadata for project name %s. Fix your '
+ '#egg=%s fragments.',
+ self.name, metadata_name, self.name
+ )
+ self.req = Requirement(metadata_name)
+
+ def prepare_pep517_metadata(self):
+ # type: () -> None
+ assert self.pep517_backend is not None
+
+ metadata_dir = os.path.join(
+ self.setup_py_dir,
+ 'pip-wheel-metadata'
+ )
+ ensure_dir(metadata_dir)
+
+ with self.build_env:
+ # Note that Pep517HookCaller implements a fallback for
+ # prepare_metadata_for_build_wheel, so we don't have to
+ # consider the possibility that this hook doesn't exist.
+ backend = self.pep517_backend
+ self.spin_message = "Preparing wheel metadata"
+ distinfo_dir = backend.prepare_metadata_for_build_wheel(
+ metadata_dir
+ )
+
+ self.metadata_directory = os.path.join(metadata_dir, distinfo_dir)
+
+ def run_egg_info(self):
+ # type: () -> None
+ if self.name:
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package %s',
+ self.setup_py_path, self.name,
+ )
+ else:
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package from %s',
+ self.setup_py_path, self.link,
+ )
+ base_cmd = make_setuptools_shim_args(self.setup_py_path)
+ if self.isolated:
+ base_cmd += ["--no-user-cfg"]
+ egg_info_cmd = base_cmd + ['egg_info']
+ # We can't put the .egg-info files at the root, because then the
+ # source code will be mistaken for an installed egg, causing
+ # problems
+ if self.editable:
+ egg_base_option = [] # type: List[str]
+ else:
+ egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
+ ensure_dir(egg_info_dir)
+ egg_base_option = ['--egg-base', 'pip-egg-info']
+ with self.build_env:
+ call_subprocess(
+ egg_info_cmd + egg_base_option,
+ cwd=self.setup_py_dir,
+ command_desc='python setup.py egg_info')
+
+ @property
+ def egg_info_path(self):
+ # type: () -> str
+ if self._egg_info_path is None:
+ if self.editable:
+ base = self.source_dir
+ else:
+ base = os.path.join(self.setup_py_dir, 'pip-egg-info')
+ filenames = os.listdir(base)
+ if self.editable:
+ filenames = []
+ for root, dirs, files in os.walk(base):
+ for dir in vcs.dirnames:
+ if dir in dirs:
+ dirs.remove(dir)
+ # Iterate over a copy of ``dirs``, since mutating
+ # a list while iterating over it can cause trouble.
+ # (See https://github.com/pypa/pip/pull/462.)
+ for dir in list(dirs):
+ # Don't search in anything that looks like a virtualenv
+ # environment
+ if (
+ os.path.lexists(
+ os.path.join(root, dir, 'bin', 'python')
+ ) or
+ os.path.exists(
+ os.path.join(
+ root, dir, 'Scripts', 'Python.exe'
+ )
+ )):
+ dirs.remove(dir)
+ # Also don't search through tests
+ elif dir == 'test' or dir == 'tests':
+ dirs.remove(dir)
+ filenames.extend([os.path.join(root, dir)
+ for dir in dirs])
+ filenames = [f for f in filenames if f.endswith('.egg-info')]
+
+ if not filenames:
+ raise InstallationError(
+ "Files/directories not found in %s" % base
+ )
+ # if we have more than one match, we pick the toplevel one. This
+ # can easily be the case if there is a dist folder which contains
+ # an extracted tarball for testing purposes.
+ if len(filenames) > 1:
+ filenames.sort(
+ key=lambda x: x.count(os.path.sep) +
+ (os.path.altsep and x.count(os.path.altsep) or 0)
+ )
+ self._egg_info_path = os.path.join(base, filenames[0])
+ return self._egg_info_path
+
+ @property
+ def metadata(self):
+ # type: () -> Any
+ if not hasattr(self, '_metadata'):
+ self._metadata = get_metadata(self.get_dist())
+
+ return self._metadata
+
+ def get_dist(self):
+ # type: () -> Distribution
+ """Return a pkg_resources.Distribution for this requirement"""
+ if self.metadata_directory:
+ dist_dir = self.metadata_directory
+ dist_cls = pkg_resources.DistInfoDistribution
+ else:
+ dist_dir = self.egg_info_path.rstrip(os.path.sep)
+ # https://github.com/python/mypy/issues/1174
+ dist_cls = pkg_resources.Distribution # type: ignore
+
+ # dist_dir_name can be of the form "<project>.dist-info" or
+ # e.g. "<project>.egg-info".
+ base_dir, dist_dir_name = os.path.split(dist_dir)
+ dist_name = os.path.splitext(dist_dir_name)[0]
+ metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
+
+ return dist_cls(
+ base_dir,
+ project_name=dist_name,
+ metadata=metadata,
+ )
+
+ def assert_source_matches_version(self):
+ # type: () -> None
+ assert self.source_dir
+ version = self.metadata['version']
+ if self.req.specifier and version not in self.req.specifier:
+ logger.warning(
+ 'Requested %s, but installing version %s',
+ self,
+ version,
+ )
+ else:
+ logger.debug(
+ 'Source in %s has version %s, which satisfies requirement %s',
+ display_path(self.source_dir),
+ version,
+ self,
+ )
+
+ # For both source distributions and editables
+ def ensure_has_source_dir(self, parent_dir):
+ # type: (str) -> str
+ """Ensure that a source_dir is set.
+
+ This will create a temporary build dir if the name of the requirement
+ isn't known yet.
+
+ :param parent_dir: The ideal pip parent_dir for the source_dir.
+ Generally src_dir for editables and build_dir for sdists.
+ :return: self.source_dir
+ """
+ if self.source_dir is None:
+ self.source_dir = self.build_location(parent_dir)
+ return self.source_dir
+
+ # For editable installations
+ def install_editable(
+ self,
+ install_options, # type: List[str]
+ global_options=(), # type: Sequence[str]
+ prefix=None # type: Optional[str]
+ ):
+ # type: (...) -> None
+ logger.info('Running setup.py develop for %s', self.name)
+
+ if self.isolated:
+ global_options = list(global_options) + ["--no-user-cfg"]
+
+ if prefix:
+ prefix_param = ['--prefix={}'.format(prefix)]
+ install_options = list(install_options) + prefix_param
+
+ with indent_log():
+ # FIXME: should we do --install-headers here too?
+ with self.build_env:
+ call_subprocess(
+ make_setuptools_shim_args(self.setup_py_path) +
+ list(global_options) +
+ ['develop', '--no-deps'] +
+ list(install_options),
+
+ cwd=self.setup_py_dir,
+ )
+
+ self.install_succeeded = True
+
+ def update_editable(self, obtain=True):
+ # type: (bool) -> None
+ if not self.link:
+ logger.debug(
+ "Cannot update repository at %s; repository location is "
+ "unknown",
+ self.source_dir,
+ )
+ return
+ assert self.editable
+ assert self.source_dir
+ if self.link.scheme == 'file':
+ # Static paths don't get updated
+ return
+ assert '+' in self.link.url, "bad url: %r" % self.link.url
+ if not self.update:
+ return
+ vc_type, url = self.link.url.split('+', 1)
+ vcs_backend = vcs.get_backend(vc_type)
+ if vcs_backend:
+ url = self.link.url
+ if obtain:
+ vcs_backend.obtain(self.source_dir, url=url)
+ else:
+ vcs_backend.export(self.source_dir, url=url)
+ else:
+ assert 0, (
+ 'Unexpected version control type (in %s): %s'
+ % (self.link, vc_type))
+
+ # Top-level Actions
+ def uninstall(self, auto_confirm=False, verbose=False,
+ use_user_site=False):
+ # type: (bool, bool, bool) -> Optional[UninstallPathSet]
+ """
+ Uninstall the distribution currently satisfying this requirement.
+
+ Prompts before removing or modifying files unless
+ ``auto_confirm`` is True.
+
+ Refuses to delete or modify files outside of ``sys.prefix`` -
+ thus uninstallation within a virtual environment can only
+ modify that virtual environment, even if the virtualenv is
+ linked to global site-packages.
+
+ """
+ if not self.check_if_exists(use_user_site):
+ logger.warning("Skipping %s as it is not installed.", self.name)
+ return None
+ dist = self.satisfied_by or self.conflicts_with
+
+ uninstalled_pathset = UninstallPathSet.from_dist(dist)
+ uninstalled_pathset.remove(auto_confirm, verbose)
+ return uninstalled_pathset
+
+ def _clean_zip_name(self, name, prefix): # only used by archive.
+ # type: (str, str) -> str
+ assert name.startswith(prefix + os.path.sep), (
+ "name %r doesn't start with prefix %r" % (name, prefix)
+ )
+ name = name[len(prefix) + 1:]
+ name = name.replace(os.path.sep, '/')
+ return name
+
+ def _get_archive_name(self, path, parentdir, rootdir):
+ # type: (str, str, str) -> str
+ path = os.path.join(parentdir, path)
+ name = self._clean_zip_name(path, rootdir)
+ return self.name + '/' + name
+
+ # TODO: Investigate if this should be kept in InstallRequirement
+ # Seems to be used only when VCS + downloads
+ def archive(self, build_dir):
+ # type: (str) -> None
+ assert self.source_dir
+ create_archive = True
+ archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
+ archive_path = os.path.join(build_dir, archive_name)
+ if os.path.exists(archive_path):
+ response = ask_path_exists(
+ 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
+ display_path(archive_path), ('i', 'w', 'b', 'a'))
+ if response == 'i':
+ create_archive = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(archive_path))
+ os.remove(archive_path)
+ elif response == 'b':
+ dest_file = backup_dir(archive_path)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(archive_path),
+ display_path(dest_file),
+ )
+ shutil.move(archive_path, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+ if create_archive:
+ zip = zipfile.ZipFile(
+ archive_path, 'w', zipfile.ZIP_DEFLATED,
+ allowZip64=True
+ )
+ dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
+ for dirpath, dirnames, filenames in os.walk(dir):
+ if 'pip-egg-info' in dirnames:
+ dirnames.remove('pip-egg-info')
+ for dirname in dirnames:
+ dir_arcname = self._get_archive_name(dirname,
+ parentdir=dirpath,
+ rootdir=dir)
+ zipdir = zipfile.ZipInfo(dir_arcname + '/')
+ zipdir.external_attr = 0x1ED << 16 # 0o755
+ zip.writestr(zipdir, '')
+ for filename in filenames:
+ if filename == PIP_DELETE_MARKER_FILENAME:
+ continue
+ file_arcname = self._get_archive_name(filename,
+ parentdir=dirpath,
+ rootdir=dir)
+ filename = os.path.join(dirpath, filename)
+ zip.write(filename, file_arcname)
+ zip.close()
+ logger.info('Saved %s', display_path(archive_path))
+
+ def install(
+ self,
+ install_options, # type: List[str]
+ global_options=None, # type: Optional[Sequence[str]]
+ root=None, # type: Optional[str]
+ home=None, # type: Optional[str]
+ prefix=None, # type: Optional[str]
+ warn_script_location=True, # type: bool
+ use_user_site=False, # type: bool
+ pycompile=True # type: bool
+ ):
+ # type: (...) -> None
+ global_options = global_options if global_options is not None else []
+ if self.editable:
+ self.install_editable(
+ install_options, global_options, prefix=prefix,
+ )
+ return
+ if self.is_wheel:
+ version = wheel.wheel_version(self.source_dir)
+ wheel.check_compatibility(version, self.name)
+
+ self.move_wheel_files(
+ self.source_dir, root=root, prefix=prefix, home=home,
+ warn_script_location=warn_script_location,
+ use_user_site=use_user_site, pycompile=pycompile,
+ )
+ self.install_succeeded = True
+ return
+
+ # Extend the list of global and install options passed on to
+ # the setup.py call with the ones from the requirements file.
+ # Options specified in requirements file override those
+ # specified on the command line, since the last option given
+ # to setup.py is the one that is used.
+ global_options = list(global_options) + \
+ self.options.get('global_options', [])
+ install_options = list(install_options) + \
+ self.options.get('install_options', [])
+
+ if self.isolated:
+ # https://github.com/python/mypy/issues/1174
+ global_options = global_options + ["--no-user-cfg"] # type: ignore
+
+ with TempDirectory(kind="record") as temp_dir:
+ record_filename = os.path.join(temp_dir.path, 'install-record.txt')
+ install_args = self.get_install_args(
+ global_options, record_filename, root, prefix, pycompile,
+ )
+ msg = 'Running setup.py install for %s' % (self.name,)
+ with open_spinner(msg) as spinner:
+ with indent_log():
+ with self.build_env:
+ call_subprocess(
+ install_args + install_options,
+ cwd=self.setup_py_dir,
+ spinner=spinner,
+ )
+
+ if not os.path.exists(record_filename):
+ logger.debug('Record file %s not found', record_filename)
+ return
+ self.install_succeeded = True
+
+ def prepend_root(path):
+ # type: (str) -> str
+ if root is None or not os.path.isabs(path):
+ return path
+ else:
+ return change_root(root, path)
+
+ with open(record_filename) as f:
+ for line in f:
+ directory = os.path.dirname(line)
+ if directory.endswith('.egg-info'):
+ egg_info_dir = prepend_root(directory)
+ break
+ else:
+ logger.warning(
+ 'Could not find .egg-info directory in install record'
+ ' for %s',
+ self,
+ )
+ # FIXME: put the record somewhere
+ # FIXME: should this be an error?
+ return
+ new_lines = []
+ with open(record_filename) as f:
+ for line in f:
+ filename = line.strip()
+ if os.path.isdir(filename):
+ filename += os.path.sep
+ new_lines.append(
+ os.path.relpath(prepend_root(filename), egg_info_dir)
+ )
+ new_lines.sort()
+ ensure_dir(egg_info_dir)
+ inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
+ with open(inst_files_path, 'w') as f:
+ f.write('\n'.join(new_lines) + '\n')
+
+ def get_install_args(
+ self,
+ global_options, # type: Sequence[str]
+ record_filename, # type: str
+ root, # type: Optional[str]
+ prefix, # type: Optional[str]
+ pycompile # type: bool
+ ):
+ # type: (...) -> List[str]
+ install_args = make_setuptools_shim_args(self.setup_py_path,
+ unbuffered_output=True)
+ install_args += list(global_options) + \
+ ['install', '--record', record_filename]
+ install_args += ['--single-version-externally-managed']
+
+ if root is not None:
+ install_args += ['--root', root]
+ if prefix is not None:
+ install_args += ['--prefix', prefix]
+
+ if pycompile:
+ install_args += ["--compile"]
+ else:
+ install_args += ["--no-compile"]
+
+ if running_under_virtualenv():
+ py_ver_str = 'python' + sysconfig.get_python_version()
+ install_args += ['--install-headers',
+ os.path.join(sys.prefix, 'include', 'site',
+ py_ver_str, self.name)]
+
+ return install_args
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_set.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_set.py
new file mode 100644
index 00000000..d1966a4a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_set.py
@@ -0,0 +1,193 @@
+from __future__ import absolute_import
+
+import logging
+from collections import OrderedDict
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel import Wheel
+
+if MYPY_CHECK_RUNNING:
+ from typing import Dict, Iterable, List, Optional, Tuple
+ from pip._internal.req.req_install import InstallRequirement
+
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementSet(object):
+
+ def __init__(self, require_hashes=False, check_supported_wheels=True):
+ # type: (bool, bool) -> None
+ """Create a RequirementSet.
+ """
+
+ self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
+ self.require_hashes = require_hashes
+ self.check_supported_wheels = check_supported_wheels
+
+ # Mapping of alias: real_name
+ self.requirement_aliases = {} # type: Dict[str, str]
+ self.unnamed_requirements = [] # type: List[InstallRequirement]
+ self.successfully_downloaded = [] # type: List[InstallRequirement]
+ self.reqs_to_cleanup = [] # type: List[InstallRequirement]
+
+ def __str__(self):
+ # type: () -> str
+ reqs = [req for req in self.requirements.values()
+ if not req.comes_from]
+ reqs.sort(key=lambda req: req.name.lower())
+ return ' '.join([str(req.req) for req in reqs])
+
+ def __repr__(self):
+ # type: () -> str
+ reqs = [req for req in self.requirements.values()]
+ reqs.sort(key=lambda req: req.name.lower())
+ reqs_str = ', '.join([str(req.req) for req in reqs])
+ return ('<%s object; %d requirement(s): %s>'
+ % (self.__class__.__name__, len(reqs), reqs_str))
+
+ def add_requirement(
+ self,
+ install_req, # type: InstallRequirement
+ parent_req_name=None, # type: Optional[str]
+ extras_requested=None # type: Optional[Iterable[str]]
+ ):
+ # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
+ """Add install_req as a requirement to install.
+
+ :param parent_req_name: The name of the requirement that needed this
+ added. The name is used because when multiple unnamed requirements
+ resolve to the same name, we could otherwise end up with dependency
+ links that point outside the Requirements set. parent_req must
+ already be added. Note that None implies that this is a user
+ supplied requirement, vs an inferred one.
+ :param extras_requested: an iterable of extras used to evaluate the
+ environment markers.
+ :return: Additional requirements to scan. That is either [] if
+ the requirement is not applicable, or [install_req] if the
+ requirement is applicable and has just been added.
+ """
+ name = install_req.name
+
+ # If the markers do not match, ignore this requirement.
+ if not install_req.match_markers(extras_requested):
+ logger.info(
+ "Ignoring %s: markers '%s' don't match your environment",
+ name, install_req.markers,
+ )
+ return [], None
+
+ # If the wheel is not supported, raise an error.
+ # Should check this after filtering out based on environment markers to
+ # allow specifying different wheels based on the environment/OS, in a
+ # single requirements file.
+ if install_req.link and install_req.link.is_wheel:
+ wheel = Wheel(install_req.link.filename)
+ if self.check_supported_wheels and not wheel.supported():
+ raise InstallationError(
+ "%s is not a supported wheel on this platform." %
+ wheel.filename
+ )
+
+ # This next bit is really a sanity check.
+ assert install_req.is_direct == (parent_req_name is None), (
+ "a direct req shouldn't have a parent and also, "
+ "a non direct req should have a parent"
+ )
+
+ # Unnamed requirements are scanned again and the requirement won't be
+ # added as a dependency until after scanning.
+ if not name:
+ # url or path requirement w/o an egg fragment
+ self.unnamed_requirements.append(install_req)
+ return [install_req], None
+
+ try:
+ existing_req = self.get_requirement(name)
+ except KeyError:
+ existing_req = None
+
+ has_conflicting_requirement = (
+ parent_req_name is None and
+ existing_req and
+ not existing_req.constraint and
+ existing_req.extras == install_req.extras and
+ existing_req.req.specifier != install_req.req.specifier
+ )
+ if has_conflicting_requirement:
+ raise InstallationError(
+ "Double requirement given: %s (already in %s, name=%r)"
+ % (install_req, existing_req, name)
+ )
+
+ # When no existing requirement exists, add the requirement as a
+ # dependency and it will be scanned again after.
+ if not existing_req:
+ self.requirements[name] = install_req
+ # FIXME: what about other normalizations? E.g., _ vs. -?
+ if name.lower() != name:
+ self.requirement_aliases[name.lower()] = name
+ # We'd want to rescan this requirements later
+ return [install_req], install_req
+
+ # Assume there's no need to scan, and that we've already
+ # encountered this for scanning.
+ if install_req.constraint or not existing_req.constraint:
+ return [], existing_req
+
+ does_not_satisfy_constraint = (
+ install_req.link and
+ not (
+ existing_req.link and
+ install_req.link.path == existing_req.link.path
+ )
+ )
+ if does_not_satisfy_constraint:
+ self.reqs_to_cleanup.append(install_req)
+ raise InstallationError(
+ "Could not satisfy constraints for '%s': "
+ "installation from path or url cannot be "
+ "constrained to a version" % name,
+ )
+ # If we're now installing a constraint, mark the existing
+ # object for real installation.
+ existing_req.constraint = False
+ existing_req.extras = tuple(sorted(
+ set(existing_req.extras) | set(install_req.extras)
+ ))
+ logger.debug(
+ "Setting %s extras to: %s",
+ existing_req, existing_req.extras,
+ )
+ # Return the existing requirement for addition to the parent and
+ # scanning again.
+ return [existing_req], existing_req
+
+ def has_requirement(self, project_name):
+ # type: (str) -> bool
+ name = project_name.lower()
+ if (name in self.requirements and
+ not self.requirements[name].constraint or
+ name in self.requirement_aliases and
+ not self.requirements[self.requirement_aliases[name]].constraint):
+ return True
+ return False
+
+ def get_requirement(self, project_name):
+ # type: (str) -> InstallRequirement
+ for name in project_name, project_name.lower():
+ if name in self.requirements:
+ return self.requirements[name]
+ if name in self.requirement_aliases:
+ return self.requirements[self.requirement_aliases[name]]
+ raise KeyError("No project with the name %r" % project_name)
+
+ def cleanup_files(self):
+ # type: () -> None
+ """Clean up files, remove builds."""
+ logger.debug('Cleaning up...')
+ with indent_log():
+ for req in self.reqs_to_cleanup:
+ req.remove_temporary_source()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py
new file mode 100644
index 00000000..e36a3f6b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py
@@ -0,0 +1,96 @@
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import hashlib
+import logging
+import os
+
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from types import TracebackType
+ from typing import Iterator, Optional, Set, Type
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.models.link import Link
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementTracker(object):
+
+ def __init__(self):
+ # type: () -> None
+ self._root = os.environ.get('PIP_REQ_TRACKER')
+ if self._root is None:
+ self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
+ self._temp_dir.create()
+ self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
+ logger.debug('Created requirements tracker %r', self._root)
+ else:
+ self._temp_dir = None
+ logger.debug('Re-using requirements tracker %r', self._root)
+ self._entries = set() # type: Set[InstallRequirement]
+
+ def __enter__(self):
+ # type: () -> RequirementTracker
+ return self
+
+ def __exit__(
+ self,
+ exc_type, # type: Optional[Type[BaseException]]
+ exc_val, # type: Optional[BaseException]
+ exc_tb # type: Optional[TracebackType]
+ ):
+ # type: (...) -> None
+ self.cleanup()
+
+ def _entry_path(self, link):
+ # type: (Link) -> str
+ hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
+ return os.path.join(self._root, hashed)
+
+ def add(self, req):
+ # type: (InstallRequirement) -> None
+ link = req.link
+ info = str(req)
+ entry_path = self._entry_path(link)
+ try:
+ with open(entry_path) as fp:
+ # Error, these's already a build in progress.
+ raise LookupError('%s is already being built: %s'
+ % (link, fp.read()))
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ assert req not in self._entries
+ with open(entry_path, 'w') as fp:
+ fp.write(info)
+ self._entries.add(req)
+ logger.debug('Added %s to build tracker %r', req, self._root)
+
+ def remove(self, req):
+ # type: (InstallRequirement) -> None
+ link = req.link
+ self._entries.remove(req)
+ os.unlink(self._entry_path(link))
+ logger.debug('Removed %s from build tracker %r', req, self._root)
+
+ def cleanup(self):
+ # type: () -> None
+ for req in set(self._entries):
+ self.remove(req)
+ remove = self._temp_dir is not None
+ if remove:
+ self._temp_dir.cleanup()
+ logger.debug('%s build tracker %r',
+ 'Removed' if remove else 'Cleaned',
+ self._root)
+
+ @contextlib.contextmanager
+ def track(self, req):
+ # type: (InstallRequirement) -> Iterator[None]
+ self.add(req)
+ yield
+ self.remove(req)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py
new file mode 100644
index 00000000..733301ce
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py
@@ -0,0 +1,633 @@
+from __future__ import absolute_import
+
+import csv
+import functools
+import logging
+import os
+import sys
+import sysconfig
+
+from pip._vendor import pkg_resources
+
+from pip._internal.exceptions import UninstallationError
+from pip._internal.locations import bin_py, bin_user
+from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
+ normalize_path, renames, rmtree,
+)
+from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple,
+ )
+ from pip._vendor.pkg_resources import Distribution
+
+logger = logging.getLogger(__name__)
+
+
+def _script_names(dist, script_name, is_gui):
+ # type: (Distribution, str, bool) -> List[str]
+ """Create the fully qualified name of the files created by
+ {console,gui}_scripts for the given ``dist``.
+ Returns the list of file names
+ """
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ exe_name = os.path.join(bin_dir, script_name)
+ paths_to_remove = [exe_name]
+ if WINDOWS:
+ paths_to_remove.append(exe_name + '.exe')
+ paths_to_remove.append(exe_name + '.exe.manifest')
+ if is_gui:
+ paths_to_remove.append(exe_name + '-script.pyw')
+ else:
+ paths_to_remove.append(exe_name + '-script.py')
+ return paths_to_remove
+
+
+def _unique(fn):
+ # type: (Callable) -> Callable[..., Iterator[Any]]
+ @functools.wraps(fn)
+ def unique(*args, **kw):
+ # type: (Any, Any) -> Iterator[Any]
+ seen = set() # type: Set[Any]
+ for item in fn(*args, **kw):
+ if item not in seen:
+ seen.add(item)
+ yield item
+ return unique
+
+
+@_unique
+def uninstallation_paths(dist):
+ # type: (Distribution) -> Iterator[str]
+ """
+ Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
+
+ Yield paths to all the files in RECORD. For each .py file in RECORD, add
+ the .pyc and .pyo in the same directory.
+
+ UninstallPathSet.add() takes care of the __pycache__ .py[co].
+ """
+ r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
+ for row in r:
+ path = os.path.join(dist.location, row[0])
+ yield path
+ if path.endswith('.py'):
+ dn, fn = os.path.split(path)
+ base = fn[:-3]
+ path = os.path.join(dn, base + '.pyc')
+ yield path
+ path = os.path.join(dn, base + '.pyo')
+ yield path
+
+
+def compact(paths):
+ # type: (Iterable[str]) -> Set[str]
+ """Compact a path set to contain the minimal number of paths
+ necessary to contain all paths in the set. If /a/path/ and
+ /a/path/to/a/file.txt are both in the set, leave only the
+ shorter path."""
+
+ sep = os.path.sep
+ short_paths = set() # type: Set[str]
+ for path in sorted(paths, key=len):
+ should_skip = any(
+ path.startswith(shortpath.rstrip("*")) and
+ path[len(shortpath.rstrip("*").rstrip(sep))] == sep
+ for shortpath in short_paths
+ )
+ if not should_skip:
+ short_paths.add(path)
+ return short_paths
+
+
+def compress_for_rename(paths):
+ # type: (Iterable[str]) -> Set[str]
+ """Returns a set containing the paths that need to be renamed.
+
+ This set may include directories when the original sequence of paths
+ included every file on disk.
+ """
+ case_map = dict((os.path.normcase(p), p) for p in paths)
+ remaining = set(case_map)
+ unchecked = sorted(set(os.path.split(p)[0]
+ for p in case_map.values()), key=len)
+ wildcards = set() # type: Set[str]
+
+ def norm_join(*a):
+ # type: (str) -> str
+ return os.path.normcase(os.path.join(*a))
+
+ for root in unchecked:
+ if any(os.path.normcase(root).startswith(w)
+ for w in wildcards):
+ # This directory has already been handled.
+ continue
+
+ all_files = set() # type: Set[str]
+ all_subdirs = set() # type: Set[str]
+ for dirname, subdirs, files in os.walk(root):
+ all_subdirs.update(norm_join(root, dirname, d)
+ for d in subdirs)
+ all_files.update(norm_join(root, dirname, f)
+ for f in files)
+ # If all the files we found are in our remaining set of files to
+ # remove, then remove them from the latter set and add a wildcard
+ # for the directory.
+ if not (all_files - remaining):
+ remaining.difference_update(all_files)
+ wildcards.add(root + os.sep)
+
+ return set(map(case_map.__getitem__, remaining)) | wildcards
+
+
+def compress_for_output_listing(paths):
+ # type: (Iterable[str]) -> Tuple[Set[str], Set[str]]
+ """Returns a tuple of 2 sets of which paths to display to user
+
+ The first set contains paths that would be deleted. Files of a package
+ are not added and the top-level directory of the package has a '*' added
+ at the end - to signify that all it's contents are removed.
+
+ The second set contains files that would have been skipped in the above
+ folders.
+ """
+
+ will_remove = set(paths)
+ will_skip = set()
+
+ # Determine folders and files
+ folders = set()
+ files = set()
+ for path in will_remove:
+ if path.endswith(".pyc"):
+ continue
+ if path.endswith("__init__.py") or ".dist-info" in path:
+ folders.add(os.path.dirname(path))
+ files.add(path)
+
+ # probably this one https://github.com/python/mypy/issues/390
+ _normcased_files = set(map(os.path.normcase, files)) # type: ignore
+
+ folders = compact(folders)
+
+ # This walks the tree using os.walk to not miss extra folders
+ # that might get added.
+ for folder in folders:
+ for dirpath, _, dirfiles in os.walk(folder):
+ for fname in dirfiles:
+ if fname.endswith(".pyc"):
+ continue
+
+ file_ = os.path.join(dirpath, fname)
+ if (os.path.isfile(file_) and
+ os.path.normcase(file_) not in _normcased_files):
+ # We are skipping this file. Add it to the set.
+ will_skip.add(file_)
+
+ will_remove = files | {
+ os.path.join(folder, "*") for folder in folders
+ }
+
+ return will_remove, will_skip
+
+
+class StashedUninstallPathSet(object):
+ """A set of file rename operations to stash files while
+ tentatively uninstalling them."""
+ def __init__(self):
+ # type: () -> None
+ # Mapping from source file root to [Adjacent]TempDirectory
+ # for files under that directory.
+ self._save_dirs = {} # type: Dict[str, TempDirectory]
+ # (old path, new path) tuples for each move that may need
+ # to be undone.
+ self._moves = [] # type: List[Tuple[str, str]]
+
+ def _get_directory_stash(self, path):
+ # type: (str) -> str
+ """Stashes a directory.
+
+ Directories are stashed adjacent to their original location if
+ possible, or else moved/copied into the user's temp dir."""
+
+ try:
+ save_dir = AdjacentTempDirectory(path) # type: TempDirectory
+ save_dir.create()
+ except OSError:
+ save_dir = TempDirectory(kind="uninstall")
+ save_dir.create()
+ self._save_dirs[os.path.normcase(path)] = save_dir
+
+ return save_dir.path
+
+ def _get_file_stash(self, path):
+ # type: (str) -> str
+ """Stashes a file.
+
+ If no root has been provided, one will be created for the directory
+ in the user's temp directory."""
+ path = os.path.normcase(path)
+ head, old_head = os.path.dirname(path), None
+ save_dir = None
+
+ while head != old_head:
+ try:
+ save_dir = self._save_dirs[head]
+ break
+ except KeyError:
+ pass
+ head, old_head = os.path.dirname(head), head
+ else:
+ # Did not find any suitable root
+ head = os.path.dirname(path)
+ save_dir = TempDirectory(kind='uninstall')
+ save_dir.create()
+ self._save_dirs[head] = save_dir
+
+ relpath = os.path.relpath(path, head)
+ if relpath and relpath != os.path.curdir:
+ return os.path.join(save_dir.path, relpath)
+ return save_dir.path
+
+ def stash(self, path):
+ # type: (str) -> str
+ """Stashes the directory or file and returns its new location.
+ """
+ if os.path.isdir(path):
+ new_path = self._get_directory_stash(path)
+ else:
+ new_path = self._get_file_stash(path)
+
+ self._moves.append((path, new_path))
+ if os.path.isdir(path) and os.path.isdir(new_path):
+ # If we're moving a directory, we need to
+ # remove the destination first or else it will be
+ # moved to inside the existing directory.
+ # We just created new_path ourselves, so it will
+ # be removable.
+ os.rmdir(new_path)
+ renames(path, new_path)
+ return new_path
+
+ def commit(self):
+ # type: () -> None
+ """Commits the uninstall by removing stashed files."""
+ for _, save_dir in self._save_dirs.items():
+ save_dir.cleanup()
+ self._moves = []
+ self._save_dirs = {}
+
+ def rollback(self):
+ # type: () -> None
+ """Undoes the uninstall by moving stashed files back."""
+ for p in self._moves:
+ logging.info("Moving to %s\n from %s", *p)
+
+ for new_path, path in self._moves:
+ try:
+ logger.debug('Replacing %s from %s', new_path, path)
+ if os.path.isfile(new_path):
+ os.unlink(new_path)
+ elif os.path.isdir(new_path):
+ rmtree(new_path)
+ renames(path, new_path)
+ except OSError as ex:
+ logger.error("Failed to restore %s", new_path)
+ logger.debug("Exception: %s", ex)
+
+ self.commit()
+
+ @property
+ def can_rollback(self):
+ # type: () -> bool
+ return bool(self._moves)
+
+
+class UninstallPathSet(object):
+ """A set of file paths to be removed in the uninstallation of a
+ requirement."""
+ def __init__(self, dist):
+ # type: (Distribution) -> None
+ self.paths = set() # type: Set[str]
+ self._refuse = set() # type: Set[str]
+ self.pth = {} # type: Dict[str, UninstallPthEntries]
+ self.dist = dist
+ self._moved_paths = StashedUninstallPathSet()
+
+ def _permitted(self, path):
+ # type: (str) -> bool
+ """
+ Return True if the given path is one we are permitted to
+ remove/modify, False otherwise.
+
+ """
+ return is_local(path)
+
+ def add(self, path):
+ # type: (str) -> None
+ head, tail = os.path.split(path)
+
+ # we normalize the head to resolve parent directory symlinks, but not
+ # the tail, since we only want to uninstall symlinks, not their targets
+ path = os.path.join(normalize_path(head), os.path.normcase(tail))
+
+ if not os.path.exists(path):
+ return
+ if self._permitted(path):
+ self.paths.add(path)
+ else:
+ self._refuse.add(path)
+
+ # __pycache__ files can show up after 'installed-files.txt' is created,
+ # due to imports
+ if os.path.splitext(path)[1] == '.py' and uses_pycache:
+ self.add(cache_from_source(path))
+
+ def add_pth(self, pth_file, entry):
+ # type: (str, str) -> None
+ pth_file = normalize_path(pth_file)
+ if self._permitted(pth_file):
+ if pth_file not in self.pth:
+ self.pth[pth_file] = UninstallPthEntries(pth_file)
+ self.pth[pth_file].add(entry)
+ else:
+ self._refuse.add(pth_file)
+
+ def remove(self, auto_confirm=False, verbose=False):
+ # type: (bool, bool) -> None
+ """Remove paths in ``self.paths`` with confirmation (unless
+ ``auto_confirm`` is True)."""
+
+ if not self.paths:
+ logger.info(
+ "Can't uninstall '%s'. No files were found to uninstall.",
+ self.dist.project_name,
+ )
+ return
+
+ dist_name_version = (
+ self.dist.project_name + "-" + self.dist.version
+ )
+ logger.info('Uninstalling %s:', dist_name_version)
+
+ with indent_log():
+ if auto_confirm or self._allowed_to_proceed(verbose):
+ moved = self._moved_paths
+
+ for_rename = compress_for_rename(self.paths)
+
+ for path in sorted(compact(for_rename)):
+ moved.stash(path)
+ logger.debug('Removing file or directory %s', path)
+
+ for pth in self.pth.values():
+ pth.remove()
+
+ logger.info('Successfully uninstalled %s', dist_name_version)
+
+ def _allowed_to_proceed(self, verbose):
+ # type: (bool) -> bool
+ """Display which files would be deleted and prompt for confirmation
+ """
+
+ def _display(msg, paths):
+ # type: (str, Iterable[str]) -> None
+ if not paths:
+ return
+
+ logger.info(msg)
+ with indent_log():
+ for path in sorted(compact(paths)):
+ logger.info(path)
+
+ if not verbose:
+ will_remove, will_skip = compress_for_output_listing(self.paths)
+ else:
+ # In verbose mode, display all the files that are going to be
+ # deleted.
+ will_remove = set(self.paths)
+ will_skip = set()
+
+ _display('Would remove:', will_remove)
+ _display('Would not remove (might be manually added):', will_skip)
+ _display('Would not remove (outside of prefix):', self._refuse)
+ if verbose:
+ _display('Will actually move:', compress_for_rename(self.paths))
+
+ return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
+
+ def rollback(self):
+ # type: () -> None
+ """Rollback the changes previously made by remove()."""
+ if not self._moved_paths.can_rollback:
+ logger.error(
+ "Can't roll back %s; was not uninstalled",
+ self.dist.project_name,
+ )
+ return
+ logger.info('Rolling back uninstall of %s', self.dist.project_name)
+ self._moved_paths.rollback()
+ for pth in self.pth.values():
+ pth.rollback()
+
+ def commit(self):
+ # type: () -> None
+ """Remove temporary save dir: rollback will no longer be possible."""
+ self._moved_paths.commit()
+
+ @classmethod
+ def from_dist(cls, dist):
+ # type: (Distribution) -> UninstallPathSet
+ dist_path = normalize_path(dist.location)
+ if not dist_is_local(dist):
+ logger.info(
+ "Not uninstalling %s at %s, outside environment %s",
+ dist.key,
+ dist_path,
+ sys.prefix,
+ )
+ return cls(dist)
+
+ if dist_path in {p for p in {sysconfig.get_path("stdlib"),
+ sysconfig.get_path("platstdlib")}
+ if p}:
+ logger.info(
+ "Not uninstalling %s at %s, as it is in the standard library.",
+ dist.key,
+ dist_path,
+ )
+ return cls(dist)
+
+ paths_to_remove = cls(dist)
+ develop_egg_link = egg_link_path(dist)
+ develop_egg_link_egg_info = '{}.egg-info'.format(
+ pkg_resources.to_filename(dist.project_name))
+ egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
+ # Special case for distutils installed package
+ distutils_egg_info = getattr(dist._provider, 'path', None)
+
+ # Uninstall cases order do matter as in the case of 2 installs of the
+ # same package, pip needs to uninstall the currently detected version
+ if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
+ not dist.egg_info.endswith(develop_egg_link_egg_info)):
+ # if dist.egg_info.endswith(develop_egg_link_egg_info), we
+ # are in fact in the develop_egg_link case
+ paths_to_remove.add(dist.egg_info)
+ if dist.has_metadata('installed-files.txt'):
+ for installed_file in dist.get_metadata(
+ 'installed-files.txt').splitlines():
+ path = os.path.normpath(
+ os.path.join(dist.egg_info, installed_file)
+ )
+ paths_to_remove.add(path)
+ # FIXME: need a test for this elif block
+ # occurs with --single-version-externally-managed/--record outside
+ # of pip
+ elif dist.has_metadata('top_level.txt'):
+ if dist.has_metadata('namespace_packages.txt'):
+ namespaces = dist.get_metadata('namespace_packages.txt')
+ else:
+ namespaces = []
+ for top_level_pkg in [
+ p for p
+ in dist.get_metadata('top_level.txt').splitlines()
+ if p and p not in namespaces]:
+ path = os.path.join(dist.location, top_level_pkg)
+ paths_to_remove.add(path)
+ paths_to_remove.add(path + '.py')
+ paths_to_remove.add(path + '.pyc')
+ paths_to_remove.add(path + '.pyo')
+
+ elif distutils_egg_info:
+ raise UninstallationError(
+ "Cannot uninstall {!r}. It is a distutils installed project "
+ "and thus we cannot accurately determine which files belong "
+ "to it which would lead to only a partial uninstall.".format(
+ dist.project_name,
+ )
+ )
+
+ elif dist.location.endswith('.egg'):
+ # package installed by easy_install
+ # We cannot match on dist.egg_name because it can slightly vary
+ # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
+ paths_to_remove.add(dist.location)
+ easy_install_egg = os.path.split(dist.location)[1]
+ easy_install_pth = os.path.join(os.path.dirname(dist.location),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
+
+ elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
+ for path in uninstallation_paths(dist):
+ paths_to_remove.add(path)
+
+ elif develop_egg_link:
+ # develop egg
+ with open(develop_egg_link, 'r') as fh:
+ link_pointer = os.path.normcase(fh.readline().strip())
+ assert (link_pointer == dist.location), (
+ 'Egg-link %s does not match installed location of %s '
+ '(at %s)' % (link_pointer, dist.project_name, dist.location)
+ )
+ paths_to_remove.add(develop_egg_link)
+ easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, dist.location)
+
+ else:
+ logger.debug(
+ 'Not sure how to uninstall: %s - Check: %s',
+ dist, dist.location,
+ )
+
+ # find distutils scripts= scripts
+ if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
+ for script in dist.metadata_listdir('scripts'):
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ paths_to_remove.add(os.path.join(bin_dir, script))
+ if WINDOWS:
+ paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
+
+ # find console_scripts
+ _scripts_to_remove = []
+ console_scripts = dist.get_entry_map(group='console_scripts')
+ for name in console_scripts.keys():
+ _scripts_to_remove.extend(_script_names(dist, name, False))
+ # find gui_scripts
+ gui_scripts = dist.get_entry_map(group='gui_scripts')
+ for name in gui_scripts.keys():
+ _scripts_to_remove.extend(_script_names(dist, name, True))
+
+ for s in _scripts_to_remove:
+ paths_to_remove.add(s)
+
+ return paths_to_remove
+
+
+class UninstallPthEntries(object):
+ def __init__(self, pth_file):
+ # type: (str) -> None
+ if not os.path.isfile(pth_file):
+ raise UninstallationError(
+ "Cannot remove entries from nonexistent file %s" % pth_file
+ )
+ self.file = pth_file
+ self.entries = set() # type: Set[str]
+ self._saved_lines = None # type: Optional[List[bytes]]
+
+ def add(self, entry):
+ # type: (str) -> None
+ entry = os.path.normcase(entry)
+ # On Windows, os.path.normcase converts the entry to use
+ # backslashes. This is correct for entries that describe absolute
+ # paths outside of site-packages, but all the others use forward
+ # slashes.
+ if WINDOWS and not os.path.splitdrive(entry)[0]:
+ entry = entry.replace('\\', '/')
+ self.entries.add(entry)
+
+ def remove(self):
+ # type: () -> None
+ logger.debug('Removing pth entries from %s:', self.file)
+ with open(self.file, 'rb') as fh:
+ # windows uses '\r\n' with py3k, but uses '\n' with py2.x
+ lines = fh.readlines()
+ self._saved_lines = lines
+ if any(b'\r\n' in line for line in lines):
+ endline = '\r\n'
+ else:
+ endline = '\n'
+ # handle missing trailing newline
+ if lines and not lines[-1].endswith(endline.encode("utf-8")):
+ lines[-1] = lines[-1] + endline.encode("utf-8")
+ for entry in self.entries:
+ try:
+ logger.debug('Removing entry: %s', entry)
+ lines.remove((entry + endline).encode("utf-8"))
+ except ValueError:
+ pass
+ with open(self.file, 'wb') as fh:
+ fh.writelines(lines)
+
+ def rollback(self):
+ # type: () -> bool
+ if self._saved_lines is None:
+ logger.error(
+ 'Cannot roll back changes to %s, none were made', self.file
+ )
+ return False
+ logger.debug('Rolling %s back to previous state', self.file)
+ with open(self.file, 'wb') as fh:
+ fh.writelines(self._saved_lines)
+ return True
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__init__.py
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..da751af6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
new file mode 100644
index 00000000..6a487507
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..056eb515
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
new file mode 100644
index 00000000..5d1469b3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
new file mode 100644
index 00000000..b960d19a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
new file mode 100644
index 00000000..5ed92bc5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
new file mode 100644
index 00000000..0670c31d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
new file mode 100644
index 00000000..bd7daf9d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/logging.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
new file mode 100644
index 00000000..2b827a00
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-37.pyc
new file mode 100644
index 00000000..9b90b6d8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/misc.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
new file mode 100644
index 00000000..4d3b0d92
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/models.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/models.cpython-37.pyc
new file mode 100644
index 00000000..4f0a69f7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/models.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
new file mode 100644
index 00000000..aaa7ead3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
new file mode 100644
index 00000000..321ef73d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
new file mode 100644
index 00000000..4362f053
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
new file mode 100644
index 00000000..ef1701df
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/typing.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
new file mode 100644
index 00000000..6f329e61
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/ui.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
new file mode 100644
index 00000000..90684aa7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc
new file mode 100644
index 00000000..69b60a11
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py
new file mode 100644
index 00000000..fb261110
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py
@@ -0,0 +1,268 @@
+"""
+This code was taken from https://github.com/ActiveState/appdirs and modified
+to suit our purposes.
+"""
+from __future__ import absolute_import
+
+import os
+import sys
+
+from pip._vendor.six import PY2, text_type
+
+from pip._internal.utils.compat import WINDOWS, expanduser
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+
+def user_cache_dir(appname):
+ # type: (str) -> str
+ r"""
+ Return full path to the user-specific cache dir for this application.
+
+ "appname" is the name of application.
+
+ Typical user cache directories are:
+ macOS: ~/Library/Caches/<AppName>
+ Unix: ~/.cache/<AppName> (XDG default)
+ Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
+
+ On Windows the only suggestion in the MSDN docs is that local settings go
+ in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
+ non-roaming app data dir (the default returned by `user_data_dir`). Apps
+ typically put cache data somewhere *under* the given dir here. Some
+ examples:
+ ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
+ ...\Acme\SuperApp\Cache\1.0
+
+ OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
+ """
+ if WINDOWS:
+ # Get the base path
+ path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
+
+ # When using Python 2, return paths as bytes on Windows like we do on
+ # other operating systems. See helper function docs for more details.
+ if PY2 and isinstance(path, text_type):
+ path = _win_path_to_bytes(path)
+
+ # Add our app name and Cache directory to it
+ path = os.path.join(path, appname, "Cache")
+ elif sys.platform == "darwin":
+ # Get the base path
+ path = expanduser("~/Library/Caches")
+
+ # Add our app name to it
+ path = os.path.join(path, appname)
+ else:
+ # Get the base path
+ path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
+
+ # Add our app name to it
+ path = os.path.join(path, appname)
+
+ return path
+
+
+def user_data_dir(appname, roaming=False):
+ # type: (str, bool) -> str
+ r"""
+ Return full path to the user-specific data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user data directories are:
+ macOS: ~/Library/Application Support/<AppName>
+ if it exists, else ~/.config/<AppName>
+ Unix: ~/.local/share/<AppName> # or in
+ $XDG_DATA_HOME, if defined
+ Win XP (not roaming): C:\Documents and Settings\<username>\ ...
+ ...Application Data\<AppName>
+ Win XP (roaming): C:\Documents and Settings\<username>\Local ...
+ ...Settings\Application Data\<AppName>
+ Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
+ Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
+
+ For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
+ That means, by default "~/.local/share/<AppName>".
+ """
+ if WINDOWS:
+ const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
+ path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
+ elif sys.platform == "darwin":
+ path = os.path.join(
+ expanduser('~/Library/Application Support/'),
+ appname,
+ ) if os.path.isdir(os.path.join(
+ expanduser('~/Library/Application Support/'),
+ appname,
+ )
+ ) else os.path.join(
+ expanduser('~/.config/'),
+ appname,
+ )
+ else:
+ path = os.path.join(
+ os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
+ appname,
+ )
+
+ return path
+
+
+def user_config_dir(appname, roaming=True):
+ # type: (str, bool) -> str
+ """Return full path to the user-specific config dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "roaming" (boolean, default True) can be set False to not use the
+ Windows roaming appdata directory. That means that for users on a
+ Windows network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user data directories are:
+ macOS: same as user_data_dir
+ Unix: ~/.config/<AppName>
+ Win *: same as user_data_dir
+
+ For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
+ That means, by default "~/.config/<AppName>".
+ """
+ if WINDOWS:
+ path = user_data_dir(appname, roaming=roaming)
+ elif sys.platform == "darwin":
+ path = user_data_dir(appname)
+ else:
+ path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
+ path = os.path.join(path, appname)
+
+ return path
+
+
+# for the discussion regarding site_config_dirs locations
+# see <https://github.com/pypa/pip/issues/1733>
+def site_config_dirs(appname):
+ # type: (str) -> List[str]
+ r"""Return a list of potential user-shared config dirs for this application.
+
+ "appname" is the name of application.
+
+ Typical user config directories are:
+ macOS: /Library/Application Support/<AppName>/
+ Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
+ $XDG_CONFIG_DIRS
+ Win XP: C:\Documents and Settings\All Users\Application ...
+ ...Data\<AppName>\
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
+ on Vista.)
+ Win 7: Hidden, but writeable on Win 7:
+ C:\ProgramData\<AppName>\
+ """
+ if WINDOWS:
+ path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
+ pathlist = [os.path.join(path, appname)]
+ elif sys.platform == 'darwin':
+ pathlist = [os.path.join('/Library/Application Support', appname)]
+ else:
+ # try looking in $XDG_CONFIG_DIRS
+ xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
+ if xdg_config_dirs:
+ pathlist = [
+ os.path.join(expanduser(x), appname)
+ for x in xdg_config_dirs.split(os.pathsep)
+ ]
+ else:
+ pathlist = []
+
+ # always look in /etc directly as well
+ pathlist.append('/etc')
+
+ return pathlist
+
+
+# -- Windows support functions --
+
+def _get_win_folder_from_registry(csidl_name):
+ # type: (str) -> str
+ """
+ This is a fallback technique at best. I'm not sure if using the
+ registry for this guarantees us the correct answer for all CSIDL_*
+ names.
+ """
+ import _winreg
+
+ shell_folder_name = {
+ "CSIDL_APPDATA": "AppData",
+ "CSIDL_COMMON_APPDATA": "Common AppData",
+ "CSIDL_LOCAL_APPDATA": "Local AppData",
+ }[csidl_name]
+
+ key = _winreg.OpenKey(
+ _winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
+ )
+ directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
+ return directory
+
+
+def _get_win_folder_with_ctypes(csidl_name):
+ # type: (str) -> str
+ csidl_const = {
+ "CSIDL_APPDATA": 26,
+ "CSIDL_COMMON_APPDATA": 35,
+ "CSIDL_LOCAL_APPDATA": 28,
+ }[csidl_name]
+
+ buf = ctypes.create_unicode_buffer(1024)
+ ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in buf:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf2 = ctypes.create_unicode_buffer(1024)
+ if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+ buf = buf2
+
+ return buf.value
+
+
+if WINDOWS:
+ try:
+ import ctypes
+ _get_win_folder = _get_win_folder_with_ctypes
+ except ImportError:
+ _get_win_folder = _get_win_folder_from_registry
+
+
+def _win_path_to_bytes(path):
+ """Encode Windows paths to bytes. Only used on Python 2.
+
+ Motivation is to be consistent with other operating systems where paths
+ are also returned as bytes. This avoids problems mixing bytes and Unicode
+ elsewhere in the codebase. For more details and discussion see
+ <https://github.com/pypa/pip/issues/3463>.
+
+ If encoding using ASCII and MBCS fails, return the original Unicode path.
+ """
+ for encoding in ('ASCII', 'MBCS'):
+ try:
+ return path.encode(encoding)
+ except (UnicodeEncodeError, LookupError):
+ pass
+ return path
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/compat.py
new file mode 100644
index 00000000..ec3995c2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/compat.py
@@ -0,0 +1,293 @@
+"""Stuff that differs in different Python versions and platform
+distributions."""
+from __future__ import absolute_import, division
+
+import codecs
+import locale
+import logging
+import os
+import shutil
+import sys
+
+from pip._vendor.six import text_type
+from pip._vendor.urllib3.util import IS_PYOPENSSL
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Text, Tuple, Union
+
+try:
+ import _ssl # noqa
+except ImportError:
+ ssl = None
+else:
+ # This additional assignment was needed to prevent a mypy error.
+ ssl = _ssl
+
+try:
+ import ipaddress
+except ImportError:
+ try:
+ from pip._vendor import ipaddress # type: ignore
+ except ImportError:
+ import ipaddr as ipaddress # type: ignore
+ ipaddress.ip_address = ipaddress.IPAddress # type: ignore
+ ipaddress.ip_network = ipaddress.IPNetwork # type: ignore
+
+
+__all__ = [
+ "ipaddress", "uses_pycache", "console_to_str", "native_str",
+ "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
+ "get_extension_suffixes",
+]
+
+
+logger = logging.getLogger(__name__)
+
+HAS_TLS = (ssl is not None) or IS_PYOPENSSL
+
+if sys.version_info >= (3, 4):
+ uses_pycache = True
+ from importlib.util import cache_from_source
+else:
+ import imp
+
+ try:
+ cache_from_source = imp.cache_from_source # type: ignore
+ except AttributeError:
+ # does not use __pycache__
+ cache_from_source = None
+
+ uses_pycache = cache_from_source is not None
+
+
+if sys.version_info >= (3, 5):
+ backslashreplace_decode = "backslashreplace"
+else:
+ # In version 3.4 and older, backslashreplace exists
+ # but does not support use for decoding.
+ # We implement our own replace handler for this
+ # situation, so that we can consistently use
+ # backslash replacement for all versions.
+ def backslashreplace_decode_fn(err):
+ raw_bytes = (err.object[i] for i in range(err.start, err.end))
+ if sys.version_info[0] == 2:
+ # Python 2 gave us characters - convert to numeric bytes
+ raw_bytes = (ord(b) for b in raw_bytes)
+ return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
+ codecs.register_error(
+ "backslashreplace_decode",
+ backslashreplace_decode_fn,
+ )
+ backslashreplace_decode = "backslashreplace_decode"
+
+
+def str_to_display(data, desc=None):
+ # type: (Union[bytes, Text], Optional[str]) -> Text
+ """
+ For display or logging purposes, convert a bytes object (or text) to
+ text (e.g. unicode in Python 2) safe for output.
+
+ :param desc: An optional phrase describing the input data, for use in
+ the log message if a warning is logged. Defaults to "Bytes object".
+
+ This function should never error out and so can take a best effort
+ approach. It is okay to be lossy if needed since the return value is
+ just for display.
+
+ We assume the data is in the locale preferred encoding. If it won't
+ decode properly, we warn the user but decode as best we can.
+
+ We also ensure that the output can be safely written to standard output
+ without encoding errors.
+ """
+ if isinstance(data, text_type):
+ return data
+
+ # Otherwise, data is a bytes object (str in Python 2).
+ # First, get the encoding we assume. This is the preferred
+ # encoding for the locale, unless that is not found, or
+ # it is ASCII, in which case assume UTF-8
+ encoding = locale.getpreferredencoding()
+ if (not encoding) or codecs.lookup(encoding).name == "ascii":
+ encoding = "utf-8"
+
+ # Now try to decode the data - if we fail, warn the user and
+ # decode with replacement.
+ try:
+ decoded_data = data.decode(encoding)
+ except UnicodeDecodeError:
+ if desc is None:
+ desc = 'Bytes object'
+ msg_format = '{} does not appear to be encoded as %s'.format(desc)
+ logger.warning(msg_format, encoding)
+ decoded_data = data.decode(encoding, errors=backslashreplace_decode)
+
+ # Make sure we can print the output, by encoding it to the output
+ # encoding with replacement of unencodable characters, and then
+ # decoding again.
+ # We use stderr's encoding because it's less likely to be
+ # redirected and if we don't find an encoding we skip this
+ # step (on the assumption that output is wrapped by something
+ # that won't fail).
+ # The double getattr is to deal with the possibility that we're
+ # being called in a situation where sys.__stderr__ doesn't exist,
+ # or doesn't have an encoding attribute. Neither of these cases
+ # should occur in normal pip use, but there's no harm in checking
+ # in case people use pip in (unsupported) unusual situations.
+ output_encoding = getattr(getattr(sys, "__stderr__", None),
+ "encoding", None)
+
+ if output_encoding:
+ output_encoded = decoded_data.encode(
+ output_encoding,
+ errors="backslashreplace"
+ )
+ decoded_data = output_encoded.decode(output_encoding)
+
+ return decoded_data
+
+
+def console_to_str(data):
+ # type: (bytes) -> Text
+ """Return a string, safe for output, of subprocess output.
+ """
+ return str_to_display(data, desc='Subprocess output')
+
+
+if sys.version_info >= (3,):
+ def native_str(s, replace=False):
+ # type: (str, bool) -> str
+ if isinstance(s, bytes):
+ return s.decode('utf-8', 'replace' if replace else 'strict')
+ return s
+
+else:
+ def native_str(s, replace=False):
+ # type: (str, bool) -> str
+ # Replace is ignored -- unicode to UTF-8 can't fail
+ if isinstance(s, text_type):
+ return s.encode('utf-8')
+ return s
+
+
+def get_path_uid(path):
+ # type: (str) -> int
+ """
+ Return path's uid.
+
+ Does not follow symlinks:
+ https://github.com/pypa/pip/pull/935#discussion_r5307003
+
+ Placed this function in compat due to differences on AIX and
+ Jython, that should eventually go away.
+
+ :raises OSError: When path is a symlink or can't be read.
+ """
+ if hasattr(os, 'O_NOFOLLOW'):
+ fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
+ file_uid = os.fstat(fd).st_uid
+ os.close(fd)
+ else: # AIX and Jython
+ # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
+ if not os.path.islink(path):
+ # older versions of Jython don't have `os.fstat`
+ file_uid = os.stat(path).st_uid
+ else:
+ # raise OSError for parity with os.O_NOFOLLOW above
+ raise OSError(
+ "%s is a symlink; Will not return uid for symlinks" % path
+ )
+ return file_uid
+
+
+if sys.version_info >= (3, 4):
+ from importlib.machinery import EXTENSION_SUFFIXES
+
+ def get_extension_suffixes():
+ return EXTENSION_SUFFIXES
+else:
+ from imp import get_suffixes
+
+ def get_extension_suffixes():
+ return [suffix[0] for suffix in get_suffixes()]
+
+
+def expanduser(path):
+ # type: (str) -> str
+ """
+ Expand ~ and ~user constructions.
+
+ Includes a workaround for https://bugs.python.org/issue14768
+ """
+ expanded = os.path.expanduser(path)
+ if path.startswith('~/') and expanded.startswith('//'):
+ expanded = expanded[1:]
+ return expanded
+
+
+# packages in the stdlib that may have installation metadata, but should not be
+# considered 'installed'. this theoretically could be determined based on
+# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
+# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
+# make this ineffective, so hard-coding
+stdlib_pkgs = {"python", "wsgiref", "argparse"}
+
+
+# windows detection, covers cpython and ironpython
+WINDOWS = (sys.platform.startswith("win") or
+ (sys.platform == 'cli' and os.name == 'nt'))
+
+
+def samefile(file1, file2):
+ # type: (str, str) -> bool
+ """Provide an alternative for os.path.samefile on Windows/Python2"""
+ if hasattr(os.path, 'samefile'):
+ return os.path.samefile(file1, file2)
+ else:
+ path1 = os.path.normcase(os.path.abspath(file1))
+ path2 = os.path.normcase(os.path.abspath(file2))
+ return path1 == path2
+
+
+if hasattr(shutil, 'get_terminal_size'):
+ def get_terminal_size():
+ # type: () -> Tuple[int, int]
+ """
+ Returns a tuple (x, y) representing the width(x) and the height(y)
+ in characters of the terminal window.
+ """
+ return tuple(shutil.get_terminal_size()) # type: ignore
+else:
+ def get_terminal_size():
+ # type: () -> Tuple[int, int]
+ """
+ Returns a tuple (x, y) representing the width(x) and the height(y)
+ in characters of the terminal window.
+ """
+ def ioctl_GWINSZ(fd):
+ try:
+ import fcntl
+ import termios
+ import struct
+ cr = struct.unpack_from(
+ 'hh',
+ fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
+ )
+ except Exception:
+ return None
+ if cr == (0, 0):
+ return None
+ return cr
+ cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
+ if not cr:
+ try:
+ fd = os.open(os.ctermid(), os.O_RDONLY)
+ cr = ioctl_GWINSZ(fd)
+ os.close(fd)
+ except Exception:
+ pass
+ if not cr:
+ cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
+ return int(cr[1]), int(cr[0])
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py
new file mode 100644
index 00000000..b9359bdd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py
@@ -0,0 +1,100 @@
+"""
+A module that implements tooling to enable easy warnings about deprecations.
+"""
+from __future__ import absolute_import
+
+import logging
+import warnings
+
+from pip._vendor.packaging.version import parse
+
+from pip import __version__ as current_version
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Optional
+
+
+DEPRECATION_MSG_PREFIX = "DEPRECATION: "
+
+
+class PipDeprecationWarning(Warning):
+ pass
+
+
+_original_showwarning = None # type: Any
+
+
+# Warnings <-> Logging Integration
+def _showwarning(message, category, filename, lineno, file=None, line=None):
+ if file is not None:
+ if _original_showwarning is not None:
+ _original_showwarning(
+ message, category, filename, lineno, file, line,
+ )
+ elif issubclass(category, PipDeprecationWarning):
+ # We use a specially named logger which will handle all of the
+ # deprecation messages for pip.
+ logger = logging.getLogger("pip._internal.deprecations")
+ logger.warning(message)
+ else:
+ _original_showwarning(
+ message, category, filename, lineno, file, line,
+ )
+
+
+def install_warning_logger():
+ # type: () -> None
+ # Enable our Deprecation Warnings
+ warnings.simplefilter("default", PipDeprecationWarning, append=True)
+
+ global _original_showwarning
+
+ if _original_showwarning is None:
+ _original_showwarning = warnings.showwarning
+ warnings.showwarning = _showwarning
+
+
+def deprecated(reason, replacement, gone_in, issue=None):
+ # type: (str, Optional[str], Optional[str], Optional[int]) -> None
+ """Helper to deprecate existing functionality.
+
+ reason:
+ Textual reason shown to the user about why this functionality has
+ been deprecated.
+ replacement:
+ Textual suggestion shown to the user about what alternative
+ functionality they can use.
+ gone_in:
+ The version of pip does this functionality should get removed in.
+ Raises errors if pip's current version is greater than or equal to
+ this.
+ issue:
+ Issue number on the tracker that would serve as a useful place for
+ users to find related discussion and provide feedback.
+
+ Always pass replacement, gone_in and issue as keyword arguments for clarity
+ at the call site.
+ """
+
+ # Construct a nice message.
+ # This is eagerly formatted as we want it to get logged as if someone
+ # typed this entire message out.
+ sentences = [
+ (reason, DEPRECATION_MSG_PREFIX + "{}"),
+ (gone_in, "pip {} will remove support for this functionality."),
+ (replacement, "A possible replacement is {}."),
+ (issue, (
+ "You can find discussion regarding this at "
+ "https://github.com/pypa/pip/issues/{}."
+ )),
+ ]
+ message = " ".join(
+ template.format(val) for val, template in sentences if val is not None
+ )
+
+ # Raise as an error if it has to be removed.
+ if gone_in is not None and parse(current_version) >= parse(gone_in):
+ raise PipDeprecationWarning(message)
+
+ warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/encoding.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/encoding.py
new file mode 100644
index 00000000..30139f2e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/encoding.py
@@ -0,0 +1,39 @@
+import codecs
+import locale
+import re
+import sys
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Tuple, Text
+
+BOMS = [
+ (codecs.BOM_UTF8, 'utf-8'),
+ (codecs.BOM_UTF16, 'utf-16'),
+ (codecs.BOM_UTF16_BE, 'utf-16-be'),
+ (codecs.BOM_UTF16_LE, 'utf-16-le'),
+ (codecs.BOM_UTF32, 'utf-32'),
+ (codecs.BOM_UTF32_BE, 'utf-32-be'),
+ (codecs.BOM_UTF32_LE, 'utf-32-le'),
+] # type: List[Tuple[bytes, Text]]
+
+ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
+
+
+def auto_decode(data):
+ # type: (bytes) -> Text
+ """Check a bytes string for a BOM to correctly detect the encoding
+
+ Fallback to locale.getpreferredencoding(False) like open() on Python3"""
+ for bom, encoding in BOMS:
+ if data.startswith(bom):
+ return data[len(bom):].decode(encoding)
+ # Lets check the first two lines as in PEP263
+ for line in data.split(b'\n')[:2]:
+ if line[0:1] == b'#' and ENCODING_RE.search(line):
+ encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
+ return data.decode(encoding)
+ return data.decode(
+ locale.getpreferredencoding(False) or sys.getdefaultencoding(),
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py
new file mode 100644
index 00000000..1e6b0338
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py
@@ -0,0 +1,30 @@
+import os
+import os.path
+
+from pip._internal.utils.compat import get_path_uid
+
+
+def check_path_owner(path):
+ # type: (str) -> bool
+ # If we don't have a way to check the effective uid of this process, then
+ # we'll just assume that we own the directory.
+ if not hasattr(os, "geteuid"):
+ return True
+
+ previous = None
+ while path != previous:
+ if os.path.lexists(path):
+ # Check if path is writable by current user.
+ if os.geteuid() == 0:
+ # Special handling for root user in order to handle properly
+ # cases where users use sudo without -H flag.
+ try:
+ path_uid = get_path_uid(path)
+ except OSError:
+ return False
+ return path_uid == 0
+ else:
+ return os.access(path, os.W_OK)
+ else:
+ previous, path = path, os.path.dirname(path)
+ return False # assume we don't own the path
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/glibc.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/glibc.py
new file mode 100644
index 00000000..aa77d9b6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/glibc.py
@@ -0,0 +1,120 @@
+from __future__ import absolute_import
+
+import os
+import re
+import warnings
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+
+
+def glibc_version_string():
+ # type: () -> Optional[str]
+ "Returns glibc version string, or None if not using glibc."
+ return glibc_version_string_confstr() or glibc_version_string_ctypes()
+
+
+def glibc_version_string_confstr():
+ # type: () -> Optional[str]
+ "Primary implementation of glibc_version_string using os.confstr."
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module:
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
+ try:
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
+ _, version = os.confstr("CS_GNU_LIBC_VERSION").split()
+ except (AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def glibc_version_string_ctypes():
+ # type: () -> Optional[str]
+ "Fallback implementation of glibc_version_string using ctypes."
+
+ try:
+ import ctypes
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ process_namespace = ctypes.CDLL(None)
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+# Separated out from have_compatible_glibc for easier unit testing
+def check_glibc_version(version_str, required_major, minimum_minor):
+ # type: (str, int, int) -> bool
+ # Parse string and check against requested version.
+ #
+ # We use a regexp instead of str.split because we want to discard any
+ # random junk that might come after the minor version -- this might happen
+ # in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ # uses version strings like "2.20-2014.11"). See gh-3588.
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+ if not m:
+ warnings.warn("Expected glibc version with 2 components major.minor,"
+ " got: %s" % version_str, RuntimeWarning)
+ return False
+ return (int(m.group("major")) == required_major and
+ int(m.group("minor")) >= minimum_minor)
+
+
+def have_compatible_glibc(required_major, minimum_minor):
+ # type: (int, int) -> bool
+ version_str = glibc_version_string()
+ if version_str is None:
+ return False
+ return check_glibc_version(version_str, required_major, minimum_minor)
+
+
+# platform.libc_ver regularly returns completely nonsensical glibc
+# versions. E.g. on my computer, platform says:
+#
+# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.7')
+# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.9')
+#
+# But the truth is:
+#
+# ~$ ldd --version
+# ldd (Debian GLIBC 2.22-11) 2.22
+#
+# This is unfortunate, because it means that the linehaul data on libc
+# versions that was generated by pip 8.1.2 and earlier is useless and
+# misleading. Solution: instead of using platform, use our code that actually
+# works.
+def libc_ver():
+ # type: () -> Tuple[str, str]
+ """Try to determine the glibc version
+
+ Returns a tuple of strings (lib, version) which default to empty strings
+ in case the lookup fails.
+ """
+ glibc_version = glibc_version_string()
+ if glibc_version is None:
+ return ("", "")
+ else:
+ return ("glibc", glibc_version)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/hashes.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/hashes.py
new file mode 100644
index 00000000..e8aabe1a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/hashes.py
@@ -0,0 +1,128 @@
+from __future__ import absolute_import
+
+import hashlib
+
+from pip._vendor.six import iteritems, iterkeys, itervalues
+
+from pip._internal.exceptions import (
+ HashMismatch, HashMissing, InstallationError,
+)
+from pip._internal.utils.misc import read_chunks
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Dict, List, BinaryIO, NoReturn, Iterator
+ )
+ from pip._vendor.six import PY3
+ if PY3:
+ from hashlib import _Hash
+ else:
+ from hashlib import _hash as _Hash
+
+
+# The recommended hash algo of the moment. Change this whenever the state of
+# the art changes; it won't hurt backward compatibility.
+FAVORITE_HASH = 'sha256'
+
+
+# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
+# Currently, those are the ones at least as collision-resistant as sha256.
+STRONG_HASHES = ['sha256', 'sha384', 'sha512']
+
+
+class Hashes(object):
+ """A wrapper that builds multiple hashes at once and checks them against
+ known-good values
+
+ """
+ def __init__(self, hashes=None):
+ # type: (Dict[str, List[str]]) -> None
+ """
+ :param hashes: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ """
+ self._allowed = {} if hashes is None else hashes
+
+ @property
+ def digest_count(self):
+ # type: () -> int
+ return sum(len(digests) for digests in self._allowed.values())
+
+ def is_hash_allowed(
+ self,
+ hash_name, # type: str
+ hex_digest, # type: str
+ ):
+ """Return whether the given hex digest is allowed."""
+ return hex_digest in self._allowed.get(hash_name, [])
+
+ def check_against_chunks(self, chunks):
+ # type: (Iterator[bytes]) -> None
+ """Check good hashes against ones built from iterable of chunks of
+ data.
+
+ Raise HashMismatch if none match.
+
+ """
+ gots = {}
+ for hash_name in iterkeys(self._allowed):
+ try:
+ gots[hash_name] = hashlib.new(hash_name)
+ except (ValueError, TypeError):
+ raise InstallationError('Unknown hash name: %s' % hash_name)
+
+ for chunk in chunks:
+ for hash in itervalues(gots):
+ hash.update(chunk)
+
+ for hash_name, got in iteritems(gots):
+ if got.hexdigest() in self._allowed[hash_name]:
+ return
+ self._raise(gots)
+
+ def _raise(self, gots):
+ # type: (Dict[str, _Hash]) -> NoReturn
+ raise HashMismatch(self._allowed, gots)
+
+ def check_against_file(self, file):
+ # type: (BinaryIO) -> None
+ """Check good hashes against a file-like object
+
+ Raise HashMismatch if none match.
+
+ """
+ return self.check_against_chunks(read_chunks(file))
+
+ def check_against_path(self, path):
+ # type: (str) -> None
+ with open(path, 'rb') as file:
+ return self.check_against_file(file)
+
+ def __nonzero__(self):
+ # type: () -> bool
+ """Return whether I know any known-good hashes."""
+ return bool(self._allowed)
+
+ def __bool__(self):
+ # type: () -> bool
+ return self.__nonzero__()
+
+
+class MissingHashes(Hashes):
+ """A workalike for Hashes used when we're missing a hash for a requirement
+
+ It computes the actual hash of the requirement and raises a HashMissing
+ exception showing it to the user.
+
+ """
+ def __init__(self):
+ # type: () -> None
+ """Don't offer the ``hashes`` kwarg."""
+ # Pass our favorite hash in to generate a "gotten hash". With the
+ # empty list, it will never match, so an error will always raise.
+ super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
+
+ def _raise(self, gots):
+ # type: (Dict[str, _Hash]) -> NoReturn
+ raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/logging.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/logging.py
new file mode 100644
index 00000000..3fbec712
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/logging.py
@@ -0,0 +1,394 @@
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import logging
+import logging.handlers
+import os
+import sys
+from logging import Filter
+
+from pip._vendor.six import PY2
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
+from pip._internal.utils.misc import ensure_dir, subprocess_logger
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading # type: ignore
+
+
+try:
+ # Use "import as" and set colorama in the else clause to avoid mypy
+ # errors and get the following correct revealed type for colorama:
+ # `Union[_importlib_modulespec.ModuleType, None]`
+ # Otherwise, we get an error like the following in the except block:
+ # > Incompatible types in assignment (expression has type "None",
+ # variable has type Module)
+ # TODO: eliminate the need to use "import as" once mypy addresses some
+ # of its issues with conditional imports. Here is an umbrella issue:
+ # https://github.com/python/mypy/issues/1297
+ from pip._vendor import colorama as _colorama
+# Lots of different errors can come from this, including SystemError and
+# ImportError.
+except Exception:
+ colorama = None
+else:
+ # Import Fore explicitly rather than accessing below as colorama.Fore
+ # to avoid the following error running mypy:
+ # > Module has no attribute "Fore"
+ # TODO: eliminate the need to import Fore once mypy addresses some of its
+ # issues with conditional imports. This particular case could be an
+ # instance of the following issue (but also see the umbrella issue above):
+ # https://github.com/python/mypy/issues/3500
+ from pip._vendor.colorama import Fore
+
+ colorama = _colorama
+
+
+_log_state = threading.local()
+_log_state.indentation = 0
+
+
+class BrokenStdoutLoggingError(Exception):
+ """
+ Raised if BrokenPipeError occurs for the stdout stream while logging.
+ """
+ pass
+
+
+# BrokenPipeError does not exist in Python 2 and, in addition, manifests
+# differently in Windows and non-Windows.
+if WINDOWS:
+ # In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
+ # https://bugs.python.org/issue19612
+ # https://bugs.python.org/issue30418
+ if PY2:
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return (exc_class is IOError and
+ exc.errno in (errno.EINVAL, errno.EPIPE))
+ else:
+ # In Windows, a broken pipe IOError became OSError in Python 3.
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return ((exc_class is BrokenPipeError) or # noqa: F821
+ (exc_class is OSError and
+ exc.errno in (errno.EINVAL, errno.EPIPE)))
+elif PY2:
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return (exc_class is IOError and exc.errno == errno.EPIPE)
+else:
+ # Then we are in the non-Windows Python 3 case.
+ def _is_broken_pipe_error(exc_class, exc):
+ """
+ Return whether an exception is a broken pipe error.
+
+ Args:
+ exc_class: an exception class.
+ exc: an exception instance.
+ """
+ return (exc_class is BrokenPipeError) # noqa: F821
+
+
+@contextlib.contextmanager
+def indent_log(num=2):
+ """
+ A context manager which will cause the log output to be indented for any
+ log messages emitted inside it.
+ """
+ _log_state.indentation += num
+ try:
+ yield
+ finally:
+ _log_state.indentation -= num
+
+
+def get_indentation():
+ return getattr(_log_state, 'indentation', 0)
+
+
+class IndentingFormatter(logging.Formatter):
+
+ def __init__(self, *args, **kwargs):
+ """
+ A logging.Formatter that obeys the indent_log() context manager.
+
+ :param add_timestamp: A bool indicating output lines should be prefixed
+ with their record's timestamp.
+ """
+ self.add_timestamp = kwargs.pop("add_timestamp", False)
+ super(IndentingFormatter, self).__init__(*args, **kwargs)
+
+ def get_message_start(self, formatted, levelno):
+ """
+ Return the start of the formatted log message (not counting the
+ prefix to add to each line).
+ """
+ if levelno < logging.WARNING:
+ return ''
+ if formatted.startswith(DEPRECATION_MSG_PREFIX):
+ # Then the message already has a prefix. We don't want it to
+ # look like "WARNING: DEPRECATION: ...."
+ return ''
+ if levelno < logging.ERROR:
+ return 'WARNING: '
+
+ return 'ERROR: '
+
+ def format(self, record):
+ """
+ Calls the standard formatter, but will indent all of the log message
+ lines by our current indentation level.
+ """
+ formatted = super(IndentingFormatter, self).format(record)
+ message_start = self.get_message_start(formatted, record.levelno)
+ formatted = message_start + formatted
+
+ prefix = ''
+ if self.add_timestamp:
+ # TODO: Use Formatter.default_time_format after dropping PY2.
+ t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S")
+ prefix = '%s,%03d ' % (t, record.msecs)
+ prefix += " " * get_indentation()
+ formatted = "".join([
+ prefix + line
+ for line in formatted.splitlines(True)
+ ])
+ return formatted
+
+
+def _color_wrap(*colors):
+ def wrapped(inp):
+ return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
+ return wrapped
+
+
+class ColorizedStreamHandler(logging.StreamHandler):
+
+ # Don't build up a list of colors if we don't have colorama
+ if colorama:
+ COLORS = [
+ # This needs to be in order from highest logging level to lowest.
+ (logging.ERROR, _color_wrap(Fore.RED)),
+ (logging.WARNING, _color_wrap(Fore.YELLOW)),
+ ]
+ else:
+ COLORS = []
+
+ def __init__(self, stream=None, no_color=None):
+ logging.StreamHandler.__init__(self, stream)
+ self._no_color = no_color
+
+ if WINDOWS and colorama:
+ self.stream = colorama.AnsiToWin32(self.stream)
+
+ def _using_stdout(self):
+ """
+ Return whether the handler is using sys.stdout.
+ """
+ if WINDOWS and colorama:
+ # Then self.stream is an AnsiToWin32 object.
+ return self.stream.wrapped is sys.stdout
+
+ return self.stream is sys.stdout
+
+ def should_color(self):
+ # Don't colorize things if we do not have colorama or if told not to
+ if not colorama or self._no_color:
+ return False
+
+ real_stream = (
+ self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
+ else self.stream.wrapped
+ )
+
+ # If the stream is a tty we should color it
+ if hasattr(real_stream, "isatty") and real_stream.isatty():
+ return True
+
+ # If we have an ANSI term we should color it
+ if os.environ.get("TERM") == "ANSI":
+ return True
+
+ # If anything else we should not color it
+ return False
+
+ def format(self, record):
+ msg = logging.StreamHandler.format(self, record)
+
+ if self.should_color():
+ for level, color in self.COLORS:
+ if record.levelno >= level:
+ msg = color(msg)
+ break
+
+ return msg
+
+ # The logging module says handleError() can be customized.
+ def handleError(self, record):
+ exc_class, exc = sys.exc_info()[:2]
+ # If a broken pipe occurred while calling write() or flush() on the
+ # stdout stream in logging's Handler.emit(), then raise our special
+ # exception so we can handle it in main() instead of logging the
+ # broken pipe error and continuing.
+ if (exc_class and self._using_stdout() and
+ _is_broken_pipe_error(exc_class, exc)):
+ raise BrokenStdoutLoggingError()
+
+ return super(ColorizedStreamHandler, self).handleError(record)
+
+
+class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
+
+ def _open(self):
+ ensure_dir(os.path.dirname(self.baseFilename))
+ return logging.handlers.RotatingFileHandler._open(self)
+
+
+class MaxLevelFilter(Filter):
+
+ def __init__(self, level):
+ self.level = level
+
+ def filter(self, record):
+ return record.levelno < self.level
+
+
+class ExcludeLoggerFilter(Filter):
+
+ """
+ A logging Filter that excludes records from a logger (or its children).
+ """
+
+ def filter(self, record):
+ # The base Filter class allows only records from a logger (or its
+ # children).
+ return not super(ExcludeLoggerFilter, self).filter(record)
+
+
+def setup_logging(verbosity, no_color, user_log_file):
+ """Configures and sets up all of the logging
+
+ Returns the requested logging level, as its integer value.
+ """
+
+ # Determine the level to be logging at.
+ if verbosity >= 1:
+ level = "DEBUG"
+ elif verbosity == -1:
+ level = "WARNING"
+ elif verbosity == -2:
+ level = "ERROR"
+ elif verbosity <= -3:
+ level = "CRITICAL"
+ else:
+ level = "INFO"
+
+ level_number = getattr(logging, level)
+
+ # The "root" logger should match the "console" level *unless* we also need
+ # to log to a user log file.
+ include_user_log = user_log_file is not None
+ if include_user_log:
+ additional_log_file = user_log_file
+ root_level = "DEBUG"
+ else:
+ additional_log_file = "/dev/null"
+ root_level = level
+
+ # Disable any logging besides WARNING unless we have DEBUG level logging
+ # enabled for vendored libraries.
+ vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
+
+ # Shorthands for clarity
+ log_streams = {
+ "stdout": "ext://sys.stdout",
+ "stderr": "ext://sys.stderr",
+ }
+ handler_classes = {
+ "stream": "pip._internal.utils.logging.ColorizedStreamHandler",
+ "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
+ }
+ handlers = ["console", "console_errors", "console_subprocess"] + (
+ ["user_log"] if include_user_log else []
+ )
+
+ logging.config.dictConfig({
+ "version": 1,
+ "disable_existing_loggers": False,
+ "filters": {
+ "exclude_warnings": {
+ "()": "pip._internal.utils.logging.MaxLevelFilter",
+ "level": logging.WARNING,
+ },
+ "restrict_to_subprocess": {
+ "()": "logging.Filter",
+ "name": subprocess_logger.name,
+ },
+ "exclude_subprocess": {
+ "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
+ "name": subprocess_logger.name,
+ },
+ },
+ "formatters": {
+ "indent": {
+ "()": IndentingFormatter,
+ "format": "%(message)s",
+ },
+ "indent_with_timestamp": {
+ "()": IndentingFormatter,
+ "format": "%(message)s",
+ "add_timestamp": True,
+ },
+ },
+ "handlers": {
+ "console": {
+ "level": level,
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stdout"],
+ "filters": ["exclude_subprocess", "exclude_warnings"],
+ "formatter": "indent",
+ },
+ "console_errors": {
+ "level": "WARNING",
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stderr"],
+ "filters": ["exclude_subprocess"],
+ "formatter": "indent",
+ },
+ # A handler responsible for logging to the console messages
+ # from the "subprocessor" logger.
+ "console_subprocess": {
+ "level": level,
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stderr"],
+ "filters": ["restrict_to_subprocess"],
+ "formatter": "indent",
+ },
+ "user_log": {
+ "level": "DEBUG",
+ "class": handler_classes["file"],
+ "filename": additional_log_file,
+ "delay": True,
+ "formatter": "indent_with_timestamp",
+ },
+ },
+ "root": {
+ "level": root_level,
+ "handlers": handlers,
+ },
+ "loggers": {
+ "pip._vendor": {
+ "level": vendored_log_level
+ }
+ },
+ })
+
+ return level_number
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py
new file mode 100644
index 00000000..cb0c8ebc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py
@@ -0,0 +1,20 @@
+import os.path
+
+DELETE_MARKER_MESSAGE = '''\
+This file is placed here by pip to indicate the source was put
+here by pip.
+
+Once this package is successfully installed this source code will be
+deleted (unless you remove this file).
+'''
+PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
+
+
+def write_delete_marker_file(directory):
+ # type: (str) -> None
+ """
+ Write the pip delete marker file into this directory.
+ """
+ filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
+ with open(filepath, 'w') as marker_fp:
+ marker_fp.write(DELETE_MARKER_MESSAGE)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/misc.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/misc.py
new file mode 100644
index 00000000..abb95979
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/misc.py
@@ -0,0 +1,1204 @@
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import getpass
+import io
+# we have a submodule named 'logging' which would shadow this if we used the
+# regular name:
+import logging as std_logging
+import os
+import posixpath
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tarfile
+import zipfile
+from collections import deque
+
+from pip._vendor import pkg_resources
+# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import.
+from pip._vendor.retrying import retry # type: ignore
+from pip._vendor.six import PY2, text_type
+from pip._vendor.six.moves import input, shlex_quote
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
+
+from pip import __version__
+from pip._internal.exceptions import CommandError, InstallationError
+from pip._internal.locations import site_packages, user_site
+from pip._internal.utils.compat import (
+ WINDOWS, console_to_str, expanduser, stdlib_pkgs, str_to_display,
+)
+from pip._internal.utils.marker_files import write_delete_marker_file
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import (
+ running_under_virtualenv, virtualenv_no_global,
+)
+
+if PY2:
+ from io import BytesIO as StringIO
+else:
+ from io import StringIO
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, AnyStr, Container, Iterable, List, Mapping, Match, Optional, Text,
+ Union,
+ )
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.models.link import Link
+ from pip._internal.utils.ui import SpinnerInterface
+
+try:
+ from typing import cast, Tuple
+ VersionInfo = Tuple[int, int, int]
+except ImportError:
+ # typing's cast() isn't supported in code comments, so we need to
+ # define a dummy, no-op version.
+ def cast(typ, val):
+ return val
+ VersionInfo = None
+
+
+__all__ = ['rmtree', 'display_path', 'backup_dir',
+ 'ask', 'splitext',
+ 'format_size', 'is_installable_dir',
+ 'is_svn_page', 'file_contents',
+ 'split_leading_dir', 'has_leading_dir',
+ 'normalize_path',
+ 'renames', 'get_prog',
+ 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
+ 'captured_stdout', 'ensure_dir',
+ 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', 'WHEEL_EXTENSION',
+ 'get_installed_version', 'remove_auth_from_url']
+
+
+logger = std_logging.getLogger(__name__)
+subprocess_logger = std_logging.getLogger('pip.subprocessor')
+
+LOG_DIVIDER = '----------------------------------------'
+
+WHEEL_EXTENSION = '.whl'
+BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
+XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
+ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION)
+TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
+ARCHIVE_EXTENSIONS = (
+ ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
+SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
+
+try:
+ import bz2 # noqa
+ SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
+except ImportError:
+ logger.debug('bz2 module is not available')
+
+try:
+ # Only for Python 3.3+
+ import lzma # noqa
+ SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
+except ImportError:
+ logger.debug('lzma module is not available')
+
+
+def get_pip_version():
+ # type: () -> str
+ pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
+ pip_pkg_dir = os.path.abspath(pip_pkg_dir)
+
+ return (
+ 'pip {} from {} (python {})'.format(
+ __version__, pip_pkg_dir, sys.version[:3],
+ )
+ )
+
+
+def normalize_version_info(py_version_info):
+ # type: (Tuple[int, ...]) -> Tuple[int, int, int]
+ """
+ Convert a tuple of ints representing a Python version to one of length
+ three.
+
+ :param py_version_info: a tuple of ints representing a Python version,
+ or None to specify no version. The tuple can have any length.
+
+ :return: a tuple of length three if `py_version_info` is non-None.
+ Otherwise, return `py_version_info` unchanged (i.e. None).
+ """
+ if len(py_version_info) < 3:
+ py_version_info += (3 - len(py_version_info)) * (0,)
+ elif len(py_version_info) > 3:
+ py_version_info = py_version_info[:3]
+
+ return cast(VersionInfo, py_version_info)
+
+
+def ensure_dir(path):
+ # type: (AnyStr) -> None
+ """os.path.makedirs without EEXIST."""
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+
+def get_prog():
+ # type: () -> str
+ try:
+ prog = os.path.basename(sys.argv[0])
+ if prog in ('__main__.py', '-c'):
+ return "%s -m pip" % sys.executable
+ else:
+ return prog
+ except (AttributeError, TypeError, IndexError):
+ pass
+ return 'pip'
+
+
+# Retry every half second for up to 3 seconds
+@retry(stop_max_delay=3000, wait_fixed=500)
+def rmtree(dir, ignore_errors=False):
+ # type: (str, bool) -> None
+ shutil.rmtree(dir, ignore_errors=ignore_errors,
+ onerror=rmtree_errorhandler)
+
+
+def rmtree_errorhandler(func, path, exc_info):
+ """On Windows, the files in .svn are read-only, so when rmtree() tries to
+ remove them, an exception is thrown. We catch that here, remove the
+ read-only attribute, and hopefully continue without problems."""
+ # if file type currently read only
+ if os.stat(path).st_mode & stat.S_IREAD:
+ # convert to read/write
+ os.chmod(path, stat.S_IWRITE)
+ # use the original function to repeat the operation
+ func(path)
+ return
+ else:
+ raise
+
+
+def path_to_display(path):
+ # type: (Optional[Union[str, Text]]) -> Optional[Text]
+ """
+ Convert a bytes (or text) path to text (unicode in Python 2) for display
+ and logging purposes.
+
+ This function should never error out. Also, this function is mainly needed
+ for Python 2 since in Python 3 str paths are already text.
+ """
+ if path is None:
+ return None
+ if isinstance(path, text_type):
+ return path
+ # Otherwise, path is a bytes object (str in Python 2).
+ try:
+ display_path = path.decode(sys.getfilesystemencoding(), 'strict')
+ except UnicodeDecodeError:
+ # Include the full bytes to make troubleshooting easier, even though
+ # it may not be very human readable.
+ if PY2:
+ # Convert the bytes to a readable str representation using
+ # repr(), and then convert the str to unicode.
+ # Also, we add the prefix "b" to the repr() return value both
+ # to make the Python 2 output look like the Python 3 output, and
+ # to signal to the user that this is a bytes representation.
+ display_path = str_to_display('b{!r}'.format(path))
+ else:
+ # Silence the "F821 undefined name 'ascii'" flake8 error since
+ # in Python 3 ascii() is a built-in.
+ display_path = ascii(path) # noqa: F821
+
+ return display_path
+
+
+def display_path(path):
+ # type: (Union[str, Text]) -> str
+ """Gives the display value for a given path, making it relative to cwd
+ if possible."""
+ path = os.path.normcase(os.path.abspath(path))
+ if sys.version_info[0] == 2:
+ path = path.decode(sys.getfilesystemencoding(), 'replace')
+ path = path.encode(sys.getdefaultencoding(), 'replace')
+ if path.startswith(os.getcwd() + os.path.sep):
+ path = '.' + path[len(os.getcwd()):]
+ return path
+
+
+def backup_dir(dir, ext='.bak'):
+ # type: (str, str) -> str
+ """Figure out the name of a directory to back up the given dir to
+ (adding .bak, .bak2, etc)"""
+ n = 1
+ extension = ext
+ while os.path.exists(dir + extension):
+ n += 1
+ extension = ext + str(n)
+ return dir + extension
+
+
+def ask_path_exists(message, options):
+ # type: (str, Iterable[str]) -> str
+ for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
+ if action in options:
+ return action
+ return ask(message, options)
+
+
+def _check_no_input(message):
+ # type: (str) -> None
+ """Raise an error if no input is allowed."""
+ if os.environ.get('PIP_NO_INPUT'):
+ raise Exception(
+ 'No input was expected ($PIP_NO_INPUT set); question: %s' %
+ message
+ )
+
+
+def ask(message, options):
+ # type: (str, Iterable[str]) -> str
+ """Ask the message interactively, with the given possible responses"""
+ while 1:
+ _check_no_input(message)
+ response = input(message)
+ response = response.strip().lower()
+ if response not in options:
+ print(
+ 'Your response (%r) was not one of the expected responses: '
+ '%s' % (response, ', '.join(options))
+ )
+ else:
+ return response
+
+
+def ask_input(message):
+ # type: (str) -> str
+ """Ask for input interactively."""
+ _check_no_input(message)
+ return input(message)
+
+
+def ask_password(message):
+ # type: (str) -> str
+ """Ask for a password interactively."""
+ _check_no_input(message)
+ return getpass.getpass(message)
+
+
+def format_size(bytes):
+ # type: (float) -> str
+ if bytes > 1000 * 1000:
+ return '%.1fMB' % (bytes / 1000.0 / 1000)
+ elif bytes > 10 * 1000:
+ return '%ikB' % (bytes / 1000)
+ elif bytes > 1000:
+ return '%.1fkB' % (bytes / 1000.0)
+ else:
+ return '%ibytes' % bytes
+
+
+def is_installable_dir(path):
+ # type: (str) -> bool
+ """Is path is a directory containing setup.py or pyproject.toml?
+ """
+ if not os.path.isdir(path):
+ return False
+ setup_py = os.path.join(path, 'setup.py')
+ if os.path.isfile(setup_py):
+ return True
+ pyproject_toml = os.path.join(path, 'pyproject.toml')
+ if os.path.isfile(pyproject_toml):
+ return True
+ return False
+
+
+def is_svn_page(html):
+ # type: (Union[str, Text]) -> Optional[Match[Union[str, Text]]]
+ """
+ Returns true if the page appears to be the index page of an svn repository
+ """
+ return (re.search(r'<title>[^<]*Revision \d+:', html) and
+ re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
+
+
+def file_contents(filename):
+ # type: (str) -> Text
+ with open(filename, 'rb') as fp:
+ return fp.read().decode('utf-8')
+
+
+def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
+ """Yield pieces of data from a file-like object until EOF."""
+ while True:
+ chunk = file.read(size)
+ if not chunk:
+ break
+ yield chunk
+
+
+def split_leading_dir(path):
+ # type: (Union[str, Text]) -> List[Union[str, Text]]
+ path = path.lstrip('/').lstrip('\\')
+ if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
+ '\\' not in path):
+ return path.split('/', 1)
+ elif '\\' in path:
+ return path.split('\\', 1)
+ else:
+ return [path, '']
+
+
+def has_leading_dir(paths):
+ # type: (Iterable[Union[str, Text]]) -> bool
+ """Returns true if all the paths have the same leading path name
+ (i.e., everything is in one subdirectory in an archive)"""
+ common_prefix = None
+ for path in paths:
+ prefix, rest = split_leading_dir(path)
+ if not prefix:
+ return False
+ elif common_prefix is None:
+ common_prefix = prefix
+ elif prefix != common_prefix:
+ return False
+ return True
+
+
+def normalize_path(path, resolve_symlinks=True):
+ # type: (str, bool) -> str
+ """
+ Convert a path to its canonical, case-normalized, absolute version.
+
+ """
+ path = expanduser(path)
+ if resolve_symlinks:
+ path = os.path.realpath(path)
+ else:
+ path = os.path.abspath(path)
+ return os.path.normcase(path)
+
+
+def splitext(path):
+ # type: (str) -> Tuple[str, str]
+ """Like os.path.splitext, but take off .tar too"""
+ base, ext = posixpath.splitext(path)
+ if base.lower().endswith('.tar'):
+ ext = base[-4:] + ext
+ base = base[:-4]
+ return base, ext
+
+
+def renames(old, new):
+ # type: (str, str) -> None
+ """Like os.renames(), but handles renaming across devices."""
+ # Implementation borrowed from os.renames().
+ head, tail = os.path.split(new)
+ if head and tail and not os.path.exists(head):
+ os.makedirs(head)
+
+ shutil.move(old, new)
+
+ head, tail = os.path.split(old)
+ if head and tail:
+ try:
+ os.removedirs(head)
+ except OSError:
+ pass
+
+
+def is_local(path):
+ # type: (str) -> bool
+ """
+ Return True if path is within sys.prefix, if we're running in a virtualenv.
+
+ If we're not in a virtualenv, all paths are considered "local."
+
+ """
+ if not running_under_virtualenv():
+ return True
+ return normalize_path(path).startswith(normalize_path(sys.prefix))
+
+
+def dist_is_local(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution object is installed locally
+ (i.e. within current virtualenv).
+
+ Always True if we're not in a virtualenv.
+
+ """
+ return is_local(dist_location(dist))
+
+
+def dist_in_usersite(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is installed in user site.
+ """
+ norm_path = normalize_path(dist_location(dist))
+ return norm_path.startswith(normalize_path(user_site))
+
+
+def dist_in_site_packages(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is installed in
+ sysconfig.get_python_lib().
+ """
+ return normalize_path(
+ dist_location(dist)
+ ).startswith(normalize_path(site_packages))
+
+
+def dist_is_editable(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is an editable install.
+ """
+ for path_item in sys.path:
+ egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
+ if os.path.isfile(egg_link):
+ return True
+ return False
+
+
+def get_installed_distributions(
+ local_only=True, # type: bool
+ skip=stdlib_pkgs, # type: Container[str]
+ include_editables=True, # type: bool
+ editables_only=False, # type: bool
+ user_only=False, # type: bool
+ paths=None # type: Optional[List[str]]
+):
+ # type: (...) -> List[Distribution]
+ """
+ Return a list of installed Distribution objects.
+
+ If ``local_only`` is True (default), only return installations
+ local to the current virtualenv, if in a virtualenv.
+
+ ``skip`` argument is an iterable of lower-case project names to
+ ignore; defaults to stdlib_pkgs
+
+ If ``include_editables`` is False, don't report editables.
+
+ If ``editables_only`` is True , only report editables.
+
+ If ``user_only`` is True , only report installations in the user
+ site directory.
+
+ If ``paths`` is set, only report the distributions present at the
+ specified list of locations.
+ """
+ if paths:
+ working_set = pkg_resources.WorkingSet(paths)
+ else:
+ working_set = pkg_resources.working_set
+
+ if local_only:
+ local_test = dist_is_local
+ else:
+ def local_test(d):
+ return True
+
+ if include_editables:
+ def editable_test(d):
+ return True
+ else:
+ def editable_test(d):
+ return not dist_is_editable(d)
+
+ if editables_only:
+ def editables_only_test(d):
+ return dist_is_editable(d)
+ else:
+ def editables_only_test(d):
+ return True
+
+ if user_only:
+ user_test = dist_in_usersite
+ else:
+ def user_test(d):
+ return True
+
+ # because of pkg_resources vendoring, mypy cannot find stub in typeshed
+ return [d for d in working_set # type: ignore
+ if local_test(d) and
+ d.key not in skip and
+ editable_test(d) and
+ editables_only_test(d) and
+ user_test(d)
+ ]
+
+
+def egg_link_path(dist):
+ # type: (Distribution) -> Optional[str]
+ """
+ Return the path for the .egg-link file if it exists, otherwise, None.
+
+ There's 3 scenarios:
+ 1) not in a virtualenv
+ try to find in site.USER_SITE, then site_packages
+ 2) in a no-global virtualenv
+ try to find in site_packages
+ 3) in a yes-global virtualenv
+ try to find in site_packages, then site.USER_SITE
+ (don't look in global location)
+
+ For #1 and #3, there could be odd cases, where there's an egg-link in 2
+ locations.
+
+ This method will just return the first one found.
+ """
+ sites = []
+ if running_under_virtualenv():
+ if virtualenv_no_global():
+ sites.append(site_packages)
+ else:
+ sites.append(site_packages)
+ if user_site:
+ sites.append(user_site)
+ else:
+ if user_site:
+ sites.append(user_site)
+ sites.append(site_packages)
+
+ for site in sites:
+ egglink = os.path.join(site, dist.project_name) + '.egg-link'
+ if os.path.isfile(egglink):
+ return egglink
+ return None
+
+
+def dist_location(dist):
+ # type: (Distribution) -> str
+ """
+ Get the site-packages location of this distribution. Generally
+ this is dist.location, except in the case of develop-installed
+ packages, where dist.location is the source code location, and we
+ want to know where the egg-link file is.
+
+ """
+ egg_link = egg_link_path(dist)
+ if egg_link:
+ return egg_link
+ return dist.location
+
+
+def current_umask():
+ """Get the current umask which involves having to set it temporarily."""
+ mask = os.umask(0)
+ os.umask(mask)
+ return mask
+
+
+def unzip_file(filename, location, flatten=True):
+ # type: (str, str, bool) -> None
+ """
+ Unzip the file (with path `filename`) to the destination `location`. All
+ files are written based on system defaults and umask (i.e. permissions are
+ not preserved), except that regular file members with any execute
+ permissions (user, group, or world) have "chmod +x" applied after being
+ written. Note that for windows, any execute changes using os.chmod are
+ no-ops per the python docs.
+ """
+ ensure_dir(location)
+ zipfp = open(filename, 'rb')
+ try:
+ zip = zipfile.ZipFile(zipfp, allowZip64=True)
+ leading = has_leading_dir(zip.namelist()) and flatten
+ for info in zip.infolist():
+ name = info.filename
+ fn = name
+ if leading:
+ fn = split_leading_dir(name)[1]
+ fn = os.path.join(location, fn)
+ dir = os.path.dirname(fn)
+ if fn.endswith('/') or fn.endswith('\\'):
+ # A directory
+ ensure_dir(fn)
+ else:
+ ensure_dir(dir)
+ # Don't use read() to avoid allocating an arbitrarily large
+ # chunk of memory for the file's content
+ fp = zip.open(name)
+ try:
+ with open(fn, 'wb') as destfp:
+ shutil.copyfileobj(fp, destfp)
+ finally:
+ fp.close()
+ mode = info.external_attr >> 16
+ # if mode and regular file and any execute permissions for
+ # user/group/world?
+ if mode and stat.S_ISREG(mode) and mode & 0o111:
+ # make dest file have execute for user/group/world
+ # (chmod +x) no-op on windows per python docs
+ os.chmod(fn, (0o777 - current_umask() | 0o111))
+ finally:
+ zipfp.close()
+
+
+def untar_file(filename, location):
+ # type: (str, str) -> None
+ """
+ Untar the file (with path `filename`) to the destination `location`.
+ All files are written based on system defaults and umask (i.e. permissions
+ are not preserved), except that regular file members with any execute
+ permissions (user, group, or world) have "chmod +x" applied after being
+ written. Note that for windows, any execute changes using os.chmod are
+ no-ops per the python docs.
+ """
+ ensure_dir(location)
+ if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+ mode = 'r:gz'
+ elif filename.lower().endswith(BZ2_EXTENSIONS):
+ mode = 'r:bz2'
+ elif filename.lower().endswith(XZ_EXTENSIONS):
+ mode = 'r:xz'
+ elif filename.lower().endswith('.tar'):
+ mode = 'r'
+ else:
+ logger.warning(
+ 'Cannot determine compression type for file %s', filename,
+ )
+ mode = 'r:*'
+ tar = tarfile.open(filename, mode)
+ try:
+ leading = has_leading_dir([
+ member.name for member in tar.getmembers()
+ ])
+ for member in tar.getmembers():
+ fn = member.name
+ if leading:
+ # https://github.com/python/mypy/issues/1174
+ fn = split_leading_dir(fn)[1] # type: ignore
+ path = os.path.join(location, fn)
+ if member.isdir():
+ ensure_dir(path)
+ elif member.issym():
+ try:
+ # https://github.com/python/typeshed/issues/2673
+ tar._extract_member(member, path) # type: ignore
+ except Exception as exc:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warning(
+ 'In the tar file %s the member %s is invalid: %s',
+ filename, member.name, exc,
+ )
+ continue
+ else:
+ try:
+ fp = tar.extractfile(member)
+ except (KeyError, AttributeError) as exc:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warning(
+ 'In the tar file %s the member %s is invalid: %s',
+ filename, member.name, exc,
+ )
+ continue
+ ensure_dir(os.path.dirname(path))
+ with open(path, 'wb') as destfp:
+ shutil.copyfileobj(fp, destfp)
+ fp.close()
+ # Update the timestamp (useful for cython compiled files)
+ # https://github.com/python/typeshed/issues/2673
+ tar.utime(member, path) # type: ignore
+ # member have any execute permissions for user/group/world?
+ if member.mode & 0o111:
+ # make dest file have execute for user/group/world
+ # no-op on windows per python docs
+ os.chmod(path, (0o777 - current_umask() | 0o111))
+ finally:
+ tar.close()
+
+
+def unpack_file(
+ filename, # type: str
+ location, # type: str
+ content_type, # type: Optional[str]
+ link # type: Optional[Link]
+):
+ # type: (...) -> None
+ filename = os.path.realpath(filename)
+ if (content_type == 'application/zip' or
+ filename.lower().endswith(ZIP_EXTENSIONS) or
+ zipfile.is_zipfile(filename)):
+ unzip_file(
+ filename,
+ location,
+ flatten=not filename.endswith('.whl')
+ )
+ elif (content_type == 'application/x-gzip' or
+ tarfile.is_tarfile(filename) or
+ filename.lower().endswith(
+ TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
+ untar_file(filename, location)
+ elif (content_type and content_type.startswith('text/html') and
+ is_svn_page(file_contents(filename))):
+ # We don't really care about this
+ from pip._internal.vcs.subversion import Subversion
+ url = 'svn+' + link.url
+ Subversion().unpack(location, url=url)
+ else:
+ # FIXME: handle?
+ # FIXME: magic signatures?
+ logger.critical(
+ 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
+ 'cannot detect archive format',
+ filename, location, content_type,
+ )
+ raise InstallationError(
+ 'Cannot determine archive format of %s' % location
+ )
+
+
+def format_command_args(args):
+ # type: (List[str]) -> str
+ """
+ Format command arguments for display.
+ """
+ return ' '.join(shlex_quote(arg) for arg in args)
+
+
+def make_subprocess_output_error(
+ cmd_args, # type: List[str]
+ cwd, # type: Optional[str]
+ lines, # type: List[Text]
+ exit_status, # type: int
+):
+ # type: (...) -> Text
+ """
+ Create and return the error message to use to log a subprocess error
+ with command output.
+
+ :param lines: A list of lines, each ending with a newline.
+ """
+ command = format_command_args(cmd_args)
+ # Convert `command` and `cwd` to text (unicode in Python 2) so we can use
+ # them as arguments in the unicode format string below. This avoids
+ # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2
+ # if either contains a non-ascii character.
+ command_display = str_to_display(command, desc='command bytes')
+ cwd_display = path_to_display(cwd)
+
+ # We know the joined output value ends in a newline.
+ output = ''.join(lines)
+ msg = (
+ # Use a unicode string to avoid "UnicodeEncodeError: 'ascii'
+ # codec can't encode character ..." in Python 2 when a format
+ # argument (e.g. `output`) has a non-ascii character.
+ u'Command errored out with exit status {exit_status}:\n'
+ ' command: {command_display}\n'
+ ' cwd: {cwd_display}\n'
+ 'Complete output ({line_count} lines):\n{output}{divider}'
+ ).format(
+ exit_status=exit_status,
+ command_display=command_display,
+ cwd_display=cwd_display,
+ line_count=len(lines),
+ output=output,
+ divider=LOG_DIVIDER,
+ )
+ return msg
+
+
+def call_subprocess(
+ cmd, # type: List[str]
+ show_stdout=False, # type: bool
+ cwd=None, # type: Optional[str]
+ on_returncode='raise', # type: str
+ extra_ok_returncodes=None, # type: Optional[Iterable[int]]
+ command_desc=None, # type: Optional[str]
+ extra_environ=None, # type: Optional[Mapping[str, Any]]
+ unset_environ=None, # type: Optional[Iterable[str]]
+ spinner=None # type: Optional[SpinnerInterface]
+):
+ # type: (...) -> Text
+ """
+ Args:
+ show_stdout: if true, use INFO to log the subprocess's stderr and
+ stdout streams. Otherwise, use DEBUG. Defaults to False.
+ extra_ok_returncodes: an iterable of integer return codes that are
+ acceptable, in addition to 0. Defaults to None, which means [].
+ unset_environ: an iterable of environment variable names to unset
+ prior to calling subprocess.Popen().
+ """
+ if extra_ok_returncodes is None:
+ extra_ok_returncodes = []
+ if unset_environ is None:
+ unset_environ = []
+ # Most places in pip use show_stdout=False. What this means is--
+ #
+ # - We connect the child's output (combined stderr and stdout) to a
+ # single pipe, which we read.
+ # - We log this output to stderr at DEBUG level as it is received.
+ # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
+ # requested), then we show a spinner so the user can still see the
+ # subprocess is in progress.
+ # - If the subprocess exits with an error, we log the output to stderr
+ # at ERROR level if it hasn't already been displayed to the console
+ # (e.g. if --verbose logging wasn't enabled). This way we don't log
+ # the output to the console twice.
+ #
+ # If show_stdout=True, then the above is still done, but with DEBUG
+ # replaced by INFO.
+ if show_stdout:
+ # Then log the subprocess output at INFO level.
+ log_subprocess = subprocess_logger.info
+ used_level = std_logging.INFO
+ else:
+ # Then log the subprocess output using DEBUG. This also ensures
+ # it will be logged to the log file (aka user_log), if enabled.
+ log_subprocess = subprocess_logger.debug
+ used_level = std_logging.DEBUG
+
+ # Whether the subprocess will be visible in the console.
+ showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
+
+ # Only use the spinner if we're not showing the subprocess output
+ # and we have a spinner.
+ use_spinner = not showing_subprocess and spinner is not None
+
+ if command_desc is None:
+ command_desc = format_command_args(cmd)
+
+ log_subprocess("Running command %s", command_desc)
+ env = os.environ.copy()
+ if extra_environ:
+ env.update(extra_environ)
+ for name in unset_environ:
+ env.pop(name, None)
+ try:
+ proc = subprocess.Popen(
+ cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, cwd=cwd, env=env,
+ )
+ proc.stdin.close()
+ except Exception as exc:
+ subprocess_logger.critical(
+ "Error %s while executing command %s", exc, command_desc,
+ )
+ raise
+ all_output = []
+ while True:
+ # The "line" value is a unicode string in Python 2.
+ line = console_to_str(proc.stdout.readline())
+ if not line:
+ break
+ line = line.rstrip()
+ all_output.append(line + '\n')
+
+ # Show the line immediately.
+ log_subprocess(line)
+ # Update the spinner.
+ if use_spinner:
+ spinner.spin()
+ try:
+ proc.wait()
+ finally:
+ if proc.stdout:
+ proc.stdout.close()
+ proc_had_error = (
+ proc.returncode and proc.returncode not in extra_ok_returncodes
+ )
+ if use_spinner:
+ if proc_had_error:
+ spinner.finish("error")
+ else:
+ spinner.finish("done")
+ if proc_had_error:
+ if on_returncode == 'raise':
+ if not showing_subprocess:
+ # Then the subprocess streams haven't been logged to the
+ # console yet.
+ msg = make_subprocess_output_error(
+ cmd_args=cmd,
+ cwd=cwd,
+ lines=all_output,
+ exit_status=proc.returncode,
+ )
+ subprocess_logger.error(msg)
+ exc_msg = (
+ 'Command errored out with exit status {}: {} '
+ 'Check the logs for full command output.'
+ ).format(proc.returncode, command_desc)
+ raise InstallationError(exc_msg)
+ elif on_returncode == 'warn':
+ subprocess_logger.warning(
+ 'Command "%s" had error code %s in %s',
+ command_desc, proc.returncode, cwd,
+ )
+ elif on_returncode == 'ignore':
+ pass
+ else:
+ raise ValueError('Invalid value: on_returncode=%s' %
+ repr(on_returncode))
+ return ''.join(all_output)
+
+
+def _make_build_dir(build_dir):
+ os.makedirs(build_dir)
+ write_delete_marker_file(build_dir)
+
+
+class FakeFile(object):
+ """Wrap a list of lines in an object with readline() to make
+ ConfigParser happy."""
+ def __init__(self, lines):
+ self._gen = (l for l in lines)
+
+ def readline(self):
+ try:
+ try:
+ return next(self._gen)
+ except NameError:
+ return self._gen.next()
+ except StopIteration:
+ return ''
+
+ def __iter__(self):
+ return self._gen
+
+
+class StreamWrapper(StringIO):
+
+ @classmethod
+ def from_stream(cls, orig_stream):
+ cls.orig_stream = orig_stream
+ return cls()
+
+ # compileall.compile_dir() needs stdout.encoding to print to stdout
+ @property
+ def encoding(self):
+ return self.orig_stream.encoding
+
+
+@contextlib.contextmanager
+def captured_output(stream_name):
+ """Return a context manager used by captured_stdout/stdin/stderr
+ that temporarily replaces the sys stream *stream_name* with a StringIO.
+
+ Taken from Lib/support/__init__.py in the CPython repo.
+ """
+ orig_stdout = getattr(sys, stream_name)
+ setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
+ try:
+ yield getattr(sys, stream_name)
+ finally:
+ setattr(sys, stream_name, orig_stdout)
+
+
+def captured_stdout():
+ """Capture the output of sys.stdout:
+
+ with captured_stdout() as stdout:
+ print('hello')
+ self.assertEqual(stdout.getvalue(), 'hello\n')
+
+ Taken from Lib/support/__init__.py in the CPython repo.
+ """
+ return captured_output('stdout')
+
+
+def captured_stderr():
+ """
+ See captured_stdout().
+ """
+ return captured_output('stderr')
+
+
+class cached_property(object):
+ """A property that is only computed once per instance and then replaces
+ itself with an ordinary attribute. Deleting the attribute resets the
+ property.
+
+ Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
+ """
+
+ def __init__(self, func):
+ self.__doc__ = getattr(func, '__doc__')
+ self.func = func
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ # We're being accessed from the class itself, not from an object
+ return self
+ value = obj.__dict__[self.func.__name__] = self.func(obj)
+ return value
+
+
+def get_installed_version(dist_name, working_set=None):
+ """Get the installed version of dist_name avoiding pkg_resources cache"""
+ # Create a requirement that we'll look for inside of setuptools.
+ req = pkg_resources.Requirement.parse(dist_name)
+
+ if working_set is None:
+ # We want to avoid having this cached, so we need to construct a new
+ # working set each time.
+ working_set = pkg_resources.WorkingSet()
+
+ # Get the installed distribution from our working set
+ dist = working_set.find(req)
+
+ # Check to see if we got an installed distribution or not, if we did
+ # we want to return it's version.
+ return dist.version if dist else None
+
+
+def consume(iterator):
+ """Consume an iterable at C speed."""
+ deque(iterator, maxlen=0)
+
+
+# Simulates an enum
+def enum(*sequential, **named):
+ enums = dict(zip(sequential, range(len(sequential))), **named)
+ reverse = {value: key for key, value in enums.items()}
+ enums['reverse_mapping'] = reverse
+ return type('Enum', (), enums)
+
+
+def path_to_url(path):
+ # type: (Union[str, Text]) -> str
+ """
+ Convert a path to a file: URL. The path will be made absolute and have
+ quoted path parts.
+ """
+ path = os.path.normpath(os.path.abspath(path))
+ url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
+ return url
+
+
+def split_auth_from_netloc(netloc):
+ """
+ Parse out and remove the auth information from a netloc.
+
+ Returns: (netloc, (username, password)).
+ """
+ if '@' not in netloc:
+ return netloc, (None, None)
+
+ # Split from the right because that's how urllib.parse.urlsplit()
+ # behaves if more than one @ is present (which can be checked using
+ # the password attribute of urlsplit()'s return value).
+ auth, netloc = netloc.rsplit('@', 1)
+ if ':' in auth:
+ # Split from the left because that's how urllib.parse.urlsplit()
+ # behaves if more than one : is present (which again can be checked
+ # using the password attribute of the return value)
+ user_pass = auth.split(':', 1)
+ else:
+ user_pass = auth, None
+
+ user_pass = tuple(
+ None if x is None else urllib_unquote(x) for x in user_pass
+ )
+
+ return netloc, user_pass
+
+
+def redact_netloc(netloc):
+ # type: (str) -> str
+ """
+ Replace the password in a netloc with "****", if it exists.
+
+ For example, "user:pass@example.com" returns "user:****@example.com".
+ """
+ netloc, (user, password) = split_auth_from_netloc(netloc)
+ if user is None:
+ return netloc
+ password = '' if password is None else ':****'
+ return '{user}{password}@{netloc}'.format(user=urllib_parse.quote(user),
+ password=password,
+ netloc=netloc)
+
+
+def _transform_url(url, transform_netloc):
+ """Transform and replace netloc in a url.
+
+ transform_netloc is a function taking the netloc and returning a
+ tuple. The first element of this tuple is the new netloc. The
+ entire tuple is returned.
+
+ Returns a tuple containing the transformed url as item 0 and the
+ original tuple returned by transform_netloc as item 1.
+ """
+ purl = urllib_parse.urlsplit(url)
+ netloc_tuple = transform_netloc(purl.netloc)
+ # stripped url
+ url_pieces = (
+ purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment
+ )
+ surl = urllib_parse.urlunsplit(url_pieces)
+ return surl, netloc_tuple
+
+
+def _get_netloc(netloc):
+ return split_auth_from_netloc(netloc)
+
+
+def _redact_netloc(netloc):
+ return (redact_netloc(netloc),)
+
+
+def split_auth_netloc_from_url(url):
+ # type: (str) -> Tuple[str, str, Tuple[str, str]]
+ """
+ Parse a url into separate netloc, auth, and url with no auth.
+
+ Returns: (url_without_auth, netloc, (username, password))
+ """
+ url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
+ return url_without_auth, netloc, auth
+
+
+def remove_auth_from_url(url):
+ # type: (str) -> str
+ """Return a copy of url with 'username:password@' removed."""
+ # username/pass params are passed to subversion through flags
+ # and are not recognized in the url.
+ return _transform_url(url, _get_netloc)[0]
+
+
+def redact_password_from_url(url):
+ # type: (str) -> str
+ """Replace the password in a given url with ****."""
+ return _transform_url(url, _redact_netloc)[0]
+
+
+def protect_pip_from_modification_on_windows(modifying_pip):
+ """Protection of pip.exe from modification on Windows
+
+ On Windows, any operation modifying pip should be run as:
+ python -m pip ...
+ """
+ pip_names = [
+ "pip.exe",
+ "pip{}.exe".format(sys.version_info[0]),
+ "pip{}.{}.exe".format(*sys.version_info[:2])
+ ]
+
+ # See https://github.com/pypa/pip/issues/1299 for more discussion
+ should_show_use_python_msg = (
+ modifying_pip and
+ WINDOWS and
+ os.path.basename(sys.argv[0]) in pip_names
+ )
+
+ if should_show_use_python_msg:
+ new_command = [
+ sys.executable, "-m", "pip"
+ ] + sys.argv[1:]
+ raise CommandError(
+ 'To modify pip, please run the following command:\n{}'
+ .format(" ".join(new_command))
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/models.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/models.py
new file mode 100644
index 00000000..fccaf5dd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/models.py
@@ -0,0 +1,40 @@
+"""Utilities for defining models
+"""
+
+import operator
+
+
+class KeyBasedCompareMixin(object):
+ """Provides comparison capabilities that is based on a key
+ """
+
+ def __init__(self, key, defining_class):
+ self._compare_key = key
+ self._defining_class = defining_class
+
+ def __hash__(self):
+ return hash(self._compare_key)
+
+ def __lt__(self, other):
+ return self._compare(other, operator.__lt__)
+
+ def __le__(self, other):
+ return self._compare(other, operator.__le__)
+
+ def __gt__(self, other):
+ return self._compare(other, operator.__gt__)
+
+ def __ge__(self, other):
+ return self._compare(other, operator.__ge__)
+
+ def __eq__(self, other):
+ return self._compare(other, operator.__eq__)
+
+ def __ne__(self, other):
+ return self._compare(other, operator.__ne__)
+
+ def _compare(self, other, method):
+ if not isinstance(other, self._defining_class):
+ return NotImplemented
+
+ return method(self._compare_key, other._compare_key)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/outdated.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/outdated.py
new file mode 100644
index 00000000..2b10aeff
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/outdated.py
@@ -0,0 +1,178 @@
+from __future__ import absolute_import
+
+import datetime
+import json
+import logging
+import os.path
+import sys
+
+from pip._vendor import lockfile, pkg_resources
+from pip._vendor.packaging import version as packaging_version
+
+from pip._internal.cli.cmdoptions import make_search_scope
+from pip._internal.index import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import ensure_dir, get_installed_version
+from pip._internal.utils.packaging import get_installer
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ import optparse
+ from typing import Any, Dict
+ from pip._internal.download import PipSession
+
+
+SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
+
+
+logger = logging.getLogger(__name__)
+
+
+class SelfCheckState(object):
+ def __init__(self, cache_dir):
+ # type: (str) -> None
+ self.state = {} # type: Dict[str, Any]
+ self.statefile_path = None
+
+ # Try to load the existing state
+ if cache_dir:
+ self.statefile_path = os.path.join(cache_dir, "selfcheck.json")
+ try:
+ with open(self.statefile_path) as statefile:
+ self.state = json.load(statefile)[sys.prefix]
+ except (IOError, ValueError, KeyError):
+ # Explicitly suppressing exceptions, since we don't want to
+ # error out if the cache file is invalid.
+ pass
+
+ def save(self, pypi_version, current_time):
+ # type: (str, datetime.datetime) -> None
+ # If we do not have a path to cache in, don't bother saving.
+ if not self.statefile_path:
+ return
+
+ # Check to make sure that we own the directory
+ if not check_path_owner(os.path.dirname(self.statefile_path)):
+ return
+
+ # Now that we've ensured the directory is owned by this user, we'll go
+ # ahead and make sure that all our directories are created.
+ ensure_dir(os.path.dirname(self.statefile_path))
+
+ # Attempt to write out our version check file
+ with lockfile.LockFile(self.statefile_path):
+ if os.path.exists(self.statefile_path):
+ with open(self.statefile_path) as statefile:
+ state = json.load(statefile)
+ else:
+ state = {}
+
+ state[sys.prefix] = {
+ "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
+ "pypi_version": pypi_version,
+ }
+
+ with open(self.statefile_path, "w") as statefile:
+ json.dump(state, statefile, sort_keys=True,
+ separators=(",", ":"))
+
+
+def was_installed_by_pip(pkg):
+ # type: (str) -> bool
+ """Checks whether pkg was installed by pip
+
+ This is used not to display the upgrade message when pip is in fact
+ installed by system package manager, such as dnf on Fedora.
+ """
+ try:
+ dist = pkg_resources.get_distribution(pkg)
+ return "pip" == get_installer(dist)
+ except pkg_resources.DistributionNotFound:
+ return False
+
+
+def pip_version_check(session, options):
+ # type: (PipSession, optparse.Values) -> None
+ """Check for an update for pip.
+
+ Limit the frequency of checks to once per week. State is stored either in
+ the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
+ of the pip script path.
+ """
+ installed_version = get_installed_version("pip")
+ if not installed_version:
+ return
+
+ pip_version = packaging_version.parse(installed_version)
+ pypi_version = None
+
+ try:
+ state = SelfCheckState(cache_dir=options.cache_dir)
+
+ current_time = datetime.datetime.utcnow()
+ # Determine if we need to refresh the state
+ if "last_check" in state.state and "pypi_version" in state.state:
+ last_check = datetime.datetime.strptime(
+ state.state["last_check"],
+ SELFCHECK_DATE_FMT
+ )
+ if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
+ pypi_version = state.state["pypi_version"]
+
+ # Refresh the version if we need to or just see if we need to warn
+ if pypi_version is None:
+ # Lets use PackageFinder to see what the latest pip version is
+ search_scope = make_search_scope(options, suppress_no_index=True)
+
+ # Pass allow_yanked=False so we don't suggest upgrading to a
+ # yanked version.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=False, # Explicitly set to False
+ )
+
+ finder = PackageFinder.create(
+ search_scope=search_scope,
+ selection_prefs=selection_prefs,
+ trusted_hosts=options.trusted_hosts,
+ session=session,
+ )
+ candidate = finder.find_candidates("pip").get_best()
+ if candidate is None:
+ return
+ pypi_version = str(candidate.version)
+
+ # save that we've performed a check
+ state.save(pypi_version, current_time)
+
+ remote_version = packaging_version.parse(pypi_version)
+
+ local_version_is_older = (
+ pip_version < remote_version and
+ pip_version.base_version != remote_version.base_version and
+ was_installed_by_pip('pip')
+ )
+
+ # Determine if our pypi_version is older
+ if not local_version_is_older:
+ return
+
+ # Advise "python -m pip" on Windows to avoid issues
+ # with overwriting pip.exe.
+ if WINDOWS:
+ pip_cmd = "python -m pip"
+ else:
+ pip_cmd = "pip"
+ logger.warning(
+ "You are using pip version %s, however version %s is "
+ "available.\nYou should consider upgrading via the "
+ "'%s install --upgrade pip' command.",
+ pip_version, pypi_version, pip_cmd
+ )
+ except Exception:
+ logger.debug(
+ "There was an error checking the latest version of pip",
+ exc_info=True,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/packaging.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/packaging.py
new file mode 100644
index 00000000..68aa86ed
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/packaging.py
@@ -0,0 +1,94 @@
+from __future__ import absolute_import
+
+import logging
+from email.parser import FeedParser
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging import specifiers, version
+
+from pip._internal.exceptions import NoneMetadataError
+from pip._internal.utils.misc import display_path
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+ from email.message import Message
+ from pip._vendor.pkg_resources import Distribution
+
+
+logger = logging.getLogger(__name__)
+
+
+def check_requires_python(requires_python, version_info):
+ # type: (Optional[str], Tuple[int, ...]) -> bool
+ """
+ Check if the given Python version matches a "Requires-Python" specifier.
+
+ :param version_info: A 3-tuple of ints representing a Python
+ major-minor-micro version to check (e.g. `sys.version_info[:3]`).
+
+ :return: `True` if the given Python version satisfies the requirement.
+ Otherwise, return `False`.
+
+ :raises InvalidSpecifier: If `requires_python` has an invalid format.
+ """
+ if requires_python is None:
+ # The package provides no information
+ return True
+ requires_python_specifier = specifiers.SpecifierSet(requires_python)
+
+ python_version = version.parse('.'.join(map(str, version_info)))
+ return python_version in requires_python_specifier
+
+
+def get_metadata(dist):
+ # type: (Distribution) -> Message
+ """
+ :raises NoneMetadataError: if the distribution reports `has_metadata()`
+ True but `get_metadata()` returns None.
+ """
+ metadata_name = 'METADATA'
+ if (isinstance(dist, pkg_resources.DistInfoDistribution) and
+ dist.has_metadata(metadata_name)):
+ metadata = dist.get_metadata(metadata_name)
+ elif dist.has_metadata('PKG-INFO'):
+ metadata_name = 'PKG-INFO'
+ metadata = dist.get_metadata(metadata_name)
+ else:
+ logger.warning("No metadata found in %s", display_path(dist.location))
+ metadata = ''
+
+ if metadata is None:
+ raise NoneMetadataError(dist, metadata_name)
+
+ feed_parser = FeedParser()
+ # The following line errors out if with a "NoneType" TypeError if
+ # passed metadata=None.
+ feed_parser.feed(metadata)
+ return feed_parser.close()
+
+
+def get_requires_python(dist):
+ # type: (pkg_resources.Distribution) -> Optional[str]
+ """
+ Return the "Requires-Python" metadata for a distribution, or None
+ if not present.
+ """
+ pkg_info_dict = get_metadata(dist)
+ requires_python = pkg_info_dict.get('Requires-Python')
+
+ if requires_python is not None:
+ # Convert to a str to satisfy the type checker, since requires_python
+ # can be a Header object.
+ requires_python = str(requires_python)
+
+ return requires_python
+
+
+def get_installer(dist):
+ # type: (Distribution) -> str
+ if dist.has_metadata('INSTALLER'):
+ for line in dist.get_metadata_lines('INSTALLER'):
+ if line.strip():
+ return line.strip()
+ return ''
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py
new file mode 100644
index 00000000..58956072
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py
@@ -0,0 +1,36 @@
+import sys
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+# Shim to wrap setup.py invocation with setuptools
+#
+# We set sys.argv[0] to the path to the underlying setup.py file so
+# setuptools / distutils don't take the path to the setup.py to be "-c" when
+# invoking via the shim. This avoids e.g. the following manifest_maker
+# warning: "warning: manifest_maker: standard file '-c' not found".
+_SETUPTOOLS_SHIM = (
+ "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};"
+ "f=getattr(tokenize, 'open', open)(__file__);"
+ "code=f.read().replace('\\r\\n', '\\n');"
+ "f.close();"
+ "exec(compile(code, __file__, 'exec'))"
+)
+
+
+def make_setuptools_shim_args(setup_py_path, unbuffered_output=False):
+ # type: (str, bool) -> List[str]
+ """
+ Get setuptools command arguments with shim wrapped setup file invocation.
+
+ :param setup_py_path: The path to setup.py to be wrapped.
+ :param unbuffered_output: If True, adds the unbuffered switch to the
+ argument list.
+ """
+ args = [sys.executable]
+ if unbuffered_output:
+ args.append('-u')
+ args.extend(['-c', _SETUPTOOLS_SHIM.format(setup_py_path)])
+ return args
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py
new file mode 100644
index 00000000..2c81ad55
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py
@@ -0,0 +1,155 @@
+from __future__ import absolute_import
+
+import errno
+import itertools
+import logging
+import os.path
+import tempfile
+
+from pip._internal.utils.misc import rmtree
+
+logger = logging.getLogger(__name__)
+
+
+class TempDirectory(object):
+ """Helper class that owns and cleans up a temporary directory.
+
+ This class can be used as a context manager or as an OO representation of a
+ temporary directory.
+
+ Attributes:
+ path
+ Location to the created temporary directory or None
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ Methods:
+ create()
+ Creates a temporary directory and stores its path in the path
+ attribute.
+ cleanup()
+ Deletes the temporary directory and sets path attribute to None
+
+ When used as a context manager, a temporary directory is created on
+ entering the context and, if the delete attribute is True, on exiting the
+ context the created directory is deleted.
+ """
+
+ def __init__(self, path=None, delete=None, kind="temp"):
+ super(TempDirectory, self).__init__()
+
+ if path is None and delete is None:
+ # If we were not given an explicit directory, and we were not given
+ # an explicit delete option, then we'll default to deleting.
+ delete = True
+
+ self.path = path
+ self.delete = delete
+ self.kind = kind
+
+ def __repr__(self):
+ return "<{} {!r}>".format(self.__class__.__name__, self.path)
+
+ def __enter__(self):
+ self.create()
+ return self
+
+ def __exit__(self, exc, value, tb):
+ if self.delete:
+ self.cleanup()
+
+ def create(self):
+ """Create a temporary directory and store its path in self.path
+ """
+ if self.path is not None:
+ logger.debug(
+ "Skipped creation of temporary directory: {}".format(self.path)
+ )
+ return
+ # We realpath here because some systems have their default tmpdir
+ # symlinked to another directory. This tends to confuse build
+ # scripts, so we canonicalize the path by traversing potential
+ # symlinks here.
+ self.path = os.path.realpath(
+ tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
+ )
+ logger.debug("Created temporary directory: {}".format(self.path))
+
+ def cleanup(self):
+ """Remove the temporary directory created and reset state
+ """
+ if self.path is not None and os.path.exists(self.path):
+ rmtree(self.path)
+ self.path = None
+
+
+class AdjacentTempDirectory(TempDirectory):
+ """Helper class that creates a temporary directory adjacent to a real one.
+
+ Attributes:
+ original
+ The original directory to create a temp directory for.
+ path
+ After calling create() or entering, contains the full
+ path to the temporary directory.
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ """
+ # The characters that may be used to name the temp directory
+ # We always prepend a ~ and then rotate through these until
+ # a usable name is found.
+ # pkg_resources raises a different error for .dist-info folder
+ # with leading '-' and invalid metadata
+ LEADING_CHARS = "-~.=%0123456789"
+
+ def __init__(self, original, delete=None):
+ super(AdjacentTempDirectory, self).__init__(delete=delete)
+ self.original = original.rstrip('/\\')
+
+ @classmethod
+ def _generate_names(cls, name):
+ """Generates a series of temporary names.
+
+ The algorithm replaces the leading characters in the name
+ with ones that are valid filesystem characters, but are not
+ valid package names (for both Python and pip definitions of
+ package).
+ """
+ for i in range(1, len(name)):
+ for candidate in itertools.combinations_with_replacement(
+ cls.LEADING_CHARS, i - 1):
+ new_name = '~' + ''.join(candidate) + name[i:]
+ if new_name != name:
+ yield new_name
+
+ # If we make it this far, we will have to make a longer name
+ for i in range(len(cls.LEADING_CHARS)):
+ for candidate in itertools.combinations_with_replacement(
+ cls.LEADING_CHARS, i):
+ new_name = '~' + ''.join(candidate) + name
+ if new_name != name:
+ yield new_name
+
+ def create(self):
+ root, name = os.path.split(self.original)
+ for candidate in self._generate_names(name):
+ path = os.path.join(root, candidate)
+ try:
+ os.mkdir(path)
+ except OSError as ex:
+ # Continue if the name exists already
+ if ex.errno != errno.EEXIST:
+ raise
+ else:
+ self.path = os.path.realpath(path)
+ break
+
+ if not self.path:
+ # Final fallback on the default behavior.
+ self.path = os.path.realpath(
+ tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
+ )
+ logger.debug("Created temporary directory: {}".format(self.path))
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/typing.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/typing.py
new file mode 100644
index 00000000..10170ce2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/typing.py
@@ -0,0 +1,29 @@
+"""For neatly implementing static typing in pip.
+
+`mypy` - the static type analysis tool we use - uses the `typing` module, which
+provides core functionality fundamental to mypy's functioning.
+
+Generally, `typing` would be imported at runtime and used in that fashion -
+it acts as a no-op at runtime and does not have any run-time overhead by
+design.
+
+As it turns out, `typing` is not vendorable - it uses separate sources for
+Python 2/Python 3. Thus, this codebase can not expect it to be present.
+To work around this, mypy allows the typing import to be behind a False-y
+optional to prevent it from running at runtime and type-comments can be used
+to remove the need for the types to be accessible directly during runtime.
+
+This module provides the False-y guard in a nicely named fashion so that a
+curious maintainer can reach here to read this.
+
+In pip, all static-typing related imports should be guarded as follows:
+
+ from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+ if MYPY_CHECK_RUNNING:
+ from typing import ...
+
+Ref: https://github.com/python/mypy/issues/3216
+"""
+
+MYPY_CHECK_RUNNING = False
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/ui.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/ui.py
new file mode 100644
index 00000000..46390f4a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/ui.py
@@ -0,0 +1,424 @@
+from __future__ import absolute_import, division
+
+import contextlib
+import itertools
+import logging
+import sys
+import time
+from signal import SIGINT, default_int_handler, signal
+
+from pip._vendor import six
+from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
+from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
+from pip._vendor.progress.spinner import Spinner
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import get_indentation
+from pip._internal.utils.misc import format_size
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Iterator, IO
+
+try:
+ from pip._vendor import colorama
+# Lots of different errors can come from this, including SystemError and
+# ImportError.
+except Exception:
+ colorama = None
+
+logger = logging.getLogger(__name__)
+
+
+def _select_progress_class(preferred, fallback):
+ encoding = getattr(preferred.file, "encoding", None)
+
+ # If we don't know what encoding this file is in, then we'll just assume
+ # that it doesn't support unicode and use the ASCII bar.
+ if not encoding:
+ return fallback
+
+ # Collect all of the possible characters we want to use with the preferred
+ # bar.
+ characters = [
+ getattr(preferred, "empty_fill", six.text_type()),
+ getattr(preferred, "fill", six.text_type()),
+ ]
+ characters += list(getattr(preferred, "phases", []))
+
+ # Try to decode the characters we're using for the bar using the encoding
+ # of the given file, if this works then we'll assume that we can use the
+ # fancier bar and if not we'll fall back to the plaintext bar.
+ try:
+ six.text_type().join(characters).encode(encoding)
+ except UnicodeEncodeError:
+ return fallback
+ else:
+ return preferred
+
+
+_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
+
+
+class InterruptibleMixin(object):
+ """
+ Helper to ensure that self.finish() gets called on keyboard interrupt.
+
+ This allows downloads to be interrupted without leaving temporary state
+ (like hidden cursors) behind.
+
+ This class is similar to the progress library's existing SigIntMixin
+ helper, but as of version 1.2, that helper has the following problems:
+
+ 1. It calls sys.exit().
+ 2. It discards the existing SIGINT handler completely.
+ 3. It leaves its own handler in place even after an uninterrupted finish,
+ which will have unexpected delayed effects if the user triggers an
+ unrelated keyboard interrupt some time after a progress-displaying
+ download has already completed, for example.
+ """
+
+ def __init__(self, *args, **kwargs):
+ """
+ Save the original SIGINT handler for later.
+ """
+ super(InterruptibleMixin, self).__init__(*args, **kwargs)
+
+ self.original_handler = signal(SIGINT, self.handle_sigint)
+
+ # If signal() returns None, the previous handler was not installed from
+ # Python, and we cannot restore it. This probably should not happen,
+ # but if it does, we must restore something sensible instead, at least.
+ # The least bad option should be Python's default SIGINT handler, which
+ # just raises KeyboardInterrupt.
+ if self.original_handler is None:
+ self.original_handler = default_int_handler
+
+ def finish(self):
+ """
+ Restore the original SIGINT handler after finishing.
+
+ This should happen regardless of whether the progress display finishes
+ normally, or gets interrupted.
+ """
+ super(InterruptibleMixin, self).finish()
+ signal(SIGINT, self.original_handler)
+
+ def handle_sigint(self, signum, frame):
+ """
+ Call self.finish() before delegating to the original SIGINT handler.
+
+ This handler should only be in place while the progress display is
+ active.
+ """
+ self.finish()
+ self.original_handler(signum, frame)
+
+
+class SilentBar(Bar):
+
+ def update(self):
+ pass
+
+
+class BlueEmojiBar(IncrementalBar):
+
+ suffix = "%(percent)d%%"
+ bar_prefix = " "
+ bar_suffix = " "
+ phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
+
+
+class DownloadProgressMixin(object):
+
+ def __init__(self, *args, **kwargs):
+ super(DownloadProgressMixin, self).__init__(*args, **kwargs)
+ self.message = (" " * (get_indentation() + 2)) + self.message
+
+ @property
+ def downloaded(self):
+ return format_size(self.index)
+
+ @property
+ def download_speed(self):
+ # Avoid zero division errors...
+ if self.avg == 0.0:
+ return "..."
+ return format_size(1 / self.avg) + "/s"
+
+ @property
+ def pretty_eta(self):
+ if self.eta:
+ return "eta %s" % self.eta_td
+ return ""
+
+ def iter(self, it, n=1):
+ for x in it:
+ yield x
+ self.next(n)
+ self.finish()
+
+
+class WindowsMixin(object):
+
+ def __init__(self, *args, **kwargs):
+ # The Windows terminal does not support the hide/show cursor ANSI codes
+ # even with colorama. So we'll ensure that hide_cursor is False on
+ # Windows.
+ # This call needs to go before the super() call, so that hide_cursor
+ # is set in time. The base progress bar class writes the "hide cursor"
+ # code to the terminal in its init, so if we don't set this soon
+ # enough, we get a "hide" with no corresponding "show"...
+ if WINDOWS and self.hide_cursor:
+ self.hide_cursor = False
+
+ super(WindowsMixin, self).__init__(*args, **kwargs)
+
+ # Check if we are running on Windows and we have the colorama module,
+ # if we do then wrap our file with it.
+ if WINDOWS and colorama:
+ self.file = colorama.AnsiToWin32(self.file)
+ # The progress code expects to be able to call self.file.isatty()
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
+ # add it.
+ self.file.isatty = lambda: self.file.wrapped.isatty()
+ # The progress code expects to be able to call self.file.flush()
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
+ # add it.
+ self.file.flush = lambda: self.file.wrapped.flush()
+
+
+class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
+ DownloadProgressMixin):
+
+ file = sys.stdout
+ message = "%(percent)d%%"
+ suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
+
+# NOTE: The "type: ignore" comments on the following classes are there to
+# work around https://github.com/python/typing/issues/241
+
+
+class DefaultDownloadProgressBar(BaseDownloadProgressBar,
+ _BaseBar):
+ pass
+
+
+class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
+ pass
+
+
+class DownloadBar(BaseDownloadProgressBar, # type: ignore
+ Bar):
+ pass
+
+
+class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
+ FillingCirclesBar):
+ pass
+
+
+class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
+ BlueEmojiBar):
+ pass
+
+
+class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
+ DownloadProgressMixin, Spinner):
+
+ file = sys.stdout
+ suffix = "%(downloaded)s %(download_speed)s"
+
+ def next_phase(self):
+ if not hasattr(self, "_phaser"):
+ self._phaser = itertools.cycle(self.phases)
+ return next(self._phaser)
+
+ def update(self):
+ message = self.message % self
+ phase = self.next_phase()
+ suffix = self.suffix % self
+ line = ''.join([
+ message,
+ " " if message else "",
+ phase,
+ " " if suffix else "",
+ suffix,
+ ])
+
+ self.writeln(line)
+
+
+BAR_TYPES = {
+ "off": (DownloadSilentBar, DownloadSilentBar),
+ "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
+ "ascii": (DownloadBar, DownloadProgressSpinner),
+ "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
+ "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
+}
+
+
+def DownloadProgressProvider(progress_bar, max=None):
+ if max is None or max == 0:
+ return BAR_TYPES[progress_bar][1]().iter
+ else:
+ return BAR_TYPES[progress_bar][0](max=max).iter
+
+
+################################################################
+# Generic "something is happening" spinners
+#
+# We don't even try using progress.spinner.Spinner here because it's actually
+# simpler to reimplement from scratch than to coerce their code into doing
+# what we need.
+################################################################
+
+@contextlib.contextmanager
+def hidden_cursor(file):
+ # type: (IO) -> Iterator[None]
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
+ # even via colorama. So don't even try.
+ if WINDOWS:
+ yield
+ # We don't want to clutter the output with control characters if we're
+ # writing to a file, or if the user is running with --quiet.
+ # See https://github.com/pypa/pip/issues/3418
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
+ yield
+ else:
+ file.write(HIDE_CURSOR)
+ try:
+ yield
+ finally:
+ file.write(SHOW_CURSOR)
+
+
+class RateLimiter(object):
+ def __init__(self, min_update_interval_seconds):
+ # type: (float) -> None
+ self._min_update_interval_seconds = min_update_interval_seconds
+ self._last_update = 0 # type: float
+
+ def ready(self):
+ # type: () -> bool
+ now = time.time()
+ delta = now - self._last_update
+ return delta >= self._min_update_interval_seconds
+
+ def reset(self):
+ # type: () -> None
+ self._last_update = time.time()
+
+
+class SpinnerInterface(object):
+ def spin(self):
+ # type: () -> None
+ raise NotImplementedError()
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ raise NotImplementedError()
+
+
+class InteractiveSpinner(SpinnerInterface):
+ def __init__(self, message, file=None, spin_chars="-\\|/",
+ # Empirically, 8 updates/second looks nice
+ min_update_interval_seconds=0.125):
+ self._message = message
+ if file is None:
+ file = sys.stdout
+ self._file = file
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._finished = False
+
+ self._spin_cycle = itertools.cycle(spin_chars)
+
+ self._file.write(" " * get_indentation() + self._message + " ... ")
+ self._width = 0
+
+ def _write(self, status):
+ assert not self._finished
+ # Erase what we wrote before by backspacing to the beginning, writing
+ # spaces to overwrite the old text, and then backspacing again
+ backup = "\b" * self._width
+ self._file.write(backup + " " * self._width + backup)
+ # Now we have a blank slate to add our status
+ self._file.write(status)
+ self._width = len(status)
+ self._file.flush()
+ self._rate_limiter.reset()
+
+ def spin(self):
+ # type: () -> None
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._write(next(self._spin_cycle))
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ if self._finished:
+ return
+ self._write(final_status)
+ self._file.write("\n")
+ self._file.flush()
+ self._finished = True
+
+
+# Used for dumb terminals, non-interactive installs (no tty), etc.
+# We still print updates occasionally (once every 60 seconds by default) to
+# act as a keep-alive for systems like Travis-CI that take lack-of-output as
+# an indication that a task has frozen.
+class NonInteractiveSpinner(SpinnerInterface):
+ def __init__(self, message, min_update_interval_seconds=60):
+ # type: (str, float) -> None
+ self._message = message
+ self._finished = False
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._update("started")
+
+ def _update(self, status):
+ assert not self._finished
+ self._rate_limiter.reset()
+ logger.info("%s: %s", self._message, status)
+
+ def spin(self):
+ # type: () -> None
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._update("still running...")
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ if self._finished:
+ return
+ self._update("finished with status '%s'" % (final_status,))
+ self._finished = True
+
+
+@contextlib.contextmanager
+def open_spinner(message):
+ # type: (str) -> Iterator[SpinnerInterface]
+ # Interactive spinner goes directly to sys.stdout rather than being routed
+ # through the logging system, but it acts like it has level INFO,
+ # i.e. it's only displayed if we're at level INFO or better.
+ # Non-interactive spinner goes through the logging system, so it is always
+ # in sync with logging configuration.
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
+ spinner = InteractiveSpinner(message) # type: SpinnerInterface
+ else:
+ spinner = NonInteractiveSpinner(message)
+ try:
+ with hidden_cursor(sys.stdout):
+ yield spinner
+ except KeyboardInterrupt:
+ spinner.finish("canceled")
+ raise
+ except Exception:
+ spinner.finish("error")
+ raise
+ else:
+ spinner.finish("done")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py
new file mode 100644
index 00000000..380db1c3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py
@@ -0,0 +1,34 @@
+import os.path
+import site
+import sys
+
+
+def running_under_virtualenv():
+ # type: () -> bool
+ """
+ Return True if we're running inside a virtualenv, False otherwise.
+
+ """
+ if hasattr(sys, 'real_prefix'):
+ # pypa/virtualenv case
+ return True
+ elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
+ # PEP 405 venv
+ return True
+
+ return False
+
+
+def virtualenv_no_global():
+ # type: () -> bool
+ """
+ Return True if in a venv and no system site packages.
+ """
+ # this mirrors the logic in virtualenv.py for locating the
+ # no-global-site-packages.txt file
+ site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
+ no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
+ if running_under_virtualenv() and os.path.isfile(no_global_file):
+ return True
+ else:
+ return False
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py
new file mode 100644
index 00000000..cb573ab6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py
@@ -0,0 +1,12 @@
+# Expose a limited set of classes and functions so callers outside of
+# the vcs package don't need to import deeper than `pip._internal.vcs`.
+# (The test directory and imports protected by MYPY_CHECK_RUNNING may
+# still need to import from a vcs sub-package.)
+from pip._internal.vcs.versioncontrol import ( # noqa: F401
+ RemoteNotFoundError, make_vcs_requirement_url, vcs,
+)
+# Import all vcs modules to register each VCS in the VcsSupport object.
+import pip._internal.vcs.bazaar
+import pip._internal.vcs.git
+import pip._internal.vcs.mercurial
+import pip._internal.vcs.subversion # noqa: F401
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..a4eba3d7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc
new file mode 100644
index 00000000..126f552c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/git.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/git.cpython-37.pyc
new file mode 100644
index 00000000..e331d430
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/git.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc
new file mode 100644
index 00000000..898fd9a2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc
new file mode 100644
index 00000000..abf6452d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc
new file mode 100644
index 00000000..d9a06ebf
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py
new file mode 100644
index 00000000..4f1e114b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.utils.misc import display_path, path_to_url, rmtree
+from pip._internal.vcs.versioncontrol import VersionControl, vcs
+
+logger = logging.getLogger(__name__)
+
+
+class Bazaar(VersionControl):
+ name = 'bzr'
+ dirname = '.bzr'
+ repo_name = 'branch'
+ schemes = (
+ 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
+ 'bzr+lp',
+ )
+
+ def __init__(self, *args, **kwargs):
+ super(Bazaar, self).__init__(*args, **kwargs)
+ # This is only needed for python <2.7.5
+ # Register lp but do not expose as a scheme to support bzr+lp.
+ if getattr(urllib_parse, 'uses_fragment', None):
+ urllib_parse.uses_fragment.extend(['lp'])
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return ['-r', rev]
+
+ def export(self, location, url):
+ """
+ Export the Bazaar repository at the url to the destination location
+ """
+ # Remove the location to make sure Bazaar can export it correctly
+ if os.path.exists(location):
+ rmtree(location)
+
+ url, rev_options = self.get_url_rev_options(url)
+ self.run_command(
+ ['export', location, url] + rev_options.to_args(),
+ show_stdout=False,
+ )
+
+ def fetch_new(self, dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Checking out %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
+ self.run_command(cmd_args)
+
+ def switch(self, dest, url, rev_options):
+ self.run_command(['switch', url], cwd=dest)
+
+ def update(self, dest, url, rev_options):
+ cmd_args = ['pull', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
+ url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url)
+ if url.startswith('ssh://'):
+ url = 'bzr+' + url
+ return url, rev, user_pass
+
+ @classmethod
+ def get_remote_url(cls, location):
+ urls = cls.run_command(['info'], show_stdout=False, cwd=location)
+ for line in urls.splitlines():
+ line = line.strip()
+ for x in ('checkout of branch: ',
+ 'parent branch: '):
+ if line.startswith(x):
+ repo = line.split(x)[1]
+ if cls._is_local_repository(repo):
+ return path_to_url(repo)
+ return repo
+ return None
+
+ @classmethod
+ def get_revision(cls, location):
+ revision = cls.run_command(
+ ['revno'], show_stdout=False, cwd=location,
+ )
+ return revision.splitlines()[-1]
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+
+vcs.register(Bazaar)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/git.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/git.py
new file mode 100644
index 00000000..3445c1b3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/git.py
@@ -0,0 +1,358 @@
+from __future__ import absolute_import
+
+import logging
+import os.path
+import re
+
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.exceptions import BadCommand
+from pip._internal.utils.compat import samefile
+from pip._internal.utils.misc import display_path, redact_password_from_url
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.vcs.versioncontrol import (
+ RemoteNotFoundError, VersionControl, vcs,
+)
+
+urlsplit = urllib_parse.urlsplit
+urlunsplit = urllib_parse.urlunsplit
+
+
+logger = logging.getLogger(__name__)
+
+
+HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$')
+
+
+def looks_like_hash(sha):
+ return bool(HASH_REGEX.match(sha))
+
+
+class Git(VersionControl):
+ name = 'git'
+ dirname = '.git'
+ repo_name = 'clone'
+ schemes = (
+ 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
+ )
+ # Prevent the user's environment variables from interfering with pip:
+ # https://github.com/pypa/pip/issues/1130
+ unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
+ default_arg_rev = 'HEAD'
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return [rev]
+
+ def get_git_version(self):
+ VERSION_PFX = 'git version '
+ version = self.run_command(['version'], show_stdout=False)
+ if version.startswith(VERSION_PFX):
+ version = version[len(VERSION_PFX):].split()[0]
+ else:
+ version = ''
+ # get first 3 positions of the git version because
+ # on windows it is x.y.z.windows.t, and this parses as
+ # LegacyVersion which always smaller than a Version.
+ version = '.'.join(version.split('.')[:3])
+ return parse_version(version)
+
+ @classmethod
+ def get_current_branch(cls, location):
+ """
+ Return the current branch, or None if HEAD isn't at a branch
+ (e.g. detached HEAD).
+ """
+ # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
+ # HEAD rather than a symbolic ref. In addition, the -q causes the
+ # command to exit with status code 1 instead of 128 in this case
+ # and to suppress the message to stderr.
+ args = ['symbolic-ref', '-q', 'HEAD']
+ output = cls.run_command(
+ args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
+ )
+ ref = output.strip()
+
+ if ref.startswith('refs/heads/'):
+ return ref[len('refs/heads/'):]
+
+ return None
+
+ def export(self, location, url):
+ """Export the Git repository at the url to the destination location"""
+ if not location.endswith('/'):
+ location = location + '/'
+
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path, url=url)
+ self.run_command(
+ ['checkout-index', '-a', '-f', '--prefix', location],
+ show_stdout=False, cwd=temp_dir.path
+ )
+
+ @classmethod
+ def get_revision_sha(cls, dest, rev):
+ """
+ Return (sha_or_none, is_branch), where sha_or_none is a commit hash
+ if the revision names a remote branch or tag, otherwise None.
+
+ Args:
+ dest: the repository directory.
+ rev: the revision name.
+ """
+ # Pass rev to pre-filter the list.
+ output = cls.run_command(['show-ref', rev], cwd=dest,
+ show_stdout=False, on_returncode='ignore')
+ refs = {}
+ for line in output.strip().splitlines():
+ try:
+ sha, ref = line.split()
+ except ValueError:
+ # Include the offending line to simplify troubleshooting if
+ # this error ever occurs.
+ raise ValueError('unexpected show-ref line: {!r}'.format(line))
+
+ refs[ref] = sha
+
+ branch_ref = 'refs/remotes/origin/{}'.format(rev)
+ tag_ref = 'refs/tags/{}'.format(rev)
+
+ sha = refs.get(branch_ref)
+ if sha is not None:
+ return (sha, True)
+
+ sha = refs.get(tag_ref)
+
+ return (sha, False)
+
+ @classmethod
+ def resolve_revision(cls, dest, url, rev_options):
+ """
+ Resolve a revision to a new RevOptions object with the SHA1 of the
+ branch, tag, or ref if found.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ rev = rev_options.arg_rev
+ sha, is_branch = cls.get_revision_sha(dest, rev)
+
+ if sha is not None:
+ rev_options = rev_options.make_new(sha)
+ rev_options.branch_name = rev if is_branch else None
+
+ return rev_options
+
+ # Do not show a warning for the common case of something that has
+ # the form of a Git commit hash.
+ if not looks_like_hash(rev):
+ logger.warning(
+ "Did not find branch or tag '%s', assuming revision or ref.",
+ rev,
+ )
+
+ if not rev.startswith('refs/'):
+ return rev_options
+
+ # If it looks like a ref, we have to fetch it explicitly.
+ cls.run_command(
+ ['fetch', '-q', url] + rev_options.to_args(),
+ cwd=dest,
+ )
+ # Change the revision to the SHA of the ref we fetched
+ sha = cls.get_revision(dest, rev='FETCH_HEAD')
+ rev_options = rev_options.make_new(sha)
+
+ return rev_options
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """
+ Return whether the current commit hash equals the given name.
+
+ Args:
+ dest: the repository directory.
+ name: a string name.
+ """
+ if not name:
+ # Then avoid an unnecessary subprocess call.
+ return False
+
+ return cls.get_revision(dest) == name
+
+ def fetch_new(self, dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Cloning %s%s to %s', redact_password_from_url(url),
+ rev_display, display_path(dest),
+ )
+ self.run_command(['clone', '-q', url, dest])
+
+ if rev_options.rev:
+ # Then a specific revision was requested.
+ rev_options = self.resolve_revision(dest, url, rev_options)
+ branch_name = getattr(rev_options, 'branch_name', None)
+ if branch_name is None:
+ # Only do a checkout if the current commit id doesn't match
+ # the requested revision.
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ cmd_args = ['checkout', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+ elif self.get_current_branch(dest) != branch_name:
+ # Then a specific branch was requested, and that branch
+ # is not yet checked out.
+ track_branch = 'origin/{}'.format(branch_name)
+ cmd_args = [
+ 'checkout', '-b', branch_name, '--track', track_branch,
+ ]
+ self.run_command(cmd_args, cwd=dest)
+
+ #: repo may contain submodules
+ self.update_submodules(dest)
+
+ def switch(self, dest, url, rev_options):
+ self.run_command(['config', 'remote.origin.url', url], cwd=dest)
+ cmd_args = ['checkout', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ self.update_submodules(dest)
+
+ def update(self, dest, url, rev_options):
+ # First fetch changes from the default remote
+ if self.get_git_version() >= parse_version('1.9.0'):
+ # fetch tags in addition to everything else
+ self.run_command(['fetch', '-q', '--tags'], cwd=dest)
+ else:
+ self.run_command(['fetch', '-q'], cwd=dest)
+ # Then reset to wanted revision (maybe even origin/master)
+ rev_options = self.resolve_revision(dest, url, rev_options)
+ cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+ #: update submodules
+ self.update_submodules(dest)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ """
+ Return URL of the first remote encountered.
+
+ Raises RemoteNotFoundError if the repository does not have a remote
+ url configured.
+ """
+ # We need to pass 1 for extra_ok_returncodes since the command
+ # exits with return code 1 if there are no matching lines.
+ stdout = cls.run_command(
+ ['config', '--get-regexp', r'remote\..*\.url'],
+ extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
+ )
+ remotes = stdout.splitlines()
+ try:
+ found_remote = remotes[0]
+ except IndexError:
+ raise RemoteNotFoundError
+
+ for remote in remotes:
+ if remote.startswith('remote.origin.url '):
+ found_remote = remote
+ break
+ url = found_remote.split(' ')[1]
+ return url.strip()
+
+ @classmethod
+ def get_revision(cls, location, rev=None):
+ if rev is None:
+ rev = 'HEAD'
+ current_rev = cls.run_command(
+ ['rev-parse', rev], show_stdout=False, cwd=location,
+ )
+ return current_rev.strip()
+
+ @classmethod
+ def get_subdirectory(cls, location):
+ # find the repo root
+ git_dir = cls.run_command(['rev-parse', '--git-dir'],
+ show_stdout=False, cwd=location).strip()
+ if not os.path.isabs(git_dir):
+ git_dir = os.path.join(location, git_dir)
+ root_dir = os.path.join(git_dir, '..')
+ # find setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warning(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)",
+ orig_location,
+ )
+ return None
+ # relative path of setup.py to repo root
+ if samefile(root_dir, location):
+ return None
+ return os.path.relpath(location, root_dir)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ """
+ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
+ That's required because although they use SSH they sometimes don't
+ work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
+ parsing. Hence we remove it again afterwards and return it as a stub.
+ """
+ # Works around an apparent Git bug
+ # (see https://article.gmane.org/gmane.comp.version-control.git/146500)
+ scheme, netloc, path, query, fragment = urlsplit(url)
+ if scheme.endswith('file'):
+ initial_slashes = path[:-len(path.lstrip('/'))]
+ newpath = (
+ initial_slashes +
+ urllib_request.url2pathname(path)
+ .replace('\\', '/').lstrip('/')
+ )
+ url = urlunsplit((scheme, netloc, newpath, query, fragment))
+ after_plus = scheme.find('+') + 1
+ url = scheme[:after_plus] + urlunsplit(
+ (scheme[after_plus:], netloc, newpath, query, fragment),
+ )
+
+ if '://' not in url:
+ assert 'file:' not in url
+ url = url.replace('git+', 'git+ssh://')
+ url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
+ url = url.replace('ssh://', '')
+ else:
+ url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
+
+ return url, rev, user_pass
+
+ @classmethod
+ def update_submodules(cls, location):
+ if not os.path.exists(os.path.join(location, '.gitmodules')):
+ return
+ cls.run_command(
+ ['submodule', 'update', '--init', '--recursive', '-q'],
+ cwd=location,
+ )
+
+ @classmethod
+ def controls_location(cls, location):
+ if super(Git, cls).controls_location(location):
+ return True
+ try:
+ r = cls.run_command(['rev-parse'],
+ cwd=location,
+ show_stdout=False,
+ on_returncode='ignore')
+ return not r
+ except BadCommand:
+ logger.debug("could not determine if %s is under git control "
+ "because git is not available", location)
+ return False
+
+
+vcs.register(Git)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py
new file mode 100644
index 00000000..db42783d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py
@@ -0,0 +1,103 @@
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._vendor.six.moves import configparser
+
+from pip._internal.utils.misc import display_path, path_to_url
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.vcs.versioncontrol import VersionControl, vcs
+
+logger = logging.getLogger(__name__)
+
+
+class Mercurial(VersionControl):
+ name = 'hg'
+ dirname = '.hg'
+ repo_name = 'clone'
+ schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return [rev]
+
+ def export(self, location, url):
+ """Export the Hg repository at the url to the destination location"""
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path, url=url)
+
+ self.run_command(
+ ['archive', location], show_stdout=False, cwd=temp_dir.path
+ )
+
+ def fetch_new(self, dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Cloning hg %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ self.run_command(['clone', '--noupdate', '-q', url, dest])
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ def switch(self, dest, url, rev_options):
+ repo_config = os.path.join(dest, self.dirname, 'hgrc')
+ config = configparser.RawConfigParser()
+ try:
+ config.read(repo_config)
+ config.set('paths', 'default', url)
+ with open(repo_config, 'w') as config_file:
+ config.write(config_file)
+ except (OSError, configparser.NoSectionError) as exc:
+ logger.warning(
+ 'Could not switch Mercurial repository to %s: %s', url, exc,
+ )
+ else:
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ def update(self, dest, url, rev_options):
+ self.run_command(['pull', '-q'], cwd=dest)
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ url = cls.run_command(
+ ['showconfig', 'paths.default'],
+ show_stdout=False, cwd=location).strip()
+ if cls._is_local_repository(url):
+ url = path_to_url(url)
+ return url.strip()
+
+ @classmethod
+ def get_revision(cls, location):
+ """
+ Return the repository-local changeset revision number, as an integer.
+ """
+ current_revision = cls.run_command(
+ ['parents', '--template={rev}'],
+ show_stdout=False, cwd=location).strip()
+ return current_revision
+
+ @classmethod
+ def get_requirement_revision(cls, location):
+ """
+ Return the changeset identification hash, as a 40-character
+ hexadecimal string
+ """
+ current_rev_hash = cls.run_command(
+ ['parents', '--template={node}'],
+ show_stdout=False, cwd=location).strip()
+ return current_rev_hash
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+
+vcs.register(Mercurial)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py
new file mode 100644
index 00000000..6bb4c8c5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py
@@ -0,0 +1,314 @@
+from __future__ import absolute_import
+
+import logging
+import os
+import re
+import sys
+
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ display_path, rmtree, split_auth_from_netloc,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.vcs.versioncontrol import VersionControl, vcs
+
+_svn_xml_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
+_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
+_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
+
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Tuple
+ from pip._internal.vcs.versioncontrol import RevOptions
+
+logger = logging.getLogger(__name__)
+
+
+class Subversion(VersionControl):
+ name = 'svn'
+ dirname = '.svn'
+ repo_name = 'checkout'
+ schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
+
+ @classmethod
+ def should_add_vcs_url_prefix(cls, remote_url):
+ return True
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return ['-r', rev]
+
+ @classmethod
+ def get_revision(cls, location):
+ """
+ Return the maximum revision for all files under a given location
+ """
+ # Note: taken from setuptools.command.egg_info
+ revision = 0
+
+ for base, dirs, files in os.walk(location):
+ if cls.dirname not in dirs:
+ dirs[:] = []
+ continue # no sense walking uncontrolled subdirs
+ dirs.remove(cls.dirname)
+ entries_fn = os.path.join(base, cls.dirname, 'entries')
+ if not os.path.exists(entries_fn):
+ # FIXME: should we warn?
+ continue
+
+ dirurl, localrev = cls._get_svn_url_rev(base)
+
+ if base == location:
+ base = dirurl + '/' # save the root url
+ elif not dirurl or not dirurl.startswith(base):
+ dirs[:] = []
+ continue # not part of the same svn tree, skip it
+ revision = max(revision, localrev)
+ return revision
+
+ @classmethod
+ def get_netloc_and_auth(cls, netloc, scheme):
+ """
+ This override allows the auth information to be passed to svn via the
+ --username and --password options instead of via the URL.
+ """
+ if scheme == 'ssh':
+ # The --username and --password options can't be used for
+ # svn+ssh URLs, so keep the auth information in the URL.
+ return super(Subversion, cls).get_netloc_and_auth(netloc, scheme)
+
+ return split_auth_from_netloc(netloc)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
+ url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url)
+ if url.startswith('ssh://'):
+ url = 'svn+' + url
+ return url, rev, user_pass
+
+ @staticmethod
+ def make_rev_args(username, password):
+ extra_args = []
+ if username:
+ extra_args += ['--username', username]
+ if password:
+ extra_args += ['--password', password]
+
+ return extra_args
+
+ @classmethod
+ def get_remote_url(cls, location):
+ # In cases where the source is in a subdirectory, not alongside
+ # setup.py we have to look up in the location until we find a real
+ # setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warning(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)",
+ orig_location,
+ )
+ return None
+
+ return cls._get_svn_url_rev(location)[0]
+
+ @classmethod
+ def _get_svn_url_rev(cls, location):
+ from pip._internal.exceptions import InstallationError
+
+ entries_path = os.path.join(location, cls.dirname, 'entries')
+ if os.path.exists(entries_path):
+ with open(entries_path) as f:
+ data = f.read()
+ else: # subversion >= 1.7 does not have the 'entries' file
+ data = ''
+
+ if (data.startswith('8') or
+ data.startswith('9') or
+ data.startswith('10')):
+ data = list(map(str.splitlines, data.split('\n\x0c\n')))
+ del data[0][0] # get rid of the '8'
+ url = data[0][3]
+ revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
+ elif data.startswith('<?xml'):
+ match = _svn_xml_url_re.search(data)
+ if not match:
+ raise ValueError('Badly formatted data: %r' % data)
+ url = match.group(1) # get repository URL
+ revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
+ else:
+ try:
+ # subversion >= 1.7
+ # Note that using get_remote_call_options is not necessary here
+ # because `svn info` is being run against a local directory.
+ # We don't need to worry about making sure interactive mode
+ # is being used to prompt for passwords, because passwords
+ # are only potentially needed for remote server requests.
+ xml = cls.run_command(
+ ['info', '--xml', location],
+ show_stdout=False,
+ )
+ url = _svn_info_xml_url_re.search(xml).group(1)
+ revs = [
+ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
+ ]
+ except InstallationError:
+ url, revs = None, []
+
+ if revs:
+ rev = max(revs)
+ else:
+ rev = 0
+
+ return url, rev
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+ def __init__(self, use_interactive=None):
+ # type: (bool) -> None
+ if use_interactive is None:
+ use_interactive = sys.stdin.isatty()
+ self.use_interactive = use_interactive
+
+ # This member is used to cache the fetched version of the current
+ # ``svn`` client.
+ # Special value definitions:
+ # None: Not evaluated yet.
+ # Empty tuple: Could not parse version.
+ self._vcs_version = None # type: Optional[Tuple[int, ...]]
+
+ super(Subversion, self).__init__()
+
+ def call_vcs_version(self):
+ # type: () -> Tuple[int, ...]
+ """Query the version of the currently installed Subversion client.
+
+ :return: A tuple containing the parts of the version information or
+ ``()`` if the version returned from ``svn`` could not be parsed.
+ :raises: BadCommand: If ``svn`` is not installed.
+ """
+ # Example versions:
+ # svn, version 1.10.3 (r1842928)
+ # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
+ # svn, version 1.7.14 (r1542130)
+ # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
+ version_prefix = 'svn, version '
+ version = self.run_command(['--version'], show_stdout=False)
+ if not version.startswith(version_prefix):
+ return ()
+
+ version = version[len(version_prefix):].split()[0]
+ version_list = version.split('.')
+ try:
+ parsed_version = tuple(map(int, version_list))
+ except ValueError:
+ return ()
+
+ return parsed_version
+
+ def get_vcs_version(self):
+ # type: () -> Tuple[int, ...]
+ """Return the version of the currently installed Subversion client.
+
+ If the version of the Subversion client has already been queried,
+ a cached value will be used.
+
+ :return: A tuple containing the parts of the version information or
+ ``()`` if the version returned from ``svn`` could not be parsed.
+ :raises: BadCommand: If ``svn`` is not installed.
+ """
+ if self._vcs_version is not None:
+ # Use cached version, if available.
+ # If parsing the version failed previously (empty tuple),
+ # do not attempt to parse it again.
+ return self._vcs_version
+
+ vcs_version = self.call_vcs_version()
+ self._vcs_version = vcs_version
+ return vcs_version
+
+ def get_remote_call_options(self):
+ # type: () -> List[str]
+ """Return options to be used on calls to Subversion that contact the server.
+
+ These options are applicable for the following ``svn`` subcommands used
+ in this class.
+
+ - checkout
+ - export
+ - switch
+ - update
+
+ :return: A list of command line arguments to pass to ``svn``.
+ """
+ if not self.use_interactive:
+ # --non-interactive switch is available since Subversion 0.14.4.
+ # Subversion < 1.8 runs in interactive mode by default.
+ return ['--non-interactive']
+
+ svn_version = self.get_vcs_version()
+ # By default, Subversion >= 1.8 runs in non-interactive mode if
+ # stdin is not a TTY. Since that is how pip invokes SVN, in
+ # call_subprocess(), pip must pass --force-interactive to ensure
+ # the user can be prompted for a password, if required.
+ # SVN added the --force-interactive option in SVN 1.8. Since
+ # e.g. RHEL/CentOS 7, which is supported until 2024, ships with
+ # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
+ # can't safely add the option if the SVN version is < 1.8 (or unknown).
+ if svn_version >= (1, 8):
+ return ['--force-interactive']
+
+ return []
+
+ def export(self, location, url):
+ """Export the svn repository at the url to the destination location"""
+ url, rev_options = self.get_url_rev_options(url)
+
+ logger.info('Exporting svn repository %s to %s', url, location)
+ with indent_log():
+ if os.path.exists(location):
+ # Subversion doesn't like to check out over an existing
+ # directory --force fixes this, but was only added in svn 1.5
+ rmtree(location)
+ cmd_args = (['export'] + self.get_remote_call_options() +
+ rev_options.to_args() + [url, location])
+ self.run_command(cmd_args, show_stdout=False)
+
+ def fetch_new(self, dest, url, rev_options):
+ # type: (str, str, RevOptions) -> None
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Checking out %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ cmd_args = (['checkout', '-q'] +
+ self.get_remote_call_options() +
+ rev_options.to_args() + [url, dest])
+ self.run_command(cmd_args)
+
+ def switch(self, dest, url, rev_options):
+ # type: (str, str, RevOptions) -> None
+ cmd_args = (['switch'] + self.get_remote_call_options() +
+ rev_options.to_args() + [url, dest])
+ self.run_command(cmd_args)
+
+ def update(self, dest, url, rev_options):
+ # type: (str, str, RevOptions) -> None
+ cmd_args = (['update'] + self.get_remote_call_options() +
+ rev_options.to_args() + [dest])
+ self.run_command(cmd_args)
+
+
+vcs.register(Subversion)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py
new file mode 100644
index 00000000..2d05fc13
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py
@@ -0,0 +1,600 @@
+"""Handles all VCS (version control) support"""
+from __future__ import absolute_import
+
+import errno
+import logging
+import os
+import shutil
+import sys
+
+from pip._vendor import pkg_resources
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.exceptions import BadCommand
+from pip._internal.utils.misc import (
+ ask_path_exists, backup_dir, call_subprocess, display_path, rmtree,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type
+ )
+ from pip._internal.utils.ui import SpinnerInterface
+
+ AuthInfo = Tuple[Optional[str], Optional[str]]
+
+__all__ = ['vcs']
+
+
+logger = logging.getLogger(__name__)
+
+
+def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None):
+ """
+ Return the URL for a VCS requirement.
+
+ Args:
+ repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
+ project_name: the (unescaped) project name.
+ """
+ egg_project_name = pkg_resources.to_filename(project_name)
+ req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name)
+ if subdir:
+ req += '&subdirectory={}'.format(subdir)
+
+ return req
+
+
+class RemoteNotFoundError(Exception):
+ pass
+
+
+class RevOptions(object):
+
+ """
+ Encapsulates a VCS-specific revision to install, along with any VCS
+ install options.
+
+ Instances of this class should be treated as if immutable.
+ """
+
+ def __init__(
+ self,
+ vc_class, # type: Type[VersionControl]
+ rev=None, # type: Optional[str]
+ extra_args=None, # type: Optional[List[str]]
+ ):
+ # type: (...) -> None
+ """
+ Args:
+ vc_class: a VersionControl subclass.
+ rev: the name of the revision to install.
+ extra_args: a list of extra options.
+ """
+ if extra_args is None:
+ extra_args = []
+
+ self.extra_args = extra_args
+ self.rev = rev
+ self.vc_class = vc_class
+
+ def __repr__(self):
+ return '<RevOptions {}: rev={!r}>'.format(self.vc_class.name, self.rev)
+
+ @property
+ def arg_rev(self):
+ # type: () -> Optional[str]
+ if self.rev is None:
+ return self.vc_class.default_arg_rev
+
+ return self.rev
+
+ def to_args(self):
+ # type: () -> List[str]
+ """
+ Return the VCS-specific command arguments.
+ """
+ args = [] # type: List[str]
+ rev = self.arg_rev
+ if rev is not None:
+ args += self.vc_class.get_base_rev_args(rev)
+ args += self.extra_args
+
+ return args
+
+ def to_display(self):
+ # type: () -> str
+ if not self.rev:
+ return ''
+
+ return ' (to revision {})'.format(self.rev)
+
+ def make_new(self, rev):
+ # type: (str) -> RevOptions
+ """
+ Make a copy of the current instance, but with a new rev.
+
+ Args:
+ rev: the name of the revision for the new object.
+ """
+ return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
+
+
+class VcsSupport(object):
+ _registry = {} # type: Dict[str, VersionControl]
+ schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
+
+ def __init__(self):
+ # type: () -> None
+ # Register more schemes with urlparse for various version control
+ # systems
+ urllib_parse.uses_netloc.extend(self.schemes)
+ # Python >= 2.7.4, 3.3 doesn't have uses_fragment
+ if getattr(urllib_parse, 'uses_fragment', None):
+ urllib_parse.uses_fragment.extend(self.schemes)
+ super(VcsSupport, self).__init__()
+
+ def __iter__(self):
+ return self._registry.__iter__()
+
+ @property
+ def backends(self):
+ # type: () -> List[VersionControl]
+ return list(self._registry.values())
+
+ @property
+ def dirnames(self):
+ # type: () -> List[str]
+ return [backend.dirname for backend in self.backends]
+
+ @property
+ def all_schemes(self):
+ # type: () -> List[str]
+ schemes = [] # type: List[str]
+ for backend in self.backends:
+ schemes.extend(backend.schemes)
+ return schemes
+
+ def register(self, cls):
+ # type: (Type[VersionControl]) -> None
+ if not hasattr(cls, 'name'):
+ logger.warning('Cannot register VCS %s', cls.__name__)
+ return
+ if cls.name not in self._registry:
+ self._registry[cls.name] = cls()
+ logger.debug('Registered VCS backend: %s', cls.name)
+
+ def unregister(self, name):
+ # type: (str) -> None
+ if name in self._registry:
+ del self._registry[name]
+
+ def get_backend_for_dir(self, location):
+ # type: (str) -> Optional[VersionControl]
+ """
+ Return a VersionControl object if a repository of that type is found
+ at the given directory.
+ """
+ for vcs_backend in self._registry.values():
+ if vcs_backend.controls_location(location):
+ logger.debug('Determine that %s uses VCS: %s',
+ location, vcs_backend.name)
+ return vcs_backend
+ return None
+
+ def get_backend(self, name):
+ # type: (str) -> Optional[VersionControl]
+ """
+ Return a VersionControl object or None.
+ """
+ name = name.lower()
+ return self._registry.get(name)
+
+
+vcs = VcsSupport()
+
+
+class VersionControl(object):
+ name = ''
+ dirname = ''
+ repo_name = ''
+ # List of supported schemes for this Version Control
+ schemes = () # type: Tuple[str, ...]
+ # Iterable of environment variable names to pass to call_subprocess().
+ unset_environ = () # type: Tuple[str, ...]
+ default_arg_rev = None # type: Optional[str]
+
+ @classmethod
+ def should_add_vcs_url_prefix(cls, remote_url):
+ """
+ Return whether the vcs prefix (e.g. "git+") should be added to a
+ repository's remote url when used in a requirement.
+ """
+ return not remote_url.lower().startswith('{}:'.format(cls.name))
+
+ @classmethod
+ def get_subdirectory(cls, repo_dir):
+ """
+ Return the path to setup.py, relative to the repo root.
+ """
+ return None
+
+ @classmethod
+ def get_requirement_revision(cls, repo_dir):
+ """
+ Return the revision string that should be used in a requirement.
+ """
+ return cls.get_revision(repo_dir)
+
+ @classmethod
+ def get_src_requirement(cls, repo_dir, project_name):
+ """
+ Return the requirement string to use to redownload the files
+ currently at the given repository directory.
+
+ Args:
+ project_name: the (unescaped) project name.
+
+ The return value has a form similar to the following:
+
+ {repository_url}@{revision}#egg={project_name}
+ """
+ repo_url = cls.get_remote_url(repo_dir)
+ if repo_url is None:
+ return None
+
+ if cls.should_add_vcs_url_prefix(repo_url):
+ repo_url = '{}+{}'.format(cls.name, repo_url)
+
+ revision = cls.get_requirement_revision(repo_dir)
+ subdir = cls.get_subdirectory(repo_dir)
+ req = make_vcs_requirement_url(repo_url, revision, project_name,
+ subdir=subdir)
+
+ return req
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ """
+ Return the base revision arguments for a vcs command.
+
+ Args:
+ rev: the name of a revision to install. Cannot be None.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def make_rev_options(cls, rev=None, extra_args=None):
+ # type: (Optional[str], Optional[List[str]]) -> RevOptions
+ """
+ Return a RevOptions object.
+
+ Args:
+ rev: the name of a revision to install.
+ extra_args: a list of extra options.
+ """
+ return RevOptions(cls, rev, extra_args=extra_args)
+
+ @classmethod
+ def _is_local_repository(cls, repo):
+ # type: (str) -> bool
+ """
+ posix absolute paths start with os.path.sep,
+ win32 ones start with drive (like c:\\folder)
+ """
+ drive, tail = os.path.splitdrive(repo)
+ return repo.startswith(os.path.sep) or bool(drive)
+
+ def export(self, location, url):
+ """
+ Export the repository at the url to the destination location
+ i.e. only download the files, without vcs informations
+
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def get_netloc_and_auth(cls, netloc, scheme):
+ """
+ Parse the repository URL's netloc, and return the new netloc to use
+ along with auth information.
+
+ Args:
+ netloc: the original repository URL netloc.
+ scheme: the repository URL's scheme without the vcs prefix.
+
+ This is mainly for the Subversion class to override, so that auth
+ information can be provided via the --username and --password options
+ instead of through the URL. For other subclasses like Git without
+ such an option, auth information must stay in the URL.
+
+ Returns: (netloc, (username, password)).
+ """
+ return netloc, (None, None)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # type: (str) -> Tuple[str, Optional[str], AuthInfo]
+ """
+ Parse the repository URL to use, and return the URL, revision,
+ and auth info to use.
+
+ Returns: (url, rev, (username, password)).
+ """
+ scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
+ if '+' not in scheme:
+ raise ValueError(
+ "Sorry, {!r} is a malformed VCS url. "
+ "The format is <vcs>+<protocol>://<url>, "
+ "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
+ )
+ # Remove the vcs prefix.
+ scheme = scheme.split('+', 1)[1]
+ netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
+ rev = None
+ if '@' in path:
+ path, rev = path.rsplit('@', 1)
+ url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
+ return url, rev, user_pass
+
+ @staticmethod
+ def make_rev_args(username, password):
+ """
+ Return the RevOptions "extra arguments" to use in obtain().
+ """
+ return []
+
+ def get_url_rev_options(self, url):
+ # type: (str) -> Tuple[str, RevOptions]
+ """
+ Return the URL and RevOptions object to use in obtain() and in
+ some cases export(), as a tuple (url, rev_options).
+ """
+ url, rev, user_pass = self.get_url_rev_and_auth(url)
+ username, password = user_pass
+ extra_args = self.make_rev_args(username, password)
+ rev_options = self.make_rev_options(rev, extra_args=extra_args)
+
+ return url, rev_options
+
+ @staticmethod
+ def normalize_url(url):
+ # type: (str) -> str
+ """
+ Normalize a URL for comparison by unquoting it and removing any
+ trailing slash.
+ """
+ return urllib_parse.unquote(url).rstrip('/')
+
+ @classmethod
+ def compare_urls(cls, url1, url2):
+ # type: (str, str) -> bool
+ """
+ Compare two repo URLs for identity, ignoring incidental differences.
+ """
+ return (cls.normalize_url(url1) == cls.normalize_url(url2))
+
+ def fetch_new(self, dest, url, rev_options):
+ """
+ Fetch a revision from a repository, in the case that this is the
+ first fetch from the repository.
+
+ Args:
+ dest: the directory to fetch the repository to.
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ def switch(self, dest, url, rev_options):
+ """
+ Switch the repo at ``dest`` to point to ``URL``.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ def update(self, dest, url, rev_options):
+ """
+ Update an already-existing repo to the given ``rev_options``.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """
+ Return whether the id of the current commit equals the given name.
+
+ Args:
+ dest: the repository directory.
+ name: a string name.
+ """
+ raise NotImplementedError
+
+ def obtain(self, dest, url):
+ # type: (str, str) -> None
+ """
+ Install or update in editable mode the package represented by this
+ VersionControl object.
+
+ :param dest: the repository directory in which to install or update.
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ url, rev_options = self.get_url_rev_options(url)
+
+ if not os.path.exists(dest):
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ rev_display = rev_options.to_display()
+ if self.is_repository_directory(dest):
+ existing_url = self.get_remote_url(dest)
+ if self.compare_urls(existing_url, url):
+ logger.debug(
+ '%s in %s exists, and has correct URL (%s)',
+ self.repo_name.title(),
+ display_path(dest),
+ url,
+ )
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ logger.info(
+ 'Updating %s %s%s',
+ display_path(dest),
+ self.repo_name,
+ rev_display,
+ )
+ self.update(dest, url, rev_options)
+ else:
+ logger.info('Skipping because already up-to-date.')
+ return
+
+ logger.warning(
+ '%s %s in %s exists with URL %s',
+ self.name,
+ self.repo_name,
+ display_path(dest),
+ existing_url,
+ )
+ prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
+ ('s', 'i', 'w', 'b'))
+ else:
+ logger.warning(
+ 'Directory %s already exists, and is not a %s %s.',
+ dest,
+ self.name,
+ self.repo_name,
+ )
+ # https://github.com/python/mypy/issues/1174
+ prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore
+ ('i', 'w', 'b'))
+
+ logger.warning(
+ 'The plan is to install the %s repository %s',
+ self.name,
+ url,
+ )
+ response = ask_path_exists('What to do? %s' % prompt[0], prompt[1])
+
+ if response == 'a':
+ sys.exit(-1)
+
+ if response == 'w':
+ logger.warning('Deleting %s', display_path(dest))
+ rmtree(dest)
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ if response == 'b':
+ dest_dir = backup_dir(dest)
+ logger.warning(
+ 'Backing up %s to %s', display_path(dest), dest_dir,
+ )
+ shutil.move(dest, dest_dir)
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ # Do nothing if the response is "i".
+ if response == 's':
+ logger.info(
+ 'Switching %s %s to %s%s',
+ self.repo_name,
+ display_path(dest),
+ url,
+ rev_display,
+ )
+ self.switch(dest, url, rev_options)
+
+ def unpack(self, location, url):
+ # type: (str, str) -> None
+ """
+ Clean up current location and download the url repository
+ (and vcs infos) into location
+
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ if os.path.exists(location):
+ rmtree(location)
+ self.obtain(location, url=url)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ """
+ Return the url used at location
+
+ Raises RemoteNotFoundError if the repository does not have a remote
+ url configured.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def get_revision(cls, location):
+ """
+ Return the current commit id of the files at the given location.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def run_command(
+ cls,
+ cmd, # type: List[str]
+ show_stdout=True, # type: bool
+ cwd=None, # type: Optional[str]
+ on_returncode='raise', # type: str
+ extra_ok_returncodes=None, # type: Optional[Iterable[int]]
+ command_desc=None, # type: Optional[str]
+ extra_environ=None, # type: Optional[Mapping[str, Any]]
+ spinner=None # type: Optional[SpinnerInterface]
+ ):
+ # type: (...) -> Text
+ """
+ Run a VCS subcommand
+ This is simply a wrapper around call_subprocess that adds the VCS
+ command name, and checks that the VCS is available
+ """
+ cmd = [cls.name] + cmd
+ try:
+ return call_subprocess(cmd, show_stdout, cwd,
+ on_returncode=on_returncode,
+ extra_ok_returncodes=extra_ok_returncodes,
+ command_desc=command_desc,
+ extra_environ=extra_environ,
+ unset_environ=cls.unset_environ,
+ spinner=spinner)
+ except OSError as e:
+ # errno.ENOENT = no such file or directory
+ # In other words, the VCS executable isn't available
+ if e.errno == errno.ENOENT:
+ raise BadCommand(
+ 'Cannot find command %r - do you have '
+ '%r installed and in your '
+ 'PATH?' % (cls.name, cls.name))
+ else:
+ raise # re-raise exception if a different error occurred
+
+ @classmethod
+ def is_repository_directory(cls, path):
+ # type: (str) -> bool
+ """
+ Return whether a directory path is a repository directory.
+ """
+ logger.debug('Checking in %s for %s (%s)...',
+ path, cls.dirname, cls.name)
+ return os.path.exists(os.path.join(path, cls.dirname))
+
+ @classmethod
+ def controls_location(cls, location):
+ # type: (str) -> bool
+ """
+ Check if a location is controlled by the vcs.
+ It is meant to be overridden to implement smarter detection
+ mechanisms for specific vcs.
+
+ This can do more than is_repository_directory() alone. For example,
+ the Git override checks that Git is actually available.
+ """
+ return cls.is_repository_directory(location)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/wheel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/wheel.py
new file mode 100644
index 00000000..6f034cd0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/wheel.py
@@ -0,0 +1,1125 @@
+"""
+Support for installing and building the "wheel" binary package format.
+"""
+from __future__ import absolute_import
+
+import collections
+import compileall
+import csv
+import hashlib
+import logging
+import os.path
+import re
+import shutil
+import stat
+import sys
+import warnings
+from base64 import urlsafe_b64encode
+from email.parser import Parser
+
+from pip._vendor import pkg_resources
+from pip._vendor.distlib.scripts import ScriptMaker
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.six import StringIO
+
+from pip._internal import pep425tags
+from pip._internal.download import unpack_url
+from pip._internal.exceptions import (
+ InstallationError, InvalidWheelFilename, UnsupportedWheel,
+)
+from pip._internal.locations import distutils_scheme
+from pip._internal.models.link import Link
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.marker_files import PIP_DELETE_MARKER_FILENAME
+from pip._internal.utils.misc import (
+ LOG_DIVIDER, call_subprocess, captured_stdout, ensure_dir,
+ format_command_args, path_to_url, read_chunks,
+)
+from pip._internal.utils.setuptools_build import make_setuptools_shim_args
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any, Iterable
+ )
+ from pip._vendor.packaging.requirements import Requirement
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.download import PipSession
+ from pip._internal.index import FormatControl, PackageFinder
+ from pip._internal.operations.prepare import (
+ RequirementPreparer
+ )
+ from pip._internal.cache import WheelCache
+ from pip._internal.pep425tags import Pep425Tag
+
+ InstalledCSVRow = Tuple[str, ...]
+
+
+VERSION_COMPATIBLE = (1, 0)
+
+
+logger = logging.getLogger(__name__)
+
+
+def normpath(src, p):
+ return os.path.relpath(src, p).replace(os.path.sep, '/')
+
+
+def hash_file(path, blocksize=1 << 20):
+ # type: (str, int) -> Tuple[Any, int]
+ """Return (hash, length) for path using hashlib.sha256()"""
+ h = hashlib.sha256()
+ length = 0
+ with open(path, 'rb') as f:
+ for block in read_chunks(f, size=blocksize):
+ length += len(block)
+ h.update(block)
+ return (h, length) # type: ignore
+
+
+def rehash(path, blocksize=1 << 20):
+ # type: (str, int) -> Tuple[str, str]
+ """Return (encoded_digest, length) for path using hashlib.sha256()"""
+ h, length = hash_file(path, blocksize)
+ digest = 'sha256=' + urlsafe_b64encode(
+ h.digest()
+ ).decode('latin1').rstrip('=')
+ # unicode/str python2 issues
+ return (digest, str(length)) # type: ignore
+
+
+def open_for_csv(name, mode):
+ # type: (str, Text) -> IO
+ if sys.version_info[0] < 3:
+ nl = {} # type: Dict[str, Any]
+ bin = 'b'
+ else:
+ nl = {'newline': ''} # type: Dict[str, Any]
+ bin = ''
+ return open(name, mode + bin, **nl)
+
+
+def replace_python_tag(wheelname, new_tag):
+ # type: (str, str) -> str
+ """Replace the Python tag in a wheel file name with a new value.
+ """
+ parts = wheelname.split('-')
+ parts[-3] = new_tag
+ return '-'.join(parts)
+
+
+def fix_script(path):
+ # type: (str) -> Optional[bool]
+ """Replace #!python with #!/path/to/python
+ Return True if file was changed."""
+ # XXX RECORD hashes will need to be updated
+ if os.path.isfile(path):
+ with open(path, 'rb') as script:
+ firstline = script.readline()
+ if not firstline.startswith(b'#!python'):
+ return False
+ exename = sys.executable.encode(sys.getfilesystemencoding())
+ firstline = b'#!' + exename + os.linesep.encode("ascii")
+ rest = script.read()
+ with open(path, 'wb') as script:
+ script.write(firstline)
+ script.write(rest)
+ return True
+ return None
+
+
+dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?)
+ \.dist-info$""", re.VERBOSE)
+
+
+def root_is_purelib(name, wheeldir):
+ # type: (str, str) -> bool
+ """
+ Return True if the extracted wheel in wheeldir should go into purelib.
+ """
+ name_folded = name.replace("-", "_")
+ for item in os.listdir(wheeldir):
+ match = dist_info_re.match(item)
+ if match and match.group('name') == name_folded:
+ with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
+ for line in wheel:
+ line = line.lower().rstrip()
+ if line == "root-is-purelib: true":
+ return True
+ return False
+
+
+def get_entrypoints(filename):
+ # type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
+ if not os.path.exists(filename):
+ return {}, {}
+
+ # This is done because you can pass a string to entry_points wrappers which
+ # means that they may or may not be valid INI files. The attempt here is to
+ # strip leading and trailing whitespace in order to make them valid INI
+ # files.
+ with open(filename) as fp:
+ data = StringIO()
+ for line in fp:
+ data.write(line.strip())
+ data.write("\n")
+ data.seek(0)
+
+ # get the entry points and then the script names
+ entry_points = pkg_resources.EntryPoint.parse_map(data)
+ console = entry_points.get('console_scripts', {})
+ gui = entry_points.get('gui_scripts', {})
+
+ def _split_ep(s):
+ """get the string representation of EntryPoint, remove space and split
+ on '='"""
+ return str(s).replace(" ", "").split("=")
+
+ # convert the EntryPoint objects into strings with module:function
+ console = dict(_split_ep(v) for v in console.values())
+ gui = dict(_split_ep(v) for v in gui.values())
+ return console, gui
+
+
+def message_about_scripts_not_on_PATH(scripts):
+ # type: (Sequence[str]) -> Optional[str]
+ """Determine if any scripts are not on PATH and format a warning.
+
+ Returns a warning message if one or more scripts are not on PATH,
+ otherwise None.
+ """
+ if not scripts:
+ return None
+
+ # Group scripts by the path they were installed in
+ grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set]
+ for destfile in scripts:
+ parent_dir = os.path.dirname(destfile)
+ script_name = os.path.basename(destfile)
+ grouped_by_dir[parent_dir].add(script_name)
+
+ # We don't want to warn for directories that are on PATH.
+ not_warn_dirs = [
+ os.path.normcase(i).rstrip(os.sep) for i in
+ os.environ.get("PATH", "").split(os.pathsep)
+ ]
+ # If an executable sits with sys.executable, we don't warn for it.
+ # This covers the case of venv invocations without activating the venv.
+ not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
+ warn_for = {
+ parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
+ if os.path.normcase(parent_dir) not in not_warn_dirs
+ }
+ if not warn_for:
+ return None
+
+ # Format a message
+ msg_lines = []
+ for parent_dir, scripts in warn_for.items():
+ sorted_scripts = sorted(scripts) # type: List[str]
+ if len(sorted_scripts) == 1:
+ start_text = "script {} is".format(sorted_scripts[0])
+ else:
+ start_text = "scripts {} are".format(
+ ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
+ )
+
+ msg_lines.append(
+ "The {} installed in '{}' which is not on PATH."
+ .format(start_text, parent_dir)
+ )
+
+ last_line_fmt = (
+ "Consider adding {} to PATH or, if you prefer "
+ "to suppress this warning, use --no-warn-script-location."
+ )
+ if len(msg_lines) == 1:
+ msg_lines.append(last_line_fmt.format("this directory"))
+ else:
+ msg_lines.append(last_line_fmt.format("these directories"))
+
+ # Returns the formatted multiline message
+ return "\n".join(msg_lines)
+
+
+def sorted_outrows(outrows):
+ # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
+ """
+ Return the given rows of a RECORD file in sorted order.
+
+ Each row is a 3-tuple (path, hash, size) and corresponds to a record of
+ a RECORD file (see PEP 376 and PEP 427 for details). For the rows
+ passed to this function, the size can be an integer as an int or string,
+ or the empty string.
+ """
+ # Normally, there should only be one row per path, in which case the
+ # second and third elements don't come into play when sorting.
+ # However, in cases in the wild where a path might happen to occur twice,
+ # we don't want the sort operation to trigger an error (but still want
+ # determinism). Since the third element can be an int or string, we
+ # coerce each element to a string to avoid a TypeError in this case.
+ # For additional background, see--
+ # https://github.com/pypa/pip/issues/5868
+ return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
+
+
+def get_csv_rows_for_installed(
+ old_csv_rows, # type: Iterable[List[str]]
+ installed, # type: Dict[str, str]
+ changed, # type: set
+ generated, # type: List[str]
+ lib_dir, # type: str
+):
+ # type: (...) -> List[InstalledCSVRow]
+ """
+ :param installed: A map from archive RECORD path to installation RECORD
+ path.
+ """
+ installed_rows = [] # type: List[InstalledCSVRow]
+ for row in old_csv_rows:
+ if len(row) > 3:
+ logger.warning(
+ 'RECORD line has more than three elements: {}'.format(row)
+ )
+ # Make a copy because we are mutating the row.
+ row = list(row)
+ old_path = row[0]
+ new_path = installed.pop(old_path, old_path)
+ row[0] = new_path
+ if new_path in changed:
+ digest, length = rehash(new_path)
+ row[1] = digest
+ row[2] = length
+ installed_rows.append(tuple(row))
+ for f in generated:
+ digest, length = rehash(f)
+ installed_rows.append((normpath(f, lib_dir), digest, str(length)))
+ for f in installed:
+ installed_rows.append((installed[f], '', ''))
+ return installed_rows
+
+
+def move_wheel_files(
+ name, # type: str
+ req, # type: Requirement
+ wheeldir, # type: str
+ user=False, # type: bool
+ home=None, # type: Optional[str]
+ root=None, # type: Optional[str]
+ pycompile=True, # type: bool
+ scheme=None, # type: Optional[Mapping[str, str]]
+ isolated=False, # type: bool
+ prefix=None, # type: Optional[str]
+ warn_script_location=True # type: bool
+):
+ # type: (...) -> None
+ """Install a wheel"""
+ # TODO: Investigate and break this up.
+ # TODO: Look into moving this into a dedicated class for representing an
+ # installation.
+
+ if not scheme:
+ scheme = distutils_scheme(
+ name, user=user, home=home, root=root, isolated=isolated,
+ prefix=prefix,
+ )
+
+ if root_is_purelib(name, wheeldir):
+ lib_dir = scheme['purelib']
+ else:
+ lib_dir = scheme['platlib']
+
+ info_dir = [] # type: List[str]
+ data_dirs = []
+ source = wheeldir.rstrip(os.path.sep) + os.path.sep
+
+ # Record details of the files moved
+ # installed = files copied from the wheel to the destination
+ # changed = files changed while installing (scripts #! line typically)
+ # generated = files newly generated during the install (script wrappers)
+ installed = {} # type: Dict[str, str]
+ changed = set()
+ generated = [] # type: List[str]
+
+ # Compile all of the pyc files that we're going to be installing
+ if pycompile:
+ with captured_stdout() as stdout:
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore')
+ compileall.compile_dir(source, force=True, quiet=True)
+ logger.debug(stdout.getvalue())
+
+ def record_installed(srcfile, destfile, modified=False):
+ """Map archive RECORD paths to installation RECORD paths."""
+ oldpath = normpath(srcfile, wheeldir)
+ newpath = normpath(destfile, lib_dir)
+ installed[oldpath] = newpath
+ if modified:
+ changed.add(destfile)
+
+ def clobber(source, dest, is_base, fixer=None, filter=None):
+ ensure_dir(dest) # common for the 'include' path
+
+ for dir, subdirs, files in os.walk(source):
+ basedir = dir[len(source):].lstrip(os.path.sep)
+ destdir = os.path.join(dest, basedir)
+ if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
+ continue
+ for s in subdirs:
+ destsubdir = os.path.join(dest, basedir, s)
+ if is_base and basedir == '' and destsubdir.endswith('.data'):
+ data_dirs.append(s)
+ continue
+ elif (is_base and
+ s.endswith('.dist-info') and
+ canonicalize_name(s).startswith(
+ canonicalize_name(req.name))):
+ assert not info_dir, ('Multiple .dist-info directories: ' +
+ destsubdir + ', ' +
+ ', '.join(info_dir))
+ info_dir.append(destsubdir)
+ for f in files:
+ # Skip unwanted files
+ if filter and filter(f):
+ continue
+ srcfile = os.path.join(dir, f)
+ destfile = os.path.join(dest, basedir, f)
+ # directory creation is lazy and after the file filtering above
+ # to ensure we don't install empty dirs; empty dirs can't be
+ # uninstalled.
+ ensure_dir(destdir)
+
+ # copyfile (called below) truncates the destination if it
+ # exists and then writes the new contents. This is fine in most
+ # cases, but can cause a segfault if pip has loaded a shared
+ # object (e.g. from pyopenssl through its vendored urllib3)
+ # Since the shared object is mmap'd an attempt to call a
+ # symbol in it will then cause a segfault. Unlinking the file
+ # allows writing of new contents while allowing the process to
+ # continue to use the old copy.
+ if os.path.exists(destfile):
+ os.unlink(destfile)
+
+ # We use copyfile (not move, copy, or copy2) to be extra sure
+ # that we are not moving directories over (copyfile fails for
+ # directories) as well as to ensure that we are not copying
+ # over any metadata because we want more control over what
+ # metadata we actually copy over.
+ shutil.copyfile(srcfile, destfile)
+
+ # Copy over the metadata for the file, currently this only
+ # includes the atime and mtime.
+ st = os.stat(srcfile)
+ if hasattr(os, "utime"):
+ os.utime(destfile, (st.st_atime, st.st_mtime))
+
+ # If our file is executable, then make our destination file
+ # executable.
+ if os.access(srcfile, os.X_OK):
+ st = os.stat(srcfile)
+ permissions = (
+ st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+ )
+ os.chmod(destfile, permissions)
+
+ changed = False
+ if fixer:
+ changed = fixer(destfile)
+ record_installed(srcfile, destfile, changed)
+
+ clobber(source, lib_dir, True)
+
+ assert info_dir, "%s .dist-info directory not found" % req
+
+ # Get the defined entry points
+ ep_file = os.path.join(info_dir[0], 'entry_points.txt')
+ console, gui = get_entrypoints(ep_file)
+
+ def is_entrypoint_wrapper(name):
+ # EP, EP.exe and EP-script.py are scripts generated for
+ # entry point EP by setuptools
+ if name.lower().endswith('.exe'):
+ matchname = name[:-4]
+ elif name.lower().endswith('-script.py'):
+ matchname = name[:-10]
+ elif name.lower().endswith(".pya"):
+ matchname = name[:-4]
+ else:
+ matchname = name
+ # Ignore setuptools-generated scripts
+ return (matchname in console or matchname in gui)
+
+ for datadir in data_dirs:
+ fixer = None
+ filter = None
+ for subdir in os.listdir(os.path.join(wheeldir, datadir)):
+ fixer = None
+ if subdir == 'scripts':
+ fixer = fix_script
+ filter = is_entrypoint_wrapper
+ source = os.path.join(wheeldir, datadir, subdir)
+ dest = scheme[subdir]
+ clobber(source, dest, False, fixer=fixer, filter=filter)
+
+ maker = ScriptMaker(None, scheme['scripts'])
+
+ # Ensure old scripts are overwritten.
+ # See https://github.com/pypa/pip/issues/1800
+ maker.clobber = True
+
+ # Ensure we don't generate any variants for scripts because this is almost
+ # never what somebody wants.
+ # See https://bitbucket.org/pypa/distlib/issue/35/
+ maker.variants = {''}
+
+ # This is required because otherwise distlib creates scripts that are not
+ # executable.
+ # See https://bitbucket.org/pypa/distlib/issue/32/
+ maker.set_mode = True
+
+ # Simplify the script and fix the fact that the default script swallows
+ # every single stack trace.
+ # See https://bitbucket.org/pypa/distlib/issue/34/
+ # See https://bitbucket.org/pypa/distlib/issue/33/
+ def _get_script_text(entry):
+ if entry.suffix is None:
+ raise InstallationError(
+ "Invalid script entry point: %s for req: %s - A callable "
+ "suffix is required. Cf https://packaging.python.org/en/"
+ "latest/distributing.html#console-scripts for more "
+ "information." % (entry, req)
+ )
+ return maker.script_template % {
+ "module": entry.prefix,
+ "import_name": entry.suffix.split(".")[0],
+ "func": entry.suffix,
+ }
+ # ignore type, because mypy disallows assigning to a method,
+ # see https://github.com/python/mypy/issues/2427
+ maker._get_script_text = _get_script_text # type: ignore
+ maker.script_template = r"""# -*- coding: utf-8 -*-
+import re
+import sys
+
+from %(module)s import %(import_name)s
+
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+ sys.exit(%(func)s())
+"""
+
+ # Special case pip and setuptools to generate versioned wrappers
+ #
+ # The issue is that some projects (specifically, pip and setuptools) use
+ # code in setup.py to create "versioned" entry points - pip2.7 on Python
+ # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
+ # the wheel metadata at build time, and so if the wheel is installed with
+ # a *different* version of Python the entry points will be wrong. The
+ # correct fix for this is to enhance the metadata to be able to describe
+ # such versioned entry points, but that won't happen till Metadata 2.0 is
+ # available.
+ # In the meantime, projects using versioned entry points will either have
+ # incorrect versioned entry points, or they will not be able to distribute
+ # "universal" wheels (i.e., they will need a wheel per Python version).
+ #
+ # Because setuptools and pip are bundled with _ensurepip and virtualenv,
+ # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
+ # override the versioned entry points in the wheel and generate the
+ # correct ones. This code is purely a short-term measure until Metadata 2.0
+ # is available.
+ #
+ # To add the level of hack in this section of code, in order to support
+ # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
+ # variable which will control which version scripts get installed.
+ #
+ # ENSUREPIP_OPTIONS=altinstall
+ # - Only pipX.Y and easy_install-X.Y will be generated and installed
+ # ENSUREPIP_OPTIONS=install
+ # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
+ # that this option is technically if ENSUREPIP_OPTIONS is set and is
+ # not altinstall
+ # DEFAULT
+ # - The default behavior is to install pip, pipX, pipX.Y, easy_install
+ # and easy_install-X.Y.
+ pip_script = console.pop('pip', None)
+ if pip_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ spec = 'pip = ' + pip_script
+ generated.extend(maker.make(spec))
+
+ if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
+ spec = 'pip%s = %s' % (sys.version[:1], pip_script)
+ generated.extend(maker.make(spec))
+
+ spec = 'pip%s = %s' % (sys.version[:3], pip_script)
+ generated.extend(maker.make(spec))
+ # Delete any other versioned pip entry points
+ pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
+ for k in pip_ep:
+ del console[k]
+ easy_install_script = console.pop('easy_install', None)
+ if easy_install_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ spec = 'easy_install = ' + easy_install_script
+ generated.extend(maker.make(spec))
+
+ spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
+ generated.extend(maker.make(spec))
+ # Delete any other versioned easy_install entry points
+ easy_install_ep = [
+ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
+ ]
+ for k in easy_install_ep:
+ del console[k]
+
+ # Generate the console and GUI entry points specified in the wheel
+ if len(console) > 0:
+ generated_console_scripts = maker.make_multiple(
+ ['%s = %s' % kv for kv in console.items()]
+ )
+ generated.extend(generated_console_scripts)
+
+ if warn_script_location:
+ msg = message_about_scripts_not_on_PATH(generated_console_scripts)
+ if msg is not None:
+ logger.warning(msg)
+
+ if len(gui) > 0:
+ generated.extend(
+ maker.make_multiple(
+ ['%s = %s' % kv for kv in gui.items()],
+ {'gui': True}
+ )
+ )
+
+ # Record pip as the installer
+ installer = os.path.join(info_dir[0], 'INSTALLER')
+ temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
+ with open(temp_installer, 'wb') as installer_file:
+ installer_file.write(b'pip\n')
+ shutil.move(temp_installer, installer)
+ generated.append(installer)
+
+ # Record details of all files installed
+ record = os.path.join(info_dir[0], 'RECORD')
+ temp_record = os.path.join(info_dir[0], 'RECORD.pip')
+ with open_for_csv(record, 'r') as record_in:
+ with open_for_csv(temp_record, 'w+') as record_out:
+ reader = csv.reader(record_in)
+ outrows = get_csv_rows_for_installed(
+ reader, installed=installed, changed=changed,
+ generated=generated, lib_dir=lib_dir,
+ )
+ writer = csv.writer(record_out)
+ # Sort to simplify testing.
+ for row in sorted_outrows(outrows):
+ writer.writerow(row)
+ shutil.move(temp_record, record)
+
+
+def wheel_version(source_dir):
+ # type: (Optional[str]) -> Optional[Tuple[int, ...]]
+ """
+ Return the Wheel-Version of an extracted wheel, if possible.
+
+ Otherwise, return None if we couldn't parse / extract it.
+ """
+ try:
+ dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
+
+ wheel_data = dist.get_metadata('WHEEL')
+ wheel_data = Parser().parsestr(wheel_data)
+
+ version = wheel_data['Wheel-Version'].strip()
+ version = tuple(map(int, version.split('.')))
+ return version
+ except Exception:
+ return None
+
+
+def check_compatibility(version, name):
+ # type: (Optional[Tuple[int, ...]], str) -> None
+ """
+ Raises errors or warns if called with an incompatible Wheel-Version.
+
+ Pip should refuse to install a Wheel-Version that's a major series
+ ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
+ installing a version only minor version ahead (e.g 1.2 > 1.1).
+
+ version: a 2-tuple representing a Wheel-Version (Major, Minor)
+ name: name of wheel or package to raise exception about
+
+ :raises UnsupportedWheel: when an incompatible Wheel-Version is given
+ """
+ if not version:
+ raise UnsupportedWheel(
+ "%s is in an unsupported or invalid wheel" % name
+ )
+ if version[0] > VERSION_COMPATIBLE[0]:
+ raise UnsupportedWheel(
+ "%s's Wheel-Version (%s) is not compatible with this version "
+ "of pip" % (name, '.'.join(map(str, version)))
+ )
+ elif version > VERSION_COMPATIBLE:
+ logger.warning(
+ 'Installing from a newer Wheel-Version (%s)',
+ '.'.join(map(str, version)),
+ )
+
+
+def format_tag(file_tag):
+ # type: (Tuple[str, ...]) -> str
+ """
+ Format three tags in the form "<python_tag>-<abi_tag>-<platform_tag>".
+
+ :param file_tag: A 3-tuple of tags (python_tag, abi_tag, platform_tag).
+ """
+ return '-'.join(file_tag)
+
+
+class Wheel(object):
+ """A wheel file"""
+
+ # TODO: Maybe move the class into the models sub-package
+ # TODO: Maybe move the install code into this class
+
+ wheel_file_re = re.compile(
+ r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
+ ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
+ \.whl|\.dist-info)$""",
+ re.VERBOSE
+ )
+
+ def __init__(self, filename):
+ # type: (str) -> None
+ """
+ :raises InvalidWheelFilename: when the filename is invalid for a wheel
+ """
+ wheel_info = self.wheel_file_re.match(filename)
+ if not wheel_info:
+ raise InvalidWheelFilename(
+ "%s is not a valid wheel filename." % filename
+ )
+ self.filename = filename
+ self.name = wheel_info.group('name').replace('_', '-')
+ # we'll assume "_" means "-" due to wheel naming scheme
+ # (https://github.com/pypa/pip/issues/1150)
+ self.version = wheel_info.group('ver').replace('_', '-')
+ self.build_tag = wheel_info.group('build')
+ self.pyversions = wheel_info.group('pyver').split('.')
+ self.abis = wheel_info.group('abi').split('.')
+ self.plats = wheel_info.group('plat').split('.')
+
+ # All the tag combinations from this file
+ self.file_tags = {
+ (x, y, z) for x in self.pyversions
+ for y in self.abis for z in self.plats
+ }
+
+ def get_formatted_file_tags(self):
+ # type: () -> List[str]
+ """
+ Return the wheel's tags as a sorted list of strings.
+ """
+ return sorted(format_tag(tag) for tag in self.file_tags)
+
+ def support_index_min(self, tags=None):
+ # type: (Optional[List[Pep425Tag]]) -> Optional[int]
+ """
+ Return the lowest index that one of the wheel's file_tag combinations
+ achieves in the supported_tags list e.g. if there are 8 supported tags,
+ and one of the file tags is first in the list, then return 0. Returns
+ None is the wheel is not supported.
+ """
+ if tags is None: # for mock
+ tags = pep425tags.get_supported()
+ indexes = [tags.index(c) for c in self.file_tags if c in tags]
+ return min(indexes) if indexes else None
+
+ def supported(self, tags=None):
+ # type: (Optional[List[Pep425Tag]]) -> bool
+ """Is this wheel supported on this system?"""
+ if tags is None: # for mock
+ tags = pep425tags.get_supported()
+ return bool(set(tags).intersection(self.file_tags))
+
+
+def _contains_egg_info(
+ s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
+ """Determine whether the string looks like an egg_info.
+
+ :param s: The string to parse. E.g. foo-2.1
+ """
+ return bool(_egg_info_re.search(s))
+
+
+def should_use_ephemeral_cache(
+ req, # type: InstallRequirement
+ format_control, # type: FormatControl
+ autobuilding, # type: bool
+ cache_available # type: bool
+):
+ # type: (...) -> Optional[bool]
+ """
+ Return whether to build an InstallRequirement object using the
+ ephemeral cache.
+
+ :param cache_available: whether a cache directory is available for the
+ autobuilding=True case.
+
+ :return: True or False to build the requirement with ephem_cache=True
+ or False, respectively; or None not to build the requirement.
+ """
+ if req.constraint:
+ return None
+ if req.is_wheel:
+ if not autobuilding:
+ logger.info(
+ 'Skipping %s, due to already being wheel.', req.name,
+ )
+ return None
+ if not autobuilding:
+ return False
+
+ if req.editable or not req.source_dir:
+ return None
+
+ if "binary" not in format_control.get_allowed_formats(
+ canonicalize_name(req.name)):
+ logger.info(
+ "Skipping bdist_wheel for %s, due to binaries "
+ "being disabled for it.", req.name,
+ )
+ return None
+
+ if req.link and not req.link.is_artifact:
+ # VCS checkout. Build wheel just for this run.
+ return True
+
+ link = req.link
+ base, ext = link.splitext()
+ if cache_available and _contains_egg_info(base):
+ return False
+
+ # Otherwise, build the wheel just for this run using the ephemeral
+ # cache since we are either in the case of e.g. a local directory, or
+ # no cache directory is available to use.
+ return True
+
+
+def format_command_result(
+ command_args, # type: List[str]
+ command_output, # type: str
+):
+ # type: (...) -> str
+ """
+ Format command information for logging.
+ """
+ command_desc = format_command_args(command_args)
+ text = 'Command arguments: {}\n'.format(command_desc)
+
+ if not command_output:
+ text += 'Command output: None'
+ elif logger.getEffectiveLevel() > logging.DEBUG:
+ text += 'Command output: [use --verbose to show]'
+ else:
+ if not command_output.endswith('\n'):
+ command_output += '\n'
+ text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER)
+
+ return text
+
+
+def get_legacy_build_wheel_path(
+ names, # type: List[str]
+ temp_dir, # type: str
+ req, # type: InstallRequirement
+ command_args, # type: List[str]
+ command_output, # type: str
+):
+ # type: (...) -> Optional[str]
+ """
+ Return the path to the wheel in the temporary build directory.
+ """
+ # Sort for determinism.
+ names = sorted(names)
+ if not names:
+ msg = (
+ 'Legacy build of wheel for {!r} created no files.\n'
+ ).format(req.name)
+ msg += format_command_result(command_args, command_output)
+ logger.warning(msg)
+ return None
+
+ if len(names) > 1:
+ msg = (
+ 'Legacy build of wheel for {!r} created more than one file.\n'
+ 'Filenames (choosing first): {}\n'
+ ).format(req.name, names)
+ msg += format_command_result(command_args, command_output)
+ logger.warning(msg)
+
+ return os.path.join(temp_dir, names[0])
+
+
+class WheelBuilder(object):
+ """Build wheels from a RequirementSet."""
+
+ def __init__(
+ self,
+ finder, # type: PackageFinder
+ preparer, # type: RequirementPreparer
+ wheel_cache, # type: WheelCache
+ build_options=None, # type: Optional[List[str]]
+ global_options=None, # type: Optional[List[str]]
+ no_clean=False # type: bool
+ ):
+ # type: (...) -> None
+ self.finder = finder
+ self.preparer = preparer
+ self.wheel_cache = wheel_cache
+
+ self._wheel_dir = preparer.wheel_download_dir
+
+ self.build_options = build_options or []
+ self.global_options = global_options or []
+ self.no_clean = no_clean
+
+ def _build_one(self, req, output_dir, python_tag=None):
+ """Build one wheel.
+
+ :return: The filename of the built wheel, or None if the build failed.
+ """
+ # Install build deps into temporary directory (PEP 518)
+ with req.build_env:
+ return self._build_one_inside_env(req, output_dir,
+ python_tag=python_tag)
+
+ def _build_one_inside_env(self, req, output_dir, python_tag=None):
+ with TempDirectory(kind="wheel") as temp_dir:
+ if req.use_pep517:
+ builder = self._build_one_pep517
+ else:
+ builder = self._build_one_legacy
+ wheel_path = builder(req, temp_dir.path, python_tag=python_tag)
+ if wheel_path is not None:
+ wheel_name = os.path.basename(wheel_path)
+ dest_path = os.path.join(output_dir, wheel_name)
+ try:
+ wheel_hash, length = hash_file(wheel_path)
+ shutil.move(wheel_path, dest_path)
+ logger.info('Created wheel for %s: '
+ 'filename=%s size=%d sha256=%s',
+ req.name, wheel_name, length,
+ wheel_hash.hexdigest())
+ logger.info('Stored in directory: %s', output_dir)
+ return dest_path
+ except Exception:
+ pass
+ # Ignore return, we can't do anything else useful.
+ self._clean_one(req)
+ return None
+
+ def _base_setup_args(self, req):
+ # NOTE: Eventually, we'd want to also -S to the flags here, when we're
+ # isolating. Currently, it breaks Python in virtualenvs, because it
+ # relies on site.py to find parts of the standard library outside the
+ # virtualenv.
+ base_cmd = make_setuptools_shim_args(req.setup_py_path,
+ unbuffered_output=True)
+ return base_cmd + list(self.global_options)
+
+ def _build_one_pep517(self, req, tempd, python_tag=None):
+ """Build one InstallRequirement using the PEP 517 build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ assert req.metadata_directory is not None
+ if self.build_options:
+ # PEP 517 does not support --build-options
+ logger.error('Cannot build wheel for %s using PEP 517 when '
+ '--build-options is present' % (req.name,))
+ return None
+ try:
+ req.spin_message = 'Building wheel for %s (PEP 517)' % (req.name,)
+ logger.debug('Destination directory: %s', tempd)
+ wheel_name = req.pep517_backend.build_wheel(
+ tempd,
+ metadata_directory=req.metadata_directory
+ )
+ if python_tag:
+ # General PEP 517 backends don't necessarily support
+ # a "--python-tag" option, so we rename the wheel
+ # file directly.
+ new_name = replace_python_tag(wheel_name, python_tag)
+ os.rename(
+ os.path.join(tempd, wheel_name),
+ os.path.join(tempd, new_name)
+ )
+ # Reassign to simplify the return at the end of function
+ wheel_name = new_name
+ except Exception:
+ logger.error('Failed building wheel for %s', req.name)
+ return None
+ return os.path.join(tempd, wheel_name)
+
+ def _build_one_legacy(self, req, tempd, python_tag=None):
+ """Build one InstallRequirement using the "legacy" build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ base_args = self._base_setup_args(req)
+
+ spin_message = 'Building wheel for %s (setup.py)' % (req.name,)
+ with open_spinner(spin_message) as spinner:
+ logger.debug('Destination directory: %s', tempd)
+ wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
+ + self.build_options
+
+ if python_tag is not None:
+ wheel_args += ["--python-tag", python_tag]
+
+ try:
+ output = call_subprocess(wheel_args, cwd=req.setup_py_dir,
+ spinner=spinner)
+ except Exception:
+ spinner.finish("error")
+ logger.error('Failed building wheel for %s', req.name)
+ return None
+ names = os.listdir(tempd)
+ wheel_path = get_legacy_build_wheel_path(
+ names=names,
+ temp_dir=tempd,
+ req=req,
+ command_args=wheel_args,
+ command_output=output,
+ )
+ return wheel_path
+
+ def _clean_one(self, req):
+ base_args = self._base_setup_args(req)
+
+ logger.info('Running setup.py clean for %s', req.name)
+ clean_args = base_args + ['clean', '--all']
+ try:
+ call_subprocess(clean_args, cwd=req.source_dir)
+ return True
+ except Exception:
+ logger.error('Failed cleaning build dir for %s', req.name)
+ return False
+
+ def build(
+ self,
+ requirements, # type: Iterable[InstallRequirement]
+ session, # type: PipSession
+ autobuilding=False # type: bool
+ ):
+ # type: (...) -> List[InstallRequirement]
+ """Build wheels.
+
+ :param unpack: If True, replace the sdist we built from with the
+ newly built wheel, in preparation for installation.
+ :return: True if all the wheels built correctly.
+ """
+ buildset = []
+ format_control = self.finder.format_control
+ # Whether a cache directory is available for autobuilding=True.
+ cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir)
+
+ for req in requirements:
+ ephem_cache = should_use_ephemeral_cache(
+ req, format_control=format_control, autobuilding=autobuilding,
+ cache_available=cache_available,
+ )
+ if ephem_cache is None:
+ continue
+
+ buildset.append((req, ephem_cache))
+
+ if not buildset:
+ return []
+
+ # Is any wheel build not using the ephemeral cache?
+ if any(not ephem_cache for _, ephem_cache in buildset):
+ have_directory_for_build = self._wheel_dir or (
+ autobuilding and self.wheel_cache.cache_dir
+ )
+ assert have_directory_for_build
+
+ # TODO by @pradyunsg
+ # Should break up this method into 2 separate methods.
+
+ # Build the wheels.
+ logger.info(
+ 'Building wheels for collected packages: %s',
+ ', '.join([req.name for (req, _) in buildset]),
+ )
+ _cache = self.wheel_cache # shorter name
+ with indent_log():
+ build_success, build_failure = [], []
+ for req, ephem in buildset:
+ python_tag = None
+ if autobuilding:
+ python_tag = pep425tags.implementation_tag
+ if ephem:
+ output_dir = _cache.get_ephem_path_for_link(req.link)
+ else:
+ output_dir = _cache.get_path_for_link(req.link)
+ try:
+ ensure_dir(output_dir)
+ except OSError as e:
+ logger.warning("Building wheel for %s failed: %s",
+ req.name, e)
+ build_failure.append(req)
+ continue
+ else:
+ output_dir = self._wheel_dir
+ wheel_file = self._build_one(
+ req, output_dir,
+ python_tag=python_tag,
+ )
+ if wheel_file:
+ build_success.append(req)
+ if autobuilding:
+ # XXX: This is mildly duplicative with prepare_files,
+ # but not close enough to pull out to a single common
+ # method.
+ # The code below assumes temporary source dirs -
+ # prevent it doing bad things.
+ if req.source_dir and not os.path.exists(os.path.join(
+ req.source_dir, PIP_DELETE_MARKER_FILENAME)):
+ raise AssertionError(
+ "bad source dir - missing marker")
+ # Delete the source we built the wheel from
+ req.remove_temporary_source()
+ # set the build directory again - name is known from
+ # the work prepare_files did.
+ req.source_dir = req.build_location(
+ self.preparer.build_dir
+ )
+ # Update the link for this.
+ req.link = Link(path_to_url(wheel_file))
+ assert req.link.is_wheel
+ # extract the wheel into the dir
+ unpack_url(
+ req.link, req.source_dir, None, False,
+ session=session,
+ )
+ else:
+ build_failure.append(req)
+
+ # notify success/failure
+ if build_success:
+ logger.info(
+ 'Successfully built %s',
+ ' '.join([req.name for req in build_success]),
+ )
+ if build_failure:
+ logger.info(
+ 'Failed to build %s',
+ ' '.join([req.name for req in build_failure]),
+ )
+ # Return a list of requirements that failed to build
+ return build_failure
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__init__.py
new file mode 100644
index 00000000..c1d9508d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__init__.py
@@ -0,0 +1,109 @@
+"""
+pip._vendor is for vendoring dependencies of pip to prevent needing pip to
+depend on something external.
+
+Files inside of pip._vendor should be considered immutable and should only be
+updated to versions from upstream.
+"""
+from __future__ import absolute_import
+
+import glob
+import os.path
+import sys
+
+# Downstream redistributors which have debundled our dependencies should also
+# patch this value to be true. This will trigger the additional patching
+# to cause things like "six" to be available as pip.
+DEBUNDLED = False
+
+# By default, look in this directory for a bunch of .whl files which we will
+# add to the beginning of sys.path before attempting to import anything. This
+# is done to support downstream re-distributors like Debian and Fedora who
+# wish to create their own Wheels for our dependencies to aid in debundling.
+WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+# Define a small helper function to alias our vendored modules to the real ones
+# if the vendored ones do not exist. This idea of this was taken from
+# https://github.com/kennethreitz/requests/pull/2567.
+def vendored(modulename):
+ vendored_name = "{0}.{1}".format(__name__, modulename)
+
+ try:
+ __import__(modulename, globals(), locals(), level=0)
+ except ImportError:
+ # We can just silently allow import failures to pass here. If we
+ # got to this point it means that ``import pip._vendor.whatever``
+ # failed and so did ``import whatever``. Since we're importing this
+ # upfront in an attempt to alias imports, not erroring here will
+ # just mean we get a regular import error whenever pip *actually*
+ # tries to import one of these modules to use it, which actually
+ # gives us a better error message than we would have otherwise
+ # gotten.
+ pass
+ else:
+ sys.modules[vendored_name] = sys.modules[modulename]
+ base, head = vendored_name.rsplit(".", 1)
+ setattr(sys.modules[base], head, sys.modules[modulename])
+
+
+# If we're operating in a debundled setup, then we want to go ahead and trigger
+# the aliasing of our vendored libraries as well as looking for wheels to add
+# to our sys.path. This will cause all of this code to be a no-op typically
+# however downstream redistributors can enable it in a consistent way across
+# all platforms.
+if DEBUNDLED:
+ # Actually look inside of WHEEL_DIR to find .whl files and add them to the
+ # front of our sys.path.
+ sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
+
+ # Actually alias all of our vendored dependencies.
+ vendored("cachecontrol")
+ vendored("colorama")
+ vendored("distlib")
+ vendored("distro")
+ vendored("html5lib")
+ vendored("lockfile")
+ vendored("six")
+ vendored("six.moves")
+ vendored("six.moves.urllib")
+ vendored("six.moves.urllib.parse")
+ vendored("packaging")
+ vendored("packaging.version")
+ vendored("packaging.specifiers")
+ vendored("pep517")
+ vendored("pkg_resources")
+ vendored("progress")
+ vendored("pytoml")
+ vendored("retrying")
+ vendored("requests")
+ vendored("requests.exceptions")
+ vendored("requests.packages")
+ vendored("requests.packages.urllib3")
+ vendored("requests.packages.urllib3._collections")
+ vendored("requests.packages.urllib3.connection")
+ vendored("requests.packages.urllib3.connectionpool")
+ vendored("requests.packages.urllib3.contrib")
+ vendored("requests.packages.urllib3.contrib.ntlmpool")
+ vendored("requests.packages.urllib3.contrib.pyopenssl")
+ vendored("requests.packages.urllib3.exceptions")
+ vendored("requests.packages.urllib3.fields")
+ vendored("requests.packages.urllib3.filepost")
+ vendored("requests.packages.urllib3.packages")
+ vendored("requests.packages.urllib3.packages.ordered_dict")
+ vendored("requests.packages.urllib3.packages.six")
+ vendored("requests.packages.urllib3.packages.ssl_match_hostname")
+ vendored("requests.packages.urllib3.packages.ssl_match_hostname."
+ "_implementation")
+ vendored("requests.packages.urllib3.poolmanager")
+ vendored("requests.packages.urllib3.request")
+ vendored("requests.packages.urllib3.response")
+ vendored("requests.packages.urllib3.util")
+ vendored("requests.packages.urllib3.util.connection")
+ vendored("requests.packages.urllib3.util.request")
+ vendored("requests.packages.urllib3.util.response")
+ vendored("requests.packages.urllib3.util.retry")
+ vendored("requests.packages.urllib3.util.ssl_")
+ vendored("requests.packages.urllib3.util.timeout")
+ vendored("requests.packages.urllib3.util.url")
+ vendored("urllib3")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..041ceebc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/appdirs.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/appdirs.cpython-37.pyc
new file mode 100644
index 00000000..10e550c4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/appdirs.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/distro.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/distro.cpython-37.pyc
new file mode 100644
index 00000000..ddbae55a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/distro.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/ipaddress.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/ipaddress.cpython-37.pyc
new file mode 100644
index 00000000..9a4d41f4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/ipaddress.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-37.pyc
new file mode 100644
index 00000000..90f54487
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/pyparsing.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/retrying.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/retrying.cpython-37.pyc
new file mode 100644
index 00000000..4d3b84bb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/retrying.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/six.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/six.cpython-37.pyc
new file mode 100644
index 00000000..c859104c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/__pycache__/six.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/appdirs.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/appdirs.py
new file mode 100644
index 00000000..2bd39110
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/appdirs.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2005-2010 ActiveState Software Inc.
+# Copyright (c) 2013 Eddy Petrișor
+
+"""Utilities for determining application-specific dirs.
+
+See <http://github.com/ActiveState/appdirs> for details and usage.
+"""
+# Dev Notes:
+# - MSDN on where to store app data files:
+# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
+# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
+# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
+
+__version_info__ = (1, 4, 3)
+__version__ = '.'.join(map(str, __version_info__))
+
+
+import sys
+import os
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ unicode = str
+
+if sys.platform.startswith('java'):
+ import platform
+ os_name = platform.java_ver()[3][0]
+ if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
+ system = 'win32'
+ elif os_name.startswith('Mac'): # "Mac OS X", etc.
+ system = 'darwin'
+ else: # "Linux", "SunOS", "FreeBSD", etc.
+ # Setting this to "linux2" is not ideal, but only Windows or Mac
+ # are actually checked for and the rest of the module expects
+ # *sys.platform* style strings.
+ system = 'linux2'
+else:
+ system = sys.platform
+
+
+
+def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user data directories are:
+ Mac OS X: ~/Library/Application Support/<AppName>
+ Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
+ Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
+ Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
+ Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
+ Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
+
+ For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
+ That means, by default "~/.local/share/<AppName>".
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
+ path = os.path.normpath(_get_win_folder(const))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ elif system == 'darwin':
+ path = os.path.expanduser('~/Library/Application Support/')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
+ r"""Return full path to the user-shared data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "multipath" is an optional parameter only applicable to *nix
+ which indicates that the entire list of data dirs should be
+ returned. By default, the first item from XDG_DATA_DIRS is
+ returned, or '/usr/local/share/<AppName>',
+ if XDG_DATA_DIRS is not set
+
+ Typical site data directories are:
+ Mac OS X: /Library/Application Support/<AppName>
+ Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
+ Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
+ Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
+
+ For Unix, this is using the $XDG_DATA_DIRS[0] default.
+
+ WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ elif system == 'darwin':
+ path = os.path.expanduser('/Library/Application Support')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ # XDG default for $XDG_DATA_DIRS
+ # only first, if multipath is False
+ path = os.getenv('XDG_DATA_DIRS',
+ os.pathsep.join(['/usr/local/share', '/usr/share']))
+ pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
+ if appname:
+ if version:
+ appname = os.path.join(appname, version)
+ pathlist = [os.sep.join([x, appname]) for x in pathlist]
+
+ if multipath:
+ path = os.pathsep.join(pathlist)
+ else:
+ path = pathlist[0]
+ return path
+
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific config dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user config directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
+ That means, by default "~/.config/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
+ r"""Return full path to the user-shared data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "multipath" is an optional parameter only applicable to *nix
+ which indicates that the entire list of config dirs should be
+ returned. By default, the first item from XDG_CONFIG_DIRS is
+ returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
+
+ Typical site config directories are:
+ Mac OS X: same as site_data_dir
+ Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
+ $XDG_CONFIG_DIRS
+ Win *: same as site_data_dir
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
+
+ For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
+
+ WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
+ """
+ if system in ["win32", "darwin"]:
+ path = site_data_dir(appname, appauthor)
+ if appname and version:
+ path = os.path.join(path, version)
+ else:
+ # XDG default for $XDG_CONFIG_DIRS
+ # only first, if multipath is False
+ path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
+ pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
+ if appname:
+ if version:
+ appname = os.path.join(appname, version)
+ pathlist = [os.sep.join([x, appname]) for x in pathlist]
+
+ if multipath:
+ path = os.pathsep.join(pathlist)
+ else:
+ path = pathlist[0]
+ return path
+
+
+def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
+ r"""Return full path to the user-specific cache dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "opinion" (boolean) can be False to disable the appending of
+ "Cache" to the base app data dir for Windows. See
+ discussion below.
+
+ Typical user cache directories are:
+ Mac OS X: ~/Library/Caches/<AppName>
+ Unix: ~/.cache/<AppName> (XDG default)
+ Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
+ Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
+
+ On Windows the only suggestion in the MSDN docs is that local settings go in
+ the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
+ app data dir (the default returned by `user_data_dir` above). Apps typically
+ put cache data somewhere *under* the given dir here. Some examples:
+ ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
+ ...\Acme\SuperApp\Cache\1.0
+ OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
+ This can be disabled with the `opinion=False` option.
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ if opinion:
+ path = os.path.join(path, "Cache")
+ elif system == 'darwin':
+ path = os.path.expanduser('~/Library/Caches')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific state dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user state directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
+ to extend the XDG spec and support $XDG_STATE_HOME.
+
+ That means, by default "~/.local/state/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
+ r"""Return full path to the user-specific log dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "opinion" (boolean) can be False to disable the appending of
+ "Logs" to the base app data dir for Windows, and "log" to the
+ base cache dir for Unix. See discussion below.
+
+ Typical user log directories are:
+ Mac OS X: ~/Library/Logs/<AppName>
+ Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
+ Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
+ Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
+
+ On Windows the only suggestion in the MSDN docs is that local settings
+ go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
+ examples of what some windows apps use for a logs dir.)
+
+ OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
+ value for Windows and appends "log" to the user cache dir for Unix.
+ This can be disabled with the `opinion=False` option.
+ """
+ if system == "darwin":
+ path = os.path.join(
+ os.path.expanduser('~/Library/Logs'),
+ appname)
+ elif system == "win32":
+ path = user_data_dir(appname, appauthor, version)
+ version = False
+ if opinion:
+ path = os.path.join(path, "Logs")
+ else:
+ path = user_cache_dir(appname, appauthor, version)
+ version = False
+ if opinion:
+ path = os.path.join(path, "log")
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+class AppDirs(object):
+ """Convenience wrapper for getting application dirs."""
+ def __init__(self, appname=None, appauthor=None, version=None,
+ roaming=False, multipath=False):
+ self.appname = appname
+ self.appauthor = appauthor
+ self.version = version
+ self.roaming = roaming
+ self.multipath = multipath
+
+ @property
+ def user_data_dir(self):
+ return user_data_dir(self.appname, self.appauthor,
+ version=self.version, roaming=self.roaming)
+
+ @property
+ def site_data_dir(self):
+ return site_data_dir(self.appname, self.appauthor,
+ version=self.version, multipath=self.multipath)
+
+ @property
+ def user_config_dir(self):
+ return user_config_dir(self.appname, self.appauthor,
+ version=self.version, roaming=self.roaming)
+
+ @property
+ def site_config_dir(self):
+ return site_config_dir(self.appname, self.appauthor,
+ version=self.version, multipath=self.multipath)
+
+ @property
+ def user_cache_dir(self):
+ return user_cache_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
+ def user_state_dir(self):
+ return user_state_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
+ def user_log_dir(self):
+ return user_log_dir(self.appname, self.appauthor,
+ version=self.version)
+
+
+#---- internal support stuff
+
+def _get_win_folder_from_registry(csidl_name):
+ """This is a fallback technique at best. I'm not sure if using the
+ registry for this guarantees us the correct answer for all CSIDL_*
+ names.
+ """
+ if PY3:
+ import winreg as _winreg
+ else:
+ import _winreg
+
+ shell_folder_name = {
+ "CSIDL_APPDATA": "AppData",
+ "CSIDL_COMMON_APPDATA": "Common AppData",
+ "CSIDL_LOCAL_APPDATA": "Local AppData",
+ }[csidl_name]
+
+ key = _winreg.OpenKey(
+ _winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
+ )
+ dir, type = _winreg.QueryValueEx(key, shell_folder_name)
+ return dir
+
+
+def _get_win_folder_with_pywin32(csidl_name):
+ from win32com.shell import shellcon, shell
+ dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
+ # Try to make this a unicode path because SHGetFolderPath does
+ # not return unicode strings when there is unicode data in the
+ # path.
+ try:
+ dir = unicode(dir)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in dir:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ try:
+ import win32api
+ dir = win32api.GetShortPathName(dir)
+ except ImportError:
+ pass
+ except UnicodeError:
+ pass
+ return dir
+
+
+def _get_win_folder_with_ctypes(csidl_name):
+ import ctypes
+
+ csidl_const = {
+ "CSIDL_APPDATA": 26,
+ "CSIDL_COMMON_APPDATA": 35,
+ "CSIDL_LOCAL_APPDATA": 28,
+ }[csidl_name]
+
+ buf = ctypes.create_unicode_buffer(1024)
+ ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in buf:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf2 = ctypes.create_unicode_buffer(1024)
+ if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+ buf = buf2
+
+ return buf.value
+
+def _get_win_folder_with_jna(csidl_name):
+ import array
+ from com.sun import jna
+ from com.sun.jna.platform import win32
+
+ buf_size = win32.WinDef.MAX_PATH * 2
+ buf = array.zeros('c', buf_size)
+ shell = win32.Shell32.INSTANCE
+ shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
+ dir = jna.Native.toString(buf.tostring()).rstrip("\0")
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in dir:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf = array.zeros('c', buf_size)
+ kernel = win32.Kernel32.INSTANCE
+ if kernel.GetShortPathName(dir, buf, buf_size):
+ dir = jna.Native.toString(buf.tostring()).rstrip("\0")
+
+ return dir
+
+if system == "win32":
+ try:
+ from ctypes import windll
+ _get_win_folder = _get_win_folder_with_ctypes
+ except ImportError:
+ try:
+ import com.sun.jna
+ _get_win_folder = _get_win_folder_with_jna
+ except ImportError:
+ _get_win_folder = _get_win_folder_from_registry
+
+
+#---- self test code
+
+if __name__ == "__main__":
+ appname = "MyApp"
+ appauthor = "MyCompany"
+
+ props = ("user_data_dir",
+ "user_config_dir",
+ "user_cache_dir",
+ "user_state_dir",
+ "user_log_dir",
+ "site_data_dir",
+ "site_config_dir")
+
+ print("-- app dirs %s --" % __version__)
+
+ print("-- app dirs (with optional 'version')")
+ dirs = AppDirs(appname, appauthor, version="1.0")
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (without optional 'version')")
+ dirs = AppDirs(appname, appauthor)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (without optional 'appauthor')")
+ dirs = AppDirs(appname)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (with disabled 'appauthor')")
+ dirs = AppDirs(appname, appauthor=False)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__init__.py
new file mode 100644
index 00000000..8fdee66f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__init__.py
@@ -0,0 +1,11 @@
+"""CacheControl import Interface.
+
+Make it easy to import from cachecontrol without long namespaces.
+"""
+__author__ = "Eric Larson"
+__email__ = "eric@ionrock.org"
+__version__ = "0.12.5"
+
+from .wrapper import CacheControl
+from .adapter import CacheControlAdapter
+from .controller import CacheController
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..32804460
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pyc
new file mode 100644
index 00000000..2cbff619
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc
new file mode 100644
index 00000000..57a49d34
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc
new file mode 100644
index 00000000..46cc0553
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..a5e4ca8f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc
new file mode 100644
index 00000000..569de920
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc
new file mode 100644
index 00000000..65087472
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pyc
new file mode 100644
index 00000000..de9cfead
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc
new file mode 100644
index 00000000..a4aeeb66
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc
new file mode 100644
index 00000000..7f6d9ab2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/_cmd.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/_cmd.py
new file mode 100644
index 00000000..f1e0ad94
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/_cmd.py
@@ -0,0 +1,57 @@
+import logging
+
+from pip._vendor import requests
+
+from pip._vendor.cachecontrol.adapter import CacheControlAdapter
+from pip._vendor.cachecontrol.cache import DictCache
+from pip._vendor.cachecontrol.controller import logger
+
+from argparse import ArgumentParser
+
+
+def setup_logging():
+ logger.setLevel(logging.DEBUG)
+ handler = logging.StreamHandler()
+ logger.addHandler(handler)
+
+
+def get_session():
+ adapter = CacheControlAdapter(
+ DictCache(), cache_etags=True, serializer=None, heuristic=None
+ )
+ sess = requests.Session()
+ sess.mount("http://", adapter)
+ sess.mount("https://", adapter)
+
+ sess.cache_controller = adapter.controller
+ return sess
+
+
+def get_args():
+ parser = ArgumentParser()
+ parser.add_argument("url", help="The URL to try and cache")
+ return parser.parse_args()
+
+
+def main(args=None):
+ args = get_args()
+ sess = get_session()
+
+ # Make a request to get a response
+ resp = sess.get(args.url)
+
+ # Turn on logging
+ setup_logging()
+
+ # try setting the cache
+ sess.cache_controller.cache_response(resp.request, resp.raw)
+
+ # Now try to get it
+ if sess.cache_controller.cached_request(resp.request):
+ print("Cached!")
+ else:
+ print("Not cached :(")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/adapter.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/adapter.py
new file mode 100644
index 00000000..780eb288
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/adapter.py
@@ -0,0 +1,133 @@
+import types
+import functools
+import zlib
+
+from pip._vendor.requests.adapters import HTTPAdapter
+
+from .controller import CacheController
+from .cache import DictCache
+from .filewrapper import CallbackFileWrapper
+
+
+class CacheControlAdapter(HTTPAdapter):
+ invalidating_methods = {"PUT", "DELETE"}
+
+ def __init__(
+ self,
+ cache=None,
+ cache_etags=True,
+ controller_class=None,
+ serializer=None,
+ heuristic=None,
+ cacheable_methods=None,
+ *args,
+ **kw
+ ):
+ super(CacheControlAdapter, self).__init__(*args, **kw)
+ self.cache = cache or DictCache()
+ self.heuristic = heuristic
+ self.cacheable_methods = cacheable_methods or ("GET",)
+
+ controller_factory = controller_class or CacheController
+ self.controller = controller_factory(
+ self.cache, cache_etags=cache_etags, serializer=serializer
+ )
+
+ def send(self, request, cacheable_methods=None, **kw):
+ """
+ Send a request. Use the request information to see if it
+ exists in the cache and cache the response if we need to and can.
+ """
+ cacheable = cacheable_methods or self.cacheable_methods
+ if request.method in cacheable:
+ try:
+ cached_response = self.controller.cached_request(request)
+ except zlib.error:
+ cached_response = None
+ if cached_response:
+ return self.build_response(request, cached_response, from_cache=True)
+
+ # check for etags and add headers if appropriate
+ request.headers.update(self.controller.conditional_headers(request))
+
+ resp = super(CacheControlAdapter, self).send(request, **kw)
+
+ return resp
+
+ def build_response(
+ self, request, response, from_cache=False, cacheable_methods=None
+ ):
+ """
+ Build a response by making a request or using the cache.
+
+ This will end up calling send and returning a potentially
+ cached response
+ """
+ cacheable = cacheable_methods or self.cacheable_methods
+ if not from_cache and request.method in cacheable:
+ # Check for any heuristics that might update headers
+ # before trying to cache.
+ if self.heuristic:
+ response = self.heuristic.apply(response)
+
+ # apply any expiration heuristics
+ if response.status == 304:
+ # We must have sent an ETag request. This could mean
+ # that we've been expired already or that we simply
+ # have an etag. In either case, we want to try and
+ # update the cache if that is the case.
+ cached_response = self.controller.update_cached_response(
+ request, response
+ )
+
+ if cached_response is not response:
+ from_cache = True
+
+ # We are done with the server response, read a
+ # possible response body (compliant servers will
+ # not return one, but we cannot be 100% sure) and
+ # release the connection back to the pool.
+ response.read(decode_content=False)
+ response.release_conn()
+
+ response = cached_response
+
+ # We always cache the 301 responses
+ elif response.status == 301:
+ self.controller.cache_response(request, response)
+ else:
+ # Wrap the response file with a wrapper that will cache the
+ # response when the stream has been consumed.
+ response._fp = CallbackFileWrapper(
+ response._fp,
+ functools.partial(
+ self.controller.cache_response, request, response
+ ),
+ )
+ if response.chunked:
+ super_update_chunk_length = response._update_chunk_length
+
+ def _update_chunk_length(self):
+ super_update_chunk_length()
+ if self.chunk_left == 0:
+ self._fp._close()
+
+ response._update_chunk_length = types.MethodType(
+ _update_chunk_length, response
+ )
+
+ resp = super(CacheControlAdapter, self).build_response(request, response)
+
+ # See if we should invalidate the cache.
+ if request.method in self.invalidating_methods and resp.ok:
+ cache_url = self.controller.cache_url(request.url)
+ self.cache.delete(cache_url)
+
+ # Give the request a from_cache attr to let people use it
+ resp.from_cache = from_cache
+
+ return resp
+
+ def close(self):
+ self.cache.close()
+ super(CacheControlAdapter, self).close()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/cache.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/cache.py
new file mode 100644
index 00000000..94e07732
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/cache.py
@@ -0,0 +1,39 @@
+"""
+The cache object API for implementing caches. The default is a thread
+safe in-memory dictionary.
+"""
+from threading import Lock
+
+
+class BaseCache(object):
+
+ def get(self, key):
+ raise NotImplementedError()
+
+ def set(self, key, value):
+ raise NotImplementedError()
+
+ def delete(self, key):
+ raise NotImplementedError()
+
+ def close(self):
+ pass
+
+
+class DictCache(BaseCache):
+
+ def __init__(self, init_dict=None):
+ self.lock = Lock()
+ self.data = init_dict or {}
+
+ def get(self, key):
+ return self.data.get(key, None)
+
+ def set(self, key, value):
+ with self.lock:
+ self.data.update({key: value})
+
+ def delete(self, key):
+ with self.lock:
+ if key in self.data:
+ self.data.pop(key)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
new file mode 100644
index 00000000..0e1658fa
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
@@ -0,0 +1,2 @@
+from .file_cache import FileCache # noqa
+from .redis_cache import RedisCache # noqa
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..eaafcdfc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc
new file mode 100644
index 00000000..0c1c82b5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc
new file mode 100644
index 00000000..3e2fae30
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
new file mode 100644
index 00000000..1ba00806
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
@@ -0,0 +1,146 @@
+import hashlib
+import os
+from textwrap import dedent
+
+from ..cache import BaseCache
+from ..controller import CacheController
+
+try:
+ FileNotFoundError
+except NameError:
+ # py2.X
+ FileNotFoundError = (IOError, OSError)
+
+
+def _secure_open_write(filename, fmode):
+ # We only want to write to this file, so open it in write only mode
+ flags = os.O_WRONLY
+
+ # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
+ # will open *new* files.
+ # We specify this because we want to ensure that the mode we pass is the
+ # mode of the file.
+ flags |= os.O_CREAT | os.O_EXCL
+
+ # Do not follow symlinks to prevent someone from making a symlink that
+ # we follow and insecurely open a cache file.
+ if hasattr(os, "O_NOFOLLOW"):
+ flags |= os.O_NOFOLLOW
+
+ # On Windows we'll mark this file as binary
+ if hasattr(os, "O_BINARY"):
+ flags |= os.O_BINARY
+
+ # Before we open our file, we want to delete any existing file that is
+ # there
+ try:
+ os.remove(filename)
+ except (IOError, OSError):
+ # The file must not exist already, so we can just skip ahead to opening
+ pass
+
+ # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
+ # race condition happens between the os.remove and this line, that an
+ # error will be raised. Because we utilize a lockfile this should only
+ # happen if someone is attempting to attack us.
+ fd = os.open(filename, flags, fmode)
+ try:
+ return os.fdopen(fd, "wb")
+
+ except:
+ # An error occurred wrapping our FD in a file object
+ os.close(fd)
+ raise
+
+
+class FileCache(BaseCache):
+
+ def __init__(
+ self,
+ directory,
+ forever=False,
+ filemode=0o0600,
+ dirmode=0o0700,
+ use_dir_lock=None,
+ lock_class=None,
+ ):
+
+ if use_dir_lock is not None and lock_class is not None:
+ raise ValueError("Cannot use use_dir_lock and lock_class together")
+
+ try:
+ from pip._vendor.lockfile import LockFile
+ from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile
+ except ImportError:
+ notice = dedent(
+ """
+ NOTE: In order to use the FileCache you must have
+ lockfile installed. You can install it via pip:
+ pip install lockfile
+ """
+ )
+ raise ImportError(notice)
+
+ else:
+ if use_dir_lock:
+ lock_class = MkdirLockFile
+
+ elif lock_class is None:
+ lock_class = LockFile
+
+ self.directory = directory
+ self.forever = forever
+ self.filemode = filemode
+ self.dirmode = dirmode
+ self.lock_class = lock_class
+
+ @staticmethod
+ def encode(x):
+ return hashlib.sha224(x.encode()).hexdigest()
+
+ def _fn(self, name):
+ # NOTE: This method should not change as some may depend on it.
+ # See: https://github.com/ionrock/cachecontrol/issues/63
+ hashed = self.encode(name)
+ parts = list(hashed[:5]) + [hashed]
+ return os.path.join(self.directory, *parts)
+
+ def get(self, key):
+ name = self._fn(key)
+ try:
+ with open(name, "rb") as fh:
+ return fh.read()
+
+ except FileNotFoundError:
+ return None
+
+ def set(self, key, value):
+ name = self._fn(key)
+
+ # Make sure the directory exists
+ try:
+ os.makedirs(os.path.dirname(name), self.dirmode)
+ except (IOError, OSError):
+ pass
+
+ with self.lock_class(name) as lock:
+ # Write our actual file
+ with _secure_open_write(lock.path, self.filemode) as fh:
+ fh.write(value)
+
+ def delete(self, key):
+ name = self._fn(key)
+ if not self.forever:
+ try:
+ os.remove(name)
+ except FileNotFoundError:
+ pass
+
+
+def url_to_file_path(url, filecache):
+ """Return the file cache path based on the URL.
+
+ This does not ensure the file exists!
+ """
+ key = CacheController.cache_url(url)
+ return filecache._fn(key)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
new file mode 100644
index 00000000..ed705ce7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
@@ -0,0 +1,33 @@
+from __future__ import division
+
+from datetime import datetime
+from pip._vendor.cachecontrol.cache import BaseCache
+
+
+class RedisCache(BaseCache):
+
+ def __init__(self, conn):
+ self.conn = conn
+
+ def get(self, key):
+ return self.conn.get(key)
+
+ def set(self, key, value, expires=None):
+ if not expires:
+ self.conn.set(key, value)
+ else:
+ expires = expires - datetime.utcnow()
+ self.conn.setex(key, int(expires.total_seconds()), value)
+
+ def delete(self, key):
+ self.conn.delete(key)
+
+ def clear(self):
+ """Helper for clearing all the keys in a database. Use with
+ caution!"""
+ for key in self.conn.keys():
+ self.conn.delete(key)
+
+ def close(self):
+ """Redis uses connection pooling, no need to close the connection."""
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/compat.py
new file mode 100644
index 00000000..33b5aed0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/compat.py
@@ -0,0 +1,29 @@
+try:
+ from urllib.parse import urljoin
+except ImportError:
+ from urlparse import urljoin
+
+
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+
+# Handle the case where the requests module has been patched to not have
+# urllib3 bundled as part of its source.
+try:
+ from pip._vendor.requests.packages.urllib3.response import HTTPResponse
+except ImportError:
+ from pip._vendor.urllib3.response import HTTPResponse
+
+try:
+ from pip._vendor.requests.packages.urllib3.util import is_fp_closed
+except ImportError:
+ from pip._vendor.urllib3.util import is_fp_closed
+
+# Replicate some six behaviour
+try:
+ text_type = unicode
+except NameError:
+ text_type = str
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/controller.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/controller.py
new file mode 100644
index 00000000..1b2b943c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/controller.py
@@ -0,0 +1,367 @@
+"""
+The httplib2 algorithms ported for use with requests.
+"""
+import logging
+import re
+import calendar
+import time
+from email.utils import parsedate_tz
+
+from pip._vendor.requests.structures import CaseInsensitiveDict
+
+from .cache import DictCache
+from .serialize import Serializer
+
+
+logger = logging.getLogger(__name__)
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+
+def parse_uri(uri):
+ """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+ (scheme, authority, path, query, fragment) = parse_uri(uri)
+ """
+ groups = URI.match(uri).groups()
+ return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+
+class CacheController(object):
+ """An interface to see if request should cached or not.
+ """
+
+ def __init__(
+ self, cache=None, cache_etags=True, serializer=None, status_codes=None
+ ):
+ self.cache = cache or DictCache()
+ self.cache_etags = cache_etags
+ self.serializer = serializer or Serializer()
+ self.cacheable_status_codes = status_codes or (200, 203, 300, 301)
+
+ @classmethod
+ def _urlnorm(cls, uri):
+ """Normalize the URL to create a safe key for the cache"""
+ (scheme, authority, path, query, fragment) = parse_uri(uri)
+ if not scheme or not authority:
+ raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
+
+ scheme = scheme.lower()
+ authority = authority.lower()
+
+ if not path:
+ path = "/"
+
+ # Could do syntax based normalization of the URI before
+ # computing the digest. See Section 6.2.2 of Std 66.
+ request_uri = query and "?".join([path, query]) or path
+ defrag_uri = scheme + "://" + authority + request_uri
+
+ return defrag_uri
+
+ @classmethod
+ def cache_url(cls, uri):
+ return cls._urlnorm(uri)
+
+ def parse_cache_control(self, headers):
+ known_directives = {
+ # https://tools.ietf.org/html/rfc7234#section-5.2
+ "max-age": (int, True),
+ "max-stale": (int, False),
+ "min-fresh": (int, True),
+ "no-cache": (None, False),
+ "no-store": (None, False),
+ "no-transform": (None, False),
+ "only-if-cached": (None, False),
+ "must-revalidate": (None, False),
+ "public": (None, False),
+ "private": (None, False),
+ "proxy-revalidate": (None, False),
+ "s-maxage": (int, True),
+ }
+
+ cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
+
+ retval = {}
+
+ for cc_directive in cc_headers.split(","):
+ if not cc_directive.strip():
+ continue
+
+ parts = cc_directive.split("=", 1)
+ directive = parts[0].strip()
+
+ try:
+ typ, required = known_directives[directive]
+ except KeyError:
+ logger.debug("Ignoring unknown cache-control directive: %s", directive)
+ continue
+
+ if not typ or not required:
+ retval[directive] = None
+ if typ:
+ try:
+ retval[directive] = typ(parts[1].strip())
+ except IndexError:
+ if required:
+ logger.debug(
+ "Missing value for cache-control " "directive: %s",
+ directive,
+ )
+ except ValueError:
+ logger.debug(
+ "Invalid value for cache-control directive " "%s, must be %s",
+ directive,
+ typ.__name__,
+ )
+
+ return retval
+
+ def cached_request(self, request):
+ """
+ Return a cached response if it exists in the cache, otherwise
+ return False.
+ """
+ cache_url = self.cache_url(request.url)
+ logger.debug('Looking up "%s" in the cache', cache_url)
+ cc = self.parse_cache_control(request.headers)
+
+ # Bail out if the request insists on fresh data
+ if "no-cache" in cc:
+ logger.debug('Request header has "no-cache", cache bypassed')
+ return False
+
+ if "max-age" in cc and cc["max-age"] == 0:
+ logger.debug('Request header has "max_age" as 0, cache bypassed')
+ return False
+
+ # Request allows serving from the cache, let's see if we find something
+ cache_data = self.cache.get(cache_url)
+ if cache_data is None:
+ logger.debug("No cache entry available")
+ return False
+
+ # Check whether it can be deserialized
+ resp = self.serializer.loads(request, cache_data)
+ if not resp:
+ logger.warning("Cache entry deserialization failed, entry ignored")
+ return False
+
+ # If we have a cached 301, return it immediately. We don't
+ # need to test our response for other headers b/c it is
+ # intrinsically "cacheable" as it is Permanent.
+ # See:
+ # https://tools.ietf.org/html/rfc7231#section-6.4.2
+ #
+ # Client can try to refresh the value by repeating the request
+ # with cache busting headers as usual (ie no-cache).
+ if resp.status == 301:
+ msg = (
+ 'Returning cached "301 Moved Permanently" response '
+ "(ignoring date and etag information)"
+ )
+ logger.debug(msg)
+ return resp
+
+ headers = CaseInsensitiveDict(resp.headers)
+ if not headers or "date" not in headers:
+ if "etag" not in headers:
+ # Without date or etag, the cached response can never be used
+ # and should be deleted.
+ logger.debug("Purging cached response: no date or etag")
+ self.cache.delete(cache_url)
+ logger.debug("Ignoring cached response: no date")
+ return False
+
+ now = time.time()
+ date = calendar.timegm(parsedate_tz(headers["date"]))
+ current_age = max(0, now - date)
+ logger.debug("Current age based on date: %i", current_age)
+
+ # TODO: There is an assumption that the result will be a
+ # urllib3 response object. This may not be best since we
+ # could probably avoid instantiating or constructing the
+ # response until we know we need it.
+ resp_cc = self.parse_cache_control(headers)
+
+ # determine freshness
+ freshness_lifetime = 0
+
+ # Check the max-age pragma in the cache control header
+ if "max-age" in resp_cc:
+ freshness_lifetime = resp_cc["max-age"]
+ logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
+
+ # If there isn't a max-age, check for an expires header
+ elif "expires" in headers:
+ expires = parsedate_tz(headers["expires"])
+ if expires is not None:
+ expire_time = calendar.timegm(expires) - date
+ freshness_lifetime = max(0, expire_time)
+ logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
+
+ # Determine if we are setting freshness limit in the
+ # request. Note, this overrides what was in the response.
+ if "max-age" in cc:
+ freshness_lifetime = cc["max-age"]
+ logger.debug(
+ "Freshness lifetime from request max-age: %i", freshness_lifetime
+ )
+
+ if "min-fresh" in cc:
+ min_fresh = cc["min-fresh"]
+ # adjust our current age by our min fresh
+ current_age += min_fresh
+ logger.debug("Adjusted current age from min-fresh: %i", current_age)
+
+ # Return entry if it is fresh enough
+ if freshness_lifetime > current_age:
+ logger.debug('The response is "fresh", returning cached response')
+ logger.debug("%i > %i", freshness_lifetime, current_age)
+ return resp
+
+ # we're not fresh. If we don't have an Etag, clear it out
+ if "etag" not in headers:
+ logger.debug('The cached response is "stale" with no etag, purging')
+ self.cache.delete(cache_url)
+
+ # return the original handler
+ return False
+
+ def conditional_headers(self, request):
+ cache_url = self.cache_url(request.url)
+ resp = self.serializer.loads(request, self.cache.get(cache_url))
+ new_headers = {}
+
+ if resp:
+ headers = CaseInsensitiveDict(resp.headers)
+
+ if "etag" in headers:
+ new_headers["If-None-Match"] = headers["ETag"]
+
+ if "last-modified" in headers:
+ new_headers["If-Modified-Since"] = headers["Last-Modified"]
+
+ return new_headers
+
+ def cache_response(self, request, response, body=None, status_codes=None):
+ """
+ Algorithm for caching requests.
+
+ This assumes a requests Response object.
+ """
+ # From httplib2: Don't cache 206's since we aren't going to
+ # handle byte range requests
+ cacheable_status_codes = status_codes or self.cacheable_status_codes
+ if response.status not in cacheable_status_codes:
+ logger.debug(
+ "Status code %s not in %s", response.status, cacheable_status_codes
+ )
+ return
+
+ response_headers = CaseInsensitiveDict(response.headers)
+
+ # If we've been given a body, our response has a Content-Length, that
+ # Content-Length is valid then we can check to see if the body we've
+ # been given matches the expected size, and if it doesn't we'll just
+ # skip trying to cache it.
+ if (
+ body is not None
+ and "content-length" in response_headers
+ and response_headers["content-length"].isdigit()
+ and int(response_headers["content-length"]) != len(body)
+ ):
+ return
+
+ cc_req = self.parse_cache_control(request.headers)
+ cc = self.parse_cache_control(response_headers)
+
+ cache_url = self.cache_url(request.url)
+ logger.debug('Updating cache with response from "%s"', cache_url)
+
+ # Delete it from the cache if we happen to have it stored there
+ no_store = False
+ if "no-store" in cc:
+ no_store = True
+ logger.debug('Response header has "no-store"')
+ if "no-store" in cc_req:
+ no_store = True
+ logger.debug('Request header has "no-store"')
+ if no_store and self.cache.get(cache_url):
+ logger.debug('Purging existing cache entry to honor "no-store"')
+ self.cache.delete(cache_url)
+ if no_store:
+ return
+
+ # If we've been given an etag, then keep the response
+ if self.cache_etags and "etag" in response_headers:
+ logger.debug("Caching due to etag")
+ self.cache.set(
+ cache_url, self.serializer.dumps(request, response, body=body)
+ )
+
+ # Add to the cache any 301s. We do this before looking that
+ # the Date headers.
+ elif response.status == 301:
+ logger.debug("Caching permanant redirect")
+ self.cache.set(cache_url, self.serializer.dumps(request, response))
+
+ # Add to the cache if the response headers demand it. If there
+ # is no date header then we can't do anything about expiring
+ # the cache.
+ elif "date" in response_headers:
+ # cache when there is a max-age > 0
+ if "max-age" in cc and cc["max-age"] > 0:
+ logger.debug("Caching b/c date exists and max-age > 0")
+ self.cache.set(
+ cache_url, self.serializer.dumps(request, response, body=body)
+ )
+
+ # If the request can expire, it means we should cache it
+ # in the meantime.
+ elif "expires" in response_headers:
+ if response_headers["expires"]:
+ logger.debug("Caching b/c of expires header")
+ self.cache.set(
+ cache_url, self.serializer.dumps(request, response, body=body)
+ )
+
+ def update_cached_response(self, request, response):
+ """On a 304 we will get a new set of headers that we want to
+ update our cached value with, assuming we have one.
+
+ This should only ever be called when we've sent an ETag and
+ gotten a 304 as the response.
+ """
+ cache_url = self.cache_url(request.url)
+
+ cached_response = self.serializer.loads(request, self.cache.get(cache_url))
+
+ if not cached_response:
+ # we didn't have a cached response
+ return response
+
+ # Lets update our headers with the headers from the new request:
+ # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
+ #
+ # The server isn't supposed to send headers that would make
+ # the cached body invalid. But... just in case, we'll be sure
+ # to strip out ones we know that might be problmatic due to
+ # typical assumptions.
+ excluded_headers = ["content-length"]
+
+ cached_response.headers.update(
+ dict(
+ (k, v)
+ for k, v in response.headers.items()
+ if k.lower() not in excluded_headers
+ )
+ )
+
+ # we want a 200 b/c we have content via the cache
+ cached_response.status = 200
+
+ # update our cache
+ self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
+
+ return cached_response
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py
new file mode 100644
index 00000000..30ed4c5a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py
@@ -0,0 +1,80 @@
+from io import BytesIO
+
+
+class CallbackFileWrapper(object):
+ """
+ Small wrapper around a fp object which will tee everything read into a
+ buffer, and when that file is closed it will execute a callback with the
+ contents of that buffer.
+
+ All attributes are proxied to the underlying file object.
+
+ This class uses members with a double underscore (__) leading prefix so as
+ not to accidentally shadow an attribute.
+ """
+
+ def __init__(self, fp, callback):
+ self.__buf = BytesIO()
+ self.__fp = fp
+ self.__callback = callback
+
+ def __getattr__(self, name):
+ # The vaguaries of garbage collection means that self.__fp is
+ # not always set. By using __getattribute__ and the private
+ # name[0] allows looking up the attribute value and raising an
+ # AttributeError when it doesn't exist. This stop thigns from
+ # infinitely recursing calls to getattr in the case where
+ # self.__fp hasn't been set.
+ #
+ # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
+ fp = self.__getattribute__("_CallbackFileWrapper__fp")
+ return getattr(fp, name)
+
+ def __is_fp_closed(self):
+ try:
+ return self.__fp.fp is None
+
+ except AttributeError:
+ pass
+
+ try:
+ return self.__fp.closed
+
+ except AttributeError:
+ pass
+
+ # We just don't cache it then.
+ # TODO: Add some logging here...
+ return False
+
+ def _close(self):
+ if self.__callback:
+ self.__callback(self.__buf.getvalue())
+
+ # We assign this to None here, because otherwise we can get into
+ # really tricky problems where the CPython interpreter dead locks
+ # because the callback is holding a reference to something which
+ # has a __del__ method. Setting this to None breaks the cycle
+ # and allows the garbage collector to do it's thing normally.
+ self.__callback = None
+
+ def read(self, amt=None):
+ data = self.__fp.read(amt)
+ self.__buf.write(data)
+ if self.__is_fp_closed():
+ self._close()
+
+ return data
+
+ def _safe_read(self, amt):
+ data = self.__fp._safe_read(amt)
+ if amt == 2 and data == b"\r\n":
+ # urllib executes this read to toss the CRLF at the end
+ # of the chunk.
+ return data
+
+ self.__buf.write(data)
+ if self.__is_fp_closed():
+ self._close()
+
+ return data
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/heuristics.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/heuristics.py
new file mode 100644
index 00000000..6c0e9790
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/heuristics.py
@@ -0,0 +1,135 @@
+import calendar
+import time
+
+from email.utils import formatdate, parsedate, parsedate_tz
+
+from datetime import datetime, timedelta
+
+TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
+
+
+def expire_after(delta, date=None):
+ date = date or datetime.utcnow()
+ return date + delta
+
+
+def datetime_to_header(dt):
+ return formatdate(calendar.timegm(dt.timetuple()))
+
+
+class BaseHeuristic(object):
+
+ def warning(self, response):
+ """
+ Return a valid 1xx warning header value describing the cache
+ adjustments.
+
+ The response is provided too allow warnings like 113
+ http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
+ to explicitly say response is over 24 hours old.
+ """
+ return '110 - "Response is Stale"'
+
+ def update_headers(self, response):
+ """Update the response headers with any new headers.
+
+ NOTE: This SHOULD always include some Warning header to
+ signify that the response was cached by the client, not
+ by way of the provided headers.
+ """
+ return {}
+
+ def apply(self, response):
+ updated_headers = self.update_headers(response)
+
+ if updated_headers:
+ response.headers.update(updated_headers)
+ warning_header_value = self.warning(response)
+ if warning_header_value is not None:
+ response.headers.update({"Warning": warning_header_value})
+
+ return response
+
+
+class OneDayCache(BaseHeuristic):
+ """
+ Cache the response by providing an expires 1 day in the
+ future.
+ """
+
+ def update_headers(self, response):
+ headers = {}
+
+ if "expires" not in response.headers:
+ date = parsedate(response.headers["date"])
+ expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
+ headers["expires"] = datetime_to_header(expires)
+ headers["cache-control"] = "public"
+ return headers
+
+
+class ExpiresAfter(BaseHeuristic):
+ """
+ Cache **all** requests for a defined time period.
+ """
+
+ def __init__(self, **kw):
+ self.delta = timedelta(**kw)
+
+ def update_headers(self, response):
+ expires = expire_after(self.delta)
+ return {"expires": datetime_to_header(expires), "cache-control": "public"}
+
+ def warning(self, response):
+ tmpl = "110 - Automatically cached for %s. Response might be stale"
+ return tmpl % self.delta
+
+
+class LastModified(BaseHeuristic):
+ """
+ If there is no Expires header already, fall back on Last-Modified
+ using the heuristic from
+ http://tools.ietf.org/html/rfc7234#section-4.2.2
+ to calculate a reasonable value.
+
+ Firefox also does something like this per
+ https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
+ http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
+ Unlike mozilla we limit this to 24-hr.
+ """
+ cacheable_by_default_statuses = {
+ 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
+ }
+
+ def update_headers(self, resp):
+ headers = resp.headers
+
+ if "expires" in headers:
+ return {}
+
+ if "cache-control" in headers and headers["cache-control"] != "public":
+ return {}
+
+ if resp.status not in self.cacheable_by_default_statuses:
+ return {}
+
+ if "date" not in headers or "last-modified" not in headers:
+ return {}
+
+ date = calendar.timegm(parsedate_tz(headers["date"]))
+ last_modified = parsedate(headers["last-modified"])
+ if date is None or last_modified is None:
+ return {}
+
+ now = time.time()
+ current_age = max(0, now - date)
+ delta = date - calendar.timegm(last_modified)
+ freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
+ if freshness_lifetime <= current_age:
+ return {}
+
+ expires = date + freshness_lifetime
+ return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
+
+ def warning(self, resp):
+ return None
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/serialize.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/serialize.py
new file mode 100644
index 00000000..ec43ff27
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/serialize.py
@@ -0,0 +1,186 @@
+import base64
+import io
+import json
+import zlib
+
+from pip._vendor import msgpack
+from pip._vendor.requests.structures import CaseInsensitiveDict
+
+from .compat import HTTPResponse, pickle, text_type
+
+
+def _b64_decode_bytes(b):
+ return base64.b64decode(b.encode("ascii"))
+
+
+def _b64_decode_str(s):
+ return _b64_decode_bytes(s).decode("utf8")
+
+
+class Serializer(object):
+
+ def dumps(self, request, response, body=None):
+ response_headers = CaseInsensitiveDict(response.headers)
+
+ if body is None:
+ body = response.read(decode_content=False)
+
+ # NOTE: 99% sure this is dead code. I'm only leaving it
+ # here b/c I don't have a test yet to prove
+ # it. Basically, before using
+ # `cachecontrol.filewrapper.CallbackFileWrapper`,
+ # this made an effort to reset the file handle. The
+ # `CallbackFileWrapper` short circuits this code by
+ # setting the body as the content is consumed, the
+ # result being a `body` argument is *always* passed
+ # into cache_response, and in turn,
+ # `Serializer.dump`.
+ response._fp = io.BytesIO(body)
+
+ # NOTE: This is all a bit weird, but it's really important that on
+ # Python 2.x these objects are unicode and not str, even when
+ # they contain only ascii. The problem here is that msgpack
+ # understands the difference between unicode and bytes and we
+ # have it set to differentiate between them, however Python 2
+ # doesn't know the difference. Forcing these to unicode will be
+ # enough to have msgpack know the difference.
+ data = {
+ u"response": {
+ u"body": body,
+ u"headers": dict(
+ (text_type(k), text_type(v)) for k, v in response.headers.items()
+ ),
+ u"status": response.status,
+ u"version": response.version,
+ u"reason": text_type(response.reason),
+ u"strict": response.strict,
+ u"decode_content": response.decode_content,
+ }
+ }
+
+ # Construct our vary headers
+ data[u"vary"] = {}
+ if u"vary" in response_headers:
+ varied_headers = response_headers[u"vary"].split(",")
+ for header in varied_headers:
+ header = text_type(header).strip()
+ header_value = request.headers.get(header, None)
+ if header_value is not None:
+ header_value = text_type(header_value)
+ data[u"vary"][header] = header_value
+
+ return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
+
+ def loads(self, request, data):
+ # Short circuit if we've been given an empty set of data
+ if not data:
+ return
+
+ # Determine what version of the serializer the data was serialized
+ # with
+ try:
+ ver, data = data.split(b",", 1)
+ except ValueError:
+ ver = b"cc=0"
+
+ # Make sure that our "ver" is actually a version and isn't a false
+ # positive from a , being in the data stream.
+ if ver[:3] != b"cc=":
+ data = ver + data
+ ver = b"cc=0"
+
+ # Get the version number out of the cc=N
+ ver = ver.split(b"=", 1)[-1].decode("ascii")
+
+ # Dispatch to the actual load method for the given version
+ try:
+ return getattr(self, "_loads_v{}".format(ver))(request, data)
+
+ except AttributeError:
+ # This is a version we don't have a loads function for, so we'll
+ # just treat it as a miss and return None
+ return
+
+ def prepare_response(self, request, cached):
+ """Verify our vary headers match and construct a real urllib3
+ HTTPResponse object.
+ """
+ # Special case the '*' Vary value as it means we cannot actually
+ # determine if the cached response is suitable for this request.
+ if "*" in cached.get("vary", {}):
+ return
+
+ # Ensure that the Vary headers for the cached response match our
+ # request
+ for header, value in cached.get("vary", {}).items():
+ if request.headers.get(header, None) != value:
+ return
+
+ body_raw = cached["response"].pop("body")
+
+ headers = CaseInsensitiveDict(data=cached["response"]["headers"])
+ if headers.get("transfer-encoding", "") == "chunked":
+ headers.pop("transfer-encoding")
+
+ cached["response"]["headers"] = headers
+
+ try:
+ body = io.BytesIO(body_raw)
+ except TypeError:
+ # This can happen if cachecontrol serialized to v1 format (pickle)
+ # using Python 2. A Python 2 str(byte string) will be unpickled as
+ # a Python 3 str (unicode string), which will cause the above to
+ # fail with:
+ #
+ # TypeError: 'str' does not support the buffer interface
+ body = io.BytesIO(body_raw.encode("utf8"))
+
+ return HTTPResponse(body=body, preload_content=False, **cached["response"])
+
+ def _loads_v0(self, request, data):
+ # The original legacy cache data. This doesn't contain enough
+ # information to construct everything we need, so we'll treat this as
+ # a miss.
+ return
+
+ def _loads_v1(self, request, data):
+ try:
+ cached = pickle.loads(data)
+ except ValueError:
+ return
+
+ return self.prepare_response(request, cached)
+
+ def _loads_v2(self, request, data):
+ try:
+ cached = json.loads(zlib.decompress(data).decode("utf8"))
+ except (ValueError, zlib.error):
+ return
+
+ # We need to decode the items that we've base64 encoded
+ cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
+ cached["response"]["headers"] = dict(
+ (_b64_decode_str(k), _b64_decode_str(v))
+ for k, v in cached["response"]["headers"].items()
+ )
+ cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
+ cached["vary"] = dict(
+ (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
+ for k, v in cached["vary"].items()
+ )
+
+ return self.prepare_response(request, cached)
+
+ def _loads_v3(self, request, data):
+ # Due to Python 2 encoding issues, it's impossible to know for sure
+ # exactly how to load v3 entries, thus we'll treat these as a miss so
+ # that they get rewritten out as v4 entries.
+ return
+
+ def _loads_v4(self, request, data):
+ try:
+ cached = msgpack.loads(data, encoding="utf-8")
+ except ValueError:
+ return
+
+ return self.prepare_response(request, cached)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/wrapper.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/wrapper.py
new file mode 100644
index 00000000..265bfc8b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/cachecontrol/wrapper.py
@@ -0,0 +1,29 @@
+from .adapter import CacheControlAdapter
+from .cache import DictCache
+
+
+def CacheControl(
+ sess,
+ cache=None,
+ cache_etags=True,
+ serializer=None,
+ heuristic=None,
+ controller_class=None,
+ adapter_class=None,
+ cacheable_methods=None,
+):
+
+ cache = cache or DictCache()
+ adapter_class = adapter_class or CacheControlAdapter
+ adapter = adapter_class(
+ cache,
+ cache_etags=cache_etags,
+ serializer=serializer,
+ heuristic=heuristic,
+ controller_class=controller_class,
+ cacheable_methods=cacheable_methods,
+ )
+ sess.mount("http://", adapter)
+ sess.mount("https://", adapter)
+
+ return sess
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__init__.py
new file mode 100644
index 00000000..8ccb14e2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__init__.py
@@ -0,0 +1,3 @@
+from .core import where
+
+__version__ = "2019.06.16"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__main__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__main__.py
new file mode 100644
index 00000000..ae2aff5c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__main__.py
@@ -0,0 +1,2 @@
+from pip._vendor.certifi import where
+print(where())
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..d45c81cb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-37.pyc
new file mode 100644
index 00000000..819b4374
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/__main__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-37.pyc
new file mode 100644
index 00000000..c6cca931
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/cacert.pem b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/cacert.pem
new file mode 100644
index 00000000..9ca290f5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/cacert.pem
@@ -0,0 +1,4618 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=Class 2 Primary CA O=Certplus
+# Subject: CN=Class 2 Primary CA O=Certplus
+# Label: "Certplus Class 2 Primary CA"
+# Serial: 177770208045934040241468760488327595043
+# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
+# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
+# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Label: "Deutsche Telekom Root CA 2"
+# Serial: 38
+# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
+# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
+# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G2"
+# Serial: 10000012
+# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
+# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
+# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
+# Label: "Hongkong Post Root CA 1"
+# Serial: 1000
+# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
+# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
+# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
+# Label: "E-Tugra Certification Authority"
+# Serial: 7667447206703254355
+# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
+# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
+# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
+BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
+aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
+BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
+Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
+MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
+BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
+em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
+ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
+B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
+D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
+Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
+q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
+k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
+fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
+dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
+ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
+zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
+rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
+U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
+Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
+XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
+Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
+HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
+GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
+77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
+vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
+FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
+yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
+AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
+y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
+NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Label: "LuxTrust Global Root 2"
+# Serial: 59914338225734147123941058376788110305822489521
+# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
+# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
+# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
+-----BEGIN CERTIFICATE-----
+MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
+BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
+BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
+MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
+LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
+ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
+hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
+EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
+Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
+zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
+96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
+j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
+DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
+8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
+X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
+hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
+KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
+Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
+BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
+BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
+jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
+loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
+qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
+2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
+JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
+zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
+LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
+x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
+oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
+-----BEGIN CERTIFICATE-----
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 146587175971765017618439757810265552097
+# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85
+# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8
+# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM
+f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX
+mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7
+zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P
+fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc
+vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4
+Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp
+zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO
+Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW
+k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+
+DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF
+lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW
+Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1
+d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z
+XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR
+gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3
+d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv
+J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg
+DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM
++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy
+F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9
+SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws
+E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 146587176055767053814479386953112547951
+# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b
+# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d
+# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH
+MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
+QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
+MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
+cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv
+CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg
+GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu
+XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd
+re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu
+PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1
+mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K
+8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj
+x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR
+nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0
+kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok
+twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp
+8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT
+vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT
+z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA
+pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb
+pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB
+R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R
+RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk
+0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC
+5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF
+izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn
+yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 146587176140553309517047991083707763997
+# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25
+# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5
+# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5
+-----BEGIN CERTIFICATE-----
+MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout
+736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A
+DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk
+fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA
+njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 146587176229350439916519468929765261721
+# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26
+# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb
+# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd
+-----BEGIN CERTIFICATE-----
+MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu
+hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l
+xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0
+CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx
+sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/core.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/core.py
new file mode 100644
index 00000000..7271acf4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/certifi/core.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem.
+"""
+import os
+
+
+def where():
+ f = os.path.dirname(__file__)
+
+ return os.path.join(f, 'cacert.pem')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__init__.py
new file mode 100644
index 00000000..0f9f820e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__init__.py
@@ -0,0 +1,39 @@
+######################## BEGIN LICENSE BLOCK ########################
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+
+from .compat import PY2, PY3
+from .universaldetector import UniversalDetector
+from .version import __version__, VERSION
+
+
+def detect(byte_str):
+ """
+ Detect the encoding of the given byte string.
+
+ :param byte_str: The byte sequence to examine.
+ :type byte_str: ``bytes`` or ``bytearray``
+ """
+ if not isinstance(byte_str, bytearray):
+ if not isinstance(byte_str, bytes):
+ raise TypeError('Expected object of type bytes or bytearray, got: '
+ '{0}'.format(type(byte_str)))
+ else:
+ byte_str = bytearray(byte_str)
+ detector = UniversalDetector()
+ detector.feed(byte_str)
+ return detector.close()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..aabe0a7c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc
new file mode 100644
index 00000000..c300d7a9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc
new file mode 100644
index 00000000..2ef0456e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc
new file mode 100644
index 00000000..4f7ad9c0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc
new file mode 100644
index 00000000..b79d1a29
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc
new file mode 100644
index 00000000..eff3716d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc
new file mode 100644
index 00000000..545e3678
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..0242c8de
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc
new file mode 100644
index 00000000..7992aaa9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc
new file mode 100644
index 00000000..9465f809
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc
new file mode 100644
index 00000000..fe9f7acd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc
new file mode 100644
index 00000000..513d507f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc
new file mode 100644
index 00000000..fc0f8b7d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc
new file mode 100644
index 00000000..ed962ea6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc
new file mode 100644
index 00000000..f3f9d0c0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc
new file mode 100644
index 00000000..6291a0d4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc
new file mode 100644
index 00000000..b729ab9a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc
new file mode 100644
index 00000000..a0ac5a3c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc
new file mode 100644
index 00000000..2815b4ea
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc
new file mode 100644
index 00000000..9faa3a13
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc
new file mode 100644
index 00000000..6fa9f79c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc
new file mode 100644
index 00000000..b8bb8018
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc
new file mode 100644
index 00000000..5bf91a21
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc
new file mode 100644
index 00000000..a74a772d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc
new file mode 100644
index 00000000..6cd01eeb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc
new file mode 100644
index 00000000..e5f492aa
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pyc
new file mode 100644
index 00000000..53af0bff
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc
new file mode 100644
index 00000000..db52e2c9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc
new file mode 100644
index 00000000..b4a03ea0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc
new file mode 100644
index 00000000..dc596e6b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc
new file mode 100644
index 00000000..d2a1a3d7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc
new file mode 100644
index 00000000..fd3bb7f2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc
new file mode 100644
index 00000000..03e9548a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc
new file mode 100644
index 00000000..429e0829
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc
new file mode 100644
index 00000000..5b5a4eea
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc
new file mode 100644
index 00000000..2e1c1458
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc
new file mode 100644
index 00000000..36584e2d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc
new file mode 100644
index 00000000..5071ef3d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-37.pyc
new file mode 100644
index 00000000..951e1569
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/big5freq.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/big5freq.py
new file mode 100644
index 00000000..38f32517
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/big5freq.py
@@ -0,0 +1,386 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Big5 frequency table
+# by Taiwan's Mandarin Promotion Council
+# <http://www.edu.tw:81/mandr/>
+#
+# 128 --> 0.42261
+# 256 --> 0.57851
+# 512 --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+#Char to FreqOrder table
+BIG5_TABLE_SIZE = 5376
+
+BIG5_CHAR_TO_FREQ_ORDER = (
+ 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
+3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
+1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
+ 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
+3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
+4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
+5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
+ 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
+ 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
+ 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
+2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
+1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
+3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
+ 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
+3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
+2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
+ 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
+3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
+1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
+5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
+ 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
+5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
+1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
+ 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
+ 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
+3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
+3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
+ 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
+2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
+2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
+ 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
+ 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
+3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
+1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
+1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
+1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
+2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
+ 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
+4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
+1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
+5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
+2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
+ 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
+ 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
+ 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
+ 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
+5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
+ 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
+1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
+ 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
+ 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
+5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
+1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
+ 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
+3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
+4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
+3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
+ 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
+ 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
+1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
+4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
+3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
+3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
+2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
+5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
+3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
+5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
+1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
+2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
+1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
+ 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
+1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
+4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
+3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
+ 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
+ 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
+ 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
+2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
+5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
+1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
+2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
+1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
+1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
+5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
+5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
+5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
+3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
+4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
+4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
+2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
+5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
+3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
+ 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
+5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
+5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
+1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
+2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
+3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
+4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
+5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
+3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
+4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
+1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
+1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
+4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
+1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
+ 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
+1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
+1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
+3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
+ 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
+5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
+2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
+1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
+1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
+5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
+ 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
+4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
+ 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
+2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
+ 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
+1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
+1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
+ 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
+4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
+4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
+1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
+3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
+5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
+5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
+1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
+2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
+1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
+3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
+2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
+3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
+2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
+4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
+4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
+3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
+ 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
+3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
+ 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
+3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
+4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
+3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
+1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
+5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
+ 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
+5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
+1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
+ 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
+4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
+4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
+ 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
+2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
+2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
+3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
+1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
+4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
+2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
+1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
+1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
+2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
+3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
+1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
+5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
+1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
+4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
+1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
+ 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
+1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
+4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
+4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
+2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
+1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
+4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
+ 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
+5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
+2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
+3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
+4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
+ 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
+5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
+5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
+1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
+4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
+4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
+2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
+3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
+3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
+2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
+1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
+4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
+3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
+3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
+2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
+4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
+5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
+3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
+2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
+3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
+1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
+2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
+3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
+4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
+2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
+2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
+5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
+1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
+2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
+1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
+3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
+4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
+2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
+3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
+3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
+2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
+4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
+2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
+3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
+4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
+5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
+3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
+ 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
+1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
+4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
+1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
+4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
+5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
+ 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
+5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
+5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
+2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
+3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
+2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
+2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
+ 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
+1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
+4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
+3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
+3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
+ 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
+2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
+ 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
+2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
+4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
+1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
+4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
+1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
+3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
+ 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
+3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
+5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
+5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
+3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
+3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
+1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
+2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
+5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
+1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
+1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
+3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
+ 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
+1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
+4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
+5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
+2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
+3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
+ 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
+1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
+2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
+2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
+5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
+5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
+5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
+2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
+2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
+1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
+4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
+3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
+3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
+4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
+4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
+2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
+2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
+5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
+4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
+5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
+4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
+ 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
+ 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
+1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
+3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
+4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
+1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
+5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
+2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
+2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
+3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
+5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
+1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
+3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
+5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
+1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
+5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
+2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
+3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
+2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
+3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
+3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
+3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
+4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
+ 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
+2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
+4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
+3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
+5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
+1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
+5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
+ 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
+1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
+ 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
+4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
+1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
+4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
+1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
+ 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
+3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
+4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
+5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
+ 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
+3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
+ 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
+2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
+)
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/big5prober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/big5prober.py
new file mode 100644
index 00000000..98f99701
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/big5prober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import Big5DistributionAnalysis
+from .mbcssm import BIG5_SM_MODEL
+
+
+class Big5Prober(MultiByteCharSetProber):
+ def __init__(self):
+ super(Big5Prober, self).__init__()
+ self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
+ self.distribution_analyzer = Big5DistributionAnalysis()
+ self.reset()
+
+ @property
+ def charset_name(self):
+ return "Big5"
+
+ @property
+ def language(self):
+ return "Chinese"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py
new file mode 100644
index 00000000..c0395f4a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py
@@ -0,0 +1,233 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE,
+ EUCTW_TYPICAL_DISTRIBUTION_RATIO)
+from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE,
+ EUCKR_TYPICAL_DISTRIBUTION_RATIO)
+from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE,
+ GB2312_TYPICAL_DISTRIBUTION_RATIO)
+from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE,
+ BIG5_TYPICAL_DISTRIBUTION_RATIO)
+from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE,
+ JIS_TYPICAL_DISTRIBUTION_RATIO)
+
+
+class CharDistributionAnalysis(object):
+ ENOUGH_DATA_THRESHOLD = 1024
+ SURE_YES = 0.99
+ SURE_NO = 0.01
+ MINIMUM_DATA_THRESHOLD = 3
+
+ def __init__(self):
+ # Mapping table to get frequency order from char order (get from
+ # GetOrder())
+ self._char_to_freq_order = None
+ self._table_size = None # Size of above table
+ # This is a constant value which varies from language to language,
+ # used in calculating confidence. See
+ # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
+ # for further detail.
+ self.typical_distribution_ratio = None
+ self._done = None
+ self._total_chars = None
+ self._freq_chars = None
+ self.reset()
+
+ def reset(self):
+ """reset analyser, clear any state"""
+ # If this flag is set to True, detection is done and conclusion has
+ # been made
+ self._done = False
+ self._total_chars = 0 # Total characters encountered
+ # The number of characters whose frequency order is less than 512
+ self._freq_chars = 0
+
+ def feed(self, char, char_len):
+ """feed a character with known length"""
+ if char_len == 2:
+ # we only care about 2-bytes character in our distribution analysis
+ order = self.get_order(char)
+ else:
+ order = -1
+ if order >= 0:
+ self._total_chars += 1
+ # order is valid
+ if order < self._table_size:
+ if 512 > self._char_to_freq_order[order]:
+ self._freq_chars += 1
+
+ def get_confidence(self):
+ """return confidence based on existing data"""
+ # if we didn't receive any character in our consideration range,
+ # return negative answer
+ if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
+ return self.SURE_NO
+
+ if self._total_chars != self._freq_chars:
+ r = (self._freq_chars / ((self._total_chars - self._freq_chars)
+ * self.typical_distribution_ratio))
+ if r < self.SURE_YES:
+ return r
+
+ # normalize confidence (we don't want to be 100% sure)
+ return self.SURE_YES
+
+ def got_enough_data(self):
+ # It is not necessary to receive all data to draw conclusion.
+ # For charset detection, certain amount of data is enough
+ return self._total_chars > self.ENOUGH_DATA_THRESHOLD
+
+ def get_order(self, byte_str):
+ # We do not handle characters based on the original encoding string,
+ # but convert this encoding string to a number, here called order.
+ # This allows multiple encodings of a language to share one frequency
+ # table.
+ return -1
+
+
+class EUCTWDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ super(EUCTWDistributionAnalysis, self).__init__()
+ self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
+ self._table_size = EUCTW_TABLE_SIZE
+ self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, byte_str):
+ # for euc-TW encoding, we are interested
+ # first byte range: 0xc4 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char = byte_str[0]
+ if first_char >= 0xC4:
+ return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
+ else:
+ return -1
+
+
+class EUCKRDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ super(EUCKRDistributionAnalysis, self).__init__()
+ self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
+ self._table_size = EUCKR_TABLE_SIZE
+ self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, byte_str):
+ # for euc-KR encoding, we are interested
+ # first byte range: 0xb0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char = byte_str[0]
+ if first_char >= 0xB0:
+ return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
+ else:
+ return -1
+
+
+class GB2312DistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ super(GB2312DistributionAnalysis, self).__init__()
+ self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
+ self._table_size = GB2312_TABLE_SIZE
+ self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, byte_str):
+ # for GB2312 encoding, we are interested
+ # first byte range: 0xb0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = byte_str[0], byte_str[1]
+ if (first_char >= 0xB0) and (second_char >= 0xA1):
+ return 94 * (first_char - 0xB0) + second_char - 0xA1
+ else:
+ return -1
+
+
+class Big5DistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ super(Big5DistributionAnalysis, self).__init__()
+ self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
+ self._table_size = BIG5_TABLE_SIZE
+ self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, byte_str):
+ # for big5 encoding, we are interested
+ # first byte range: 0xa4 -- 0xfe
+ # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = byte_str[0], byte_str[1]
+ if first_char >= 0xA4:
+ if second_char >= 0xA1:
+ return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
+ else:
+ return 157 * (first_char - 0xA4) + second_char - 0x40
+ else:
+ return -1
+
+
+class SJISDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ super(SJISDistributionAnalysis, self).__init__()
+ self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
+ self._table_size = JIS_TABLE_SIZE
+ self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, byte_str):
+ # for sjis encoding, we are interested
+ # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
+ # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
+ # no validation needed here. State machine has done that
+ first_char, second_char = byte_str[0], byte_str[1]
+ if (first_char >= 0x81) and (first_char <= 0x9F):
+ order = 188 * (first_char - 0x81)
+ elif (first_char >= 0xE0) and (first_char <= 0xEF):
+ order = 188 * (first_char - 0xE0 + 31)
+ else:
+ return -1
+ order = order + second_char - 0x40
+ if second_char > 0x7F:
+ order = -1
+ return order
+
+
+class EUCJPDistributionAnalysis(CharDistributionAnalysis):
+ def __init__(self):
+ super(EUCJPDistributionAnalysis, self).__init__()
+ self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
+ self._table_size = JIS_TABLE_SIZE
+ self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
+
+ def get_order(self, byte_str):
+ # for euc-JP encoding, we are interested
+ # first byte range: 0xa0 -- 0xfe
+ # second byte range: 0xa1 -- 0xfe
+ # no validation needed here. State machine has done that
+ char = byte_str[0]
+ if char >= 0xA0:
+ return 94 * (char - 0xA1) + byte_str[1] - 0xa1
+ else:
+ return -1
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py
new file mode 100644
index 00000000..8b3738ef
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py
@@ -0,0 +1,106 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import ProbingState
+from .charsetprober import CharSetProber
+
+
+class CharSetGroupProber(CharSetProber):
+ def __init__(self, lang_filter=None):
+ super(CharSetGroupProber, self).__init__(lang_filter=lang_filter)
+ self._active_num = 0
+ self.probers = []
+ self._best_guess_prober = None
+
+ def reset(self):
+ super(CharSetGroupProber, self).reset()
+ self._active_num = 0
+ for prober in self.probers:
+ if prober:
+ prober.reset()
+ prober.active = True
+ self._active_num += 1
+ self._best_guess_prober = None
+
+ @property
+ def charset_name(self):
+ if not self._best_guess_prober:
+ self.get_confidence()
+ if not self._best_guess_prober:
+ return None
+ return self._best_guess_prober.charset_name
+
+ @property
+ def language(self):
+ if not self._best_guess_prober:
+ self.get_confidence()
+ if not self._best_guess_prober:
+ return None
+ return self._best_guess_prober.language
+
+ def feed(self, byte_str):
+ for prober in self.probers:
+ if not prober:
+ continue
+ if not prober.active:
+ continue
+ state = prober.feed(byte_str)
+ if not state:
+ continue
+ if state == ProbingState.FOUND_IT:
+ self._best_guess_prober = prober
+ return self.state
+ elif state == ProbingState.NOT_ME:
+ prober.active = False
+ self._active_num -= 1
+ if self._active_num <= 0:
+ self._state = ProbingState.NOT_ME
+ return self.state
+ return self.state
+
+ def get_confidence(self):
+ state = self.state
+ if state == ProbingState.FOUND_IT:
+ return 0.99
+ elif state == ProbingState.NOT_ME:
+ return 0.01
+ best_conf = 0.0
+ self._best_guess_prober = None
+ for prober in self.probers:
+ if not prober:
+ continue
+ if not prober.active:
+ self.logger.debug('%s not active', prober.charset_name)
+ continue
+ conf = prober.get_confidence()
+ self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf)
+ if best_conf < conf:
+ best_conf = conf
+ self._best_guess_prober = prober
+ if not self._best_guess_prober:
+ return 0.0
+ return best_conf
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/charsetprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/charsetprober.py
new file mode 100644
index 00000000..eac4e598
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/charsetprober.py
@@ -0,0 +1,145 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import logging
+import re
+
+from .enums import ProbingState
+
+
+class CharSetProber(object):
+
+ SHORTCUT_THRESHOLD = 0.95
+
+ def __init__(self, lang_filter=None):
+ self._state = None
+ self.lang_filter = lang_filter
+ self.logger = logging.getLogger(__name__)
+
+ def reset(self):
+ self._state = ProbingState.DETECTING
+
+ @property
+ def charset_name(self):
+ return None
+
+ def feed(self, buf):
+ pass
+
+ @property
+ def state(self):
+ return self._state
+
+ def get_confidence(self):
+ return 0.0
+
+ @staticmethod
+ def filter_high_byte_only(buf):
+ buf = re.sub(b'([\x00-\x7F])+', b' ', buf)
+ return buf
+
+ @staticmethod
+ def filter_international_words(buf):
+ """
+ We define three types of bytes:
+ alphabet: english alphabets [a-zA-Z]
+ international: international characters [\x80-\xFF]
+ marker: everything else [^a-zA-Z\x80-\xFF]
+
+ The input buffer can be thought to contain a series of words delimited
+ by markers. This function works to filter all words that contain at
+ least one international character. All contiguous sequences of markers
+ are replaced by a single space ascii character.
+
+ This filter applies to all scripts which do not use English characters.
+ """
+ filtered = bytearray()
+
+ # This regex expression filters out only words that have at-least one
+ # international character. The word may include one marker character at
+ # the end.
+ words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?',
+ buf)
+
+ for word in words:
+ filtered.extend(word[:-1])
+
+ # If the last character in the word is a marker, replace it with a
+ # space as markers shouldn't affect our analysis (they are used
+ # similarly across all languages and may thus have similar
+ # frequencies).
+ last_char = word[-1:]
+ if not last_char.isalpha() and last_char < b'\x80':
+ last_char = b' '
+ filtered.extend(last_char)
+
+ return filtered
+
+ @staticmethod
+ def filter_with_english_letters(buf):
+ """
+ Returns a copy of ``buf`` that retains only the sequences of English
+ alphabet and high byte characters that are not between <> characters.
+ Also retains English alphabet and high byte characters immediately
+ before occurrences of >.
+
+ This filter can be applied to all scripts which contain both English
+ characters and extended ASCII characters, but is currently only used by
+ ``Latin1Prober``.
+ """
+ filtered = bytearray()
+ in_tag = False
+ prev = 0
+
+ for curr in range(len(buf)):
+ # Slice here to get bytes instead of an int with Python 3
+ buf_char = buf[curr:curr + 1]
+ # Check if we're coming out of or entering an HTML tag
+ if buf_char == b'>':
+ in_tag = False
+ elif buf_char == b'<':
+ in_tag = True
+
+ # If current character is not extended-ASCII and not alphabetic...
+ if buf_char < b'\x80' and not buf_char.isalpha():
+ # ...and we're not in a tag
+ if curr > prev and not in_tag:
+ # Keep everything after last non-extended-ASCII,
+ # non-alphabetic character
+ filtered.extend(buf[prev:curr])
+ # Output a space to delimit stretch we kept
+ filtered.extend(b' ')
+ prev = curr + 1
+
+ # If we're not in a tag...
+ if not in_tag:
+ # Keep everything after last non-extended-ASCII, non-alphabetic
+ # character
+ filtered.extend(buf[prev:])
+
+ return filtered
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__init__.py
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__init__.py
@@ -0,0 +1 @@
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..f221f0d7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pyc
new file mode 100644
index 00000000..a13c108a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/chardetect.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/chardetect.py
new file mode 100644
index 00000000..c61136b6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cli/chardetect.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+"""
+Script which takes one or more file paths and reports on their detected
+encodings
+
+Example::
+
+ % chardetect somefile someotherfile
+ somefile: windows-1252 with confidence 0.5
+ someotherfile: ascii with confidence 1.0
+
+If no paths are provided, it takes its input from stdin.
+
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import sys
+
+from pip._vendor.chardet import __version__
+from pip._vendor.chardet.compat import PY2
+from pip._vendor.chardet.universaldetector import UniversalDetector
+
+
+def description_of(lines, name='stdin'):
+ """
+ Return a string describing the probable encoding of a file or
+ list of strings.
+
+ :param lines: The lines to get the encoding of.
+ :type lines: Iterable of bytes
+ :param name: Name of file or collection of lines
+ :type name: str
+ """
+ u = UniversalDetector()
+ for line in lines:
+ line = bytearray(line)
+ u.feed(line)
+ # shortcut out of the loop to save reading further - particularly useful if we read a BOM.
+ if u.done:
+ break
+ u.close()
+ result = u.result
+ if PY2:
+ name = name.decode(sys.getfilesystemencoding(), 'ignore')
+ if result['encoding']:
+ return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
+ result['confidence'])
+ else:
+ return '{0}: no result'.format(name)
+
+
+def main(argv=None):
+ """
+ Handles command line arguments and gets things started.
+
+ :param argv: List of arguments, as if specified on the command-line.
+ If None, ``sys.argv[1:]`` is used instead.
+ :type argv: list of str
+ """
+ # Get command line arguments
+ parser = argparse.ArgumentParser(
+ description="Takes one or more file paths and reports their detected \
+ encodings")
+ parser.add_argument('input',
+ help='File whose encoding we would like to determine. \
+ (default: stdin)',
+ type=argparse.FileType('rb'), nargs='*',
+ default=[sys.stdin if PY2 else sys.stdin.buffer])
+ parser.add_argument('--version', action='version',
+ version='%(prog)s {0}'.format(__version__))
+ args = parser.parse_args(argv)
+
+ for f in args.input:
+ if f.isatty():
+ print("You are running chardetect interactively. Press " +
+ "CTRL-D twice at the start of a blank line to signal the " +
+ "end of your input. If you want help, run chardetect " +
+ "--help\n", file=sys.stderr)
+ print(description_of(f, f.name))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/codingstatemachine.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/codingstatemachine.py
new file mode 100644
index 00000000..68fba44f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/codingstatemachine.py
@@ -0,0 +1,88 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import logging
+
+from .enums import MachineState
+
+
+class CodingStateMachine(object):
+ """
+ A state machine to verify a byte sequence for a particular encoding. For
+ each byte the detector receives, it will feed that byte to every active
+ state machine available, one byte at a time. The state machine changes its
+ state based on its previous state and the byte it receives. There are 3
+ states in a state machine that are of interest to an auto-detector:
+
+ START state: This is the state to start with, or a legal byte sequence
+ (i.e. a valid code point) for character has been identified.
+
+ ME state: This indicates that the state machine identified a byte sequence
+ that is specific to the charset it is designed for and that
+ there is no other possible encoding which can contain this byte
+ sequence. This will to lead to an immediate positive answer for
+ the detector.
+
+ ERROR state: This indicates the state machine identified an illegal byte
+ sequence for that encoding. This will lead to an immediate
+ negative answer for this encoding. Detector will exclude this
+ encoding from consideration from here on.
+ """
+ def __init__(self, sm):
+ self._model = sm
+ self._curr_byte_pos = 0
+ self._curr_char_len = 0
+ self._curr_state = None
+ self.logger = logging.getLogger(__name__)
+ self.reset()
+
+ def reset(self):
+ self._curr_state = MachineState.START
+
+ def next_state(self, c):
+ # for each byte we get its class
+ # if it is first byte, we also get byte length
+ byte_class = self._model['class_table'][c]
+ if self._curr_state == MachineState.START:
+ self._curr_byte_pos = 0
+ self._curr_char_len = self._model['char_len_table'][byte_class]
+ # from byte's class and state_table, we get its next state
+ curr_state = (self._curr_state * self._model['class_factor']
+ + byte_class)
+ self._curr_state = self._model['state_table'][curr_state]
+ self._curr_byte_pos += 1
+ return self._curr_state
+
+ def get_current_charlen(self):
+ return self._curr_char_len
+
+ def get_coding_state_machine(self):
+ return self._model['name']
+
+ @property
+ def language(self):
+ return self._model['language']
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/compat.py
new file mode 100644
index 00000000..ddd74687
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/compat.py
@@ -0,0 +1,34 @@
+######################## BEGIN LICENSE BLOCK ########################
+# Contributor(s):
+# Dan Blanchard
+# Ian Cordasco
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+import sys
+
+
+if sys.version_info < (3, 0):
+ PY2 = True
+ PY3 = False
+ base_str = (str, unicode)
+ text_type = unicode
+else:
+ PY2 = False
+ PY3 = True
+ base_str = (bytes, str)
+ text_type = str
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cp949prober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cp949prober.py
new file mode 100644
index 00000000..efd793ab
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/cp949prober.py
@@ -0,0 +1,49 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .chardistribution import EUCKRDistributionAnalysis
+from .codingstatemachine import CodingStateMachine
+from .mbcharsetprober import MultiByteCharSetProber
+from .mbcssm import CP949_SM_MODEL
+
+
+class CP949Prober(MultiByteCharSetProber):
+ def __init__(self):
+ super(CP949Prober, self).__init__()
+ self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
+ # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
+ # not different.
+ self.distribution_analyzer = EUCKRDistributionAnalysis()
+ self.reset()
+
+ @property
+ def charset_name(self):
+ return "CP949"
+
+ @property
+ def language(self):
+ return "Korean"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/enums.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/enums.py
new file mode 100644
index 00000000..04512072
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/enums.py
@@ -0,0 +1,76 @@
+"""
+All of the Enums that are used throughout the chardet package.
+
+:author: Dan Blanchard (dan.blanchard@gmail.com)
+"""
+
+
+class InputState(object):
+ """
+ This enum represents the different states a universal detector can be in.
+ """
+ PURE_ASCII = 0
+ ESC_ASCII = 1
+ HIGH_BYTE = 2
+
+
+class LanguageFilter(object):
+ """
+ This enum represents the different language filters we can apply to a
+ ``UniversalDetector``.
+ """
+ CHINESE_SIMPLIFIED = 0x01
+ CHINESE_TRADITIONAL = 0x02
+ JAPANESE = 0x04
+ KOREAN = 0x08
+ NON_CJK = 0x10
+ ALL = 0x1F
+ CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
+ CJK = CHINESE | JAPANESE | KOREAN
+
+
+class ProbingState(object):
+ """
+ This enum represents the different states a prober can be in.
+ """
+ DETECTING = 0
+ FOUND_IT = 1
+ NOT_ME = 2
+
+
+class MachineState(object):
+ """
+ This enum represents the different states a state machine can be in.
+ """
+ START = 0
+ ERROR = 1
+ ITS_ME = 2
+
+
+class SequenceLikelihood(object):
+ """
+ This enum represents the likelihood of a character following the previous one.
+ """
+ NEGATIVE = 0
+ UNLIKELY = 1
+ LIKELY = 2
+ POSITIVE = 3
+
+ @classmethod
+ def get_num_categories(cls):
+ """:returns: The number of likelihood categories in the enum."""
+ return 4
+
+
+class CharacterCategory(object):
+ """
+ This enum represents the different categories language models for
+ ``SingleByteCharsetProber`` put characters into.
+
+ Anything less than CONTROL is considered a letter.
+ """
+ UNDEFINED = 255
+ LINE_BREAK = 254
+ SYMBOL = 253
+ DIGIT = 252
+ CONTROL = 251
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/escprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/escprober.py
new file mode 100644
index 00000000..c70493f2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/escprober.py
@@ -0,0 +1,101 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .codingstatemachine import CodingStateMachine
+from .enums import LanguageFilter, ProbingState, MachineState
+from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL,
+ ISO2022KR_SM_MODEL)
+
+
+class EscCharSetProber(CharSetProber):
+ """
+ This CharSetProber uses a "code scheme" approach for detecting encodings,
+ whereby easily recognizable escape or shift sequences are relied on to
+ identify these encodings.
+ """
+
+ def __init__(self, lang_filter=None):
+ super(EscCharSetProber, self).__init__(lang_filter=lang_filter)
+ self.coding_sm = []
+ if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
+ self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
+ self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
+ if self.lang_filter & LanguageFilter.JAPANESE:
+ self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
+ if self.lang_filter & LanguageFilter.KOREAN:
+ self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
+ self.active_sm_count = None
+ self._detected_charset = None
+ self._detected_language = None
+ self._state = None
+ self.reset()
+
+ def reset(self):
+ super(EscCharSetProber, self).reset()
+ for coding_sm in self.coding_sm:
+ if not coding_sm:
+ continue
+ coding_sm.active = True
+ coding_sm.reset()
+ self.active_sm_count = len(self.coding_sm)
+ self._detected_charset = None
+ self._detected_language = None
+
+ @property
+ def charset_name(self):
+ return self._detected_charset
+
+ @property
+ def language(self):
+ return self._detected_language
+
+ def get_confidence(self):
+ if self._detected_charset:
+ return 0.99
+ else:
+ return 0.00
+
+ def feed(self, byte_str):
+ for c in byte_str:
+ for coding_sm in self.coding_sm:
+ if not coding_sm or not coding_sm.active:
+ continue
+ coding_state = coding_sm.next_state(c)
+ if coding_state == MachineState.ERROR:
+ coding_sm.active = False
+ self.active_sm_count -= 1
+ if self.active_sm_count <= 0:
+ self._state = ProbingState.NOT_ME
+ return self.state
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ self._detected_charset = coding_sm.get_coding_state_machine()
+ self._detected_language = coding_sm.language
+ return self.state
+
+ return self.state
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/escsm.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/escsm.py
new file mode 100644
index 00000000..0069523a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/escsm.py
@@ -0,0 +1,246 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import MachineState
+
+HZ_CLS = (
+1,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,0,0,0,0, # 20 - 27
+0,0,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,0,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,4,0,5,2,0, # 78 - 7f
+1,1,1,1,1,1,1,1, # 80 - 87
+1,1,1,1,1,1,1,1, # 88 - 8f
+1,1,1,1,1,1,1,1, # 90 - 97
+1,1,1,1,1,1,1,1, # 98 - 9f
+1,1,1,1,1,1,1,1, # a0 - a7
+1,1,1,1,1,1,1,1, # a8 - af
+1,1,1,1,1,1,1,1, # b0 - b7
+1,1,1,1,1,1,1,1, # b8 - bf
+1,1,1,1,1,1,1,1, # c0 - c7
+1,1,1,1,1,1,1,1, # c8 - cf
+1,1,1,1,1,1,1,1, # d0 - d7
+1,1,1,1,1,1,1,1, # d8 - df
+1,1,1,1,1,1,1,1, # e0 - e7
+1,1,1,1,1,1,1,1, # e8 - ef
+1,1,1,1,1,1,1,1, # f0 - f7
+1,1,1,1,1,1,1,1, # f8 - ff
+)
+
+HZ_ST = (
+MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
+MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17
+ 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f
+ 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27
+ 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f
+)
+
+HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
+
+HZ_SM_MODEL = {'class_table': HZ_CLS,
+ 'class_factor': 6,
+ 'state_table': HZ_ST,
+ 'char_len_table': HZ_CHAR_LEN_TABLE,
+ 'name': "HZ-GB-2312",
+ 'language': 'Chinese'}
+
+ISO2022CN_CLS = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,0,0,0,0, # 20 - 27
+0,3,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,4,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022CN_ST = (
+MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
+MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
+MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
+MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27
+ 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f
+)
+
+ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS,
+ 'class_factor': 9,
+ 'state_table': ISO2022CN_ST,
+ 'char_len_table': ISO2022CN_CHAR_LEN_TABLE,
+ 'name': "ISO-2022-CN",
+ 'language': 'Chinese'}
+
+ISO2022JP_CLS = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,2,2, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,7,0,0,0, # 20 - 27
+3,0,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+6,0,4,0,8,0,0,0, # 40 - 47
+0,9,5,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022JP_ST = (
+MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
+MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
+MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f
+MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47
+)
+
+ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
+
+ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS,
+ 'class_factor': 10,
+ 'state_table': ISO2022JP_ST,
+ 'char_len_table': ISO2022JP_CHAR_LEN_TABLE,
+ 'name': "ISO-2022-JP",
+ 'language': 'Japanese'}
+
+ISO2022KR_CLS = (
+2,0,0,0,0,0,0,0, # 00 - 07
+0,0,0,0,0,0,0,0, # 08 - 0f
+0,0,0,0,0,0,0,0, # 10 - 17
+0,0,0,1,0,0,0,0, # 18 - 1f
+0,0,0,0,3,0,0,0, # 20 - 27
+0,4,0,0,0,0,0,0, # 28 - 2f
+0,0,0,0,0,0,0,0, # 30 - 37
+0,0,0,0,0,0,0,0, # 38 - 3f
+0,0,0,5,0,0,0,0, # 40 - 47
+0,0,0,0,0,0,0,0, # 48 - 4f
+0,0,0,0,0,0,0,0, # 50 - 57
+0,0,0,0,0,0,0,0, # 58 - 5f
+0,0,0,0,0,0,0,0, # 60 - 67
+0,0,0,0,0,0,0,0, # 68 - 6f
+0,0,0,0,0,0,0,0, # 70 - 77
+0,0,0,0,0,0,0,0, # 78 - 7f
+2,2,2,2,2,2,2,2, # 80 - 87
+2,2,2,2,2,2,2,2, # 88 - 8f
+2,2,2,2,2,2,2,2, # 90 - 97
+2,2,2,2,2,2,2,2, # 98 - 9f
+2,2,2,2,2,2,2,2, # a0 - a7
+2,2,2,2,2,2,2,2, # a8 - af
+2,2,2,2,2,2,2,2, # b0 - b7
+2,2,2,2,2,2,2,2, # b8 - bf
+2,2,2,2,2,2,2,2, # c0 - c7
+2,2,2,2,2,2,2,2, # c8 - cf
+2,2,2,2,2,2,2,2, # d0 - d7
+2,2,2,2,2,2,2,2, # d8 - df
+2,2,2,2,2,2,2,2, # e0 - e7
+2,2,2,2,2,2,2,2, # e8 - ef
+2,2,2,2,2,2,2,2, # f0 - f7
+2,2,2,2,2,2,2,2, # f8 - ff
+)
+
+ISO2022KR_ST = (
+MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
+MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f
+MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27
+)
+
+ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
+
+ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS,
+ 'class_factor': 6,
+ 'state_table': ISO2022KR_ST,
+ 'char_len_table': ISO2022KR_CHAR_LEN_TABLE,
+ 'name': "ISO-2022-KR",
+ 'language': 'Korean'}
+
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/eucjpprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/eucjpprober.py
new file mode 100644
index 00000000..20ce8f7d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/eucjpprober.py
@@ -0,0 +1,92 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import ProbingState, MachineState
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCJPDistributionAnalysis
+from .jpcntx import EUCJPContextAnalysis
+from .mbcssm import EUCJP_SM_MODEL
+
+
+class EUCJPProber(MultiByteCharSetProber):
+ def __init__(self):
+ super(EUCJPProber, self).__init__()
+ self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
+ self.distribution_analyzer = EUCJPDistributionAnalysis()
+ self.context_analyzer = EUCJPContextAnalysis()
+ self.reset()
+
+ def reset(self):
+ super(EUCJPProber, self).reset()
+ self.context_analyzer.reset()
+
+ @property
+ def charset_name(self):
+ return "EUC-JP"
+
+ @property
+ def language(self):
+ return "Japanese"
+
+ def feed(self, byte_str):
+ for i in range(len(byte_str)):
+ # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte
+ coding_state = self.coding_sm.next_state(byte_str[i])
+ if coding_state == MachineState.ERROR:
+ self.logger.debug('%s %s prober hit error at byte %s',
+ self.charset_name, self.language, i)
+ self._state = ProbingState.NOT_ME
+ break
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ break
+ elif coding_state == MachineState.START:
+ char_len = self.coding_sm.get_current_charlen()
+ if i == 0:
+ self._last_char[1] = byte_str[0]
+ self.context_analyzer.feed(self._last_char, char_len)
+ self.distribution_analyzer.feed(self._last_char, char_len)
+ else:
+ self.context_analyzer.feed(byte_str[i - 1:i + 1],
+ char_len)
+ self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
+ char_len)
+
+ self._last_char[0] = byte_str[-1]
+
+ if self.state == ProbingState.DETECTING:
+ if (self.context_analyzer.got_enough_data() and
+ (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
+ self._state = ProbingState.FOUND_IT
+
+ return self.state
+
+ def get_confidence(self):
+ context_conf = self.context_analyzer.get_confidence()
+ distrib_conf = self.distribution_analyzer.get_confidence()
+ return max(context_conf, distrib_conf)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euckrfreq.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euckrfreq.py
new file mode 100644
index 00000000..b68078cb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euckrfreq.py
@@ -0,0 +1,195 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+
+# 128 --> 0.79
+# 256 --> 0.92
+# 512 --> 0.986
+# 1024 --> 0.99944
+# 2048 --> 0.99999
+#
+# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
+# Random Distribution Ration = 512 / (2350-512) = 0.279.
+#
+# Typical Distribution Ratio
+
+EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
+
+EUCKR_TABLE_SIZE = 2352
+
+# Char to FreqOrder table ,
+EUCKR_CHAR_TO_FREQ_ORDER = (
+ 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
+1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
+1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
+ 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
+ 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
+ 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
+1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
+ 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
+ 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
+1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
+1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
+1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
+1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
+1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
+ 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
+1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
+1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
+1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
+1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
+ 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
+1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
+ 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
+ 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
+1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
+ 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
+1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
+ 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
+ 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
+1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
+1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
+1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
+1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
+ 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
+1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
+ 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
+ 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
+1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
+1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
+1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
+1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
+1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
+1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
+ 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
+ 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
+ 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
+1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
+ 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
+1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
+ 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
+ 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
+2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
+ 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
+ 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
+2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
+2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
+2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
+ 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
+ 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
+2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
+ 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
+1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
+2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
+1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
+2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
+2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
+1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
+ 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
+2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
+2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
+ 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
+ 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
+2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
+1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
+2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
+2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
+2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
+2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
+2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
+2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
+1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
+2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
+2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
+2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
+2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
+2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
+1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
+1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
+2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
+1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
+2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
+1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
+ 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
+2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
+ 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
+2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
+ 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
+2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
+2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
+ 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
+2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
+1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
+ 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
+1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
+2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
+1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
+2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
+ 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
+2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
+1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
+2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
+1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
+2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
+1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
+ 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
+2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
+2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
+ 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
+ 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
+1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
+1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
+ 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
+2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
+2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
+ 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
+ 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
+ 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
+2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
+ 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
+ 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
+2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
+2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
+ 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
+2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
+1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
+ 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
+2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
+2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
+2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
+ 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
+ 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
+ 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
+2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
+2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
+2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
+1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
+2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
+ 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
+)
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euckrprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euckrprober.py
new file mode 100644
index 00000000..345a060d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euckrprober.py
@@ -0,0 +1,47 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCKRDistributionAnalysis
+from .mbcssm import EUCKR_SM_MODEL
+
+
+class EUCKRProber(MultiByteCharSetProber):
+ def __init__(self):
+ super(EUCKRProber, self).__init__()
+ self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
+ self.distribution_analyzer = EUCKRDistributionAnalysis()
+ self.reset()
+
+ @property
+ def charset_name(self):
+ return "EUC-KR"
+
+ @property
+ def language(self):
+ return "Korean"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euctwfreq.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euctwfreq.py
new file mode 100644
index 00000000..ed7a995a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euctwfreq.py
@@ -0,0 +1,387 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# EUCTW frequency table
+# Converted from big5 work
+# by Taiwan's Mandarin Promotion Council
+# <http:#www.edu.tw:81/mandr/>
+
+# 128 --> 0.42261
+# 256 --> 0.57851
+# 512 --> 0.74851
+# 1024 --> 0.89384
+# 2048 --> 0.97583
+#
+# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
+# Random Distribution Ration = 512/(5401-512)=0.105
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
+
+EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
+
+# Char to FreqOrder table ,
+EUCTW_TABLE_SIZE = 5376
+
+EUCTW_CHAR_TO_FREQ_ORDER = (
+ 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
+3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
+1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
+ 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
+3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
+4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
+7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
+ 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
+ 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
+ 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
+2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
+1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
+3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
+ 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
+1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
+3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
+2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
+ 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
+3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
+1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
+7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
+ 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
+7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
+1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
+ 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
+ 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
+3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
+3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
+ 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
+2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
+2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
+ 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
+ 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
+3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
+1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
+1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
+1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
+2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
+ 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
+4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
+1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
+7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
+2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
+ 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
+ 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
+ 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
+ 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
+7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
+ 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
+1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
+ 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
+ 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
+7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
+1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
+ 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
+3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
+4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
+3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
+ 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
+ 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
+1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
+4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
+3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
+3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
+2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
+7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
+3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
+7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
+1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
+2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
+1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
+ 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
+1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
+4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
+3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
+ 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
+ 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
+ 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
+2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
+7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
+1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
+2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
+1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
+1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
+7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
+7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
+7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
+3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
+4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
+1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
+7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
+2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
+7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
+3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
+3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
+7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
+2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
+7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
+ 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
+4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
+2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
+7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
+3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
+2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
+2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
+ 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
+2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
+1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
+1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
+2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
+1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
+7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
+7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
+2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
+4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
+1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
+7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
+ 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
+4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
+ 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
+2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
+ 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
+1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
+1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
+ 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
+3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
+3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
+1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
+3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
+7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
+7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
+1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
+2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
+1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
+3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
+2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
+3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
+2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
+4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
+4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
+3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
+ 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
+3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
+ 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
+3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
+3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
+3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
+1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
+7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
+ 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
+7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
+1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
+ 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
+4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
+3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
+ 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
+2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
+2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
+3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
+1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
+4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
+2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
+1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
+1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
+2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
+3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
+1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
+7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
+1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
+4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
+1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
+ 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
+1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
+3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
+3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
+2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
+1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
+4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
+ 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
+7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
+2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
+3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
+4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
+ 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
+7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
+7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
+1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
+4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
+3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
+2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
+3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
+3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
+2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
+1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
+4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
+3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
+3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
+2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
+4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
+7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
+3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
+2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
+3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
+1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
+2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
+3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
+4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
+2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
+2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
+7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
+1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
+2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
+1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
+3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
+4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
+2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
+3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
+3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
+2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
+4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
+2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
+3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
+4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
+7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
+3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
+ 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
+1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
+4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
+1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
+4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
+7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
+ 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
+7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
+2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
+1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
+1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
+3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
+ 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
+ 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
+ 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
+3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
+2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
+ 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
+7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
+1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
+3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
+7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
+1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
+7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
+4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
+1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
+2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
+2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
+4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
+ 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
+ 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
+3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
+3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
+1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
+2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
+7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
+1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
+1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
+3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
+ 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
+1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
+4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
+7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
+2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
+3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
+ 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
+1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
+2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
+2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
+7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
+7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
+7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
+2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
+2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
+1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
+4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
+3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
+3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
+4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
+4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
+2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
+2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
+7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
+4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
+7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
+2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
+1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
+3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
+4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
+2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
+ 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
+2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
+1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
+2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
+2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
+4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
+7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
+1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
+3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
+7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
+1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
+8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
+2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
+8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
+2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
+2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
+8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
+8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
+8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
+ 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
+8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
+4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
+3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
+8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
+1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
+8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
+ 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
+1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
+ 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
+4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
+1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
+4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
+1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
+ 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
+3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
+4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
+8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
+ 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
+3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
+ 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
+2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
+)
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euctwprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euctwprober.py
new file mode 100644
index 00000000..35669cc4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/euctwprober.py
@@ -0,0 +1,46 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCTWDistributionAnalysis
+from .mbcssm import EUCTW_SM_MODEL
+
+class EUCTWProber(MultiByteCharSetProber):
+ def __init__(self):
+ super(EUCTWProber, self).__init__()
+ self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL)
+ self.distribution_analyzer = EUCTWDistributionAnalysis()
+ self.reset()
+
+ @property
+ def charset_name(self):
+ return "EUC-TW"
+
+ @property
+ def language(self):
+ return "Taiwan"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312freq.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312freq.py
new file mode 100644
index 00000000..697837bd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312freq.py
@@ -0,0 +1,283 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# GB2312 most frequently used character table
+#
+# Char to FreqOrder table , from hz6763
+
+# 512 --> 0.79 -- 0.79
+# 1024 --> 0.92 -- 0.13
+# 2048 --> 0.98 -- 0.06
+# 6768 --> 1.00 -- 0.02
+#
+# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
+# Random Distribution Ration = 512 / (3755 - 512) = 0.157
+#
+# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
+
+GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
+
+GB2312_TABLE_SIZE = 3760
+
+GB2312_CHAR_TO_FREQ_ORDER = (
+1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
+2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
+2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
+ 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
+1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
+1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
+ 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
+1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,
+2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
+3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
+ 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
+1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
+ 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
+2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,
+ 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
+2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
+1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
+3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,
+ 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
+1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
+ 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
+2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
+1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,
+3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
+1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
+2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
+1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
+ 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
+3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,
+3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
+ 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
+3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,
+ 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,
+1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
+3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
+2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,
+1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
+ 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
+1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,
+4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
+ 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
+3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,
+3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,
+ 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
+1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,
+2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
+1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
+1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
+ 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
+3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
+3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,
+4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
+ 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
+3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,
+1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
+1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
+4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
+ 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
+ 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,
+3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
+1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
+ 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,
+1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
+2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,
+ 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
+ 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,
+ 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
+3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
+4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,
+3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
+ 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
+2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
+2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,
+2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,
+ 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
+2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
+ 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,
+ 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
+ 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
+3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
+2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
+2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,
+1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
+ 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
+2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
+ 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
+ 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
+1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
+1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,
+ 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
+ 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
+1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
+2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
+3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
+2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
+2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
+2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
+3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
+1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,
+1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
+2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
+1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
+3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,
+1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
+1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
+3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
+ 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
+2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
+1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
+4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
+1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
+1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
+3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
+1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
+ 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
+ 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,
+1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,
+ 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
+1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
+1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
+ 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
+3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
+4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
+3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
+2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
+2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
+1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
+3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
+2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
+1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
+1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,
+ 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
+2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
+2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
+3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
+4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
+3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
+ 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
+3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
+2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
+1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,
+ 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,
+ 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
+3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,
+4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
+2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
+1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
+1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
+ 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
+1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,
+3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
+ 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
+ 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,
+1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,
+ 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
+1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,
+ 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
+2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
+ 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
+2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
+2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
+1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
+1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
+2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
+ 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
+1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
+1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
+2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
+2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,
+3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
+1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
+4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
+ 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
+ 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
+3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,
+1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,
+ 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,
+3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
+1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
+4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
+1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
+2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
+1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
+ 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
+1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
+3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,
+ 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
+2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
+ 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
+1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
+1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,
+1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
+3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
+2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
+3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
+3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
+3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
+ 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
+2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
+ 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
+2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
+ 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
+1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,
+ 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
+ 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
+1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
+3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
+3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,
+1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,
+1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
+3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
+2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
+2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
+1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,
+3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
+ 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
+4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
+1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
+2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,
+3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
+3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
+1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,
+ 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
+ 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
+2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
+ 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,
+1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,
+ 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
+1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,
+1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
+1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
+1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
+1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
+ 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
+ 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512
+)
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312prober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312prober.py
new file mode 100644
index 00000000..8446d2dd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312prober.py
@@ -0,0 +1,46 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import GB2312DistributionAnalysis
+from .mbcssm import GB2312_SM_MODEL
+
+class GB2312Prober(MultiByteCharSetProber):
+ def __init__(self):
+ super(GB2312Prober, self).__init__()
+ self.coding_sm = CodingStateMachine(GB2312_SM_MODEL)
+ self.distribution_analyzer = GB2312DistributionAnalysis()
+ self.reset()
+
+ @property
+ def charset_name(self):
+ return "GB2312"
+
+ @property
+ def language(self):
+ return "Chinese"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/hebrewprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/hebrewprober.py
new file mode 100644
index 00000000..b0e1bf49
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/hebrewprober.py
@@ -0,0 +1,292 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Shy Shalom
+# Portions created by the Initial Developer are Copyright (C) 2005
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState
+
+# This prober doesn't actually recognize a language or a charset.
+# It is a helper prober for the use of the Hebrew model probers
+
+### General ideas of the Hebrew charset recognition ###
+#
+# Four main charsets exist in Hebrew:
+# "ISO-8859-8" - Visual Hebrew
+# "windows-1255" - Logical Hebrew
+# "ISO-8859-8-I" - Logical Hebrew
+# "x-mac-hebrew" - ?? Logical Hebrew ??
+#
+# Both "ISO" charsets use a completely identical set of code points, whereas
+# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
+# these code points. windows-1255 defines additional characters in the range
+# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
+# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
+# x-mac-hebrew defines similar additional code points but with a different
+# mapping.
+#
+# As far as an average Hebrew text with no diacritics is concerned, all four
+# charsets are identical with respect to code points. Meaning that for the
+# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
+# (including final letters).
+#
+# The dominant difference between these charsets is their directionality.
+# "Visual" directionality means that the text is ordered as if the renderer is
+# not aware of a BIDI rendering algorithm. The renderer sees the text and
+# draws it from left to right. The text itself when ordered naturally is read
+# backwards. A buffer of Visual Hebrew generally looks like so:
+# "[last word of first line spelled backwards] [whole line ordered backwards
+# and spelled backwards] [first word of first line spelled backwards]
+# [end of line] [last word of second line] ... etc' "
+# adding punctuation marks, numbers and English text to visual text is
+# naturally also "visual" and from left to right.
+#
+# "Logical" directionality means the text is ordered "naturally" according to
+# the order it is read. It is the responsibility of the renderer to display
+# the text from right to left. A BIDI algorithm is used to place general
+# punctuation marks, numbers and English text in the text.
+#
+# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
+# what little evidence I could find, it seems that its general directionality
+# is Logical.
+#
+# To sum up all of the above, the Hebrew probing mechanism knows about two
+# charsets:
+# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
+# backwards while line order is natural. For charset recognition purposes
+# the line order is unimportant (In fact, for this implementation, even
+# word order is unimportant).
+# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
+#
+# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
+# specifically identified.
+# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
+# that contain special punctuation marks or diacritics is displayed with
+# some unconverted characters showing as question marks. This problem might
+# be corrected using another model prober for x-mac-hebrew. Due to the fact
+# that x-mac-hebrew texts are so rare, writing another model prober isn't
+# worth the effort and performance hit.
+#
+#### The Prober ####
+#
+# The prober is divided between two SBCharSetProbers and a HebrewProber,
+# all of which are managed, created, fed data, inquired and deleted by the
+# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
+# fact some kind of Hebrew, Logical or Visual. The final decision about which
+# one is it is made by the HebrewProber by combining final-letter scores
+# with the scores of the two SBCharSetProbers to produce a final answer.
+#
+# The SBCSGroupProber is responsible for stripping the original text of HTML
+# tags, English characters, numbers, low-ASCII punctuation characters, spaces
+# and new lines. It reduces any sequence of such characters to a single space.
+# The buffer fed to each prober in the SBCS group prober is pure text in
+# high-ASCII.
+# The two SBCharSetProbers (model probers) share the same language model:
+# Win1255Model.
+# The first SBCharSetProber uses the model normally as any other
+# SBCharSetProber does, to recognize windows-1255, upon which this model was
+# built. The second SBCharSetProber is told to make the pair-of-letter
+# lookup in the language model backwards. This in practice exactly simulates
+# a visual Hebrew model using the windows-1255 logical Hebrew model.
+#
+# The HebrewProber is not using any language model. All it does is look for
+# final-letter evidence suggesting the text is either logical Hebrew or visual
+# Hebrew. Disjointed from the model probers, the results of the HebrewProber
+# alone are meaningless. HebrewProber always returns 0.00 as confidence
+# since it never identifies a charset by itself. Instead, the pointer to the
+# HebrewProber is passed to the model probers as a helper "Name Prober".
+# When the Group prober receives a positive identification from any prober,
+# it asks for the name of the charset identified. If the prober queried is a
+# Hebrew model prober, the model prober forwards the call to the
+# HebrewProber to make the final decision. In the HebrewProber, the
+# decision is made according to the final-letters scores maintained and Both
+# model probers scores. The answer is returned in the form of the name of the
+# charset identified, either "windows-1255" or "ISO-8859-8".
+
+class HebrewProber(CharSetProber):
+ # windows-1255 / ISO-8859-8 code points of interest
+ FINAL_KAF = 0xea
+ NORMAL_KAF = 0xeb
+ FINAL_MEM = 0xed
+ NORMAL_MEM = 0xee
+ FINAL_NUN = 0xef
+ NORMAL_NUN = 0xf0
+ FINAL_PE = 0xf3
+ NORMAL_PE = 0xf4
+ FINAL_TSADI = 0xf5
+ NORMAL_TSADI = 0xf6
+
+ # Minimum Visual vs Logical final letter score difference.
+ # If the difference is below this, don't rely solely on the final letter score
+ # distance.
+ MIN_FINAL_CHAR_DISTANCE = 5
+
+ # Minimum Visual vs Logical model score difference.
+ # If the difference is below this, don't rely at all on the model score
+ # distance.
+ MIN_MODEL_DISTANCE = 0.01
+
+ VISUAL_HEBREW_NAME = "ISO-8859-8"
+ LOGICAL_HEBREW_NAME = "windows-1255"
+
+ def __init__(self):
+ super(HebrewProber, self).__init__()
+ self._final_char_logical_score = None
+ self._final_char_visual_score = None
+ self._prev = None
+ self._before_prev = None
+ self._logical_prober = None
+ self._visual_prober = None
+ self.reset()
+
+ def reset(self):
+ self._final_char_logical_score = 0
+ self._final_char_visual_score = 0
+ # The two last characters seen in the previous buffer,
+ # mPrev and mBeforePrev are initialized to space in order to simulate
+ # a word delimiter at the beginning of the data
+ self._prev = ' '
+ self._before_prev = ' '
+ # These probers are owned by the group prober.
+
+ def set_model_probers(self, logicalProber, visualProber):
+ self._logical_prober = logicalProber
+ self._visual_prober = visualProber
+
+ def is_final(self, c):
+ return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN,
+ self.FINAL_PE, self.FINAL_TSADI]
+
+ def is_non_final(self, c):
+ # The normal Tsadi is not a good Non-Final letter due to words like
+ # 'lechotet' (to chat) containing an apostrophe after the tsadi. This
+ # apostrophe is converted to a space in FilterWithoutEnglishLetters
+ # causing the Non-Final tsadi to appear at an end of a word even
+ # though this is not the case in the original text.
+ # The letters Pe and Kaf rarely display a related behavior of not being
+ # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
+ # for example legally end with a Non-Final Pe or Kaf. However, the
+ # benefit of these letters as Non-Final letters outweighs the damage
+ # since these words are quite rare.
+ return c in [self.NORMAL_KAF, self.NORMAL_MEM,
+ self.NORMAL_NUN, self.NORMAL_PE]
+
+ def feed(self, byte_str):
+ # Final letter analysis for logical-visual decision.
+ # Look for evidence that the received buffer is either logical Hebrew
+ # or visual Hebrew.
+ # The following cases are checked:
+ # 1) A word longer than 1 letter, ending with a final letter. This is
+ # an indication that the text is laid out "naturally" since the
+ # final letter really appears at the end. +1 for logical score.
+ # 2) A word longer than 1 letter, ending with a Non-Final letter. In
+ # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
+ # should not end with the Non-Final form of that letter. Exceptions
+ # to this rule are mentioned above in isNonFinal(). This is an
+ # indication that the text is laid out backwards. +1 for visual
+ # score
+ # 3) A word longer than 1 letter, starting with a final letter. Final
+ # letters should not appear at the beginning of a word. This is an
+ # indication that the text is laid out backwards. +1 for visual
+ # score.
+ #
+ # The visual score and logical score are accumulated throughout the
+ # text and are finally checked against each other in GetCharSetName().
+ # No checking for final letters in the middle of words is done since
+ # that case is not an indication for either Logical or Visual text.
+ #
+ # We automatically filter out all 7-bit characters (replace them with
+ # spaces) so the word boundary detection works properly. [MAP]
+
+ if self.state == ProbingState.NOT_ME:
+ # Both model probers say it's not them. No reason to continue.
+ return ProbingState.NOT_ME
+
+ byte_str = self.filter_high_byte_only(byte_str)
+
+ for cur in byte_str:
+ if cur == ' ':
+ # We stand on a space - a word just ended
+ if self._before_prev != ' ':
+ # next-to-last char was not a space so self._prev is not a
+ # 1 letter word
+ if self.is_final(self._prev):
+ # case (1) [-2:not space][-1:final letter][cur:space]
+ self._final_char_logical_score += 1
+ elif self.is_non_final(self._prev):
+ # case (2) [-2:not space][-1:Non-Final letter][
+ # cur:space]
+ self._final_char_visual_score += 1
+ else:
+ # Not standing on a space
+ if ((self._before_prev == ' ') and
+ (self.is_final(self._prev)) and (cur != ' ')):
+ # case (3) [-2:space][-1:final letter][cur:not space]
+ self._final_char_visual_score += 1
+ self._before_prev = self._prev
+ self._prev = cur
+
+ # Forever detecting, till the end or until both model probers return
+ # ProbingState.NOT_ME (handled above)
+ return ProbingState.DETECTING
+
+ @property
+ def charset_name(self):
+ # Make the decision: is it Logical or Visual?
+ # If the final letter score distance is dominant enough, rely on it.
+ finalsub = self._final_char_logical_score - self._final_char_visual_score
+ if finalsub >= self.MIN_FINAL_CHAR_DISTANCE:
+ return self.LOGICAL_HEBREW_NAME
+ if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE:
+ return self.VISUAL_HEBREW_NAME
+
+ # It's not dominant enough, try to rely on the model scores instead.
+ modelsub = (self._logical_prober.get_confidence()
+ - self._visual_prober.get_confidence())
+ if modelsub > self.MIN_MODEL_DISTANCE:
+ return self.LOGICAL_HEBREW_NAME
+ if modelsub < -self.MIN_MODEL_DISTANCE:
+ return self.VISUAL_HEBREW_NAME
+
+ # Still no good, back to final letter distance, maybe it'll save the
+ # day.
+ if finalsub < 0.0:
+ return self.VISUAL_HEBREW_NAME
+
+ # (finalsub > 0 - Logical) or (don't know what to do) default to
+ # Logical.
+ return self.LOGICAL_HEBREW_NAME
+
+ @property
+ def language(self):
+ return 'Hebrew'
+
+ @property
+ def state(self):
+ # Remain active as long as any of the model probers are active.
+ if (self._logical_prober.state == ProbingState.NOT_ME) and \
+ (self._visual_prober.state == ProbingState.NOT_ME):
+ return ProbingState.NOT_ME
+ return ProbingState.DETECTING
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/jisfreq.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/jisfreq.py
new file mode 100644
index 00000000..83fc082b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/jisfreq.py
@@ -0,0 +1,325 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# Sampling from about 20M text materials include literature and computer technology
+#
+# Japanese frequency table, applied to both S-JIS and EUC-JP
+# They are sorted in order.
+
+# 128 --> 0.77094
+# 256 --> 0.85710
+# 512 --> 0.92635
+# 1024 --> 0.97130
+# 2048 --> 0.99431
+#
+# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
+# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
+#
+# Typical Distribution Ratio, 25% of IDR
+
+JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
+
+# Char to FreqOrder table ,
+JIS_TABLE_SIZE = 4368
+
+JIS_CHAR_TO_FREQ_ORDER = (
+ 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
+3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
+1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
+2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64
+2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80
+5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96
+1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112
+5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128
+5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144
+5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160
+5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176
+5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192
+5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208
+1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224
+1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240
+1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256
+2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272
+3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288
+3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304
+ 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320
+ 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336
+1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352
+ 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368
+5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384
+ 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400
+ 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416
+ 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432
+ 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448
+ 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464
+5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480
+5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496
+5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512
+4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528
+5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544
+5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560
+5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576
+5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592
+5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608
+5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624
+5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640
+5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656
+5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672
+3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688
+5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704
+5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720
+5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736
+5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752
+5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768
+5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784
+5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800
+5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816
+5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832
+5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848
+5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864
+5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880
+5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896
+5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912
+5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928
+5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944
+5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960
+5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976
+5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992
+5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
+5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
+5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
+5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
+5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
+5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
+5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
+5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
+5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
+5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
+5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
+5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
+5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
+5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
+5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
+5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
+5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
+5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
+5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
+6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
+6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
+6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
+6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
+6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
+6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
+6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
+6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
+4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
+ 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
+ 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
+1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488
+1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
+ 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
+3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
+3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
+ 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
+3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
+3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
+ 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
+2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
+ 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
+3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
+1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
+ 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
+1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
+ 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
+2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
+2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
+2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
+2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
+1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
+1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
+1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
+1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
+2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
+1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
+2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
+1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
+1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
+1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
+1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
+1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
+1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
+ 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
+ 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
+1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
+2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
+2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
+2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
+3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
+3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
+ 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
+3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
+1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176
+ 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
+2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
+1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
+ 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
+3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
+4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
+2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
+1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
+2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
+1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
+ 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
+ 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
+1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
+2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
+2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
+2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
+3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
+1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
+2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
+ 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
+ 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
+ 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
+1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544
+2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
+ 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
+1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
+1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
+ 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
+1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
+1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
+1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
+ 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
+2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
+ 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
+2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
+3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
+2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
+1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
+6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800
+1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
+2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
+1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
+ 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
+ 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
+3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
+3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
+1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
+1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
+1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
+1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
+ 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
+ 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
+2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
+ 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
+3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
+2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
+ 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
+1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
+2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
+ 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
+1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
+ 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
+4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
+2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
+1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
+ 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
+1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
+2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
+ 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
+6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
+1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
+1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
+2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
+3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
+ 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
+3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
+1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
+ 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
+1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
+ 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
+3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
+ 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
+2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
+ 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
+4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
+2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
+1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
+1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
+1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
+ 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
+1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
+3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
+1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
+3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
+ 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
+ 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
+ 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
+2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
+1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
+ 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
+1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
+ 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
+1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
+ 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
+ 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
+ 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872
+1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
+1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
+2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
+4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
+ 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952
+1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
+ 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
+1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
+3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
+1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
+2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
+2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
+1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
+1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
+2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
+ 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
+2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
+1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
+1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
+1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192
+1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
+3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
+2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
+2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
+ 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
+3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
+3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
+1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
+2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
+1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
+2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
+)
+
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/jpcntx.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/jpcntx.py
new file mode 100644
index 00000000..20044e4b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/jpcntx.py
@@ -0,0 +1,233 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+
+# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
+jp2CharContext = (
+(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
+(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
+(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
+(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
+(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
+(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
+(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
+(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
+(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
+(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
+(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
+(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
+(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
+(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
+(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
+(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
+(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
+(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
+(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
+(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
+(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
+(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
+(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
+(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
+(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
+(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
+(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
+(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
+(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
+(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
+(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
+(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
+(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
+(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
+(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
+(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
+(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
+(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
+(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
+(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
+(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
+(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
+(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
+(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
+(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
+(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
+(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
+(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
+(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
+(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
+(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
+(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
+(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
+(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
+(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
+(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
+(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
+(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
+(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
+(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
+(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
+(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
+(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
+(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
+(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
+(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
+(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
+(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
+(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
+(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
+(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
+(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
+(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
+(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
+(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
+(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
+)
+
+class JapaneseContextAnalysis(object):
+ NUM_OF_CATEGORY = 6
+ DONT_KNOW = -1
+ ENOUGH_REL_THRESHOLD = 100
+ MAX_REL_THRESHOLD = 1000
+ MINIMUM_DATA_THRESHOLD = 4
+
+ def __init__(self):
+ self._total_rel = None
+ self._rel_sample = None
+ self._need_to_skip_char_num = None
+ self._last_char_order = None
+ self._done = None
+ self.reset()
+
+ def reset(self):
+ self._total_rel = 0 # total sequence received
+ # category counters, each integer counts sequence in its category
+ self._rel_sample = [0] * self.NUM_OF_CATEGORY
+ # if last byte in current buffer is not the last byte of a character,
+ # we need to know how many bytes to skip in next buffer
+ self._need_to_skip_char_num = 0
+ self._last_char_order = -1 # The order of previous char
+ # If this flag is set to True, detection is done and conclusion has
+ # been made
+ self._done = False
+
+ def feed(self, byte_str, num_bytes):
+ if self._done:
+ return
+
+ # The buffer we got is byte oriented, and a character may span in more than one
+ # buffers. In case the last one or two byte in last buffer is not
+ # complete, we record how many byte needed to complete that character
+ # and skip these bytes here. We can choose to record those bytes as
+ # well and analyse the character once it is complete, but since a
+ # character will not make much difference, by simply skipping
+ # this character will simply our logic and improve performance.
+ i = self._need_to_skip_char_num
+ while i < num_bytes:
+ order, char_len = self.get_order(byte_str[i:i + 2])
+ i += char_len
+ if i > num_bytes:
+ self._need_to_skip_char_num = i - num_bytes
+ self._last_char_order = -1
+ else:
+ if (order != -1) and (self._last_char_order != -1):
+ self._total_rel += 1
+ if self._total_rel > self.MAX_REL_THRESHOLD:
+ self._done = True
+ break
+ self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1
+ self._last_char_order = order
+
+ def got_enough_data(self):
+ return self._total_rel > self.ENOUGH_REL_THRESHOLD
+
+ def get_confidence(self):
+ # This is just one way to calculate confidence. It works well for me.
+ if self._total_rel > self.MINIMUM_DATA_THRESHOLD:
+ return (self._total_rel - self._rel_sample[0]) / self._total_rel
+ else:
+ return self.DONT_KNOW
+
+ def get_order(self, byte_str):
+ return -1, 1
+
+class SJISContextAnalysis(JapaneseContextAnalysis):
+ def __init__(self):
+ super(SJISContextAnalysis, self).__init__()
+ self._charset_name = "SHIFT_JIS"
+
+ @property
+ def charset_name(self):
+ return self._charset_name
+
+ def get_order(self, byte_str):
+ if not byte_str:
+ return -1, 1
+ # find out current char's byte length
+ first_char = byte_str[0]
+ if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC):
+ char_len = 2
+ if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
+ self._charset_name = "CP932"
+ else:
+ char_len = 1
+
+ # return its order if it is hiragana
+ if len(byte_str) > 1:
+ second_char = byte_str[1]
+ if (first_char == 202) and (0x9F <= second_char <= 0xF1):
+ return second_char - 0x9F, char_len
+
+ return -1, char_len
+
+class EUCJPContextAnalysis(JapaneseContextAnalysis):
+ def get_order(self, byte_str):
+ if not byte_str:
+ return -1, 1
+ # find out current char's byte length
+ first_char = byte_str[0]
+ if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
+ char_len = 2
+ elif first_char == 0x8F:
+ char_len = 3
+ else:
+ char_len = 1
+
+ # return its order if it is hiragana
+ if len(byte_str) > 1:
+ second_char = byte_str[1]
+ if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
+ return second_char - 0xA1, char_len
+
+ return -1, char_len
+
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py
new file mode 100644
index 00000000..2aa4fb2e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py
@@ -0,0 +1,228 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+# this table is modified base on win1251BulgarianCharToOrderMap, so
+# only number <64 is sure valid
+
+Latin5_BulgarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
+110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
+253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
+116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
+194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
+210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
+ 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
+ 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
+ 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
+ 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
+ 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
+ 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
+)
+
+win1251BulgarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
+110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
+253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
+116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
+206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
+221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
+ 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
+ 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
+ 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
+ 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
+ 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
+ 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 96.9392%
+# first 1024 sequences:3.0618%
+# rest sequences: 0.2992%
+# negative sequences: 0.0020%
+BulgarianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
+3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
+0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
+0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
+0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
+0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
+0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
+2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
+3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
+3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
+1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
+3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
+1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
+2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
+2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
+3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
+1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
+2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
+2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
+3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
+1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
+2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
+2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
+2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
+1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
+2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
+1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
+3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
+1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
+3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
+1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
+2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
+1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
+2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
+1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
+2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
+1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
+1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
+1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
+2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
+1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
+2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
+1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
+0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
+1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
+1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
+1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
+0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
+0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
+0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
+1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
+1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
+1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
+1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+)
+
+Latin5BulgarianModel = {
+ 'char_to_order_map': Latin5_BulgarianCharToOrderMap,
+ 'precedence_matrix': BulgarianLangModel,
+ 'typical_positive_ratio': 0.969392,
+ 'keep_english_letter': False,
+ 'charset_name': "ISO-8859-5",
+ 'language': 'Bulgairan',
+}
+
+Win1251BulgarianModel = {
+ 'char_to_order_map': win1251BulgarianCharToOrderMap,
+ 'precedence_matrix': BulgarianLangModel,
+ 'typical_positive_ratio': 0.969392,
+ 'keep_english_letter': False,
+ 'charset_name': "windows-1251",
+ 'language': 'Bulgarian',
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py
new file mode 100644
index 00000000..e5f9a1fd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py
@@ -0,0 +1,333 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# KOI8-R language model
+# Character Mapping Table:
+KOI8R_char_to_order_map = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90
+223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0
+238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0
+ 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0
+ 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0
+ 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0
+ 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0
+)
+
+win1251_char_to_order_map = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+)
+
+latin5_char_to_order_map = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
+)
+
+macCyrillic_char_to_order_map = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
+)
+
+IBM855_char_to_order_map = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
+206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
+ 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,
+220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,
+230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
+ 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,
+ 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
+250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
+)
+
+IBM866_char_to_order_map = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40
+155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50
+253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60
+ 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70
+ 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
+ 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
+ 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,
+191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
+207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
+223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
+ 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
+239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 97.6601%
+# first 1024 sequences: 2.3389%
+# rest sequences: 0.1237%
+# negative sequences: 0.0009%
+RussianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
+3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
+0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
+0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
+0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
+1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
+1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
+2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
+1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
+3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
+1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
+2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
+1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
+1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
+1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
+2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
+1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
+3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
+1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
+2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
+1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
+2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
+0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
+1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
+1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
+1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
+3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
+2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
+3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
+1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
+1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
+0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
+1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
+1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
+0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
+1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
+2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
+1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
+1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
+2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
+1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
+0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
+2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
+1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
+1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
+0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
+0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
+0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
+1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
+0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
+0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
+0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
+2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
+0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
+)
+
+Koi8rModel = {
+ 'char_to_order_map': KOI8R_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "KOI8-R",
+ 'language': 'Russian',
+}
+
+Win1251CyrillicModel = {
+ 'char_to_order_map': win1251_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "windows-1251",
+ 'language': 'Russian',
+}
+
+Latin5CyrillicModel = {
+ 'char_to_order_map': latin5_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "ISO-8859-5",
+ 'language': 'Russian',
+}
+
+MacCyrillicModel = {
+ 'char_to_order_map': macCyrillic_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "MacCyrillic",
+ 'language': 'Russian',
+}
+
+Ibm866Model = {
+ 'char_to_order_map': IBM866_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "IBM866",
+ 'language': 'Russian',
+}
+
+Ibm855Model = {
+ 'char_to_order_map': IBM855_char_to_order_map,
+ 'precedence_matrix': RussianLangModel,
+ 'typical_positive_ratio': 0.976601,
+ 'keep_english_letter': False,
+ 'charset_name': "IBM855",
+ 'language': 'Russian',
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langgreekmodel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langgreekmodel.py
new file mode 100644
index 00000000..53322216
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langgreekmodel.py
@@ -0,0 +1,225 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+Latin7_char_to_order_map = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
+ 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
+253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
+ 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
+253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
+253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0
+110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
+ 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
+124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
+ 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
+)
+
+win1253_char_to_order_map = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
+ 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
+253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
+ 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
+253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
+253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0
+110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
+ 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
+124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
+ 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 98.2851%
+# first 1024 sequences:1.7001%
+# rest sequences: 0.0359%
+# negative sequences: 0.0148%
+GreekLangModel = (
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,
+3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,
+2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,
+0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,
+2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,
+2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,
+0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,
+2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,
+0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,
+3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,
+3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,
+2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,
+2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,
+0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,
+0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,
+0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,
+0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,
+0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,
+0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,
+0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,
+0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,
+0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,
+0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,
+0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,
+0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,
+0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,
+0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,
+0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,
+0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,
+0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,
+0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,
+0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,
+0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,
+0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,
+0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,
+0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,
+0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,
+0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,
+0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,
+0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,
+0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,
+0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,
+0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+)
+
+Latin7GreekModel = {
+ 'char_to_order_map': Latin7_char_to_order_map,
+ 'precedence_matrix': GreekLangModel,
+ 'typical_positive_ratio': 0.982851,
+ 'keep_english_letter': False,
+ 'charset_name': "ISO-8859-7",
+ 'language': 'Greek',
+}
+
+Win1253GreekModel = {
+ 'char_to_order_map': win1253_char_to_order_map,
+ 'precedence_matrix': GreekLangModel,
+ 'typical_positive_ratio': 0.982851,
+ 'keep_english_letter': False,
+ 'charset_name': "windows-1253",
+ 'language': 'Greek',
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py
new file mode 100644
index 00000000..58f4c875
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py
@@ -0,0 +1,200 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Simon Montagu
+# Portions created by the Initial Developer are Copyright (C) 2005
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Shoshannah Forbes - original C code (?)
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Windows-1255 language model
+# Character Mapping Table:
+WIN1255_CHAR_TO_ORDER_MAP = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
+ 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
+253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
+ 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
+124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
+215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
+ 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
+106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
+ 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
+238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
+ 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
+ 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 98.4004%
+# first 1024 sequences: 1.5981%
+# rest sequences: 0.087%
+# negative sequences: 0.0015%
+HEBREW_LANG_MODEL = (
+0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
+3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
+1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
+1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
+1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
+1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
+1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
+0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
+0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
+1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
+0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
+0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
+0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
+0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
+0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
+0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
+0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
+0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
+0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
+3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
+0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
+0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
+0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
+1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
+0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
+3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
+0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
+0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
+0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
+0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
+2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
+0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
+0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
+0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
+0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
+1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
+0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
+2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
+1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
+2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
+1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
+2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
+0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
+1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
+0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
+)
+
+Win1255HebrewModel = {
+ 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP,
+ 'precedence_matrix': HEBREW_LANG_MODEL,
+ 'typical_positive_ratio': 0.984004,
+ 'keep_english_letter': False,
+ 'charset_name': "windows-1255",
+ 'language': 'Hebrew',
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py
new file mode 100644
index 00000000..bb7c095e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py
@@ -0,0 +1,225 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+Latin2_HungarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
+ 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
+253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
+ 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
+159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
+175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
+191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
+ 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
+221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
+232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
+ 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
+245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
+)
+
+win1250HungarianCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
+ 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
+253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
+ 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
+161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
+177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
+191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
+ 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
+221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
+232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
+ 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
+245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 94.7368%
+# first 1024 sequences:5.2623%
+# rest sequences: 0.8894%
+# negative sequences: 0.0009%
+HungarianLangModel = (
+0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
+3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
+3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
+3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
+0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
+0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
+3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
+0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
+0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
+3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
+1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
+1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
+1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
+3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
+2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
+2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
+2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
+2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
+2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
+3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
+2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
+2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
+2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
+1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
+1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
+3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
+1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
+1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
+2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
+2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
+2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
+3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
+2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
+1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
+1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
+2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
+2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
+1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
+1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
+2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
+1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
+1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
+2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
+2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
+2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
+1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
+1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
+1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
+0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
+2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
+2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
+1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
+2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
+1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
+1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
+2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
+2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
+2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
+1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
+2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
+0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
+0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
+2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
+)
+
+Latin2HungarianModel = {
+ 'char_to_order_map': Latin2_HungarianCharToOrderMap,
+ 'precedence_matrix': HungarianLangModel,
+ 'typical_positive_ratio': 0.947368,
+ 'keep_english_letter': True,
+ 'charset_name': "ISO-8859-2",
+ 'language': 'Hungarian',
+}
+
+Win1250HungarianModel = {
+ 'char_to_order_map': win1250HungarianCharToOrderMap,
+ 'precedence_matrix': HungarianLangModel,
+ 'typical_positive_ratio': 0.947368,
+ 'keep_english_letter': True,
+ 'charset_name': "windows-1250",
+ 'language': 'Hungarian',
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langthaimodel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langthaimodel.py
new file mode 100644
index 00000000..15f94c2d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langthaimodel.py
@@ -0,0 +1,199 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# The following result for thai was collected from a limited sample (1M).
+
+# Character Mapping Table:
+TIS620CharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
+253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
+252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
+253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
+188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
+253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
+ 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
+209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
+223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
+236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
+ 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
+ 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
+ 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
+ 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
+ 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
+)
+
+# Model Table:
+# total sequences: 100%
+# first 512 sequences: 92.6386%
+# first 1024 sequences:7.3177%
+# rest sequences: 1.0230%
+# negative sequences: 0.0436%
+ThaiLangModel = (
+0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
+0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
+3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
+0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
+3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
+3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
+3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
+3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
+3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
+3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
+2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
+3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
+0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
+3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
+0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
+3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
+1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
+3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
+3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
+1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
+0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
+0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
+3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
+2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
+3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
+0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
+3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
+3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
+2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
+3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
+2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
+3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
+3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
+3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
+3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
+3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
+1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
+0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
+0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
+3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
+3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
+1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
+3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
+3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
+0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
+0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
+1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
+1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
+3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
+0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
+0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
+3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
+0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
+0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
+0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
+0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
+0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
+0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
+0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
+3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
+0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
+0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
+3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
+2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
+0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
+3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
+1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
+1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
+1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
+1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+)
+
+TIS620ThaiModel = {
+ 'char_to_order_map': TIS620CharToOrderMap,
+ 'precedence_matrix': ThaiLangModel,
+ 'typical_positive_ratio': 0.926386,
+ 'keep_english_letter': False,
+ 'charset_name': "TIS-620",
+ 'language': 'Thai',
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langturkishmodel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langturkishmodel.py
new file mode 100644
index 00000000..a427a457
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/langturkishmodel.py
@@ -0,0 +1,193 @@
+# -*- coding: utf-8 -*-
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Communicator client code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Özgür Baskın - Turkish Language Model
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+# 255: Control characters that usually does not exist in any text
+# 254: Carriage/Return
+# 253: symbol (punctuation) that does not belong to word
+# 252: 0 - 9
+
+# Character Mapping Table:
+Latin5_TurkishCharToOrderMap = (
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
+255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
+255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42,
+ 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255,
+255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15,
+ 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255,
+180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,
+164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106,
+150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136,
+ 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125,
+124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119,
+ 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86,
+ 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96,
+ 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107,
+)
+
+TurkishLangModel = (
+3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3,
+3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1,
+3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3,
+3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1,
+3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3,
+3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1,
+3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2,
+2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1,
+3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2,
+2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,
+1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1,
+2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2,
+3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1,
+3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2,
+2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1,
+3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2,
+2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,
+3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2,
+3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0,
+3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3,
+0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,
+3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1,
+0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,
+3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1,
+1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1,
+3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3,
+2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3,
+2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1,
+3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0,
+0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0,
+1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2,
+3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1,
+1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,
+3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2,
+2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0,
+0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0,
+3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1,
+0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1,
+1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,
+1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3,
+2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1,
+2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,
+0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1,
+2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0,
+3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0,
+2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0,
+0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,
+3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1,
+1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
+1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2,
+0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1,
+3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1,
+0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0,
+3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2,
+1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,
+3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,
+1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2,
+2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1,
+0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0,
+3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0,
+0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0,
+3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0,
+0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0,
+3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0,
+0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0,
+0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0,
+3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0,
+0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1,
+3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,
+0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1,
+0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,
+3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0,
+0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0,
+3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0,
+0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0,
+3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0,
+0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0,
+0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0,
+3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0,
+0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
+3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0,
+0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,
+3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0,
+0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0,
+0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0,
+0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
+2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0,
+1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
+3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0,
+0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1,
+0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0,
+3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0,
+0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,
+2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,
+2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0,
+0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
+1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,
+0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
+2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
+0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
+)
+
+Latin5TurkishModel = {
+ 'char_to_order_map': Latin5_TurkishCharToOrderMap,
+ 'precedence_matrix': TurkishLangModel,
+ 'typical_positive_ratio': 0.970290,
+ 'keep_english_letter': True,
+ 'charset_name': "ISO-8859-9",
+ 'language': 'Turkish',
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/latin1prober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/latin1prober.py
new file mode 100644
index 00000000..7d1e8c20
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/latin1prober.py
@@ -0,0 +1,145 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState
+
+FREQ_CAT_NUM = 4
+
+UDF = 0 # undefined
+OTH = 1 # other
+ASC = 2 # ascii capital letter
+ASS = 3 # ascii small letter
+ACV = 4 # accent capital vowel
+ACO = 5 # accent capital other
+ASV = 6 # accent small vowel
+ASO = 7 # accent small other
+CLASS_NUM = 8 # total classes
+
+Latin1_CharToClass = (
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
+ OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
+ ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
+ ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
+ ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
+ OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
+ ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
+ ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
+ ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
+ OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
+ OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
+ UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
+ OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
+ OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
+ ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
+ ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
+ ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
+ ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
+ ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
+ ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
+ ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
+ ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
+)
+
+# 0 : illegal
+# 1 : very unlikely
+# 2 : normal
+# 3 : very likely
+Latin1ClassModel = (
+# UDF OTH ASC ASS ACV ACO ASV ASO
+ 0, 0, 0, 0, 0, 0, 0, 0, # UDF
+ 0, 3, 3, 3, 3, 3, 3, 3, # OTH
+ 0, 3, 3, 3, 3, 3, 3, 3, # ASC
+ 0, 3, 3, 3, 1, 1, 3, 3, # ASS
+ 0, 3, 3, 3, 1, 2, 1, 2, # ACV
+ 0, 3, 3, 3, 3, 3, 3, 3, # ACO
+ 0, 3, 1, 3, 1, 1, 1, 3, # ASV
+ 0, 3, 1, 3, 1, 1, 3, 3, # ASO
+)
+
+
+class Latin1Prober(CharSetProber):
+ def __init__(self):
+ super(Latin1Prober, self).__init__()
+ self._last_char_class = None
+ self._freq_counter = None
+ self.reset()
+
+ def reset(self):
+ self._last_char_class = OTH
+ self._freq_counter = [0] * FREQ_CAT_NUM
+ CharSetProber.reset(self)
+
+ @property
+ def charset_name(self):
+ return "ISO-8859-1"
+
+ @property
+ def language(self):
+ return ""
+
+ def feed(self, byte_str):
+ byte_str = self.filter_with_english_letters(byte_str)
+ for c in byte_str:
+ char_class = Latin1_CharToClass[c]
+ freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM)
+ + char_class]
+ if freq == 0:
+ self._state = ProbingState.NOT_ME
+ break
+ self._freq_counter[freq] += 1
+ self._last_char_class = char_class
+
+ return self.state
+
+ def get_confidence(self):
+ if self.state == ProbingState.NOT_ME:
+ return 0.01
+
+ total = sum(self._freq_counter)
+ if total < 0.01:
+ confidence = 0.0
+ else:
+ confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0)
+ / total)
+ if confidence < 0.0:
+ confidence = 0.0
+ # lower the confidence of latin1 so that other more accurate
+ # detector can take priority.
+ confidence = confidence * 0.73
+ return confidence
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py
new file mode 100644
index 00000000..6256ecfd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py
@@ -0,0 +1,91 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState, MachineState
+
+
+class MultiByteCharSetProber(CharSetProber):
+ """
+ MultiByteCharSetProber
+ """
+
+ def __init__(self, lang_filter=None):
+ super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter)
+ self.distribution_analyzer = None
+ self.coding_sm = None
+ self._last_char = [0, 0]
+
+ def reset(self):
+ super(MultiByteCharSetProber, self).reset()
+ if self.coding_sm:
+ self.coding_sm.reset()
+ if self.distribution_analyzer:
+ self.distribution_analyzer.reset()
+ self._last_char = [0, 0]
+
+ @property
+ def charset_name(self):
+ raise NotImplementedError
+
+ @property
+ def language(self):
+ raise NotImplementedError
+
+ def feed(self, byte_str):
+ for i in range(len(byte_str)):
+ coding_state = self.coding_sm.next_state(byte_str[i])
+ if coding_state == MachineState.ERROR:
+ self.logger.debug('%s %s prober hit error at byte %s',
+ self.charset_name, self.language, i)
+ self._state = ProbingState.NOT_ME
+ break
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ break
+ elif coding_state == MachineState.START:
+ char_len = self.coding_sm.get_current_charlen()
+ if i == 0:
+ self._last_char[1] = byte_str[0]
+ self.distribution_analyzer.feed(self._last_char, char_len)
+ else:
+ self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
+ char_len)
+
+ self._last_char[0] = byte_str[-1]
+
+ if self.state == ProbingState.DETECTING:
+ if (self.distribution_analyzer.got_enough_data() and
+ (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
+ self._state = ProbingState.FOUND_IT
+
+ return self.state
+
+ def get_confidence(self):
+ return self.distribution_analyzer.get_confidence()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py
new file mode 100644
index 00000000..530abe75
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py
@@ -0,0 +1,54 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+# Proofpoint, Inc.
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetgroupprober import CharSetGroupProber
+from .utf8prober import UTF8Prober
+from .sjisprober import SJISProber
+from .eucjpprober import EUCJPProber
+from .gb2312prober import GB2312Prober
+from .euckrprober import EUCKRProber
+from .cp949prober import CP949Prober
+from .big5prober import Big5Prober
+from .euctwprober import EUCTWProber
+
+
+class MBCSGroupProber(CharSetGroupProber):
+ def __init__(self, lang_filter=None):
+ super(MBCSGroupProber, self).__init__(lang_filter=lang_filter)
+ self.probers = [
+ UTF8Prober(),
+ SJISProber(),
+ EUCJPProber(),
+ GB2312Prober(),
+ EUCKRProber(),
+ CP949Prober(),
+ Big5Prober(),
+ EUCTWProber()
+ ]
+ self.reset()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcssm.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcssm.py
new file mode 100644
index 00000000..8360d0f2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/mbcssm.py
@@ -0,0 +1,572 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .enums import MachineState
+
+# BIG5
+
+BIG5_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,1, # 78 - 7f
+ 4,4,4,4,4,4,4,4, # 80 - 87
+ 4,4,4,4,4,4,4,4, # 88 - 8f
+ 4,4,4,4,4,4,4,4, # 90 - 97
+ 4,4,4,4,4,4,4,4, # 98 - 9f
+ 4,3,3,3,3,3,3,3, # a0 - a7
+ 3,3,3,3,3,3,3,3, # a8 - af
+ 3,3,3,3,3,3,3,3, # b0 - b7
+ 3,3,3,3,3,3,3,3, # b8 - bf
+ 3,3,3,3,3,3,3,3, # c0 - c7
+ 3,3,3,3,3,3,3,3, # c8 - cf
+ 3,3,3,3,3,3,3,3, # d0 - d7
+ 3,3,3,3,3,3,3,3, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,3,3,3, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,3,3,0 # f8 - ff
+)
+
+BIG5_ST = (
+ MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f
+ MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17
+)
+
+BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0)
+
+BIG5_SM_MODEL = {'class_table': BIG5_CLS,
+ 'class_factor': 5,
+ 'state_table': BIG5_ST,
+ 'char_len_table': BIG5_CHAR_LEN_TABLE,
+ 'name': 'Big5'}
+
+# CP949
+
+CP949_CLS = (
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
+ 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
+ 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
+ 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
+ 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
+ 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
+ 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
+ 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
+ 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
+ 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
+ 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
+)
+
+CP949_ST = (
+#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
+ MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4
+ MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5
+ MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6
+)
+
+CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
+
+CP949_SM_MODEL = {'class_table': CP949_CLS,
+ 'class_factor': 10,
+ 'state_table': CP949_ST,
+ 'char_len_table': CP949_CHAR_LEN_TABLE,
+ 'name': 'CP949'}
+
+# EUC-JP
+
+EUCJP_CLS = (
+ 4,4,4,4,4,4,4,4, # 00 - 07
+ 4,4,4,4,4,4,5,5, # 08 - 0f
+ 4,4,4,4,4,4,4,4, # 10 - 17
+ 4,4,4,5,4,4,4,4, # 18 - 1f
+ 4,4,4,4,4,4,4,4, # 20 - 27
+ 4,4,4,4,4,4,4,4, # 28 - 2f
+ 4,4,4,4,4,4,4,4, # 30 - 37
+ 4,4,4,4,4,4,4,4, # 38 - 3f
+ 4,4,4,4,4,4,4,4, # 40 - 47
+ 4,4,4,4,4,4,4,4, # 48 - 4f
+ 4,4,4,4,4,4,4,4, # 50 - 57
+ 4,4,4,4,4,4,4,4, # 58 - 5f
+ 4,4,4,4,4,4,4,4, # 60 - 67
+ 4,4,4,4,4,4,4,4, # 68 - 6f
+ 4,4,4,4,4,4,4,4, # 70 - 77
+ 4,4,4,4,4,4,4,4, # 78 - 7f
+ 5,5,5,5,5,5,5,5, # 80 - 87
+ 5,5,5,5,5,5,1,3, # 88 - 8f
+ 5,5,5,5,5,5,5,5, # 90 - 97
+ 5,5,5,5,5,5,5,5, # 98 - 9f
+ 5,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,2,2,2, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,2,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,0,5 # f8 - ff
+)
+
+EUCJP_ST = (
+ 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f
+ 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27
+)
+
+EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0)
+
+EUCJP_SM_MODEL = {'class_table': EUCJP_CLS,
+ 'class_factor': 6,
+ 'state_table': EUCJP_ST,
+ 'char_len_table': EUCJP_CHAR_LEN_TABLE,
+ 'name': 'EUC-JP'}
+
+# EUC-KR
+
+EUCKR_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 1,1,1,1,1,1,1,1, # 40 - 47
+ 1,1,1,1,1,1,1,1, # 48 - 4f
+ 1,1,1,1,1,1,1,1, # 50 - 57
+ 1,1,1,1,1,1,1,1, # 58 - 5f
+ 1,1,1,1,1,1,1,1, # 60 - 67
+ 1,1,1,1,1,1,1,1, # 68 - 6f
+ 1,1,1,1,1,1,1,1, # 70 - 77
+ 1,1,1,1,1,1,1,1, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,3,3,3, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,3,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 2,2,2,2,2,2,2,2, # e0 - e7
+ 2,2,2,2,2,2,2,2, # e8 - ef
+ 2,2,2,2,2,2,2,2, # f0 - f7
+ 2,2,2,2,2,2,2,0 # f8 - ff
+)
+
+EUCKR_ST = (
+ MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f
+)
+
+EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0)
+
+EUCKR_SM_MODEL = {'class_table': EUCKR_CLS,
+ 'class_factor': 4,
+ 'state_table': EUCKR_ST,
+ 'char_len_table': EUCKR_CHAR_LEN_TABLE,
+ 'name': 'EUC-KR'}
+
+# EUC-TW
+
+EUCTW_CLS = (
+ 2,2,2,2,2,2,2,2, # 00 - 07
+ 2,2,2,2,2,2,0,0, # 08 - 0f
+ 2,2,2,2,2,2,2,2, # 10 - 17
+ 2,2,2,0,2,2,2,2, # 18 - 1f
+ 2,2,2,2,2,2,2,2, # 20 - 27
+ 2,2,2,2,2,2,2,2, # 28 - 2f
+ 2,2,2,2,2,2,2,2, # 30 - 37
+ 2,2,2,2,2,2,2,2, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,2, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,6,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,3,4,4,4,4,4,4, # a0 - a7
+ 5,5,1,1,1,1,1,1, # a8 - af
+ 1,1,1,1,1,1,1,1, # b0 - b7
+ 1,1,1,1,1,1,1,1, # b8 - bf
+ 1,1,3,1,3,3,3,3, # c0 - c7
+ 3,3,3,3,3,3,3,3, # c8 - cf
+ 3,3,3,3,3,3,3,3, # d0 - d7
+ 3,3,3,3,3,3,3,3, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,3,3,3, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,3,3,0 # f8 - ff
+)
+
+EUCTW_ST = (
+ MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17
+ MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+ 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27
+ MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
+)
+
+EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3)
+
+EUCTW_SM_MODEL = {'class_table': EUCTW_CLS,
+ 'class_factor': 7,
+ 'state_table': EUCTW_ST,
+ 'char_len_table': EUCTW_CHAR_LEN_TABLE,
+ 'name': 'x-euc-tw'}
+
+# GB2312
+
+GB2312_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 3,3,3,3,3,3,3,3, # 30 - 37
+ 3,3,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,4, # 78 - 7f
+ 5,6,6,6,6,6,6,6, # 80 - 87
+ 6,6,6,6,6,6,6,6, # 88 - 8f
+ 6,6,6,6,6,6,6,6, # 90 - 97
+ 6,6,6,6,6,6,6,6, # 98 - 9f
+ 6,6,6,6,6,6,6,6, # a0 - a7
+ 6,6,6,6,6,6,6,6, # a8 - af
+ 6,6,6,6,6,6,6,6, # b0 - b7
+ 6,6,6,6,6,6,6,6, # b8 - bf
+ 6,6,6,6,6,6,6,6, # c0 - c7
+ 6,6,6,6,6,6,6,6, # c8 - cf
+ 6,6,6,6,6,6,6,6, # d0 - d7
+ 6,6,6,6,6,6,6,6, # d8 - df
+ 6,6,6,6,6,6,6,6, # e0 - e7
+ 6,6,6,6,6,6,6,6, # e8 - ef
+ 6,6,6,6,6,6,6,6, # f0 - f7
+ 6,6,6,6,6,6,6,0 # f8 - ff
+)
+
+GB2312_ST = (
+ MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17
+ 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+ MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
+)
+
+# To be accurate, the length of class 6 can be either 2 or 4.
+# But it is not necessary to discriminate between the two since
+# it is used for frequency analysis only, and we are validating
+# each code range there as well. So it is safe to set it to be
+# 2 here.
+GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2)
+
+GB2312_SM_MODEL = {'class_table': GB2312_CLS,
+ 'class_factor': 7,
+ 'state_table': GB2312_ST,
+ 'char_len_table': GB2312_CHAR_LEN_TABLE,
+ 'name': 'GB2312'}
+
+# Shift_JIS
+
+SJIS_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 2,2,2,2,2,2,2,2, # 40 - 47
+ 2,2,2,2,2,2,2,2, # 48 - 4f
+ 2,2,2,2,2,2,2,2, # 50 - 57
+ 2,2,2,2,2,2,2,2, # 58 - 5f
+ 2,2,2,2,2,2,2,2, # 60 - 67
+ 2,2,2,2,2,2,2,2, # 68 - 6f
+ 2,2,2,2,2,2,2,2, # 70 - 77
+ 2,2,2,2,2,2,2,1, # 78 - 7f
+ 3,3,3,3,3,2,2,3, # 80 - 87
+ 3,3,3,3,3,3,3,3, # 88 - 8f
+ 3,3,3,3,3,3,3,3, # 90 - 97
+ 3,3,3,3,3,3,3,3, # 98 - 9f
+ #0xa0 is illegal in sjis encoding, but some pages does
+ #contain such byte. We need to be more error forgiven.
+ 2,2,2,2,2,2,2,2, # a0 - a7
+ 2,2,2,2,2,2,2,2, # a8 - af
+ 2,2,2,2,2,2,2,2, # b0 - b7
+ 2,2,2,2,2,2,2,2, # b8 - bf
+ 2,2,2,2,2,2,2,2, # c0 - c7
+ 2,2,2,2,2,2,2,2, # c8 - cf
+ 2,2,2,2,2,2,2,2, # d0 - d7
+ 2,2,2,2,2,2,2,2, # d8 - df
+ 3,3,3,3,3,3,3,3, # e0 - e7
+ 3,3,3,3,3,4,4,4, # e8 - ef
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,0,0,0) # f8 - ff
+
+
+SJIS_ST = (
+ MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17
+)
+
+SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0)
+
+SJIS_SM_MODEL = {'class_table': SJIS_CLS,
+ 'class_factor': 6,
+ 'state_table': SJIS_ST,
+ 'char_len_table': SJIS_CHAR_LEN_TABLE,
+ 'name': 'Shift_JIS'}
+
+# UCS2-BE
+
+UCS2BE_CLS = (
+ 0,0,0,0,0,0,0,0, # 00 - 07
+ 0,0,1,0,0,2,0,0, # 08 - 0f
+ 0,0,0,0,0,0,0,0, # 10 - 17
+ 0,0,0,3,0,0,0,0, # 18 - 1f
+ 0,0,0,0,0,0,0,0, # 20 - 27
+ 0,3,3,3,3,3,0,0, # 28 - 2f
+ 0,0,0,0,0,0,0,0, # 30 - 37
+ 0,0,0,0,0,0,0,0, # 38 - 3f
+ 0,0,0,0,0,0,0,0, # 40 - 47
+ 0,0,0,0,0,0,0,0, # 48 - 4f
+ 0,0,0,0,0,0,0,0, # 50 - 57
+ 0,0,0,0,0,0,0,0, # 58 - 5f
+ 0,0,0,0,0,0,0,0, # 60 - 67
+ 0,0,0,0,0,0,0,0, # 68 - 6f
+ 0,0,0,0,0,0,0,0, # 70 - 77
+ 0,0,0,0,0,0,0,0, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,0,0,0,0,0,0,0, # a0 - a7
+ 0,0,0,0,0,0,0,0, # a8 - af
+ 0,0,0,0,0,0,0,0, # b0 - b7
+ 0,0,0,0,0,0,0,0, # b8 - bf
+ 0,0,0,0,0,0,0,0, # c0 - c7
+ 0,0,0,0,0,0,0,0, # c8 - cf
+ 0,0,0,0,0,0,0,0, # d0 - d7
+ 0,0,0,0,0,0,0,0, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,4,5 # f8 - ff
+)
+
+UCS2BE_ST = (
+ 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17
+ 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f
+ 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27
+ 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f
+ 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37
+)
+
+UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2)
+
+UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS,
+ 'class_factor': 6,
+ 'state_table': UCS2BE_ST,
+ 'char_len_table': UCS2BE_CHAR_LEN_TABLE,
+ 'name': 'UTF-16BE'}
+
+# UCS2-LE
+
+UCS2LE_CLS = (
+ 0,0,0,0,0,0,0,0, # 00 - 07
+ 0,0,1,0,0,2,0,0, # 08 - 0f
+ 0,0,0,0,0,0,0,0, # 10 - 17
+ 0,0,0,3,0,0,0,0, # 18 - 1f
+ 0,0,0,0,0,0,0,0, # 20 - 27
+ 0,3,3,3,3,3,0,0, # 28 - 2f
+ 0,0,0,0,0,0,0,0, # 30 - 37
+ 0,0,0,0,0,0,0,0, # 38 - 3f
+ 0,0,0,0,0,0,0,0, # 40 - 47
+ 0,0,0,0,0,0,0,0, # 48 - 4f
+ 0,0,0,0,0,0,0,0, # 50 - 57
+ 0,0,0,0,0,0,0,0, # 58 - 5f
+ 0,0,0,0,0,0,0,0, # 60 - 67
+ 0,0,0,0,0,0,0,0, # 68 - 6f
+ 0,0,0,0,0,0,0,0, # 70 - 77
+ 0,0,0,0,0,0,0,0, # 78 - 7f
+ 0,0,0,0,0,0,0,0, # 80 - 87
+ 0,0,0,0,0,0,0,0, # 88 - 8f
+ 0,0,0,0,0,0,0,0, # 90 - 97
+ 0,0,0,0,0,0,0,0, # 98 - 9f
+ 0,0,0,0,0,0,0,0, # a0 - a7
+ 0,0,0,0,0,0,0,0, # a8 - af
+ 0,0,0,0,0,0,0,0, # b0 - b7
+ 0,0,0,0,0,0,0,0, # b8 - bf
+ 0,0,0,0,0,0,0,0, # c0 - c7
+ 0,0,0,0,0,0,0,0, # c8 - cf
+ 0,0,0,0,0,0,0,0, # d0 - d7
+ 0,0,0,0,0,0,0,0, # d8 - df
+ 0,0,0,0,0,0,0,0, # e0 - e7
+ 0,0,0,0,0,0,0,0, # e8 - ef
+ 0,0,0,0,0,0,0,0, # f0 - f7
+ 0,0,0,0,0,0,4,5 # f8 - ff
+)
+
+UCS2LE_ST = (
+ 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
+ MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17
+ 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f
+ 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27
+ 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f
+ 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37
+)
+
+UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2)
+
+UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS,
+ 'class_factor': 6,
+ 'state_table': UCS2LE_ST,
+ 'char_len_table': UCS2LE_CHAR_LEN_TABLE,
+ 'name': 'UTF-16LE'}
+
+# UTF-8
+
+UTF8_CLS = (
+ 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
+ 1,1,1,1,1,1,0,0, # 08 - 0f
+ 1,1,1,1,1,1,1,1, # 10 - 17
+ 1,1,1,0,1,1,1,1, # 18 - 1f
+ 1,1,1,1,1,1,1,1, # 20 - 27
+ 1,1,1,1,1,1,1,1, # 28 - 2f
+ 1,1,1,1,1,1,1,1, # 30 - 37
+ 1,1,1,1,1,1,1,1, # 38 - 3f
+ 1,1,1,1,1,1,1,1, # 40 - 47
+ 1,1,1,1,1,1,1,1, # 48 - 4f
+ 1,1,1,1,1,1,1,1, # 50 - 57
+ 1,1,1,1,1,1,1,1, # 58 - 5f
+ 1,1,1,1,1,1,1,1, # 60 - 67
+ 1,1,1,1,1,1,1,1, # 68 - 6f
+ 1,1,1,1,1,1,1,1, # 70 - 77
+ 1,1,1,1,1,1,1,1, # 78 - 7f
+ 2,2,2,2,3,3,3,3, # 80 - 87
+ 4,4,4,4,4,4,4,4, # 88 - 8f
+ 4,4,4,4,4,4,4,4, # 90 - 97
+ 4,4,4,4,4,4,4,4, # 98 - 9f
+ 5,5,5,5,5,5,5,5, # a0 - a7
+ 5,5,5,5,5,5,5,5, # a8 - af
+ 5,5,5,5,5,5,5,5, # b0 - b7
+ 5,5,5,5,5,5,5,5, # b8 - bf
+ 0,0,6,6,6,6,6,6, # c0 - c7
+ 6,6,6,6,6,6,6,6, # c8 - cf
+ 6,6,6,6,6,6,6,6, # d0 - d7
+ 6,6,6,6,6,6,6,6, # d8 - df
+ 7,8,8,8,8,8,8,8, # e0 - e7
+ 8,8,8,8,8,9,8,8, # e8 - ef
+ 10,11,11,11,11,11,11,11, # f0 - f7
+ 12,13,13,13,14,15,0,0 # f8 - ff
+)
+
+UTF8_ST = (
+ MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07
+ 9, 11, 8, 7, 6, 5, 4, 3,#08-0f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27
+ MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f
+ MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f
+ MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f
+ MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f
+ MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af
+ MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf
+ MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7
+ MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf
+)
+
+UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
+
+UTF8_SM_MODEL = {'class_table': UTF8_CLS,
+ 'class_factor': 16,
+ 'state_table': UTF8_ST,
+ 'char_len_table': UTF8_CHAR_LEN_TABLE,
+ 'name': 'UTF-8'}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py
new file mode 100644
index 00000000..0adb51de
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py
@@ -0,0 +1,132 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .enums import CharacterCategory, ProbingState, SequenceLikelihood
+
+
+class SingleByteCharSetProber(CharSetProber):
+ SAMPLE_SIZE = 64
+ SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2
+ POSITIVE_SHORTCUT_THRESHOLD = 0.95
+ NEGATIVE_SHORTCUT_THRESHOLD = 0.05
+
+ def __init__(self, model, reversed=False, name_prober=None):
+ super(SingleByteCharSetProber, self).__init__()
+ self._model = model
+ # TRUE if we need to reverse every pair in the model lookup
+ self._reversed = reversed
+ # Optional auxiliary prober for name decision
+ self._name_prober = name_prober
+ self._last_order = None
+ self._seq_counters = None
+ self._total_seqs = None
+ self._total_char = None
+ self._freq_char = None
+ self.reset()
+
+ def reset(self):
+ super(SingleByteCharSetProber, self).reset()
+ # char order of last character
+ self._last_order = 255
+ self._seq_counters = [0] * SequenceLikelihood.get_num_categories()
+ self._total_seqs = 0
+ self._total_char = 0
+ # characters that fall in our sampling range
+ self._freq_char = 0
+
+ @property
+ def charset_name(self):
+ if self._name_prober:
+ return self._name_prober.charset_name
+ else:
+ return self._model['charset_name']
+
+ @property
+ def language(self):
+ if self._name_prober:
+ return self._name_prober.language
+ else:
+ return self._model.get('language')
+
+ def feed(self, byte_str):
+ if not self._model['keep_english_letter']:
+ byte_str = self.filter_international_words(byte_str)
+ if not byte_str:
+ return self.state
+ char_to_order_map = self._model['char_to_order_map']
+ for i, c in enumerate(byte_str):
+ # XXX: Order is in range 1-64, so one would think we want 0-63 here,
+ # but that leads to 27 more test failures than before.
+ order = char_to_order_map[c]
+ # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but
+ # CharacterCategory.SYMBOL is actually 253, so we use CONTROL
+ # to make it closer to the original intent. The only difference
+ # is whether or not we count digits and control characters for
+ # _total_char purposes.
+ if order < CharacterCategory.CONTROL:
+ self._total_char += 1
+ if order < self.SAMPLE_SIZE:
+ self._freq_char += 1
+ if self._last_order < self.SAMPLE_SIZE:
+ self._total_seqs += 1
+ if not self._reversed:
+ i = (self._last_order * self.SAMPLE_SIZE) + order
+ model = self._model['precedence_matrix'][i]
+ else: # reverse the order of the letters in the lookup
+ i = (order * self.SAMPLE_SIZE) + self._last_order
+ model = self._model['precedence_matrix'][i]
+ self._seq_counters[model] += 1
+ self._last_order = order
+
+ charset_name = self._model['charset_name']
+ if self.state == ProbingState.DETECTING:
+ if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD:
+ confidence = self.get_confidence()
+ if confidence > self.POSITIVE_SHORTCUT_THRESHOLD:
+ self.logger.debug('%s confidence = %s, we have a winner',
+ charset_name, confidence)
+ self._state = ProbingState.FOUND_IT
+ elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD:
+ self.logger.debug('%s confidence = %s, below negative '
+ 'shortcut threshhold %s', charset_name,
+ confidence,
+ self.NEGATIVE_SHORTCUT_THRESHOLD)
+ self._state = ProbingState.NOT_ME
+
+ return self.state
+
+ def get_confidence(self):
+ r = 0.01
+ if self._total_seqs > 0:
+ r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) /
+ self._total_seqs / self._model['typical_positive_ratio'])
+ r = r * self._freq_char / self._total_char
+ if r >= 1.0:
+ r = 0.99
+ return r
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py
new file mode 100644
index 00000000..98e95dc1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py
@@ -0,0 +1,73 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetgroupprober import CharSetGroupProber
+from .sbcharsetprober import SingleByteCharSetProber
+from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
+ Latin5CyrillicModel, MacCyrillicModel,
+ Ibm866Model, Ibm855Model)
+from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
+from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
+# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
+from .langthaimodel import TIS620ThaiModel
+from .langhebrewmodel import Win1255HebrewModel
+from .hebrewprober import HebrewProber
+from .langturkishmodel import Latin5TurkishModel
+
+
+class SBCSGroupProber(CharSetGroupProber):
+ def __init__(self):
+ super(SBCSGroupProber, self).__init__()
+ self.probers = [
+ SingleByteCharSetProber(Win1251CyrillicModel),
+ SingleByteCharSetProber(Koi8rModel),
+ SingleByteCharSetProber(Latin5CyrillicModel),
+ SingleByteCharSetProber(MacCyrillicModel),
+ SingleByteCharSetProber(Ibm866Model),
+ SingleByteCharSetProber(Ibm855Model),
+ SingleByteCharSetProber(Latin7GreekModel),
+ SingleByteCharSetProber(Win1253GreekModel),
+ SingleByteCharSetProber(Latin5BulgarianModel),
+ SingleByteCharSetProber(Win1251BulgarianModel),
+ # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250)
+ # after we retrain model.
+ # SingleByteCharSetProber(Latin2HungarianModel),
+ # SingleByteCharSetProber(Win1250HungarianModel),
+ SingleByteCharSetProber(TIS620ThaiModel),
+ SingleByteCharSetProber(Latin5TurkishModel),
+ ]
+ hebrew_prober = HebrewProber()
+ logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel,
+ False, hebrew_prober)
+ visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True,
+ hebrew_prober)
+ hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober)
+ self.probers.extend([hebrew_prober, logical_hebrew_prober,
+ visual_hebrew_prober])
+
+ self.reset()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sjisprober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sjisprober.py
new file mode 100644
index 00000000..9e29623b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/sjisprober.py
@@ -0,0 +1,92 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import SJISDistributionAnalysis
+from .jpcntx import SJISContextAnalysis
+from .mbcssm import SJIS_SM_MODEL
+from .enums import ProbingState, MachineState
+
+
+class SJISProber(MultiByteCharSetProber):
+ def __init__(self):
+ super(SJISProber, self).__init__()
+ self.coding_sm = CodingStateMachine(SJIS_SM_MODEL)
+ self.distribution_analyzer = SJISDistributionAnalysis()
+ self.context_analyzer = SJISContextAnalysis()
+ self.reset()
+
+ def reset(self):
+ super(SJISProber, self).reset()
+ self.context_analyzer.reset()
+
+ @property
+ def charset_name(self):
+ return self.context_analyzer.charset_name
+
+ @property
+ def language(self):
+ return "Japanese"
+
+ def feed(self, byte_str):
+ for i in range(len(byte_str)):
+ coding_state = self.coding_sm.next_state(byte_str[i])
+ if coding_state == MachineState.ERROR:
+ self.logger.debug('%s %s prober hit error at byte %s',
+ self.charset_name, self.language, i)
+ self._state = ProbingState.NOT_ME
+ break
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ break
+ elif coding_state == MachineState.START:
+ char_len = self.coding_sm.get_current_charlen()
+ if i == 0:
+ self._last_char[1] = byte_str[0]
+ self.context_analyzer.feed(self._last_char[2 - char_len:],
+ char_len)
+ self.distribution_analyzer.feed(self._last_char, char_len)
+ else:
+ self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3
+ - char_len], char_len)
+ self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
+ char_len)
+
+ self._last_char[0] = byte_str[-1]
+
+ if self.state == ProbingState.DETECTING:
+ if (self.context_analyzer.got_enough_data() and
+ (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
+ self._state = ProbingState.FOUND_IT
+
+ return self.state
+
+ def get_confidence(self):
+ context_conf = self.context_analyzer.get_confidence()
+ distrib_conf = self.distribution_analyzer.get_confidence()
+ return max(context_conf, distrib_conf)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/universaldetector.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/universaldetector.py
new file mode 100644
index 00000000..7b4e92d6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/universaldetector.py
@@ -0,0 +1,286 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is Mozilla Universal charset detector code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2001
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+# Shy Shalom - original C code
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+"""
+Module containing the UniversalDetector detector class, which is the primary
+class a user of ``chardet`` should use.
+
+:author: Mark Pilgrim (initial port to Python)
+:author: Shy Shalom (original C code)
+:author: Dan Blanchard (major refactoring for 3.0)
+:author: Ian Cordasco
+"""
+
+
+import codecs
+import logging
+import re
+
+from .charsetgroupprober import CharSetGroupProber
+from .enums import InputState, LanguageFilter, ProbingState
+from .escprober import EscCharSetProber
+from .latin1prober import Latin1Prober
+from .mbcsgroupprober import MBCSGroupProber
+from .sbcsgroupprober import SBCSGroupProber
+
+
+class UniversalDetector(object):
+ """
+ The ``UniversalDetector`` class underlies the ``chardet.detect`` function
+ and coordinates all of the different charset probers.
+
+ To get a ``dict`` containing an encoding and its confidence, you can simply
+ run:
+
+ .. code::
+
+ u = UniversalDetector()
+ u.feed(some_bytes)
+ u.close()
+ detected = u.result
+
+ """
+
+ MINIMUM_THRESHOLD = 0.20
+ HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]')
+ ESC_DETECTOR = re.compile(b'(\033|~{)')
+ WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]')
+ ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252',
+ 'iso-8859-2': 'Windows-1250',
+ 'iso-8859-5': 'Windows-1251',
+ 'iso-8859-6': 'Windows-1256',
+ 'iso-8859-7': 'Windows-1253',
+ 'iso-8859-8': 'Windows-1255',
+ 'iso-8859-9': 'Windows-1254',
+ 'iso-8859-13': 'Windows-1257'}
+
+ def __init__(self, lang_filter=LanguageFilter.ALL):
+ self._esc_charset_prober = None
+ self._charset_probers = []
+ self.result = None
+ self.done = None
+ self._got_data = None
+ self._input_state = None
+ self._last_char = None
+ self.lang_filter = lang_filter
+ self.logger = logging.getLogger(__name__)
+ self._has_win_bytes = None
+ self.reset()
+
+ def reset(self):
+ """
+ Reset the UniversalDetector and all of its probers back to their
+ initial states. This is called by ``__init__``, so you only need to
+ call this directly in between analyses of different documents.
+ """
+ self.result = {'encoding': None, 'confidence': 0.0, 'language': None}
+ self.done = False
+ self._got_data = False
+ self._has_win_bytes = False
+ self._input_state = InputState.PURE_ASCII
+ self._last_char = b''
+ if self._esc_charset_prober:
+ self._esc_charset_prober.reset()
+ for prober in self._charset_probers:
+ prober.reset()
+
+ def feed(self, byte_str):
+ """
+ Takes a chunk of a document and feeds it through all of the relevant
+ charset probers.
+
+ After calling ``feed``, you can check the value of the ``done``
+ attribute to see if you need to continue feeding the
+ ``UniversalDetector`` more data, or if it has made a prediction
+ (in the ``result`` attribute).
+
+ .. note::
+ You should always call ``close`` when you're done feeding in your
+ document if ``done`` is not already ``True``.
+ """
+ if self.done:
+ return
+
+ if not len(byte_str):
+ return
+
+ if not isinstance(byte_str, bytearray):
+ byte_str = bytearray(byte_str)
+
+ # First check for known BOMs, since these are guaranteed to be correct
+ if not self._got_data:
+ # If the data starts with BOM, we know it is UTF
+ if byte_str.startswith(codecs.BOM_UTF8):
+ # EF BB BF UTF-8 with BOM
+ self.result = {'encoding': "UTF-8-SIG",
+ 'confidence': 1.0,
+ 'language': ''}
+ elif byte_str.startswith((codecs.BOM_UTF32_LE,
+ codecs.BOM_UTF32_BE)):
+ # FF FE 00 00 UTF-32, little-endian BOM
+ # 00 00 FE FF UTF-32, big-endian BOM
+ self.result = {'encoding': "UTF-32",
+ 'confidence': 1.0,
+ 'language': ''}
+ elif byte_str.startswith(b'\xFE\xFF\x00\x00'):
+ # FE FF 00 00 UCS-4, unusual octet order BOM (3412)
+ self.result = {'encoding': "X-ISO-10646-UCS-4-3412",
+ 'confidence': 1.0,
+ 'language': ''}
+ elif byte_str.startswith(b'\x00\x00\xFF\xFE'):
+ # 00 00 FF FE UCS-4, unusual octet order BOM (2143)
+ self.result = {'encoding': "X-ISO-10646-UCS-4-2143",
+ 'confidence': 1.0,
+ 'language': ''}
+ elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)):
+ # FF FE UTF-16, little endian BOM
+ # FE FF UTF-16, big endian BOM
+ self.result = {'encoding': "UTF-16",
+ 'confidence': 1.0,
+ 'language': ''}
+
+ self._got_data = True
+ if self.result['encoding'] is not None:
+ self.done = True
+ return
+
+ # If none of those matched and we've only see ASCII so far, check
+ # for high bytes and escape sequences
+ if self._input_state == InputState.PURE_ASCII:
+ if self.HIGH_BYTE_DETECTOR.search(byte_str):
+ self._input_state = InputState.HIGH_BYTE
+ elif self._input_state == InputState.PURE_ASCII and \
+ self.ESC_DETECTOR.search(self._last_char + byte_str):
+ self._input_state = InputState.ESC_ASCII
+
+ self._last_char = byte_str[-1:]
+
+ # If we've seen escape sequences, use the EscCharSetProber, which
+ # uses a simple state machine to check for known escape sequences in
+ # HZ and ISO-2022 encodings, since those are the only encodings that
+ # use such sequences.
+ if self._input_state == InputState.ESC_ASCII:
+ if not self._esc_charset_prober:
+ self._esc_charset_prober = EscCharSetProber(self.lang_filter)
+ if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT:
+ self.result = {'encoding':
+ self._esc_charset_prober.charset_name,
+ 'confidence':
+ self._esc_charset_prober.get_confidence(),
+ 'language':
+ self._esc_charset_prober.language}
+ self.done = True
+ # If we've seen high bytes (i.e., those with values greater than 127),
+ # we need to do more complicated checks using all our multi-byte and
+ # single-byte probers that are left. The single-byte probers
+ # use character bigram distributions to determine the encoding, whereas
+ # the multi-byte probers use a combination of character unigram and
+ # bigram distributions.
+ elif self._input_state == InputState.HIGH_BYTE:
+ if not self._charset_probers:
+ self._charset_probers = [MBCSGroupProber(self.lang_filter)]
+ # If we're checking non-CJK encodings, use single-byte prober
+ if self.lang_filter & LanguageFilter.NON_CJK:
+ self._charset_probers.append(SBCSGroupProber())
+ self._charset_probers.append(Latin1Prober())
+ for prober in self._charset_probers:
+ if prober.feed(byte_str) == ProbingState.FOUND_IT:
+ self.result = {'encoding': prober.charset_name,
+ 'confidence': prober.get_confidence(),
+ 'language': prober.language}
+ self.done = True
+ break
+ if self.WIN_BYTE_DETECTOR.search(byte_str):
+ self._has_win_bytes = True
+
+ def close(self):
+ """
+ Stop analyzing the current document and come up with a final
+ prediction.
+
+ :returns: The ``result`` attribute, a ``dict`` with the keys
+ `encoding`, `confidence`, and `language`.
+ """
+ # Don't bother with checks if we're already done
+ if self.done:
+ return self.result
+ self.done = True
+
+ if not self._got_data:
+ self.logger.debug('no data received!')
+
+ # Default to ASCII if it is all we've seen so far
+ elif self._input_state == InputState.PURE_ASCII:
+ self.result = {'encoding': 'ascii',
+ 'confidence': 1.0,
+ 'language': ''}
+
+ # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD
+ elif self._input_state == InputState.HIGH_BYTE:
+ prober_confidence = None
+ max_prober_confidence = 0.0
+ max_prober = None
+ for prober in self._charset_probers:
+ if not prober:
+ continue
+ prober_confidence = prober.get_confidence()
+ if prober_confidence > max_prober_confidence:
+ max_prober_confidence = prober_confidence
+ max_prober = prober
+ if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD):
+ charset_name = max_prober.charset_name
+ lower_charset_name = max_prober.charset_name.lower()
+ confidence = max_prober.get_confidence()
+ # Use Windows encoding name instead of ISO-8859 if we saw any
+ # extra Windows-specific bytes
+ if lower_charset_name.startswith('iso-8859'):
+ if self._has_win_bytes:
+ charset_name = self.ISO_WIN_MAP.get(lower_charset_name,
+ charset_name)
+ self.result = {'encoding': charset_name,
+ 'confidence': confidence,
+ 'language': max_prober.language}
+
+ # Log all prober confidences if none met MINIMUM_THRESHOLD
+ if self.logger.getEffectiveLevel() == logging.DEBUG:
+ if self.result['encoding'] is None:
+ self.logger.debug('no probers hit minimum threshold')
+ for group_prober in self._charset_probers:
+ if not group_prober:
+ continue
+ if isinstance(group_prober, CharSetGroupProber):
+ for prober in group_prober.probers:
+ self.logger.debug('%s %s confidence = %s',
+ prober.charset_name,
+ prober.language,
+ prober.get_confidence())
+ else:
+ self.logger.debug('%s %s confidence = %s',
+ prober.charset_name,
+ prober.language,
+ prober.get_confidence())
+ return self.result
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/utf8prober.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/utf8prober.py
new file mode 100644
index 00000000..6c3196cc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/utf8prober.py
@@ -0,0 +1,82 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .charsetprober import CharSetProber
+from .enums import ProbingState, MachineState
+from .codingstatemachine import CodingStateMachine
+from .mbcssm import UTF8_SM_MODEL
+
+
+
+class UTF8Prober(CharSetProber):
+ ONE_CHAR_PROB = 0.5
+
+ def __init__(self):
+ super(UTF8Prober, self).__init__()
+ self.coding_sm = CodingStateMachine(UTF8_SM_MODEL)
+ self._num_mb_chars = None
+ self.reset()
+
+ def reset(self):
+ super(UTF8Prober, self).reset()
+ self.coding_sm.reset()
+ self._num_mb_chars = 0
+
+ @property
+ def charset_name(self):
+ return "utf-8"
+
+ @property
+ def language(self):
+ return ""
+
+ def feed(self, byte_str):
+ for c in byte_str:
+ coding_state = self.coding_sm.next_state(c)
+ if coding_state == MachineState.ERROR:
+ self._state = ProbingState.NOT_ME
+ break
+ elif coding_state == MachineState.ITS_ME:
+ self._state = ProbingState.FOUND_IT
+ break
+ elif coding_state == MachineState.START:
+ if self.coding_sm.get_current_charlen() >= 2:
+ self._num_mb_chars += 1
+
+ if self.state == ProbingState.DETECTING:
+ if self.get_confidence() > self.SHORTCUT_THRESHOLD:
+ self._state = ProbingState.FOUND_IT
+
+ return self.state
+
+ def get_confidence(self):
+ unlike = 0.99
+ if self._num_mb_chars < 6:
+ unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars
+ return 1.0 - unlike
+ else:
+ return unlike
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/version.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/version.py
new file mode 100644
index 00000000..bb2a34a7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/chardet/version.py
@@ -0,0 +1,9 @@
+"""
+This module exists only to simplify retrieving the version number of chardet
+from within setup.py and from chardet subpackages.
+
+:author: Dan Blanchard (dan.blanchard@gmail.com)
+"""
+
+__version__ = "3.0.4"
+VERSION = __version__.split('.')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__init__.py
new file mode 100644
index 00000000..2a3bf471
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__init__.py
@@ -0,0 +1,6 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from .initialise import init, deinit, reinit, colorama_text
+from .ansi import Fore, Back, Style, Cursor
+from .ansitowin32 import AnsiToWin32
+
+__version__ = '0.4.1'
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..4267f01c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc
new file mode 100644
index 00000000..1fd3491c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc
new file mode 100644
index 00000000..b1360b54
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc
new file mode 100644
index 00000000..7c1b7b63
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc
new file mode 100644
index 00000000..6b854e0e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc
new file mode 100644
index 00000000..4c2644a2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/ansi.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/ansi.py
new file mode 100644
index 00000000..78776588
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/ansi.py
@@ -0,0 +1,102 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+'''
+This module generates ANSI character codes to printing colors to terminals.
+See: http://en.wikipedia.org/wiki/ANSI_escape_code
+'''
+
+CSI = '\033['
+OSC = '\033]'
+BEL = '\007'
+
+
+def code_to_chars(code):
+ return CSI + str(code) + 'm'
+
+def set_title(title):
+ return OSC + '2;' + title + BEL
+
+def clear_screen(mode=2):
+ return CSI + str(mode) + 'J'
+
+def clear_line(mode=2):
+ return CSI + str(mode) + 'K'
+
+
+class AnsiCodes(object):
+ def __init__(self):
+ # the subclasses declare class attributes which are numbers.
+ # Upon instantiation we define instance attributes, which are the same
+ # as the class attributes but wrapped with the ANSI escape sequence
+ for name in dir(self):
+ if not name.startswith('_'):
+ value = getattr(self, name)
+ setattr(self, name, code_to_chars(value))
+
+
+class AnsiCursor(object):
+ def UP(self, n=1):
+ return CSI + str(n) + 'A'
+ def DOWN(self, n=1):
+ return CSI + str(n) + 'B'
+ def FORWARD(self, n=1):
+ return CSI + str(n) + 'C'
+ def BACK(self, n=1):
+ return CSI + str(n) + 'D'
+ def POS(self, x=1, y=1):
+ return CSI + str(y) + ';' + str(x) + 'H'
+
+
+class AnsiFore(AnsiCodes):
+ BLACK = 30
+ RED = 31
+ GREEN = 32
+ YELLOW = 33
+ BLUE = 34
+ MAGENTA = 35
+ CYAN = 36
+ WHITE = 37
+ RESET = 39
+
+ # These are fairly well supported, but not part of the standard.
+ LIGHTBLACK_EX = 90
+ LIGHTRED_EX = 91
+ LIGHTGREEN_EX = 92
+ LIGHTYELLOW_EX = 93
+ LIGHTBLUE_EX = 94
+ LIGHTMAGENTA_EX = 95
+ LIGHTCYAN_EX = 96
+ LIGHTWHITE_EX = 97
+
+
+class AnsiBack(AnsiCodes):
+ BLACK = 40
+ RED = 41
+ GREEN = 42
+ YELLOW = 43
+ BLUE = 44
+ MAGENTA = 45
+ CYAN = 46
+ WHITE = 47
+ RESET = 49
+
+ # These are fairly well supported, but not part of the standard.
+ LIGHTBLACK_EX = 100
+ LIGHTRED_EX = 101
+ LIGHTGREEN_EX = 102
+ LIGHTYELLOW_EX = 103
+ LIGHTBLUE_EX = 104
+ LIGHTMAGENTA_EX = 105
+ LIGHTCYAN_EX = 106
+ LIGHTWHITE_EX = 107
+
+
+class AnsiStyle(AnsiCodes):
+ BRIGHT = 1
+ DIM = 2
+ NORMAL = 22
+ RESET_ALL = 0
+
+Fore = AnsiFore()
+Back = AnsiBack()
+Style = AnsiStyle()
+Cursor = AnsiCursor()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/ansitowin32.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/ansitowin32.py
new file mode 100644
index 00000000..359c92be
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/ansitowin32.py
@@ -0,0 +1,257 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import re
+import sys
+import os
+
+from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
+from .winterm import WinTerm, WinColor, WinStyle
+from .win32 import windll, winapi_test
+
+
+winterm = None
+if windll is not None:
+ winterm = WinTerm()
+
+
+class StreamWrapper(object):
+ '''
+ Wraps a stream (such as stdout), acting as a transparent proxy for all
+ attribute access apart from method 'write()', which is delegated to our
+ Converter instance.
+ '''
+ def __init__(self, wrapped, converter):
+ # double-underscore everything to prevent clashes with names of
+ # attributes on the wrapped stream object.
+ self.__wrapped = wrapped
+ self.__convertor = converter
+
+ def __getattr__(self, name):
+ return getattr(self.__wrapped, name)
+
+ def __enter__(self, *args, **kwargs):
+ # special method lookup bypasses __getattr__/__getattribute__, see
+ # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit
+ # thus, contextlib magic methods are not proxied via __getattr__
+ return self.__wrapped.__enter__(*args, **kwargs)
+
+ def __exit__(self, *args, **kwargs):
+ return self.__wrapped.__exit__(*args, **kwargs)
+
+ def write(self, text):
+ self.__convertor.write(text)
+
+ def isatty(self):
+ stream = self.__wrapped
+ if 'PYCHARM_HOSTED' in os.environ:
+ if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__):
+ return True
+ try:
+ stream_isatty = stream.isatty
+ except AttributeError:
+ return False
+ else:
+ return stream_isatty()
+
+ @property
+ def closed(self):
+ stream = self.__wrapped
+ try:
+ return stream.closed
+ except AttributeError:
+ return True
+
+
+class AnsiToWin32(object):
+ '''
+ Implements a 'write()' method which, on Windows, will strip ANSI character
+ sequences from the text, and if outputting to a tty, will convert them into
+ win32 function calls.
+ '''
+ ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
+ ANSI_OSC_RE = re.compile('\001?\033\\]((?:.|;)*?)(\x07)\002?') # Operating System Command
+
+ def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
+ # The wrapped stream (normally sys.stdout or sys.stderr)
+ self.wrapped = wrapped
+
+ # should we reset colors to defaults after every .write()
+ self.autoreset = autoreset
+
+ # create the proxy wrapping our output stream
+ self.stream = StreamWrapper(wrapped, self)
+
+ on_windows = os.name == 'nt'
+ # We test if the WinAPI works, because even if we are on Windows
+ # we may be using a terminal that doesn't support the WinAPI
+ # (e.g. Cygwin Terminal). In this case it's up to the terminal
+ # to support the ANSI codes.
+ conversion_supported = on_windows and winapi_test()
+
+ # should we strip ANSI sequences from our output?
+ if strip is None:
+ strip = conversion_supported or (not self.stream.closed and not self.stream.isatty())
+ self.strip = strip
+
+ # should we should convert ANSI sequences into win32 calls?
+ if convert is None:
+ convert = conversion_supported and not self.stream.closed and self.stream.isatty()
+ self.convert = convert
+
+ # dict of ansi codes to win32 functions and parameters
+ self.win32_calls = self.get_win32_calls()
+
+ # are we wrapping stderr?
+ self.on_stderr = self.wrapped is sys.stderr
+
+ def should_wrap(self):
+ '''
+ True if this class is actually needed. If false, then the output
+ stream will not be affected, nor will win32 calls be issued, so
+ wrapping stdout is not actually required. This will generally be
+ False on non-Windows platforms, unless optional functionality like
+ autoreset has been requested using kwargs to init()
+ '''
+ return self.convert or self.strip or self.autoreset
+
+ def get_win32_calls(self):
+ if self.convert and winterm:
+ return {
+ AnsiStyle.RESET_ALL: (winterm.reset_all, ),
+ AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
+ AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
+ AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
+ AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
+ AnsiFore.RED: (winterm.fore, WinColor.RED),
+ AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
+ AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
+ AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
+ AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
+ AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
+ AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
+ AnsiFore.RESET: (winterm.fore, ),
+ AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
+ AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
+ AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
+ AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
+ AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
+ AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
+ AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
+ AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
+ AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
+ AnsiBack.RED: (winterm.back, WinColor.RED),
+ AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
+ AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
+ AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
+ AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
+ AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
+ AnsiBack.WHITE: (winterm.back, WinColor.GREY),
+ AnsiBack.RESET: (winterm.back, ),
+ AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
+ AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
+ AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
+ AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
+ AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
+ AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
+ AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
+ AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
+ }
+ return dict()
+
+ def write(self, text):
+ if self.strip or self.convert:
+ self.write_and_convert(text)
+ else:
+ self.wrapped.write(text)
+ self.wrapped.flush()
+ if self.autoreset:
+ self.reset_all()
+
+
+ def reset_all(self):
+ if self.convert:
+ self.call_win32('m', (0,))
+ elif not self.strip and not self.stream.closed:
+ self.wrapped.write(Style.RESET_ALL)
+
+
+ def write_and_convert(self, text):
+ '''
+ Write the given text to our wrapped stream, stripping any ANSI
+ sequences from the text, and optionally converting them into win32
+ calls.
+ '''
+ cursor = 0
+ text = self.convert_osc(text)
+ for match in self.ANSI_CSI_RE.finditer(text):
+ start, end = match.span()
+ self.write_plain_text(text, cursor, start)
+ self.convert_ansi(*match.groups())
+ cursor = end
+ self.write_plain_text(text, cursor, len(text))
+
+
+ def write_plain_text(self, text, start, end):
+ if start < end:
+ self.wrapped.write(text[start:end])
+ self.wrapped.flush()
+
+
+ def convert_ansi(self, paramstring, command):
+ if self.convert:
+ params = self.extract_params(command, paramstring)
+ self.call_win32(command, params)
+
+
+ def extract_params(self, command, paramstring):
+ if command in 'Hf':
+ params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
+ while len(params) < 2:
+ # defaults:
+ params = params + (1,)
+ else:
+ params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
+ if len(params) == 0:
+ # defaults:
+ if command in 'JKm':
+ params = (0,)
+ elif command in 'ABCD':
+ params = (1,)
+
+ return params
+
+
+ def call_win32(self, command, params):
+ if command == 'm':
+ for param in params:
+ if param in self.win32_calls:
+ func_args = self.win32_calls[param]
+ func = func_args[0]
+ args = func_args[1:]
+ kwargs = dict(on_stderr=self.on_stderr)
+ func(*args, **kwargs)
+ elif command in 'J':
+ winterm.erase_screen(params[0], on_stderr=self.on_stderr)
+ elif command in 'K':
+ winterm.erase_line(params[0], on_stderr=self.on_stderr)
+ elif command in 'Hf': # cursor position - absolute
+ winterm.set_cursor_position(params, on_stderr=self.on_stderr)
+ elif command in 'ABCD': # cursor position - relative
+ n = params[0]
+ # A - up, B - down, C - forward, D - back
+ x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
+ winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
+
+
+ def convert_osc(self, text):
+ for match in self.ANSI_OSC_RE.finditer(text):
+ start, end = match.span()
+ text = text[:start] + text[end:]
+ paramstring, command = match.groups()
+ if command in '\x07': # \x07 = BEL
+ params = paramstring.split(";")
+ # 0 - change title and icon (we will only change title)
+ # 1 - change icon (we don't support this)
+ # 2 - change title
+ if params[0] in '02':
+ winterm.set_title(params[1])
+ return text
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/initialise.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/initialise.py
new file mode 100644
index 00000000..430d0668
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/initialise.py
@@ -0,0 +1,80 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+import atexit
+import contextlib
+import sys
+
+from .ansitowin32 import AnsiToWin32
+
+
+orig_stdout = None
+orig_stderr = None
+
+wrapped_stdout = None
+wrapped_stderr = None
+
+atexit_done = False
+
+
+def reset_all():
+ if AnsiToWin32 is not None: # Issue #74: objects might become None at exit
+ AnsiToWin32(orig_stdout).reset_all()
+
+
+def init(autoreset=False, convert=None, strip=None, wrap=True):
+
+ if not wrap and any([autoreset, convert, strip]):
+ raise ValueError('wrap=False conflicts with any other arg=True')
+
+ global wrapped_stdout, wrapped_stderr
+ global orig_stdout, orig_stderr
+
+ orig_stdout = sys.stdout
+ orig_stderr = sys.stderr
+
+ if sys.stdout is None:
+ wrapped_stdout = None
+ else:
+ sys.stdout = wrapped_stdout = \
+ wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
+ if sys.stderr is None:
+ wrapped_stderr = None
+ else:
+ sys.stderr = wrapped_stderr = \
+ wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
+
+ global atexit_done
+ if not atexit_done:
+ atexit.register(reset_all)
+ atexit_done = True
+
+
+def deinit():
+ if orig_stdout is not None:
+ sys.stdout = orig_stdout
+ if orig_stderr is not None:
+ sys.stderr = orig_stderr
+
+
+@contextlib.contextmanager
+def colorama_text(*args, **kwargs):
+ init(*args, **kwargs)
+ try:
+ yield
+ finally:
+ deinit()
+
+
+def reinit():
+ if wrapped_stdout is not None:
+ sys.stdout = wrapped_stdout
+ if wrapped_stderr is not None:
+ sys.stderr = wrapped_stderr
+
+
+def wrap_stream(stream, convert, strip, autoreset, wrap):
+ if wrap:
+ wrapper = AnsiToWin32(stream,
+ convert=convert, strip=strip, autoreset=autoreset)
+ if wrapper.should_wrap():
+ stream = wrapper.stream
+ return stream
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/win32.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/win32.py
new file mode 100644
index 00000000..c2d83603
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/win32.py
@@ -0,0 +1,152 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+
+# from winbase.h
+STDOUT = -11
+STDERR = -12
+
+try:
+ import ctypes
+ from ctypes import LibraryLoader
+ windll = LibraryLoader(ctypes.WinDLL)
+ from ctypes import wintypes
+except (AttributeError, ImportError):
+ windll = None
+ SetConsoleTextAttribute = lambda *_: None
+ winapi_test = lambda *_: None
+else:
+ from ctypes import byref, Structure, c_char, POINTER
+
+ COORD = wintypes._COORD
+
+ class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+ """struct in wincon.h."""
+ _fields_ = [
+ ("dwSize", COORD),
+ ("dwCursorPosition", COORD),
+ ("wAttributes", wintypes.WORD),
+ ("srWindow", wintypes.SMALL_RECT),
+ ("dwMaximumWindowSize", COORD),
+ ]
+ def __str__(self):
+ return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
+ self.dwSize.Y, self.dwSize.X
+ , self.dwCursorPosition.Y, self.dwCursorPosition.X
+ , self.wAttributes
+ , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
+ , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
+ )
+
+ _GetStdHandle = windll.kernel32.GetStdHandle
+ _GetStdHandle.argtypes = [
+ wintypes.DWORD,
+ ]
+ _GetStdHandle.restype = wintypes.HANDLE
+
+ _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
+ _GetConsoleScreenBufferInfo.argtypes = [
+ wintypes.HANDLE,
+ POINTER(CONSOLE_SCREEN_BUFFER_INFO),
+ ]
+ _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+
+ _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
+ _SetConsoleTextAttribute.argtypes = [
+ wintypes.HANDLE,
+ wintypes.WORD,
+ ]
+ _SetConsoleTextAttribute.restype = wintypes.BOOL
+
+ _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
+ _SetConsoleCursorPosition.argtypes = [
+ wintypes.HANDLE,
+ COORD,
+ ]
+ _SetConsoleCursorPosition.restype = wintypes.BOOL
+
+ _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
+ _FillConsoleOutputCharacterA.argtypes = [
+ wintypes.HANDLE,
+ c_char,
+ wintypes.DWORD,
+ COORD,
+ POINTER(wintypes.DWORD),
+ ]
+ _FillConsoleOutputCharacterA.restype = wintypes.BOOL
+
+ _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
+ _FillConsoleOutputAttribute.argtypes = [
+ wintypes.HANDLE,
+ wintypes.WORD,
+ wintypes.DWORD,
+ COORD,
+ POINTER(wintypes.DWORD),
+ ]
+ _FillConsoleOutputAttribute.restype = wintypes.BOOL
+
+ _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW
+ _SetConsoleTitleW.argtypes = [
+ wintypes.LPCWSTR
+ ]
+ _SetConsoleTitleW.restype = wintypes.BOOL
+
+ def _winapi_test(handle):
+ csbi = CONSOLE_SCREEN_BUFFER_INFO()
+ success = _GetConsoleScreenBufferInfo(
+ handle, byref(csbi))
+ return bool(success)
+
+ def winapi_test():
+ return any(_winapi_test(h) for h in
+ (_GetStdHandle(STDOUT), _GetStdHandle(STDERR)))
+
+ def GetConsoleScreenBufferInfo(stream_id=STDOUT):
+ handle = _GetStdHandle(stream_id)
+ csbi = CONSOLE_SCREEN_BUFFER_INFO()
+ success = _GetConsoleScreenBufferInfo(
+ handle, byref(csbi))
+ return csbi
+
+ def SetConsoleTextAttribute(stream_id, attrs):
+ handle = _GetStdHandle(stream_id)
+ return _SetConsoleTextAttribute(handle, attrs)
+
+ def SetConsoleCursorPosition(stream_id, position, adjust=True):
+ position = COORD(*position)
+ # If the position is out of range, do nothing.
+ if position.Y <= 0 or position.X <= 0:
+ return
+ # Adjust for Windows' SetConsoleCursorPosition:
+ # 1. being 0-based, while ANSI is 1-based.
+ # 2. expecting (x,y), while ANSI uses (y,x).
+ adjusted_position = COORD(position.Y - 1, position.X - 1)
+ if adjust:
+ # Adjust for viewport's scroll position
+ sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
+ adjusted_position.Y += sr.Top
+ adjusted_position.X += sr.Left
+ # Resume normal processing
+ handle = _GetStdHandle(stream_id)
+ return _SetConsoleCursorPosition(handle, adjusted_position)
+
+ def FillConsoleOutputCharacter(stream_id, char, length, start):
+ handle = _GetStdHandle(stream_id)
+ char = c_char(char.encode())
+ length = wintypes.DWORD(length)
+ num_written = wintypes.DWORD(0)
+ # Note that this is hard-coded for ANSI (vs wide) bytes.
+ success = _FillConsoleOutputCharacterA(
+ handle, char, length, start, byref(num_written))
+ return num_written.value
+
+ def FillConsoleOutputAttribute(stream_id, attr, length, start):
+ ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
+ handle = _GetStdHandle(stream_id)
+ attribute = wintypes.WORD(attr)
+ length = wintypes.DWORD(length)
+ num_written = wintypes.DWORD(0)
+ # Note that this is hard-coded for ANSI (vs wide) bytes.
+ return _FillConsoleOutputAttribute(
+ handle, attribute, length, start, byref(num_written))
+
+ def SetConsoleTitle(title):
+ return _SetConsoleTitleW(title)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/winterm.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/winterm.py
new file mode 100644
index 00000000..0fdb4ec4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/colorama/winterm.py
@@ -0,0 +1,169 @@
+# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
+from . import win32
+
+
+# from wincon.h
+class WinColor(object):
+ BLACK = 0
+ BLUE = 1
+ GREEN = 2
+ CYAN = 3
+ RED = 4
+ MAGENTA = 5
+ YELLOW = 6
+ GREY = 7
+
+# from wincon.h
+class WinStyle(object):
+ NORMAL = 0x00 # dim text, dim background
+ BRIGHT = 0x08 # bright text, dim background
+ BRIGHT_BACKGROUND = 0x80 # dim text, bright background
+
+class WinTerm(object):
+
+ def __init__(self):
+ self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
+ self.set_attrs(self._default)
+ self._default_fore = self._fore
+ self._default_back = self._back
+ self._default_style = self._style
+ # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
+ # So that LIGHT_EX colors and BRIGHT style do not clobber each other,
+ # we track them separately, since LIGHT_EX is overwritten by Fore/Back
+ # and BRIGHT is overwritten by Style codes.
+ self._light = 0
+
+ def get_attrs(self):
+ return self._fore + self._back * 16 + (self._style | self._light)
+
+ def set_attrs(self, value):
+ self._fore = value & 7
+ self._back = (value >> 4) & 7
+ self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
+
+ def reset_all(self, on_stderr=None):
+ self.set_attrs(self._default)
+ self.set_console(attrs=self._default)
+ self._light = 0
+
+ def fore(self, fore=None, light=False, on_stderr=False):
+ if fore is None:
+ fore = self._default_fore
+ self._fore = fore
+ # Emulate LIGHT_EX with BRIGHT Style
+ if light:
+ self._light |= WinStyle.BRIGHT
+ else:
+ self._light &= ~WinStyle.BRIGHT
+ self.set_console(on_stderr=on_stderr)
+
+ def back(self, back=None, light=False, on_stderr=False):
+ if back is None:
+ back = self._default_back
+ self._back = back
+ # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
+ if light:
+ self._light |= WinStyle.BRIGHT_BACKGROUND
+ else:
+ self._light &= ~WinStyle.BRIGHT_BACKGROUND
+ self.set_console(on_stderr=on_stderr)
+
+ def style(self, style=None, on_stderr=False):
+ if style is None:
+ style = self._default_style
+ self._style = style
+ self.set_console(on_stderr=on_stderr)
+
+ def set_console(self, attrs=None, on_stderr=False):
+ if attrs is None:
+ attrs = self.get_attrs()
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ win32.SetConsoleTextAttribute(handle, attrs)
+
+ def get_position(self, handle):
+ position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
+ # Because Windows coordinates are 0-based,
+ # and win32.SetConsoleCursorPosition expects 1-based.
+ position.X += 1
+ position.Y += 1
+ return position
+
+ def set_cursor_position(self, position=None, on_stderr=False):
+ if position is None:
+ # I'm not currently tracking the position, so there is no default.
+ # position = self.get_position()
+ return
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ win32.SetConsoleCursorPosition(handle, position)
+
+ def cursor_adjust(self, x, y, on_stderr=False):
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ position = self.get_position(handle)
+ adjusted_position = (position.Y + y, position.X + x)
+ win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
+
+ def erase_screen(self, mode=0, on_stderr=False):
+ # 0 should clear from the cursor to the end of the screen.
+ # 1 should clear from the cursor to the beginning of the screen.
+ # 2 should clear the entire screen, and move cursor to (1,1)
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ csbi = win32.GetConsoleScreenBufferInfo(handle)
+ # get the number of character cells in the current buffer
+ cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
+ # get number of character cells before current cursor position
+ cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
+ if mode == 0:
+ from_coord = csbi.dwCursorPosition
+ cells_to_erase = cells_in_screen - cells_before_cursor
+ elif mode == 1:
+ from_coord = win32.COORD(0, 0)
+ cells_to_erase = cells_before_cursor
+ elif mode == 2:
+ from_coord = win32.COORD(0, 0)
+ cells_to_erase = cells_in_screen
+ else:
+ # invalid mode
+ return
+ # fill the entire screen with blanks
+ win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
+ # now set the buffer's attributes accordingly
+ win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
+ if mode == 2:
+ # put the cursor where needed
+ win32.SetConsoleCursorPosition(handle, (1, 1))
+
+ def erase_line(self, mode=0, on_stderr=False):
+ # 0 should clear from the cursor to the end of the line.
+ # 1 should clear from the cursor to the beginning of the line.
+ # 2 should clear the entire line.
+ handle = win32.STDOUT
+ if on_stderr:
+ handle = win32.STDERR
+ csbi = win32.GetConsoleScreenBufferInfo(handle)
+ if mode == 0:
+ from_coord = csbi.dwCursorPosition
+ cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
+ elif mode == 1:
+ from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
+ cells_to_erase = csbi.dwCursorPosition.X
+ elif mode == 2:
+ from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
+ cells_to_erase = csbi.dwSize.X
+ else:
+ # invalid mode
+ return
+ # fill the entire screen with blanks
+ win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
+ # now set the buffer's attributes accordingly
+ win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
+
+ def set_title(self, title):
+ win32.SetConsoleTitle(title)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__init__.py
new file mode 100644
index 00000000..a2d70d47
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__init__.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2019 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import logging
+
+__version__ = '0.2.9.post0'
+
+class DistlibException(Exception):
+ pass
+
+try:
+ from logging import NullHandler
+except ImportError: # pragma: no cover
+ class NullHandler(logging.Handler):
+ def handle(self, record): pass
+ def emit(self, record): pass
+ def createLock(self): self.lock = None
+
+logger = logging.getLogger(__name__)
+logger.addHandler(NullHandler())
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..8aabfc7c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..89efcb87
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-37.pyc
new file mode 100644
index 00000000..87abd078
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-37.pyc
new file mode 100644
index 00000000..b8553271
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-37.pyc
new file mode 100644
index 00000000..280db9d3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-37.pyc
new file mode 100644
index 00000000..8ad8fe82
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-37.pyc
new file mode 100644
index 00000000..e23fd05a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-37.pyc
new file mode 100644
index 00000000..f291d9bb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc
new file mode 100644
index 00000000..11cb08d9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc
new file mode 100644
index 00000000..5cafe3ab
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-37.pyc
new file mode 100644
index 00000000..d169db38
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-37.pyc
new file mode 100644
index 00000000..030da583
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-37.pyc
new file mode 100644
index 00000000..95a7523f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__init__.py
new file mode 100644
index 00000000..f7dbf4c9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__init__.py
@@ -0,0 +1,6 @@
+"""Modules copied from Python 3 standard libraries, for internal use only.
+
+Individual classes and functions are found in d2._backport.misc. Intended
+usage is to always import things missing from 3.1 from that module: the
+built-in/stdlib objects will be used if found.
+"""
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..bf76a7f2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pyc
new file mode 100644
index 00000000..3a3d916c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pyc
new file mode 100644
index 00000000..a8f71d48
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pyc
new file mode 100644
index 00000000..39cbe33a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pyc
new file mode 100644
index 00000000..5cccc9b7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/misc.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/misc.py
new file mode 100644
index 00000000..cfb318d3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/misc.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Backports for individual classes and functions."""
+
+import os
+import sys
+
+__all__ = ['cache_from_source', 'callable', 'fsencode']
+
+
+try:
+ from imp import cache_from_source
+except ImportError:
+ def cache_from_source(py_file, debug=__debug__):
+ ext = debug and 'c' or 'o'
+ return py_file + ext
+
+
+try:
+ callable = callable
+except NameError:
+ from collections import Callable
+
+ def callable(obj):
+ return isinstance(obj, Callable)
+
+
+try:
+ fsencode = os.fsencode
+except AttributeError:
+ def fsencode(filename):
+ if isinstance(filename, bytes):
+ return filename
+ elif isinstance(filename, str):
+ return filename.encode(sys.getfilesystemencoding())
+ else:
+ raise TypeError("expect bytes or str, not %s" %
+ type(filename).__name__)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/shutil.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/shutil.py
new file mode 100644
index 00000000..159e49ee
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/shutil.py
@@ -0,0 +1,761 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Utility functions for copying and archiving files and directory trees.
+
+XXX The functions here don't copy the resource fork or other metadata on Mac.
+
+"""
+
+import os
+import sys
+import stat
+from os.path import abspath
+import fnmatch
+import collections
+import errno
+from . import tarfile
+
+try:
+ import bz2
+ _BZ2_SUPPORTED = True
+except ImportError:
+ _BZ2_SUPPORTED = False
+
+try:
+ from pwd import getpwnam
+except ImportError:
+ getpwnam = None
+
+try:
+ from grp import getgrnam
+except ImportError:
+ getgrnam = None
+
+__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
+ "copytree", "move", "rmtree", "Error", "SpecialFileError",
+ "ExecError", "make_archive", "get_archive_formats",
+ "register_archive_format", "unregister_archive_format",
+ "get_unpack_formats", "register_unpack_format",
+ "unregister_unpack_format", "unpack_archive", "ignore_patterns"]
+
+class Error(EnvironmentError):
+ pass
+
+class SpecialFileError(EnvironmentError):
+ """Raised when trying to do a kind of operation (e.g. copying) which is
+ not supported on a special file (e.g. a named pipe)"""
+
+class ExecError(EnvironmentError):
+ """Raised when a command could not be executed"""
+
+class ReadError(EnvironmentError):
+ """Raised when an archive cannot be read"""
+
+class RegistryError(Exception):
+ """Raised when a registry operation with the archiving
+ and unpacking registries fails"""
+
+
+try:
+ WindowsError
+except NameError:
+ WindowsError = None
+
+def copyfileobj(fsrc, fdst, length=16*1024):
+ """copy data from file-like object fsrc to file-like object fdst"""
+ while 1:
+ buf = fsrc.read(length)
+ if not buf:
+ break
+ fdst.write(buf)
+
+def _samefile(src, dst):
+ # Macintosh, Unix.
+ if hasattr(os.path, 'samefile'):
+ try:
+ return os.path.samefile(src, dst)
+ except OSError:
+ return False
+
+ # All other platforms: check for same pathname.
+ return (os.path.normcase(os.path.abspath(src)) ==
+ os.path.normcase(os.path.abspath(dst)))
+
+def copyfile(src, dst):
+ """Copy data from src to dst"""
+ if _samefile(src, dst):
+ raise Error("`%s` and `%s` are the same file" % (src, dst))
+
+ for fn in [src, dst]:
+ try:
+ st = os.stat(fn)
+ except OSError:
+ # File most likely does not exist
+ pass
+ else:
+ # XXX What about other special files? (sockets, devices...)
+ if stat.S_ISFIFO(st.st_mode):
+ raise SpecialFileError("`%s` is a named pipe" % fn)
+
+ with open(src, 'rb') as fsrc:
+ with open(dst, 'wb') as fdst:
+ copyfileobj(fsrc, fdst)
+
+def copymode(src, dst):
+ """Copy mode bits from src to dst"""
+ if hasattr(os, 'chmod'):
+ st = os.stat(src)
+ mode = stat.S_IMODE(st.st_mode)
+ os.chmod(dst, mode)
+
+def copystat(src, dst):
+ """Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
+ st = os.stat(src)
+ mode = stat.S_IMODE(st.st_mode)
+ if hasattr(os, 'utime'):
+ os.utime(dst, (st.st_atime, st.st_mtime))
+ if hasattr(os, 'chmod'):
+ os.chmod(dst, mode)
+ if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
+ try:
+ os.chflags(dst, st.st_flags)
+ except OSError as why:
+ if (not hasattr(errno, 'EOPNOTSUPP') or
+ why.errno != errno.EOPNOTSUPP):
+ raise
+
+def copy(src, dst):
+ """Copy data and mode bits ("cp src dst").
+
+ The destination may be a directory.
+
+ """
+ if os.path.isdir(dst):
+ dst = os.path.join(dst, os.path.basename(src))
+ copyfile(src, dst)
+ copymode(src, dst)
+
+def copy2(src, dst):
+ """Copy data and all stat info ("cp -p src dst").
+
+ The destination may be a directory.
+
+ """
+ if os.path.isdir(dst):
+ dst = os.path.join(dst, os.path.basename(src))
+ copyfile(src, dst)
+ copystat(src, dst)
+
+def ignore_patterns(*patterns):
+ """Function that can be used as copytree() ignore parameter.
+
+ Patterns is a sequence of glob-style patterns
+ that are used to exclude files"""
+ def _ignore_patterns(path, names):
+ ignored_names = []
+ for pattern in patterns:
+ ignored_names.extend(fnmatch.filter(names, pattern))
+ return set(ignored_names)
+ return _ignore_patterns
+
+def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
+ ignore_dangling_symlinks=False):
+ """Recursively copy a directory tree.
+
+ The destination directory must not already exist.
+ If exception(s) occur, an Error is raised with a list of reasons.
+
+ If the optional symlinks flag is true, symbolic links in the
+ source tree result in symbolic links in the destination tree; if
+ it is false, the contents of the files pointed to by symbolic
+ links are copied. If the file pointed by the symlink doesn't
+ exist, an exception will be added in the list of errors raised in
+ an Error exception at the end of the copy process.
+
+ You can set the optional ignore_dangling_symlinks flag to true if you
+ want to silence this exception. Notice that this has no effect on
+ platforms that don't support os.symlink.
+
+ The optional ignore argument is a callable. If given, it
+ is called with the `src` parameter, which is the directory
+ being visited by copytree(), and `names` which is the list of
+ `src` contents, as returned by os.listdir():
+
+ callable(src, names) -> ignored_names
+
+ Since copytree() is called recursively, the callable will be
+ called once for each directory that is copied. It returns a
+ list of names relative to the `src` directory that should
+ not be copied.
+
+ The optional copy_function argument is a callable that will be used
+ to copy each file. It will be called with the source path and the
+ destination path as arguments. By default, copy2() is used, but any
+ function that supports the same signature (like copy()) can be used.
+
+ """
+ names = os.listdir(src)
+ if ignore is not None:
+ ignored_names = ignore(src, names)
+ else:
+ ignored_names = set()
+
+ os.makedirs(dst)
+ errors = []
+ for name in names:
+ if name in ignored_names:
+ continue
+ srcname = os.path.join(src, name)
+ dstname = os.path.join(dst, name)
+ try:
+ if os.path.islink(srcname):
+ linkto = os.readlink(srcname)
+ if symlinks:
+ os.symlink(linkto, dstname)
+ else:
+ # ignore dangling symlink if the flag is on
+ if not os.path.exists(linkto) and ignore_dangling_symlinks:
+ continue
+ # otherwise let the copy occurs. copy2 will raise an error
+ copy_function(srcname, dstname)
+ elif os.path.isdir(srcname):
+ copytree(srcname, dstname, symlinks, ignore, copy_function)
+ else:
+ # Will raise a SpecialFileError for unsupported file types
+ copy_function(srcname, dstname)
+ # catch the Error from the recursive copytree so that we can
+ # continue with other files
+ except Error as err:
+ errors.extend(err.args[0])
+ except EnvironmentError as why:
+ errors.append((srcname, dstname, str(why)))
+ try:
+ copystat(src, dst)
+ except OSError as why:
+ if WindowsError is not None and isinstance(why, WindowsError):
+ # Copying file access times may fail on Windows
+ pass
+ else:
+ errors.extend((src, dst, str(why)))
+ if errors:
+ raise Error(errors)
+
+def rmtree(path, ignore_errors=False, onerror=None):
+ """Recursively delete a directory tree.
+
+ If ignore_errors is set, errors are ignored; otherwise, if onerror
+ is set, it is called to handle the error with arguments (func,
+ path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
+ path is the argument to that function that caused it to fail; and
+ exc_info is a tuple returned by sys.exc_info(). If ignore_errors
+ is false and onerror is None, an exception is raised.
+
+ """
+ if ignore_errors:
+ def onerror(*args):
+ pass
+ elif onerror is None:
+ def onerror(*args):
+ raise
+ try:
+ if os.path.islink(path):
+ # symlinks to directories are forbidden, see bug #1669
+ raise OSError("Cannot call rmtree on a symbolic link")
+ except OSError:
+ onerror(os.path.islink, path, sys.exc_info())
+ # can't continue even if onerror hook returns
+ return
+ names = []
+ try:
+ names = os.listdir(path)
+ except os.error:
+ onerror(os.listdir, path, sys.exc_info())
+ for name in names:
+ fullname = os.path.join(path, name)
+ try:
+ mode = os.lstat(fullname).st_mode
+ except os.error:
+ mode = 0
+ if stat.S_ISDIR(mode):
+ rmtree(fullname, ignore_errors, onerror)
+ else:
+ try:
+ os.remove(fullname)
+ except os.error:
+ onerror(os.remove, fullname, sys.exc_info())
+ try:
+ os.rmdir(path)
+ except os.error:
+ onerror(os.rmdir, path, sys.exc_info())
+
+
+def _basename(path):
+ # A basename() variant which first strips the trailing slash, if present.
+ # Thus we always get the last component of the path, even for directories.
+ return os.path.basename(path.rstrip(os.path.sep))
+
+def move(src, dst):
+ """Recursively move a file or directory to another location. This is
+ similar to the Unix "mv" command.
+
+ If the destination is a directory or a symlink to a directory, the source
+ is moved inside the directory. The destination path must not already
+ exist.
+
+ If the destination already exists but is not a directory, it may be
+ overwritten depending on os.rename() semantics.
+
+ If the destination is on our current filesystem, then rename() is used.
+ Otherwise, src is copied to the destination and then removed.
+ A lot more could be done here... A look at a mv.c shows a lot of
+ the issues this implementation glosses over.
+
+ """
+ real_dst = dst
+ if os.path.isdir(dst):
+ if _samefile(src, dst):
+ # We might be on a case insensitive filesystem,
+ # perform the rename anyway.
+ os.rename(src, dst)
+ return
+
+ real_dst = os.path.join(dst, _basename(src))
+ if os.path.exists(real_dst):
+ raise Error("Destination path '%s' already exists" % real_dst)
+ try:
+ os.rename(src, real_dst)
+ except OSError:
+ if os.path.isdir(src):
+ if _destinsrc(src, dst):
+ raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
+ copytree(src, real_dst, symlinks=True)
+ rmtree(src)
+ else:
+ copy2(src, real_dst)
+ os.unlink(src)
+
+def _destinsrc(src, dst):
+ src = abspath(src)
+ dst = abspath(dst)
+ if not src.endswith(os.path.sep):
+ src += os.path.sep
+ if not dst.endswith(os.path.sep):
+ dst += os.path.sep
+ return dst.startswith(src)
+
+def _get_gid(name):
+ """Returns a gid, given a group name."""
+ if getgrnam is None or name is None:
+ return None
+ try:
+ result = getgrnam(name)
+ except KeyError:
+ result = None
+ if result is not None:
+ return result[2]
+ return None
+
+def _get_uid(name):
+ """Returns an uid, given a user name."""
+ if getpwnam is None or name is None:
+ return None
+ try:
+ result = getpwnam(name)
+ except KeyError:
+ result = None
+ if result is not None:
+ return result[2]
+ return None
+
+def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
+ owner=None, group=None, logger=None):
+ """Create a (possibly compressed) tar file from all the files under
+ 'base_dir'.
+
+ 'compress' must be "gzip" (the default), "bzip2", or None.
+
+ 'owner' and 'group' can be used to define an owner and a group for the
+ archive that is being built. If not provided, the current owner and group
+ will be used.
+
+ The output tar file will be named 'base_name' + ".tar", possibly plus
+ the appropriate compression extension (".gz", or ".bz2").
+
+ Returns the output filename.
+ """
+ tar_compression = {'gzip': 'gz', None: ''}
+ compress_ext = {'gzip': '.gz'}
+
+ if _BZ2_SUPPORTED:
+ tar_compression['bzip2'] = 'bz2'
+ compress_ext['bzip2'] = '.bz2'
+
+ # flags for compression program, each element of list will be an argument
+ if compress is not None and compress not in compress_ext:
+ raise ValueError("bad value for 'compress', or compression format not "
+ "supported : {0}".format(compress))
+
+ archive_name = base_name + '.tar' + compress_ext.get(compress, '')
+ archive_dir = os.path.dirname(archive_name)
+
+ if not os.path.exists(archive_dir):
+ if logger is not None:
+ logger.info("creating %s", archive_dir)
+ if not dry_run:
+ os.makedirs(archive_dir)
+
+ # creating the tarball
+ if logger is not None:
+ logger.info('Creating tar archive')
+
+ uid = _get_uid(owner)
+ gid = _get_gid(group)
+
+ def _set_uid_gid(tarinfo):
+ if gid is not None:
+ tarinfo.gid = gid
+ tarinfo.gname = group
+ if uid is not None:
+ tarinfo.uid = uid
+ tarinfo.uname = owner
+ return tarinfo
+
+ if not dry_run:
+ tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+ try:
+ tar.add(base_dir, filter=_set_uid_gid)
+ finally:
+ tar.close()
+
+ return archive_name
+
+def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
+ # XXX see if we want to keep an external call here
+ if verbose:
+ zipoptions = "-r"
+ else:
+ zipoptions = "-rq"
+ from distutils.errors import DistutilsExecError
+ from distutils.spawn import spawn
+ try:
+ spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
+ except DistutilsExecError:
+ # XXX really should distinguish between "couldn't find
+ # external 'zip' command" and "zip failed".
+ raise ExecError("unable to create zip file '%s': "
+ "could neither import the 'zipfile' module nor "
+ "find a standalone zip utility") % zip_filename
+
+def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
+ """Create a zip file from all the files under 'base_dir'.
+
+ The output zip file will be named 'base_name' + ".zip". Uses either the
+ "zipfile" Python module (if available) or the InfoZIP "zip" utility
+ (if installed and found on the default search path). If neither tool is
+ available, raises ExecError. Returns the name of the output zip
+ file.
+ """
+ zip_filename = base_name + ".zip"
+ archive_dir = os.path.dirname(base_name)
+
+ if not os.path.exists(archive_dir):
+ if logger is not None:
+ logger.info("creating %s", archive_dir)
+ if not dry_run:
+ os.makedirs(archive_dir)
+
+ # If zipfile module is not available, try spawning an external 'zip'
+ # command.
+ try:
+ import zipfile
+ except ImportError:
+ zipfile = None
+
+ if zipfile is None:
+ _call_external_zip(base_dir, zip_filename, verbose, dry_run)
+ else:
+ if logger is not None:
+ logger.info("creating '%s' and adding '%s' to it",
+ zip_filename, base_dir)
+
+ if not dry_run:
+ zip = zipfile.ZipFile(zip_filename, "w",
+ compression=zipfile.ZIP_DEFLATED)
+
+ for dirpath, dirnames, filenames in os.walk(base_dir):
+ for name in filenames:
+ path = os.path.normpath(os.path.join(dirpath, name))
+ if os.path.isfile(path):
+ zip.write(path, path)
+ if logger is not None:
+ logger.info("adding '%s'", path)
+ zip.close()
+
+ return zip_filename
+
+_ARCHIVE_FORMATS = {
+ 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
+ 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
+ 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
+ 'zip': (_make_zipfile, [], "ZIP file"),
+ }
+
+if _BZ2_SUPPORTED:
+ _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
+ "bzip2'ed tar-file")
+
+def get_archive_formats():
+ """Returns a list of supported formats for archiving and unarchiving.
+
+ Each element of the returned sequence is a tuple (name, description)
+ """
+ formats = [(name, registry[2]) for name, registry in
+ _ARCHIVE_FORMATS.items()]
+ formats.sort()
+ return formats
+
+def register_archive_format(name, function, extra_args=None, description=''):
+ """Registers an archive format.
+
+ name is the name of the format. function is the callable that will be
+ used to create archives. If provided, extra_args is a sequence of
+ (name, value) tuples that will be passed as arguments to the callable.
+ description can be provided to describe the format, and will be returned
+ by the get_archive_formats() function.
+ """
+ if extra_args is None:
+ extra_args = []
+ if not isinstance(function, collections.Callable):
+ raise TypeError('The %s object is not callable' % function)
+ if not isinstance(extra_args, (tuple, list)):
+ raise TypeError('extra_args needs to be a sequence')
+ for element in extra_args:
+ if not isinstance(element, (tuple, list)) or len(element) !=2:
+ raise TypeError('extra_args elements are : (arg_name, value)')
+
+ _ARCHIVE_FORMATS[name] = (function, extra_args, description)
+
+def unregister_archive_format(name):
+ del _ARCHIVE_FORMATS[name]
+
+def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
+ dry_run=0, owner=None, group=None, logger=None):
+ """Create an archive file (eg. zip or tar).
+
+ 'base_name' is the name of the file to create, minus any format-specific
+ extension; 'format' is the archive format: one of "zip", "tar", "bztar"
+ or "gztar".
+
+ 'root_dir' is a directory that will be the root directory of the
+ archive; ie. we typically chdir into 'root_dir' before creating the
+ archive. 'base_dir' is the directory where we start archiving from;
+ ie. 'base_dir' will be the common prefix of all files and
+ directories in the archive. 'root_dir' and 'base_dir' both default
+ to the current directory. Returns the name of the archive file.
+
+ 'owner' and 'group' are used when creating a tar archive. By default,
+ uses the current owner and group.
+ """
+ save_cwd = os.getcwd()
+ if root_dir is not None:
+ if logger is not None:
+ logger.debug("changing into '%s'", root_dir)
+ base_name = os.path.abspath(base_name)
+ if not dry_run:
+ os.chdir(root_dir)
+
+ if base_dir is None:
+ base_dir = os.curdir
+
+ kwargs = {'dry_run': dry_run, 'logger': logger}
+
+ try:
+ format_info = _ARCHIVE_FORMATS[format]
+ except KeyError:
+ raise ValueError("unknown archive format '%s'" % format)
+
+ func = format_info[0]
+ for arg, val in format_info[1]:
+ kwargs[arg] = val
+
+ if format != 'zip':
+ kwargs['owner'] = owner
+ kwargs['group'] = group
+
+ try:
+ filename = func(base_name, base_dir, **kwargs)
+ finally:
+ if root_dir is not None:
+ if logger is not None:
+ logger.debug("changing back to '%s'", save_cwd)
+ os.chdir(save_cwd)
+
+ return filename
+
+
+def get_unpack_formats():
+ """Returns a list of supported formats for unpacking.
+
+ Each element of the returned sequence is a tuple
+ (name, extensions, description)
+ """
+ formats = [(name, info[0], info[3]) for name, info in
+ _UNPACK_FORMATS.items()]
+ formats.sort()
+ return formats
+
+def _check_unpack_options(extensions, function, extra_args):
+ """Checks what gets registered as an unpacker."""
+ # first make sure no other unpacker is registered for this extension
+ existing_extensions = {}
+ for name, info in _UNPACK_FORMATS.items():
+ for ext in info[0]:
+ existing_extensions[ext] = name
+
+ for extension in extensions:
+ if extension in existing_extensions:
+ msg = '%s is already registered for "%s"'
+ raise RegistryError(msg % (extension,
+ existing_extensions[extension]))
+
+ if not isinstance(function, collections.Callable):
+ raise TypeError('The registered function must be a callable')
+
+
+def register_unpack_format(name, extensions, function, extra_args=None,
+ description=''):
+ """Registers an unpack format.
+
+ `name` is the name of the format. `extensions` is a list of extensions
+ corresponding to the format.
+
+ `function` is the callable that will be
+ used to unpack archives. The callable will receive archives to unpack.
+ If it's unable to handle an archive, it needs to raise a ReadError
+ exception.
+
+ If provided, `extra_args` is a sequence of
+ (name, value) tuples that will be passed as arguments to the callable.
+ description can be provided to describe the format, and will be returned
+ by the get_unpack_formats() function.
+ """
+ if extra_args is None:
+ extra_args = []
+ _check_unpack_options(extensions, function, extra_args)
+ _UNPACK_FORMATS[name] = extensions, function, extra_args, description
+
+def unregister_unpack_format(name):
+ """Removes the pack format from the registry."""
+ del _UNPACK_FORMATS[name]
+
+def _ensure_directory(path):
+ """Ensure that the parent directory of `path` exists"""
+ dirname = os.path.dirname(path)
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+
+def _unpack_zipfile(filename, extract_dir):
+ """Unpack zip `filename` to `extract_dir`
+ """
+ try:
+ import zipfile
+ except ImportError:
+ raise ReadError('zlib not supported, cannot unpack this archive.')
+
+ if not zipfile.is_zipfile(filename):
+ raise ReadError("%s is not a zip file" % filename)
+
+ zip = zipfile.ZipFile(filename)
+ try:
+ for info in zip.infolist():
+ name = info.filename
+
+ # don't extract absolute paths or ones with .. in them
+ if name.startswith('/') or '..' in name:
+ continue
+
+ target = os.path.join(extract_dir, *name.split('/'))
+ if not target:
+ continue
+
+ _ensure_directory(target)
+ if not name.endswith('/'):
+ # file
+ data = zip.read(info.filename)
+ f = open(target, 'wb')
+ try:
+ f.write(data)
+ finally:
+ f.close()
+ del data
+ finally:
+ zip.close()
+
+def _unpack_tarfile(filename, extract_dir):
+ """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+ """
+ try:
+ tarobj = tarfile.open(filename)
+ except tarfile.TarError:
+ raise ReadError(
+ "%s is not a compressed or uncompressed tar file" % filename)
+ try:
+ tarobj.extractall(extract_dir)
+ finally:
+ tarobj.close()
+
+_UNPACK_FORMATS = {
+ 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
+ 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
+ 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file")
+ }
+
+if _BZ2_SUPPORTED:
+ _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
+ "bzip2'ed tar-file")
+
+def _find_unpack_format(filename):
+ for name, info in _UNPACK_FORMATS.items():
+ for extension in info[0]:
+ if filename.endswith(extension):
+ return name
+ return None
+
+def unpack_archive(filename, extract_dir=None, format=None):
+ """Unpack an archive.
+
+ `filename` is the name of the archive.
+
+ `extract_dir` is the name of the target directory, where the archive
+ is unpacked. If not provided, the current working directory is used.
+
+ `format` is the archive format: one of "zip", "tar", or "gztar". Or any
+ other registered format. If not provided, unpack_archive will use the
+ filename extension and see if an unpacker was registered for that
+ extension.
+
+ In case none is found, a ValueError is raised.
+ """
+ if extract_dir is None:
+ extract_dir = os.getcwd()
+
+ if format is not None:
+ try:
+ format_info = _UNPACK_FORMATS[format]
+ except KeyError:
+ raise ValueError("Unknown unpack format '{0}'".format(format))
+
+ func = format_info[1]
+ func(filename, extract_dir, **dict(format_info[2]))
+ else:
+ # we need to look at the registered unpackers supported extensions
+ format = _find_unpack_format(filename)
+ if format is None:
+ raise ReadError("Unknown archive format '{0}'".format(filename))
+
+ func = _UNPACK_FORMATS[format][1]
+ kwargs = dict(_UNPACK_FORMATS[format][2])
+ func(filename, extract_dir, **kwargs)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg
new file mode 100644
index 00000000..1746bd01
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg
@@ -0,0 +1,84 @@
+[posix_prefix]
+# Configuration directories. Some of these come straight out of the
+# configure script. They are for implementing the other variables, not to
+# be used directly in [resource_locations].
+confdir = /etc
+datadir = /usr/share
+libdir = /usr/lib
+statedir = /var
+# User resource directory
+local = ~/.local/{distribution.name}
+
+stdlib = {base}/lib/python{py_version_short}
+platstdlib = {platbase}/lib/python{py_version_short}
+purelib = {base}/lib/python{py_version_short}/site-packages
+platlib = {platbase}/lib/python{py_version_short}/site-packages
+include = {base}/include/python{py_version_short}{abiflags}
+platinclude = {platbase}/include/python{py_version_short}{abiflags}
+data = {base}
+
+[posix_home]
+stdlib = {base}/lib/python
+platstdlib = {base}/lib/python
+purelib = {base}/lib/python
+platlib = {base}/lib/python
+include = {base}/include/python
+platinclude = {base}/include/python
+scripts = {base}/bin
+data = {base}
+
+[nt]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2]
+stdlib = {base}/Lib
+platstdlib = {base}/Lib
+purelib = {base}/Lib/site-packages
+platlib = {base}/Lib/site-packages
+include = {base}/Include
+platinclude = {base}/Include
+scripts = {base}/Scripts
+data = {base}
+
+[os2_home]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[nt_user]
+stdlib = {userbase}/Python{py_version_nodot}
+platstdlib = {userbase}/Python{py_version_nodot}
+purelib = {userbase}/Python{py_version_nodot}/site-packages
+platlib = {userbase}/Python{py_version_nodot}/site-packages
+include = {userbase}/Python{py_version_nodot}/Include
+scripts = {userbase}/Scripts
+data = {userbase}
+
+[posix_user]
+stdlib = {userbase}/lib/python{py_version_short}
+platstdlib = {userbase}/lib/python{py_version_short}
+purelib = {userbase}/lib/python{py_version_short}/site-packages
+platlib = {userbase}/lib/python{py_version_short}/site-packages
+include = {userbase}/include/python{py_version_short}
+scripts = {userbase}/bin
+data = {userbase}
+
+[osx_framework_user]
+stdlib = {userbase}/lib/python
+platstdlib = {userbase}/lib/python
+purelib = {userbase}/lib/python/site-packages
+platlib = {userbase}/lib/python/site-packages
+include = {userbase}/include
+scripts = {userbase}/bin
+data = {userbase}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py
new file mode 100644
index 00000000..1df3aba1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py
@@ -0,0 +1,788 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Access to Python's configuration information."""
+
+import codecs
+import os
+import re
+import sys
+from os.path import pardir, realpath
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
+
+
+__all__ = [
+ 'get_config_h_filename',
+ 'get_config_var',
+ 'get_config_vars',
+ 'get_makefile_filename',
+ 'get_path',
+ 'get_path_names',
+ 'get_paths',
+ 'get_platform',
+ 'get_python_version',
+ 'get_scheme_names',
+ 'parse_config_h',
+]
+
+
+def _safe_realpath(path):
+ try:
+ return realpath(path)
+ except OSError:
+ return path
+
+
+if sys.executable:
+ _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
+else:
+ # sys.executable can be empty if argv[0] has been changed and Python is
+ # unable to retrieve the real program name
+ _PROJECT_BASE = _safe_realpath(os.getcwd())
+
+if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
+ _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
+# PC/VS7.1
+if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
+ _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
+# PC/AMD64
+if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
+ _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
+
+
+def is_python_build():
+ for fn in ("Setup.dist", "Setup.local"):
+ if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
+ return True
+ return False
+
+_PYTHON_BUILD = is_python_build()
+
+_cfg_read = False
+
+def _ensure_cfg_read():
+ global _cfg_read
+ if not _cfg_read:
+ from ..resources import finder
+ backport_package = __name__.rsplit('.', 1)[0]
+ _finder = finder(backport_package)
+ _cfgfile = _finder.find('sysconfig.cfg')
+ assert _cfgfile, 'sysconfig.cfg exists'
+ with _cfgfile.as_stream() as s:
+ _SCHEMES.readfp(s)
+ if _PYTHON_BUILD:
+ for scheme in ('posix_prefix', 'posix_home'):
+ _SCHEMES.set(scheme, 'include', '{srcdir}/Include')
+ _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
+
+ _cfg_read = True
+
+
+_SCHEMES = configparser.RawConfigParser()
+_VAR_REPL = re.compile(r'\{([^{]*?)\}')
+
+def _expand_globals(config):
+ _ensure_cfg_read()
+ if config.has_section('globals'):
+ globals = config.items('globals')
+ else:
+ globals = tuple()
+
+ sections = config.sections()
+ for section in sections:
+ if section == 'globals':
+ continue
+ for option, value in globals:
+ if config.has_option(section, option):
+ continue
+ config.set(section, option, value)
+ config.remove_section('globals')
+
+ # now expanding local variables defined in the cfg file
+ #
+ for section in config.sections():
+ variables = dict(config.items(section))
+
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in variables:
+ return variables[name]
+ return matchobj.group(0)
+
+ for option, value in config.items(section):
+ config.set(section, option, _VAR_REPL.sub(_replacer, value))
+
+#_expand_globals(_SCHEMES)
+
+ # FIXME don't rely on sys.version here, its format is an implementation detail
+ # of CPython, use sys.version_info or sys.hexversion
+_PY_VERSION = sys.version.split()[0]
+_PY_VERSION_SHORT = sys.version[:3]
+_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
+_PREFIX = os.path.normpath(sys.prefix)
+_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+_CONFIG_VARS = None
+_USER_BASE = None
+
+
+def _subst_vars(path, local_vars):
+ """In the string `path`, replace tokens like {some.thing} with the
+ corresponding value from the map `local_vars`.
+
+ If there is no corresponding value, leave the token unchanged.
+ """
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in local_vars:
+ return local_vars[name]
+ elif name in os.environ:
+ return os.environ[name]
+ return matchobj.group(0)
+ return _VAR_REPL.sub(_replacer, path)
+
+
+def _extend_dict(target_dict, other_dict):
+ target_keys = target_dict.keys()
+ for key, value in other_dict.items():
+ if key in target_keys:
+ continue
+ target_dict[key] = value
+
+
+def _expand_vars(scheme, vars):
+ res = {}
+ if vars is None:
+ vars = {}
+ _extend_dict(vars, get_config_vars())
+
+ for key, value in _SCHEMES.items(scheme):
+ if os.name in ('posix', 'nt'):
+ value = os.path.expanduser(value)
+ res[key] = os.path.normpath(_subst_vars(value, vars))
+ return res
+
+
+def format_value(value, vars):
+ def _replacer(matchobj):
+ name = matchobj.group(1)
+ if name in vars:
+ return vars[name]
+ return matchobj.group(0)
+ return _VAR_REPL.sub(_replacer, value)
+
+
+def _get_default_scheme():
+ if os.name == 'posix':
+ # the default scheme for posix is posix_prefix
+ return 'posix_prefix'
+ return os.name
+
+
+def _getuserbase():
+ env_base = os.environ.get("PYTHONUSERBASE", None)
+
+ def joinuser(*args):
+ return os.path.expanduser(os.path.join(*args))
+
+ # what about 'os2emx', 'riscos' ?
+ if os.name == "nt":
+ base = os.environ.get("APPDATA") or "~"
+ if env_base:
+ return env_base
+ else:
+ return joinuser(base, "Python")
+
+ if sys.platform == "darwin":
+ framework = get_config_var("PYTHONFRAMEWORK")
+ if framework:
+ if env_base:
+ return env_base
+ else:
+ return joinuser("~", "Library", framework, "%d.%d" %
+ sys.version_info[:2])
+
+ if env_base:
+ return env_base
+ else:
+ return joinuser("~", ".local")
+
+
+def _parse_makefile(filename, vars=None):
+ """Parse a Makefile-style file.
+
+ A dictionary containing name/value pairs is returned. If an
+ optional dictionary is passed in as the second argument, it is
+ used instead of a new dictionary.
+ """
+ # Regexes needed for parsing Makefile (and similar syntaxes,
+ # like old-style Setup files).
+ _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+ _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
+ _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
+
+ if vars is None:
+ vars = {}
+ done = {}
+ notdone = {}
+
+ with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:
+ lines = f.readlines()
+
+ for line in lines:
+ if line.startswith('#') or line.strip() == '':
+ continue
+ m = _variable_rx.match(line)
+ if m:
+ n, v = m.group(1, 2)
+ v = v.strip()
+ # `$$' is a literal `$' in make
+ tmpv = v.replace('$$', '')
+
+ if "$" in tmpv:
+ notdone[n] = v
+ else:
+ try:
+ v = int(v)
+ except ValueError:
+ # insert literal `$'
+ done[n] = v.replace('$$', '$')
+ else:
+ done[n] = v
+
+ # do variable interpolation here
+ variables = list(notdone.keys())
+
+ # Variables with a 'PY_' prefix in the makefile. These need to
+ # be made available without that prefix through sysconfig.
+ # Special care is needed to ensure that variable expansion works, even
+ # if the expansion uses the name without a prefix.
+ renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
+
+ while len(variables) > 0:
+ for name in tuple(variables):
+ value = notdone[name]
+ m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
+ if m is not None:
+ n = m.group(1)
+ found = True
+ if n in done:
+ item = str(done[n])
+ elif n in notdone:
+ # get it on a subsequent round
+ found = False
+ elif n in os.environ:
+ # do it like make: fall back to environment
+ item = os.environ[n]
+
+ elif n in renamed_variables:
+ if (name.startswith('PY_') and
+ name[3:] in renamed_variables):
+ item = ""
+
+ elif 'PY_' + n in notdone:
+ found = False
+
+ else:
+ item = str(done['PY_' + n])
+
+ else:
+ done[n] = item = ""
+
+ if found:
+ after = value[m.end():]
+ value = value[:m.start()] + item + after
+ if "$" in after:
+ notdone[name] = value
+ else:
+ try:
+ value = int(value)
+ except ValueError:
+ done[name] = value.strip()
+ else:
+ done[name] = value
+ variables.remove(name)
+
+ if (name.startswith('PY_') and
+ name[3:] in renamed_variables):
+
+ name = name[3:]
+ if name not in done:
+ done[name] = value
+
+ else:
+ # bogus variable reference (e.g. "prefix=$/opt/python");
+ # just drop it since we can't deal
+ done[name] = value
+ variables.remove(name)
+
+ # strip spurious spaces
+ for k, v in done.items():
+ if isinstance(v, str):
+ done[k] = v.strip()
+
+ # save the results in the global dictionary
+ vars.update(done)
+ return vars
+
+
+def get_makefile_filename():
+ """Return the path of the Makefile."""
+ if _PYTHON_BUILD:
+ return os.path.join(_PROJECT_BASE, "Makefile")
+ if hasattr(sys, 'abiflags'):
+ config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
+ else:
+ config_dir_name = 'config'
+ return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
+
+
+def _init_posix(vars):
+ """Initialize the module as appropriate for POSIX systems."""
+ # load the installed Makefile:
+ makefile = get_makefile_filename()
+ try:
+ _parse_makefile(makefile, vars)
+ except IOError as e:
+ msg = "invalid Python installation: unable to open %s" % makefile
+ if hasattr(e, "strerror"):
+ msg = msg + " (%s)" % e.strerror
+ raise IOError(msg)
+ # load the installed pyconfig.h:
+ config_h = get_config_h_filename()
+ try:
+ with open(config_h) as f:
+ parse_config_h(f, vars)
+ except IOError as e:
+ msg = "invalid Python installation: unable to open %s" % config_h
+ if hasattr(e, "strerror"):
+ msg = msg + " (%s)" % e.strerror
+ raise IOError(msg)
+ # On AIX, there are wrong paths to the linker scripts in the Makefile
+ # -- these paths are relative to the Python source, but when installed
+ # the scripts are in another directory.
+ if _PYTHON_BUILD:
+ vars['LDSHARED'] = vars['BLDSHARED']
+
+
+def _init_non_posix(vars):
+ """Initialize the module as appropriate for NT"""
+ # set basic install directories
+ vars['LIBDEST'] = get_path('stdlib')
+ vars['BINLIBDEST'] = get_path('platstdlib')
+ vars['INCLUDEPY'] = get_path('include')
+ vars['SO'] = '.pyd'
+ vars['EXE'] = '.exe'
+ vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
+ vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
+
+#
+# public APIs
+#
+
+
+def parse_config_h(fp, vars=None):
+ """Parse a config.h-style file.
+
+ A dictionary containing name/value pairs is returned. If an
+ optional dictionary is passed in as the second argument, it is
+ used instead of a new dictionary.
+ """
+ if vars is None:
+ vars = {}
+ define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
+ undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
+
+ while True:
+ line = fp.readline()
+ if not line:
+ break
+ m = define_rx.match(line)
+ if m:
+ n, v = m.group(1, 2)
+ try:
+ v = int(v)
+ except ValueError:
+ pass
+ vars[n] = v
+ else:
+ m = undef_rx.match(line)
+ if m:
+ vars[m.group(1)] = 0
+ return vars
+
+
+def get_config_h_filename():
+ """Return the path of pyconfig.h."""
+ if _PYTHON_BUILD:
+ if os.name == "nt":
+ inc_dir = os.path.join(_PROJECT_BASE, "PC")
+ else:
+ inc_dir = _PROJECT_BASE
+ else:
+ inc_dir = get_path('platinclude')
+ return os.path.join(inc_dir, 'pyconfig.h')
+
+
+def get_scheme_names():
+ """Return a tuple containing the schemes names."""
+ return tuple(sorted(_SCHEMES.sections()))
+
+
+def get_path_names():
+ """Return a tuple containing the paths names."""
+ # xxx see if we want a static list
+ return _SCHEMES.options('posix_prefix')
+
+
+def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
+ """Return a mapping containing an install scheme.
+
+ ``scheme`` is the install scheme name. If not provided, it will
+ return the default scheme for the current platform.
+ """
+ _ensure_cfg_read()
+ if expand:
+ return _expand_vars(scheme, vars)
+ else:
+ return dict(_SCHEMES.items(scheme))
+
+
+def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
+ """Return a path corresponding to the scheme.
+
+ ``scheme`` is the install scheme name.
+ """
+ return get_paths(scheme, vars, expand)[name]
+
+
+def get_config_vars(*args):
+ """With no arguments, return a dictionary of all configuration
+ variables relevant for the current platform.
+
+ On Unix, this means every variable defined in Python's installed Makefile;
+ On Windows and Mac OS it's a much smaller set.
+
+ With arguments, return a list of values that result from looking up
+ each argument in the configuration variable dictionary.
+ """
+ global _CONFIG_VARS
+ if _CONFIG_VARS is None:
+ _CONFIG_VARS = {}
+ # Normalized versions of prefix and exec_prefix are handy to have;
+ # in fact, these are the standard versions used most places in the
+ # distutils2 module.
+ _CONFIG_VARS['prefix'] = _PREFIX
+ _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
+ _CONFIG_VARS['py_version'] = _PY_VERSION
+ _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
+ _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
+ _CONFIG_VARS['base'] = _PREFIX
+ _CONFIG_VARS['platbase'] = _EXEC_PREFIX
+ _CONFIG_VARS['projectbase'] = _PROJECT_BASE
+ try:
+ _CONFIG_VARS['abiflags'] = sys.abiflags
+ except AttributeError:
+ # sys.abiflags may not be defined on all platforms.
+ _CONFIG_VARS['abiflags'] = ''
+
+ if os.name in ('nt', 'os2'):
+ _init_non_posix(_CONFIG_VARS)
+ if os.name == 'posix':
+ _init_posix(_CONFIG_VARS)
+ # Setting 'userbase' is done below the call to the
+ # init function to enable using 'get_config_var' in
+ # the init-function.
+ if sys.version >= '2.6':
+ _CONFIG_VARS['userbase'] = _getuserbase()
+
+ if 'srcdir' not in _CONFIG_VARS:
+ _CONFIG_VARS['srcdir'] = _PROJECT_BASE
+ else:
+ _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
+
+ # Convert srcdir into an absolute path if it appears necessary.
+ # Normally it is relative to the build directory. However, during
+ # testing, for example, we might be running a non-installed python
+ # from a different directory.
+ if _PYTHON_BUILD and os.name == "posix":
+ base = _PROJECT_BASE
+ try:
+ cwd = os.getcwd()
+ except OSError:
+ cwd = None
+ if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
+ base != cwd):
+ # srcdir is relative and we are not in the same directory
+ # as the executable. Assume executable is in the build
+ # directory and make srcdir absolute.
+ srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
+ _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
+
+ if sys.platform == 'darwin':
+ kernel_version = os.uname()[2] # Kernel version (8.4.3)
+ major_version = int(kernel_version.split('.')[0])
+
+ if major_version < 8:
+ # On Mac OS X before 10.4, check if -arch and -isysroot
+ # are in CFLAGS or LDFLAGS and remove them if they are.
+ # This is needed when building extensions on a 10.3 system
+ # using a universal build of python.
+ for key in ('LDFLAGS', 'BASECFLAGS',
+ # a number of derived variables. These need to be
+ # patched up as well.
+ 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+ flags = _CONFIG_VARS[key]
+ flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
+ flags = re.sub('-isysroot [^ \t]*', ' ', flags)
+ _CONFIG_VARS[key] = flags
+ else:
+ # Allow the user to override the architecture flags using
+ # an environment variable.
+ # NOTE: This name was introduced by Apple in OSX 10.5 and
+ # is used by several scripting languages distributed with
+ # that OS release.
+ if 'ARCHFLAGS' in os.environ:
+ arch = os.environ['ARCHFLAGS']
+ for key in ('LDFLAGS', 'BASECFLAGS',
+ # a number of derived variables. These need to be
+ # patched up as well.
+ 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+
+ flags = _CONFIG_VARS[key]
+ flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
+ flags = flags + ' ' + arch
+ _CONFIG_VARS[key] = flags
+
+ # If we're on OSX 10.5 or later and the user tries to
+ # compiles an extension using an SDK that is not present
+ # on the current machine it is better to not use an SDK
+ # than to fail.
+ #
+ # The major usecase for this is users using a Python.org
+ # binary installer on OSX 10.6: that installer uses
+ # the 10.4u SDK, but that SDK is not installed by default
+ # when you install Xcode.
+ #
+ CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
+ m = re.search(r'-isysroot\s+(\S+)', CFLAGS)
+ if m is not None:
+ sdk = m.group(1)
+ if not os.path.exists(sdk):
+ for key in ('LDFLAGS', 'BASECFLAGS',
+ # a number of derived variables. These need to be
+ # patched up as well.
+ 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+
+ flags = _CONFIG_VARS[key]
+ flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags)
+ _CONFIG_VARS[key] = flags
+
+ if args:
+ vals = []
+ for name in args:
+ vals.append(_CONFIG_VARS.get(name))
+ return vals
+ else:
+ return _CONFIG_VARS
+
+
+def get_config_var(name):
+ """Return the value of a single variable using the dictionary returned by
+ 'get_config_vars()'.
+
+ Equivalent to get_config_vars().get(name)
+ """
+ return get_config_vars().get(name)
+
+
+def get_platform():
+ """Return a string that identifies the current platform.
+
+ This is used mainly to distinguish platform-specific build directories and
+ platform-specific built distributions. Typically includes the OS name
+ and version and the architecture (as supplied by 'os.uname()'),
+ although the exact information included depends on the OS; eg. for IRIX
+ the architecture isn't particularly important (IRIX only runs on SGI
+ hardware), but for Linux the kernel version isn't particularly
+ important.
+
+ Examples of returned values:
+ linux-i586
+ linux-alpha (?)
+ solaris-2.6-sun4u
+ irix-5.3
+ irix64-6.2
+
+ Windows will return one of:
+ win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+ win-ia64 (64bit Windows on Itanium)
+ win32 (all others - specifically, sys.platform is returned)
+
+ For other non-POSIX platforms, currently just returns 'sys.platform'.
+ """
+ if os.name == 'nt':
+ # sniff sys.version for architecture.
+ prefix = " bit ("
+ i = sys.version.find(prefix)
+ if i == -1:
+ return sys.platform
+ j = sys.version.find(")", i)
+ look = sys.version[i+len(prefix):j].lower()
+ if look == 'amd64':
+ return 'win-amd64'
+ if look == 'itanium':
+ return 'win-ia64'
+ return sys.platform
+
+ if os.name != "posix" or not hasattr(os, 'uname'):
+ # XXX what about the architecture? NT is Intel or Alpha,
+ # Mac OS is M68k or PPC, etc.
+ return sys.platform
+
+ # Try to distinguish various flavours of Unix
+ osname, host, release, version, machine = os.uname()
+
+ # Convert the OS name to lowercase, remove '/' characters
+ # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
+ osname = osname.lower().replace('/', '')
+ machine = machine.replace(' ', '_')
+ machine = machine.replace('/', '-')
+
+ if osname[:5] == "linux":
+ # At least on Linux/Intel, 'machine' is the processor --
+ # i386, etc.
+ # XXX what about Alpha, SPARC, etc?
+ return "%s-%s" % (osname, machine)
+ elif osname[:5] == "sunos":
+ if release[0] >= "5": # SunOS 5 == Solaris 2
+ osname = "solaris"
+ release = "%d.%s" % (int(release[0]) - 3, release[2:])
+ # fall through to standard osname-release-machine representation
+ elif osname[:4] == "irix": # could be "irix64"!
+ return "%s-%s" % (osname, release)
+ elif osname[:3] == "aix":
+ return "%s-%s.%s" % (osname, version, release)
+ elif osname[:6] == "cygwin":
+ osname = "cygwin"
+ rel_re = re.compile(r'[\d.]+')
+ m = rel_re.match(release)
+ if m:
+ release = m.group()
+ elif osname[:6] == "darwin":
+ #
+ # For our purposes, we'll assume that the system version from
+ # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
+ # to. This makes the compatibility story a bit more sane because the
+ # machine is going to compile and link as if it were
+ # MACOSX_DEPLOYMENT_TARGET.
+ cfgvars = get_config_vars()
+ macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
+
+ if True:
+ # Always calculate the release of the running machine,
+ # needed to determine if we can build fat binaries or not.
+
+ macrelease = macver
+ # Get the system version. Reading this plist is a documented
+ # way to get the system version (see the documentation for
+ # the Gestalt Manager)
+ try:
+ f = open('/System/Library/CoreServices/SystemVersion.plist')
+ except IOError:
+ # We're on a plain darwin box, fall back to the default
+ # behaviour.
+ pass
+ else:
+ try:
+ m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
+ r'<string>(.*?)</string>', f.read())
+ finally:
+ f.close()
+ if m is not None:
+ macrelease = '.'.join(m.group(1).split('.')[:2])
+ # else: fall back to the default behaviour
+
+ if not macver:
+ macver = macrelease
+
+ if macver:
+ release = macver
+ osname = "macosx"
+
+ if ((macrelease + '.') >= '10.4.' and
+ '-arch' in get_config_vars().get('CFLAGS', '').strip()):
+ # The universal build will build fat binaries, but not on
+ # systems before 10.4
+ #
+ # Try to detect 4-way universal builds, those have machine-type
+ # 'universal' instead of 'fat'.
+
+ machine = 'fat'
+ cflags = get_config_vars().get('CFLAGS')
+
+ archs = re.findall(r'-arch\s+(\S+)', cflags)
+ archs = tuple(sorted(set(archs)))
+
+ if len(archs) == 1:
+ machine = archs[0]
+ elif archs == ('i386', 'ppc'):
+ machine = 'fat'
+ elif archs == ('i386', 'x86_64'):
+ machine = 'intel'
+ elif archs == ('i386', 'ppc', 'x86_64'):
+ machine = 'fat3'
+ elif archs == ('ppc64', 'x86_64'):
+ machine = 'fat64'
+ elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
+ machine = 'universal'
+ else:
+ raise ValueError(
+ "Don't know machine value for archs=%r" % (archs,))
+
+ elif machine == 'i386':
+ # On OSX the machine type returned by uname is always the
+ # 32-bit variant, even if the executable architecture is
+ # the 64-bit variant
+ if sys.maxsize >= 2**32:
+ machine = 'x86_64'
+
+ elif machine in ('PowerPC', 'Power_Macintosh'):
+ # Pick a sane name for the PPC architecture.
+ # See 'i386' case
+ if sys.maxsize >= 2**32:
+ machine = 'ppc64'
+ else:
+ machine = 'ppc'
+
+ return "%s-%s-%s" % (osname, release, machine)
+
+
+def get_python_version():
+ return _PY_VERSION_SHORT
+
+
+def _print_dict(title, data):
+ for index, (key, value) in enumerate(sorted(data.items())):
+ if index == 0:
+ print('%s: ' % (title))
+ print('\t%s = "%s"' % (key, value))
+
+
+def _main():
+ """Display all information sysconfig detains."""
+ print('Platform: "%s"' % get_platform())
+ print('Python version: "%s"' % get_python_version())
+ print('Current installation scheme: "%s"' % _get_default_scheme())
+ print()
+ _print_dict('Paths', get_paths())
+ print()
+ _print_dict('Variables', get_config_vars())
+
+
+if __name__ == '__main__':
+ _main()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py
new file mode 100644
index 00000000..d66d8566
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py
@@ -0,0 +1,2607 @@
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+from __future__ import print_function
+
+"""Read from and write to tar format archives.
+"""
+
+__version__ = "$Revision$"
+
+version = "0.9.0"
+__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
+__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"
+__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $"
+__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+import sys
+import os
+import stat
+import errno
+import time
+import struct
+import copy
+import re
+
+try:
+ import grp, pwd
+except ImportError:
+ grp = pwd = None
+
+# os.symlink on Windows prior to 6.0 raises NotImplementedError
+symlink_exception = (AttributeError, NotImplementedError)
+try:
+ # WindowsError (1314) will be raised if the caller does not hold the
+ # SeCreateSymbolicLinkPrivilege privilege
+ symlink_exception += (WindowsError,)
+except NameError:
+ pass
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
+
+if sys.version_info[0] < 3:
+ import __builtin__ as builtins
+else:
+ import builtins
+
+_open = builtins.open # Since 'open' is TarFile.open
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL = b"\0" # the null character
+BLOCKSIZE = 512 # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20 # length of records
+GNU_MAGIC = b"ustar \0" # magic gnu tar string
+POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
+
+LENGTH_NAME = 100 # maximum length of a filename
+LENGTH_LINK = 100 # maximum length of a linkname
+LENGTH_PREFIX = 155 # maximum length of the prefix field
+
+REGTYPE = b"0" # regular file
+AREGTYPE = b"\0" # regular file
+LNKTYPE = b"1" # link (inside tarfile)
+SYMTYPE = b"2" # symbolic link
+CHRTYPE = b"3" # character special device
+BLKTYPE = b"4" # block special device
+DIRTYPE = b"5" # directory
+FIFOTYPE = b"6" # fifo special device
+CONTTYPE = b"7" # contiguous file
+
+GNUTYPE_LONGNAME = b"L" # GNU tar longname
+GNUTYPE_LONGLINK = b"K" # GNU tar longlink
+GNUTYPE_SPARSE = b"S" # GNU tar sparse file
+
+XHDTYPE = b"x" # POSIX.1-2001 extended header
+XGLTYPE = b"g" # POSIX.1-2001 global header
+SOLARIS_XHDTYPE = b"X" # Solaris extended header
+
+USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
+GNU_FORMAT = 1 # GNU tar format
+PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
+DEFAULT_FORMAT = GNU_FORMAT
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+# File types that tarfile supports:
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
+ SYMTYPE, DIRTYPE, FIFOTYPE,
+ CONTTYPE, CHRTYPE, BLKTYPE,
+ GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+ GNUTYPE_SPARSE)
+
+# File types that will be treated as a regular file.
+REGULAR_TYPES = (REGTYPE, AREGTYPE,
+ CONTTYPE, GNUTYPE_SPARSE)
+
+# File types that are part of the GNU tar format.
+GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+ GNUTYPE_SPARSE)
+
+# Fields from a pax header that override a TarInfo attribute.
+PAX_FIELDS = ("path", "linkpath", "size", "mtime",
+ "uid", "gid", "uname", "gname")
+
+# Fields from a pax header that are affected by hdrcharset.
+PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname"))
+
+# Fields in a pax header that are numbers, all other fields
+# are treated as strings.
+PAX_NUMBER_FIELDS = {
+ "atime": float,
+ "ctime": float,
+ "mtime": float,
+ "uid": int,
+ "gid": int,
+ "size": int
+}
+
+#---------------------------------------------------------
+# Bits used in the mode field, values in octal.
+#---------------------------------------------------------
+S_IFLNK = 0o120000 # symbolic link
+S_IFREG = 0o100000 # regular file
+S_IFBLK = 0o060000 # block device
+S_IFDIR = 0o040000 # directory
+S_IFCHR = 0o020000 # character device
+S_IFIFO = 0o010000 # fifo
+
+TSUID = 0o4000 # set UID on execution
+TSGID = 0o2000 # set GID on execution
+TSVTX = 0o1000 # reserved
+
+TUREAD = 0o400 # read by owner
+TUWRITE = 0o200 # write by owner
+TUEXEC = 0o100 # execute/search by owner
+TGREAD = 0o040 # read by group
+TGWRITE = 0o020 # write by group
+TGEXEC = 0o010 # execute/search by group
+TOREAD = 0o004 # read by other
+TOWRITE = 0o002 # write by other
+TOEXEC = 0o001 # execute/search by other
+
+#---------------------------------------------------------
+# initialization
+#---------------------------------------------------------
+if os.name in ("nt", "ce"):
+ ENCODING = "utf-8"
+else:
+ ENCODING = sys.getfilesystemencoding()
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+
+def stn(s, length, encoding, errors):
+ """Convert a string to a null-terminated bytes object.
+ """
+ s = s.encode(encoding, errors)
+ return s[:length] + (length - len(s)) * NUL
+
+def nts(s, encoding, errors):
+ """Convert a null-terminated bytes object to a string.
+ """
+ p = s.find(b"\0")
+ if p != -1:
+ s = s[:p]
+ return s.decode(encoding, errors)
+
+def nti(s):
+ """Convert a number field to a python number.
+ """
+ # There are two possible encodings for a number field, see
+ # itn() below.
+ if s[0] != chr(0o200):
+ try:
+ n = int(nts(s, "ascii", "strict") or "0", 8)
+ except ValueError:
+ raise InvalidHeaderError("invalid header")
+ else:
+ n = 0
+ for i in range(len(s) - 1):
+ n <<= 8
+ n += ord(s[i + 1])
+ return n
+
+def itn(n, digits=8, format=DEFAULT_FORMAT):
+ """Convert a python number to a number field.
+ """
+ # POSIX 1003.1-1988 requires numbers to be encoded as a string of
+ # octal digits followed by a null-byte, this allows values up to
+ # (8**(digits-1))-1. GNU tar allows storing numbers greater than
+ # that if necessary. A leading 0o200 byte indicates this particular
+ # encoding, the following digits-1 bytes are a big-endian
+ # representation. This allows values up to (256**(digits-1))-1.
+ if 0 <= n < 8 ** (digits - 1):
+ s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL
+ else:
+ if format != GNU_FORMAT or n >= 256 ** (digits - 1):
+ raise ValueError("overflow in number field")
+
+ if n < 0:
+ # XXX We mimic GNU tar's behaviour with negative numbers,
+ # this could raise OverflowError.
+ n = struct.unpack("L", struct.pack("l", n))[0]
+
+ s = bytearray()
+ for i in range(digits - 1):
+ s.insert(0, n & 0o377)
+ n >>= 8
+ s.insert(0, 0o200)
+ return s
+
+def calc_chksums(buf):
+ """Calculate the checksum for a member's header by summing up all
+ characters except for the chksum field which is treated as if
+ it was filled with spaces. According to the GNU tar sources,
+ some tars (Sun and NeXT) calculate chksum with signed char,
+ which will be different if there are chars in the buffer with
+ the high bit set. So we calculate two checksums, unsigned and
+ signed.
+ """
+ unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
+ signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
+ return unsigned_chksum, signed_chksum
+
+def copyfileobj(src, dst, length=None):
+ """Copy length bytes from fileobj src to fileobj dst.
+ If length is None, copy the entire content.
+ """
+ if length == 0:
+ return
+ if length is None:
+ while True:
+ buf = src.read(16*1024)
+ if not buf:
+ break
+ dst.write(buf)
+ return
+
+ BUFSIZE = 16 * 1024
+ blocks, remainder = divmod(length, BUFSIZE)
+ for b in range(blocks):
+ buf = src.read(BUFSIZE)
+ if len(buf) < BUFSIZE:
+ raise IOError("end of file reached")
+ dst.write(buf)
+
+ if remainder != 0:
+ buf = src.read(remainder)
+ if len(buf) < remainder:
+ raise IOError("end of file reached")
+ dst.write(buf)
+ return
+
+filemode_table = (
+ ((S_IFLNK, "l"),
+ (S_IFREG, "-"),
+ (S_IFBLK, "b"),
+ (S_IFDIR, "d"),
+ (S_IFCHR, "c"),
+ (S_IFIFO, "p")),
+
+ ((TUREAD, "r"),),
+ ((TUWRITE, "w"),),
+ ((TUEXEC|TSUID, "s"),
+ (TSUID, "S"),
+ (TUEXEC, "x")),
+
+ ((TGREAD, "r"),),
+ ((TGWRITE, "w"),),
+ ((TGEXEC|TSGID, "s"),
+ (TSGID, "S"),
+ (TGEXEC, "x")),
+
+ ((TOREAD, "r"),),
+ ((TOWRITE, "w"),),
+ ((TOEXEC|TSVTX, "t"),
+ (TSVTX, "T"),
+ (TOEXEC, "x"))
+)
+
+def filemode(mode):
+ """Convert a file's mode to a string of the form
+ -rwxrwxrwx.
+ Used by TarFile.list()
+ """
+ perm = []
+ for table in filemode_table:
+ for bit, char in table:
+ if mode & bit == bit:
+ perm.append(char)
+ break
+ else:
+ perm.append("-")
+ return "".join(perm)
+
+class TarError(Exception):
+ """Base exception."""
+ pass
+class ExtractError(TarError):
+ """General exception for extract errors."""
+ pass
+class ReadError(TarError):
+ """Exception for unreadable tar archives."""
+ pass
+class CompressionError(TarError):
+ """Exception for unavailable compression methods."""
+ pass
+class StreamError(TarError):
+ """Exception for unsupported operations on stream-like TarFiles."""
+ pass
+class HeaderError(TarError):
+ """Base exception for header errors."""
+ pass
+class EmptyHeaderError(HeaderError):
+ """Exception for empty headers."""
+ pass
+class TruncatedHeaderError(HeaderError):
+ """Exception for truncated headers."""
+ pass
+class EOFHeaderError(HeaderError):
+ """Exception for end of file headers."""
+ pass
+class InvalidHeaderError(HeaderError):
+ """Exception for invalid headers."""
+ pass
+class SubsequentHeaderError(HeaderError):
+ """Exception for missing and invalid extended headers."""
+ pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile(object):
+ """Low-level file object. Supports reading and writing.
+ It is used instead of a regular file object for streaming
+ access.
+ """
+
+ def __init__(self, name, mode):
+ mode = {
+ "r": os.O_RDONLY,
+ "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+ }[mode]
+ if hasattr(os, "O_BINARY"):
+ mode |= os.O_BINARY
+ self.fd = os.open(name, mode, 0o666)
+
+ def close(self):
+ os.close(self.fd)
+
+ def read(self, size):
+ return os.read(self.fd, size)
+
+ def write(self, s):
+ os.write(self.fd, s)
+
+class _Stream(object):
+ """Class that serves as an adapter between TarFile and
+ a stream-like object. The stream-like object only
+ needs to have a read() or write() method and is accessed
+ blockwise. Use of gzip or bzip2 compression is possible.
+ A stream-like object could be for example: sys.stdin,
+ sys.stdout, a socket, a tape device etc.
+
+ _Stream is intended to be used only internally.
+ """
+
+ def __init__(self, name, mode, comptype, fileobj, bufsize):
+ """Construct a _Stream object.
+ """
+ self._extfileobj = True
+ if fileobj is None:
+ fileobj = _LowLevelFile(name, mode)
+ self._extfileobj = False
+
+ if comptype == '*':
+ # Enable transparent compression detection for the
+ # stream interface
+ fileobj = _StreamProxy(fileobj)
+ comptype = fileobj.getcomptype()
+
+ self.name = name or ""
+ self.mode = mode
+ self.comptype = comptype
+ self.fileobj = fileobj
+ self.bufsize = bufsize
+ self.buf = b""
+ self.pos = 0
+ self.closed = False
+
+ try:
+ if comptype == "gz":
+ try:
+ import zlib
+ except ImportError:
+ raise CompressionError("zlib module is not available")
+ self.zlib = zlib
+ self.crc = zlib.crc32(b"")
+ if mode == "r":
+ self._init_read_gz()
+ else:
+ self._init_write_gz()
+
+ if comptype == "bz2":
+ try:
+ import bz2
+ except ImportError:
+ raise CompressionError("bz2 module is not available")
+ if mode == "r":
+ self.dbuf = b""
+ self.cmp = bz2.BZ2Decompressor()
+ else:
+ self.cmp = bz2.BZ2Compressor()
+ except:
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+ raise
+
+ def __del__(self):
+ if hasattr(self, "closed") and not self.closed:
+ self.close()
+
+ def _init_write_gz(self):
+ """Initialize for writing with gzip compression.
+ """
+ self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
+ -self.zlib.MAX_WBITS,
+ self.zlib.DEF_MEM_LEVEL,
+ 0)
+ timestamp = struct.pack("<L", int(time.time()))
+ self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
+ if self.name.endswith(".gz"):
+ self.name = self.name[:-3]
+ # RFC1952 says we must use ISO-8859-1 for the FNAME field.
+ self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
+
+ def write(self, s):
+ """Write string s to the stream.
+ """
+ if self.comptype == "gz":
+ self.crc = self.zlib.crc32(s, self.crc)
+ self.pos += len(s)
+ if self.comptype != "tar":
+ s = self.cmp.compress(s)
+ self.__write(s)
+
+ def __write(self, s):
+ """Write string s to the stream if a whole new block
+ is ready to be written.
+ """
+ self.buf += s
+ while len(self.buf) > self.bufsize:
+ self.fileobj.write(self.buf[:self.bufsize])
+ self.buf = self.buf[self.bufsize:]
+
+ def close(self):
+ """Close the _Stream object. No operation should be
+ done on it afterwards.
+ """
+ if self.closed:
+ return
+
+ if self.mode == "w" and self.comptype != "tar":
+ self.buf += self.cmp.flush()
+
+ if self.mode == "w" and self.buf:
+ self.fileobj.write(self.buf)
+ self.buf = b""
+ if self.comptype == "gz":
+ # The native zlib crc is an unsigned 32-bit integer, but
+ # the Python wrapper implicitly casts that to a signed C
+ # long. So, on a 32-bit box self.crc may "look negative",
+ # while the same crc on a 64-bit box may "look positive".
+ # To avoid irksome warnings from the `struct` module, force
+ # it to look positive on all boxes.
+ self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff))
+ self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
+
+ if not self._extfileobj:
+ self.fileobj.close()
+
+ self.closed = True
+
+ def _init_read_gz(self):
+ """Initialize for reading a gzip compressed fileobj.
+ """
+ self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
+ self.dbuf = b""
+
+ # taken from gzip.GzipFile with some alterations
+ if self.__read(2) != b"\037\213":
+ raise ReadError("not a gzip file")
+ if self.__read(1) != b"\010":
+ raise CompressionError("unsupported compression method")
+
+ flag = ord(self.__read(1))
+ self.__read(6)
+
+ if flag & 4:
+ xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
+ self.read(xlen)
+ if flag & 8:
+ while True:
+ s = self.__read(1)
+ if not s or s == NUL:
+ break
+ if flag & 16:
+ while True:
+ s = self.__read(1)
+ if not s or s == NUL:
+ break
+ if flag & 2:
+ self.__read(2)
+
+ def tell(self):
+ """Return the stream's file pointer position.
+ """
+ return self.pos
+
+ def seek(self, pos=0):
+ """Set the stream's file pointer to pos. Negative seeking
+ is forbidden.
+ """
+ if pos - self.pos >= 0:
+ blocks, remainder = divmod(pos - self.pos, self.bufsize)
+ for i in range(blocks):
+ self.read(self.bufsize)
+ self.read(remainder)
+ else:
+ raise StreamError("seeking backwards is not allowed")
+ return self.pos
+
+ def read(self, size=None):
+ """Return the next size number of bytes from the stream.
+ If size is not defined, return all bytes of the stream
+ up to EOF.
+ """
+ if size is None:
+ t = []
+ while True:
+ buf = self._read(self.bufsize)
+ if not buf:
+ break
+ t.append(buf)
+ buf = "".join(t)
+ else:
+ buf = self._read(size)
+ self.pos += len(buf)
+ return buf
+
+ def _read(self, size):
+ """Return size bytes from the stream.
+ """
+ if self.comptype == "tar":
+ return self.__read(size)
+
+ c = len(self.dbuf)
+ while c < size:
+ buf = self.__read(self.bufsize)
+ if not buf:
+ break
+ try:
+ buf = self.cmp.decompress(buf)
+ except IOError:
+ raise ReadError("invalid compressed data")
+ self.dbuf += buf
+ c += len(buf)
+ buf = self.dbuf[:size]
+ self.dbuf = self.dbuf[size:]
+ return buf
+
+ def __read(self, size):
+ """Return size bytes from stream. If internal buffer is empty,
+ read another block from the stream.
+ """
+ c = len(self.buf)
+ while c < size:
+ buf = self.fileobj.read(self.bufsize)
+ if not buf:
+ break
+ self.buf += buf
+ c += len(buf)
+ buf = self.buf[:size]
+ self.buf = self.buf[size:]
+ return buf
+# class _Stream
+
+class _StreamProxy(object):
+ """Small proxy class that enables transparent compression
+ detection for the Stream interface (mode 'r|*').
+ """
+
+ def __init__(self, fileobj):
+ self.fileobj = fileobj
+ self.buf = self.fileobj.read(BLOCKSIZE)
+
+ def read(self, size):
+ self.read = self.fileobj.read
+ return self.buf
+
+ def getcomptype(self):
+ if self.buf.startswith(b"\037\213\010"):
+ return "gz"
+ if self.buf.startswith(b"BZh91"):
+ return "bz2"
+ return "tar"
+
+ def close(self):
+ self.fileobj.close()
+# class StreamProxy
+
+class _BZ2Proxy(object):
+ """Small proxy class that enables external file object
+ support for "r:bz2" and "w:bz2" modes. This is actually
+ a workaround for a limitation in bz2 module's BZ2File
+ class which (unlike gzip.GzipFile) has no support for
+ a file object argument.
+ """
+
+ blocksize = 16 * 1024
+
+ def __init__(self, fileobj, mode):
+ self.fileobj = fileobj
+ self.mode = mode
+ self.name = getattr(self.fileobj, "name", None)
+ self.init()
+
+ def init(self):
+ import bz2
+ self.pos = 0
+ if self.mode == "r":
+ self.bz2obj = bz2.BZ2Decompressor()
+ self.fileobj.seek(0)
+ self.buf = b""
+ else:
+ self.bz2obj = bz2.BZ2Compressor()
+
+ def read(self, size):
+ x = len(self.buf)
+ while x < size:
+ raw = self.fileobj.read(self.blocksize)
+ if not raw:
+ break
+ data = self.bz2obj.decompress(raw)
+ self.buf += data
+ x += len(data)
+
+ buf = self.buf[:size]
+ self.buf = self.buf[size:]
+ self.pos += len(buf)
+ return buf
+
+ def seek(self, pos):
+ if pos < self.pos:
+ self.init()
+ self.read(pos - self.pos)
+
+ def tell(self):
+ return self.pos
+
+ def write(self, data):
+ self.pos += len(data)
+ raw = self.bz2obj.compress(data)
+ self.fileobj.write(raw)
+
+ def close(self):
+ if self.mode == "w":
+ raw = self.bz2obj.flush()
+ self.fileobj.write(raw)
+# class _BZ2Proxy
+
+#------------------------
+# Extraction file object
+#------------------------
+class _FileInFile(object):
+ """A thin wrapper around an existing file object that
+ provides a part of its data as an individual file
+ object.
+ """
+
+ def __init__(self, fileobj, offset, size, blockinfo=None):
+ self.fileobj = fileobj
+ self.offset = offset
+ self.size = size
+ self.position = 0
+
+ if blockinfo is None:
+ blockinfo = [(0, size)]
+
+ # Construct a map with data and zero blocks.
+ self.map_index = 0
+ self.map = []
+ lastpos = 0
+ realpos = self.offset
+ for offset, size in blockinfo:
+ if offset > lastpos:
+ self.map.append((False, lastpos, offset, None))
+ self.map.append((True, offset, offset + size, realpos))
+ realpos += size
+ lastpos = offset + size
+ if lastpos < self.size:
+ self.map.append((False, lastpos, self.size, None))
+
+ def seekable(self):
+ if not hasattr(self.fileobj, "seekable"):
+ # XXX gzip.GzipFile and bz2.BZ2File
+ return True
+ return self.fileobj.seekable()
+
+ def tell(self):
+ """Return the current file position.
+ """
+ return self.position
+
+ def seek(self, position):
+ """Seek to a position in the file.
+ """
+ self.position = position
+
+ def read(self, size=None):
+ """Read data from the file.
+ """
+ if size is None:
+ size = self.size - self.position
+ else:
+ size = min(size, self.size - self.position)
+
+ buf = b""
+ while size > 0:
+ while True:
+ data, start, stop, offset = self.map[self.map_index]
+ if start <= self.position < stop:
+ break
+ else:
+ self.map_index += 1
+ if self.map_index == len(self.map):
+ self.map_index = 0
+ length = min(size, stop - self.position)
+ if data:
+ self.fileobj.seek(offset + (self.position - start))
+ buf += self.fileobj.read(length)
+ else:
+ buf += NUL * length
+ size -= length
+ self.position += length
+ return buf
+#class _FileInFile
+
+
+class ExFileObject(object):
+ """File-like object for reading an archive member.
+ Is returned by TarFile.extractfile().
+ """
+ blocksize = 1024
+
+ def __init__(self, tarfile, tarinfo):
+ self.fileobj = _FileInFile(tarfile.fileobj,
+ tarinfo.offset_data,
+ tarinfo.size,
+ tarinfo.sparse)
+ self.name = tarinfo.name
+ self.mode = "r"
+ self.closed = False
+ self.size = tarinfo.size
+
+ self.position = 0
+ self.buffer = b""
+
+ def readable(self):
+ return True
+
+ def writable(self):
+ return False
+
+ def seekable(self):
+ return self.fileobj.seekable()
+
+ def read(self, size=None):
+ """Read at most size bytes from the file. If size is not
+ present or None, read all data until EOF is reached.
+ """
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ buf = b""
+ if self.buffer:
+ if size is None:
+ buf = self.buffer
+ self.buffer = b""
+ else:
+ buf = self.buffer[:size]
+ self.buffer = self.buffer[size:]
+
+ if size is None:
+ buf += self.fileobj.read()
+ else:
+ buf += self.fileobj.read(size - len(buf))
+
+ self.position += len(buf)
+ return buf
+
+ # XXX TextIOWrapper uses the read1() method.
+ read1 = read
+
+ def readline(self, size=-1):
+ """Read one entire line from the file. If size is present
+ and non-negative, return a string with at most that
+ size, which may be an incomplete line.
+ """
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ pos = self.buffer.find(b"\n") + 1
+ if pos == 0:
+ # no newline found.
+ while True:
+ buf = self.fileobj.read(self.blocksize)
+ self.buffer += buf
+ if not buf or b"\n" in buf:
+ pos = self.buffer.find(b"\n") + 1
+ if pos == 0:
+ # no newline found.
+ pos = len(self.buffer)
+ break
+
+ if size != -1:
+ pos = min(size, pos)
+
+ buf = self.buffer[:pos]
+ self.buffer = self.buffer[pos:]
+ self.position += len(buf)
+ return buf
+
+ def readlines(self):
+ """Return a list with all remaining lines.
+ """
+ result = []
+ while True:
+ line = self.readline()
+ if not line: break
+ result.append(line)
+ return result
+
+ def tell(self):
+ """Return the current file position.
+ """
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ return self.position
+
+ def seek(self, pos, whence=os.SEEK_SET):
+ """Seek to a position in the file.
+ """
+ if self.closed:
+ raise ValueError("I/O operation on closed file")
+
+ if whence == os.SEEK_SET:
+ self.position = min(max(pos, 0), self.size)
+ elif whence == os.SEEK_CUR:
+ if pos < 0:
+ self.position = max(self.position + pos, 0)
+ else:
+ self.position = min(self.position + pos, self.size)
+ elif whence == os.SEEK_END:
+ self.position = max(min(self.size + pos, self.size), 0)
+ else:
+ raise ValueError("Invalid argument")
+
+ self.buffer = b""
+ self.fileobj.seek(self.position)
+
+ def close(self):
+ """Close the file object.
+ """
+ self.closed = True
+
+ def __iter__(self):
+ """Get an iterator over the file's lines.
+ """
+ while True:
+ line = self.readline()
+ if not line:
+ break
+ yield line
+#class ExFileObject
+
+#------------------
+# Exported Classes
+#------------------
+class TarInfo(object):
+ """Informational class which holds the details about an
+ archive member given by a tar header block.
+ TarInfo objects are returned by TarFile.getmember(),
+ TarFile.getmembers() and TarFile.gettarinfo() and are
+ usually created internally.
+ """
+
+ __slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
+ "chksum", "type", "linkname", "uname", "gname",
+ "devmajor", "devminor",
+ "offset", "offset_data", "pax_headers", "sparse",
+ "tarfile", "_sparse_structs", "_link_target")
+
+ def __init__(self, name=""):
+ """Construct a TarInfo object. name is the optional name
+ of the member.
+ """
+ self.name = name # member name
+ self.mode = 0o644 # file permissions
+ self.uid = 0 # user id
+ self.gid = 0 # group id
+ self.size = 0 # file size
+ self.mtime = 0 # modification time
+ self.chksum = 0 # header checksum
+ self.type = REGTYPE # member type
+ self.linkname = "" # link name
+ self.uname = "" # user name
+ self.gname = "" # group name
+ self.devmajor = 0 # device major number
+ self.devminor = 0 # device minor number
+
+ self.offset = 0 # the tar header starts here
+ self.offset_data = 0 # the file's data starts here
+
+ self.sparse = None # sparse member information
+ self.pax_headers = {} # pax header information
+
+ # In pax headers the "name" and "linkname" field are called
+ # "path" and "linkpath".
+ def _getpath(self):
+ return self.name
+ def _setpath(self, name):
+ self.name = name
+ path = property(_getpath, _setpath)
+
+ def _getlinkpath(self):
+ return self.linkname
+ def _setlinkpath(self, linkname):
+ self.linkname = linkname
+ linkpath = property(_getlinkpath, _setlinkpath)
+
+ def __repr__(self):
+ return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+ def get_info(self):
+ """Return the TarInfo's attributes as a dictionary.
+ """
+ info = {
+ "name": self.name,
+ "mode": self.mode & 0o7777,
+ "uid": self.uid,
+ "gid": self.gid,
+ "size": self.size,
+ "mtime": self.mtime,
+ "chksum": self.chksum,
+ "type": self.type,
+ "linkname": self.linkname,
+ "uname": self.uname,
+ "gname": self.gname,
+ "devmajor": self.devmajor,
+ "devminor": self.devminor
+ }
+
+ if info["type"] == DIRTYPE and not info["name"].endswith("/"):
+ info["name"] += "/"
+
+ return info
+
+ def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
+ """Return a tar header as a string of 512 byte blocks.
+ """
+ info = self.get_info()
+
+ if format == USTAR_FORMAT:
+ return self.create_ustar_header(info, encoding, errors)
+ elif format == GNU_FORMAT:
+ return self.create_gnu_header(info, encoding, errors)
+ elif format == PAX_FORMAT:
+ return self.create_pax_header(info, encoding)
+ else:
+ raise ValueError("invalid format")
+
+ def create_ustar_header(self, info, encoding, errors):
+ """Return the object as a ustar header block.
+ """
+ info["magic"] = POSIX_MAGIC
+
+ if len(info["linkname"]) > LENGTH_LINK:
+ raise ValueError("linkname is too long")
+
+ if len(info["name"]) > LENGTH_NAME:
+ info["prefix"], info["name"] = self._posix_split_name(info["name"])
+
+ return self._create_header(info, USTAR_FORMAT, encoding, errors)
+
+ def create_gnu_header(self, info, encoding, errors):
+ """Return the object as a GNU header block sequence.
+ """
+ info["magic"] = GNU_MAGIC
+
+ buf = b""
+ if len(info["linkname"]) > LENGTH_LINK:
+ buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
+
+ if len(info["name"]) > LENGTH_NAME:
+ buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
+
+ return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
+
+ def create_pax_header(self, info, encoding):
+ """Return the object as a ustar header block. If it cannot be
+ represented this way, prepend a pax extended header sequence
+ with supplement information.
+ """
+ info["magic"] = POSIX_MAGIC
+ pax_headers = self.pax_headers.copy()
+
+ # Test string fields for values that exceed the field length or cannot
+ # be represented in ASCII encoding.
+ for name, hname, length in (
+ ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
+ ("uname", "uname", 32), ("gname", "gname", 32)):
+
+ if hname in pax_headers:
+ # The pax header has priority.
+ continue
+
+ # Try to encode the string as ASCII.
+ try:
+ info[name].encode("ascii", "strict")
+ except UnicodeEncodeError:
+ pax_headers[hname] = info[name]
+ continue
+
+ if len(info[name]) > length:
+ pax_headers[hname] = info[name]
+
+ # Test number fields for values that exceed the field limit or values
+ # that like to be stored as float.
+ for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
+ if name in pax_headers:
+ # The pax header has priority. Avoid overflow.
+ info[name] = 0
+ continue
+
+ val = info[name]
+ if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
+ pax_headers[name] = str(val)
+ info[name] = 0
+
+ # Create a pax extended header if necessary.
+ if pax_headers:
+ buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
+ else:
+ buf = b""
+
+ return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
+
+ @classmethod
+ def create_pax_global_header(cls, pax_headers):
+ """Return the object as a pax global header block sequence.
+ """
+ return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8")
+
+ def _posix_split_name(self, name):
+ """Split a name longer than 100 chars into a prefix
+ and a name part.
+ """
+ prefix = name[:LENGTH_PREFIX + 1]
+ while prefix and prefix[-1] != "/":
+ prefix = prefix[:-1]
+
+ name = name[len(prefix):]
+ prefix = prefix[:-1]
+
+ if not prefix or len(name) > LENGTH_NAME:
+ raise ValueError("name is too long")
+ return prefix, name
+
+ @staticmethod
+ def _create_header(info, format, encoding, errors):
+ """Return a header block. info is a dictionary with file
+ information, format must be one of the *_FORMAT constants.
+ """
+ parts = [
+ stn(info.get("name", ""), 100, encoding, errors),
+ itn(info.get("mode", 0) & 0o7777, 8, format),
+ itn(info.get("uid", 0), 8, format),
+ itn(info.get("gid", 0), 8, format),
+ itn(info.get("size", 0), 12, format),
+ itn(info.get("mtime", 0), 12, format),
+ b" ", # checksum field
+ info.get("type", REGTYPE),
+ stn(info.get("linkname", ""), 100, encoding, errors),
+ info.get("magic", POSIX_MAGIC),
+ stn(info.get("uname", ""), 32, encoding, errors),
+ stn(info.get("gname", ""), 32, encoding, errors),
+ itn(info.get("devmajor", 0), 8, format),
+ itn(info.get("devminor", 0), 8, format),
+ stn(info.get("prefix", ""), 155, encoding, errors)
+ ]
+
+ buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
+ chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
+ buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:]
+ return buf
+
+ @staticmethod
+ def _create_payload(payload):
+ """Return the string payload filled with zero bytes
+ up to the next 512 byte border.
+ """
+ blocks, remainder = divmod(len(payload), BLOCKSIZE)
+ if remainder > 0:
+ payload += (BLOCKSIZE - remainder) * NUL
+ return payload
+
+ @classmethod
+ def _create_gnu_long_header(cls, name, type, encoding, errors):
+ """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
+ for name.
+ """
+ name = name.encode(encoding, errors) + NUL
+
+ info = {}
+ info["name"] = "././@LongLink"
+ info["type"] = type
+ info["size"] = len(name)
+ info["magic"] = GNU_MAGIC
+
+ # create extended header + name blocks.
+ return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
+ cls._create_payload(name)
+
+ @classmethod
+ def _create_pax_generic_header(cls, pax_headers, type, encoding):
+ """Return a POSIX.1-2008 extended or global header sequence
+ that contains a list of keyword, value pairs. The values
+ must be strings.
+ """
+ # Check if one of the fields contains surrogate characters and thereby
+ # forces hdrcharset=BINARY, see _proc_pax() for more information.
+ binary = False
+ for keyword, value in pax_headers.items():
+ try:
+ value.encode("utf8", "strict")
+ except UnicodeEncodeError:
+ binary = True
+ break
+
+ records = b""
+ if binary:
+ # Put the hdrcharset field at the beginning of the header.
+ records += b"21 hdrcharset=BINARY\n"
+
+ for keyword, value in pax_headers.items():
+ keyword = keyword.encode("utf8")
+ if binary:
+ # Try to restore the original byte representation of `value'.
+ # Needless to say, that the encoding must match the string.
+ value = value.encode(encoding, "surrogateescape")
+ else:
+ value = value.encode("utf8")
+
+ l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
+ n = p = 0
+ while True:
+ n = l + len(str(p))
+ if n == p:
+ break
+ p = n
+ records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
+
+ # We use a hardcoded "././@PaxHeader" name like star does
+ # instead of the one that POSIX recommends.
+ info = {}
+ info["name"] = "././@PaxHeader"
+ info["type"] = type
+ info["size"] = len(records)
+ info["magic"] = POSIX_MAGIC
+
+ # Create pax header + record blocks.
+ return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
+ cls._create_payload(records)
+
+ @classmethod
+ def frombuf(cls, buf, encoding, errors):
+ """Construct a TarInfo object from a 512 byte bytes object.
+ """
+ if len(buf) == 0:
+ raise EmptyHeaderError("empty header")
+ if len(buf) != BLOCKSIZE:
+ raise TruncatedHeaderError("truncated header")
+ if buf.count(NUL) == BLOCKSIZE:
+ raise EOFHeaderError("end of file header")
+
+ chksum = nti(buf[148:156])
+ if chksum not in calc_chksums(buf):
+ raise InvalidHeaderError("bad checksum")
+
+ obj = cls()
+ obj.name = nts(buf[0:100], encoding, errors)
+ obj.mode = nti(buf[100:108])
+ obj.uid = nti(buf[108:116])
+ obj.gid = nti(buf[116:124])
+ obj.size = nti(buf[124:136])
+ obj.mtime = nti(buf[136:148])
+ obj.chksum = chksum
+ obj.type = buf[156:157]
+ obj.linkname = nts(buf[157:257], encoding, errors)
+ obj.uname = nts(buf[265:297], encoding, errors)
+ obj.gname = nts(buf[297:329], encoding, errors)
+ obj.devmajor = nti(buf[329:337])
+ obj.devminor = nti(buf[337:345])
+ prefix = nts(buf[345:500], encoding, errors)
+
+ # Old V7 tar format represents a directory as a regular
+ # file with a trailing slash.
+ if obj.type == AREGTYPE and obj.name.endswith("/"):
+ obj.type = DIRTYPE
+
+ # The old GNU sparse format occupies some of the unused
+ # space in the buffer for up to 4 sparse structures.
+ # Save the them for later processing in _proc_sparse().
+ if obj.type == GNUTYPE_SPARSE:
+ pos = 386
+ structs = []
+ for i in range(4):
+ try:
+ offset = nti(buf[pos:pos + 12])
+ numbytes = nti(buf[pos + 12:pos + 24])
+ except ValueError:
+ break
+ structs.append((offset, numbytes))
+ pos += 24
+ isextended = bool(buf[482])
+ origsize = nti(buf[483:495])
+ obj._sparse_structs = (structs, isextended, origsize)
+
+ # Remove redundant slashes from directories.
+ if obj.isdir():
+ obj.name = obj.name.rstrip("/")
+
+ # Reconstruct a ustar longname.
+ if prefix and obj.type not in GNU_TYPES:
+ obj.name = prefix + "/" + obj.name
+ return obj
+
+ @classmethod
+ def fromtarfile(cls, tarfile):
+ """Return the next TarInfo object from TarFile object
+ tarfile.
+ """
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
+ obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
+ return obj._proc_member(tarfile)
+
+ #--------------------------------------------------------------------------
+ # The following are methods that are called depending on the type of a
+ # member. The entry point is _proc_member() which can be overridden in a
+ # subclass to add custom _proc_*() methods. A _proc_*() method MUST
+ # implement the following
+ # operations:
+ # 1. Set self.offset_data to the position where the data blocks begin,
+ # if there is data that follows.
+ # 2. Set tarfile.offset to the position where the next member's header will
+ # begin.
+ # 3. Return self or another valid TarInfo object.
+ def _proc_member(self, tarfile):
+ """Choose the right processing method depending on
+ the type and call it.
+ """
+ if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
+ return self._proc_gnulong(tarfile)
+ elif self.type == GNUTYPE_SPARSE:
+ return self._proc_sparse(tarfile)
+ elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
+ return self._proc_pax(tarfile)
+ else:
+ return self._proc_builtin(tarfile)
+
+ def _proc_builtin(self, tarfile):
+ """Process a builtin type or an unknown type which
+ will be treated as a regular file.
+ """
+ self.offset_data = tarfile.fileobj.tell()
+ offset = self.offset_data
+ if self.isreg() or self.type not in SUPPORTED_TYPES:
+ # Skip the following data blocks.
+ offset += self._block(self.size)
+ tarfile.offset = offset
+
+ # Patch the TarInfo object with saved global
+ # header information.
+ self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
+
+ return self
+
+ def _proc_gnulong(self, tarfile):
+ """Process the blocks that hold a GNU longname
+ or longlink member.
+ """
+ buf = tarfile.fileobj.read(self._block(self.size))
+
+ # Fetch the next header and process it.
+ try:
+ next = self.fromtarfile(tarfile)
+ except HeaderError:
+ raise SubsequentHeaderError("missing or bad subsequent header")
+
+ # Patch the TarInfo object from the next header with
+ # the longname information.
+ next.offset = self.offset
+ if self.type == GNUTYPE_LONGNAME:
+ next.name = nts(buf, tarfile.encoding, tarfile.errors)
+ elif self.type == GNUTYPE_LONGLINK:
+ next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
+
+ return next
+
+ def _proc_sparse(self, tarfile):
+ """Process a GNU sparse header plus extra headers.
+ """
+ # We already collected some sparse structures in frombuf().
+ structs, isextended, origsize = self._sparse_structs
+ del self._sparse_structs
+
+ # Collect sparse structures from extended header blocks.
+ while isextended:
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ pos = 0
+ for i in range(21):
+ try:
+ offset = nti(buf[pos:pos + 12])
+ numbytes = nti(buf[pos + 12:pos + 24])
+ except ValueError:
+ break
+ if offset and numbytes:
+ structs.append((offset, numbytes))
+ pos += 24
+ isextended = bool(buf[504])
+ self.sparse = structs
+
+ self.offset_data = tarfile.fileobj.tell()
+ tarfile.offset = self.offset_data + self._block(self.size)
+ self.size = origsize
+ return self
+
+ def _proc_pax(self, tarfile):
+ """Process an extended or global header as described in
+ POSIX.1-2008.
+ """
+ # Read the header information.
+ buf = tarfile.fileobj.read(self._block(self.size))
+
+ # A pax header stores supplemental information for either
+ # the following file (extended) or all following files
+ # (global).
+ if self.type == XGLTYPE:
+ pax_headers = tarfile.pax_headers
+ else:
+ pax_headers = tarfile.pax_headers.copy()
+
+ # Check if the pax header contains a hdrcharset field. This tells us
+ # the encoding of the path, linkpath, uname and gname fields. Normally,
+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+ # implementations are allowed to store them as raw binary strings if
+ # the translation to UTF-8 fails.
+ match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
+ if match is not None:
+ pax_headers["hdrcharset"] = match.group(1).decode("utf8")
+
+ # For the time being, we don't care about anything other than "BINARY".
+ # The only other value that is currently allowed by the standard is
+ # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+ hdrcharset = pax_headers.get("hdrcharset")
+ if hdrcharset == "BINARY":
+ encoding = tarfile.encoding
+ else:
+ encoding = "utf8"
+
+ # Parse pax header information. A record looks like that:
+ # "%d %s=%s\n" % (length, keyword, value). length is the size
+ # of the complete record including the length field itself and
+ # the newline. keyword and value are both UTF-8 encoded strings.
+ regex = re.compile(br"(\d+) ([^=]+)=")
+ pos = 0
+ while True:
+ match = regex.match(buf, pos)
+ if not match:
+ break
+
+ length, keyword = match.groups()
+ length = int(length)
+ value = buf[match.end(2) + 1:match.start(1) + length - 1]
+
+ # Normally, we could just use "utf8" as the encoding and "strict"
+ # as the error handler, but we better not take the risk. For
+ # example, GNU tar <= 1.23 is known to store filenames it cannot
+ # translate to UTF-8 as raw strings (unfortunately without a
+ # hdrcharset=BINARY header).
+ # We first try the strict standard encoding, and if that fails we
+ # fall back on the user's encoding and error handler.
+ keyword = self._decode_pax_field(keyword, "utf8", "utf8",
+ tarfile.errors)
+ if keyword in PAX_NAME_FIELDS:
+ value = self._decode_pax_field(value, encoding, tarfile.encoding,
+ tarfile.errors)
+ else:
+ value = self._decode_pax_field(value, "utf8", "utf8",
+ tarfile.errors)
+
+ pax_headers[keyword] = value
+ pos += length
+
+ # Fetch the next header.
+ try:
+ next = self.fromtarfile(tarfile)
+ except HeaderError:
+ raise SubsequentHeaderError("missing or bad subsequent header")
+
+ # Process GNU sparse information.
+ if "GNU.sparse.map" in pax_headers:
+ # GNU extended sparse format version 0.1.
+ self._proc_gnusparse_01(next, pax_headers)
+
+ elif "GNU.sparse.size" in pax_headers:
+ # GNU extended sparse format version 0.0.
+ self._proc_gnusparse_00(next, pax_headers, buf)
+
+ elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+ # GNU extended sparse format version 1.0.
+ self._proc_gnusparse_10(next, pax_headers, tarfile)
+
+ if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
+ # Patch the TarInfo object with the extended header info.
+ next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
+ next.offset = self.offset
+
+ if "size" in pax_headers:
+ # If the extended header replaces the size field,
+ # we need to recalculate the offset where the next
+ # header starts.
+ offset = next.offset_data
+ if next.isreg() or next.type not in SUPPORTED_TYPES:
+ offset += next._block(next.size)
+ tarfile.offset = offset
+
+ return next
+
+ def _proc_gnusparse_00(self, next, pax_headers, buf):
+ """Process a GNU tar extended sparse header, version 0.0.
+ """
+ offsets = []
+ for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
+ offsets.append(int(match.group(1)))
+ numbytes = []
+ for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
+ numbytes.append(int(match.group(1)))
+ next.sparse = list(zip(offsets, numbytes))
+
+ def _proc_gnusparse_01(self, next, pax_headers):
+ """Process a GNU tar extended sparse header, version 0.1.
+ """
+ sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+ """Process a GNU tar extended sparse header, version 1.0.
+ """
+ fields = None
+ sparse = []
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ fields, buf = buf.split(b"\n", 1)
+ fields = int(fields)
+ while len(sparse) < fields * 2:
+ if b"\n" not in buf:
+ buf += tarfile.fileobj.read(BLOCKSIZE)
+ number, buf = buf.split(b"\n", 1)
+ sparse.append(int(number))
+ next.offset_data = tarfile.fileobj.tell()
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _apply_pax_info(self, pax_headers, encoding, errors):
+ """Replace fields with supplemental information from a previous
+ pax extended or global header.
+ """
+ for keyword, value in pax_headers.items():
+ if keyword == "GNU.sparse.name":
+ setattr(self, "path", value)
+ elif keyword == "GNU.sparse.size":
+ setattr(self, "size", int(value))
+ elif keyword == "GNU.sparse.realsize":
+ setattr(self, "size", int(value))
+ elif keyword in PAX_FIELDS:
+ if keyword in PAX_NUMBER_FIELDS:
+ try:
+ value = PAX_NUMBER_FIELDS[keyword](value)
+ except ValueError:
+ value = 0
+ if keyword == "path":
+ value = value.rstrip("/")
+ setattr(self, keyword, value)
+
+ self.pax_headers = pax_headers.copy()
+
+ def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+ """Decode a single field from a pax record.
+ """
+ try:
+ return value.decode(encoding, "strict")
+ except UnicodeDecodeError:
+ return value.decode(fallback_encoding, fallback_errors)
+
+ def _block(self, count):
+ """Round up a byte count by BLOCKSIZE and return it,
+ e.g. _block(834) => 1024.
+ """
+ blocks, remainder = divmod(count, BLOCKSIZE)
+ if remainder:
+ blocks += 1
+ return blocks * BLOCKSIZE
+
+ def isreg(self):
+ return self.type in REGULAR_TYPES
+ def isfile(self):
+ return self.isreg()
+ def isdir(self):
+ return self.type == DIRTYPE
+ def issym(self):
+ return self.type == SYMTYPE
+ def islnk(self):
+ return self.type == LNKTYPE
+ def ischr(self):
+ return self.type == CHRTYPE
+ def isblk(self):
+ return self.type == BLKTYPE
+ def isfifo(self):
+ return self.type == FIFOTYPE
+ def issparse(self):
+ return self.sparse is not None
+ def isdev(self):
+ return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+ """The TarFile Class provides an interface to tar archives.
+ """
+
+ debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
+
+ dereference = False # If true, add content of linked file to the
+ # tar file, else the link.
+
+ ignore_zeros = False # If true, skips empty or invalid blocks and
+ # continues processing.
+
+ errorlevel = 1 # If 0, fatal errors only appear in debug
+ # messages (if debug >= 0). If > 0, errors
+ # are passed to the caller as exceptions.
+
+ format = DEFAULT_FORMAT # The format to use when creating an archive.
+
+ encoding = ENCODING # Encoding for 8-bit character strings.
+
+ errors = None # Error handler for unicode conversion.
+
+ tarinfo = TarInfo # The default TarInfo class to use.
+
+ fileobject = ExFileObject # The default ExFileObject class to use.
+
+ def __init__(self, name=None, mode="r", fileobj=None, format=None,
+ tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
+ errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None):
+ """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+ read from an existing archive, 'a' to append data to an existing
+ file or 'w' to create a new file overwriting an existing one. `mode'
+ defaults to 'r'.
+ If `fileobj' is given, it is used for reading or writing data. If it
+ can be determined, `mode' is overridden by `fileobj's mode.
+ `fileobj' is not closed, when TarFile is closed.
+ """
+ if len(mode) > 1 or mode not in "raw":
+ raise ValueError("mode must be 'r', 'a' or 'w'")
+ self.mode = mode
+ self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
+
+ if not fileobj:
+ if self.mode == "a" and not os.path.exists(name):
+ # Create nonexistent files in append mode.
+ self.mode = "w"
+ self._mode = "wb"
+ fileobj = bltn_open(name, self._mode)
+ self._extfileobj = False
+ else:
+ if name is None and hasattr(fileobj, "name"):
+ name = fileobj.name
+ if hasattr(fileobj, "mode"):
+ self._mode = fileobj.mode
+ self._extfileobj = True
+ self.name = os.path.abspath(name) if name else None
+ self.fileobj = fileobj
+
+ # Init attributes.
+ if format is not None:
+ self.format = format
+ if tarinfo is not None:
+ self.tarinfo = tarinfo
+ if dereference is not None:
+ self.dereference = dereference
+ if ignore_zeros is not None:
+ self.ignore_zeros = ignore_zeros
+ if encoding is not None:
+ self.encoding = encoding
+ self.errors = errors
+
+ if pax_headers is not None and self.format == PAX_FORMAT:
+ self.pax_headers = pax_headers
+ else:
+ self.pax_headers = {}
+
+ if debug is not None:
+ self.debug = debug
+ if errorlevel is not None:
+ self.errorlevel = errorlevel
+
+ # Init datastructures.
+ self.closed = False
+ self.members = [] # list of members as TarInfo objects
+ self._loaded = False # flag if all members have been read
+ self.offset = self.fileobj.tell()
+ # current position in the archive file
+ self.inodes = {} # dictionary caching the inodes of
+ # archive members already added
+
+ try:
+ if self.mode == "r":
+ self.firstmember = None
+ self.firstmember = self.next()
+
+ if self.mode == "a":
+ # Move to the end of the archive,
+ # before the first empty block.
+ while True:
+ self.fileobj.seek(self.offset)
+ try:
+ tarinfo = self.tarinfo.fromtarfile(self)
+ self.members.append(tarinfo)
+ except EOFHeaderError:
+ self.fileobj.seek(self.offset)
+ break
+ except HeaderError as e:
+ raise ReadError(str(e))
+
+ if self.mode in "aw":
+ self._loaded = True
+
+ if self.pax_headers:
+ buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
+ self.fileobj.write(buf)
+ self.offset += len(buf)
+ except:
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+ raise
+
+ #--------------------------------------------------------------------------
+ # Below are the classmethods which act as alternate constructors to the
+ # TarFile class. The open() method is the only one that is needed for
+ # public use; it is the "super"-constructor and is able to select an
+ # adequate "sub"-constructor for a particular compression using the mapping
+ # from OPEN_METH.
+ #
+ # This concept allows one to subclass TarFile without losing the comfort of
+ # the super-constructor. A sub-constructor is registered and made available
+ # by adding it to the mapping in OPEN_METH.
+
+ @classmethod
+ def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
+ """Open a tar archive for reading, writing or appending. Return
+ an appropriate TarFile class.
+
+ mode:
+ 'r' or 'r:*' open for reading with transparent compression
+ 'r:' open for reading exclusively uncompressed
+ 'r:gz' open for reading with gzip compression
+ 'r:bz2' open for reading with bzip2 compression
+ 'a' or 'a:' open for appending, creating the file if necessary
+ 'w' or 'w:' open for writing without compression
+ 'w:gz' open for writing with gzip compression
+ 'w:bz2' open for writing with bzip2 compression
+
+ 'r|*' open a stream of tar blocks with transparent compression
+ 'r|' open an uncompressed stream of tar blocks for reading
+ 'r|gz' open a gzip compressed stream of tar blocks
+ 'r|bz2' open a bzip2 compressed stream of tar blocks
+ 'w|' open an uncompressed stream for writing
+ 'w|gz' open a gzip compressed stream for writing
+ 'w|bz2' open a bzip2 compressed stream for writing
+ """
+
+ if not name and not fileobj:
+ raise ValueError("nothing to open")
+
+ if mode in ("r", "r:*"):
+ # Find out which *open() is appropriate for opening the file.
+ for comptype in cls.OPEN_METH:
+ func = getattr(cls, cls.OPEN_METH[comptype])
+ if fileobj is not None:
+ saved_pos = fileobj.tell()
+ try:
+ return func(name, "r", fileobj, **kwargs)
+ except (ReadError, CompressionError) as e:
+ if fileobj is not None:
+ fileobj.seek(saved_pos)
+ continue
+ raise ReadError("file could not be opened successfully")
+
+ elif ":" in mode:
+ filemode, comptype = mode.split(":", 1)
+ filemode = filemode or "r"
+ comptype = comptype or "tar"
+
+ # Select the *open() function according to
+ # given compression.
+ if comptype in cls.OPEN_METH:
+ func = getattr(cls, cls.OPEN_METH[comptype])
+ else:
+ raise CompressionError("unknown compression type %r" % comptype)
+ return func(name, filemode, fileobj, **kwargs)
+
+ elif "|" in mode:
+ filemode, comptype = mode.split("|", 1)
+ filemode = filemode or "r"
+ comptype = comptype or "tar"
+
+ if filemode not in "rw":
+ raise ValueError("mode must be 'r' or 'w'")
+
+ stream = _Stream(name, filemode, comptype, fileobj, bufsize)
+ try:
+ t = cls(name, filemode, stream, **kwargs)
+ except:
+ stream.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ elif mode in "aw":
+ return cls.taropen(name, mode, fileobj, **kwargs)
+
+ raise ValueError("undiscernible mode")
+
+ @classmethod
+ def taropen(cls, name, mode="r", fileobj=None, **kwargs):
+ """Open uncompressed tar archive name for reading or writing.
+ """
+ if len(mode) > 1 or mode not in "raw":
+ raise ValueError("mode must be 'r', 'a' or 'w'")
+ return cls(name, mode, fileobj, **kwargs)
+
+ @classmethod
+ def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+ """Open gzip compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if len(mode) > 1 or mode not in "rw":
+ raise ValueError("mode must be 'r' or 'w'")
+
+ try:
+ import gzip
+ gzip.GzipFile
+ except (ImportError, AttributeError):
+ raise CompressionError("gzip module is not available")
+
+ extfileobj = fileobj is not None
+ try:
+ fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except IOError:
+ if not extfileobj and fileobj is not None:
+ fileobj.close()
+ if fileobj is None:
+ raise
+ raise ReadError("not a gzip file")
+ except:
+ if not extfileobj and fileobj is not None:
+ fileobj.close()
+ raise
+ t._extfileobj = extfileobj
+ return t
+
+ @classmethod
+ def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+ """Open bzip2 compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if len(mode) > 1 or mode not in "rw":
+ raise ValueError("mode must be 'r' or 'w'.")
+
+ try:
+ import bz2
+ except ImportError:
+ raise CompressionError("bz2 module is not available")
+
+ if fileobj is not None:
+ fileobj = _BZ2Proxy(fileobj, mode)
+ else:
+ fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except (IOError, EOFError):
+ fileobj.close()
+ raise ReadError("not a bzip2 file")
+ t._extfileobj = False
+ return t
+
+ # All *open() methods are registered here.
+ OPEN_METH = {
+ "tar": "taropen", # uncompressed tar
+ "gz": "gzopen", # gzip compressed tar
+ "bz2": "bz2open" # bzip2 compressed tar
+ }
+
+ #--------------------------------------------------------------------------
+ # The public methods which TarFile provides:
+
+ def close(self):
+ """Close the TarFile. In write-mode, two finishing zero blocks are
+ appended to the archive.
+ """
+ if self.closed:
+ return
+
+ if self.mode in "aw":
+ self.fileobj.write(NUL * (BLOCKSIZE * 2))
+ self.offset += (BLOCKSIZE * 2)
+ # fill up the end with zero-blocks
+ # (like option -b20 for tar does)
+ blocks, remainder = divmod(self.offset, RECORDSIZE)
+ if remainder > 0:
+ self.fileobj.write(NUL * (RECORDSIZE - remainder))
+
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+
+ def getmember(self, name):
+ """Return a TarInfo object for member `name'. If `name' can not be
+ found in the archive, KeyError is raised. If a member occurs more
+ than once in the archive, its last occurrence is assumed to be the
+ most up-to-date version.
+ """
+ tarinfo = self._getmember(name)
+ if tarinfo is None:
+ raise KeyError("filename %r not found" % name)
+ return tarinfo
+
+ def getmembers(self):
+ """Return the members of the archive as a list of TarInfo objects. The
+ list has the same order as the members in the archive.
+ """
+ self._check()
+ if not self._loaded: # if we want to obtain a list of
+ self._load() # all members, we first have to
+ # scan the whole archive.
+ return self.members
+
+ def getnames(self):
+ """Return the members of the archive as a list of their names. It has
+ the same order as the list returned by getmembers().
+ """
+ return [tarinfo.name for tarinfo in self.getmembers()]
+
+ def gettarinfo(self, name=None, arcname=None, fileobj=None):
+ """Create a TarInfo object for either the file `name' or the file
+ object `fileobj' (using os.fstat on its file descriptor). You can
+ modify some of the TarInfo's attributes before you add it using
+ addfile(). If given, `arcname' specifies an alternative name for the
+ file in the archive.
+ """
+ self._check("aw")
+
+ # When fileobj is given, replace name by
+ # fileobj's real name.
+ if fileobj is not None:
+ name = fileobj.name
+
+ # Building the name of the member in the archive.
+ # Backward slashes are converted to forward slashes,
+ # Absolute paths are turned to relative paths.
+ if arcname is None:
+ arcname = name
+ drv, arcname = os.path.splitdrive(arcname)
+ arcname = arcname.replace(os.sep, "/")
+ arcname = arcname.lstrip("/")
+
+ # Now, fill the TarInfo object with
+ # information specific for the file.
+ tarinfo = self.tarinfo()
+ tarinfo.tarfile = self
+
+ # Use os.stat or os.lstat, depending on platform
+ # and if symlinks shall be resolved.
+ if fileobj is None:
+ if hasattr(os, "lstat") and not self.dereference:
+ statres = os.lstat(name)
+ else:
+ statres = os.stat(name)
+ else:
+ statres = os.fstat(fileobj.fileno())
+ linkname = ""
+
+ stmd = statres.st_mode
+ if stat.S_ISREG(stmd):
+ inode = (statres.st_ino, statres.st_dev)
+ if not self.dereference and statres.st_nlink > 1 and \
+ inode in self.inodes and arcname != self.inodes[inode]:
+ # Is it a hardlink to an already
+ # archived file?
+ type = LNKTYPE
+ linkname = self.inodes[inode]
+ else:
+ # The inode is added only if its valid.
+ # For win32 it is always 0.
+ type = REGTYPE
+ if inode[0]:
+ self.inodes[inode] = arcname
+ elif stat.S_ISDIR(stmd):
+ type = DIRTYPE
+ elif stat.S_ISFIFO(stmd):
+ type = FIFOTYPE
+ elif stat.S_ISLNK(stmd):
+ type = SYMTYPE
+ linkname = os.readlink(name)
+ elif stat.S_ISCHR(stmd):
+ type = CHRTYPE
+ elif stat.S_ISBLK(stmd):
+ type = BLKTYPE
+ else:
+ return None
+
+ # Fill the TarInfo object with all
+ # information we can get.
+ tarinfo.name = arcname
+ tarinfo.mode = stmd
+ tarinfo.uid = statres.st_uid
+ tarinfo.gid = statres.st_gid
+ if type == REGTYPE:
+ tarinfo.size = statres.st_size
+ else:
+ tarinfo.size = 0
+ tarinfo.mtime = statres.st_mtime
+ tarinfo.type = type
+ tarinfo.linkname = linkname
+ if pwd:
+ try:
+ tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+ except KeyError:
+ pass
+ if grp:
+ try:
+ tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+ except KeyError:
+ pass
+
+ if type in (CHRTYPE, BLKTYPE):
+ if hasattr(os, "major") and hasattr(os, "minor"):
+ tarinfo.devmajor = os.major(statres.st_rdev)
+ tarinfo.devminor = os.minor(statres.st_rdev)
+ return tarinfo
+
+ def list(self, verbose=True):
+ """Print a table of contents to sys.stdout. If `verbose' is False, only
+ the names of the members are printed. If it is True, an `ls -l'-like
+ output is produced.
+ """
+ self._check()
+
+ for tarinfo in self:
+ if verbose:
+ print(filemode(tarinfo.mode), end=' ')
+ print("%s/%s" % (tarinfo.uname or tarinfo.uid,
+ tarinfo.gname or tarinfo.gid), end=' ')
+ if tarinfo.ischr() or tarinfo.isblk():
+ print("%10s" % ("%d,%d" \
+ % (tarinfo.devmajor, tarinfo.devminor)), end=' ')
+ else:
+ print("%10d" % tarinfo.size, end=' ')
+ print("%d-%02d-%02d %02d:%02d:%02d" \
+ % time.localtime(tarinfo.mtime)[:6], end=' ')
+
+ print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ')
+
+ if verbose:
+ if tarinfo.issym():
+ print("->", tarinfo.linkname, end=' ')
+ if tarinfo.islnk():
+ print("link to", tarinfo.linkname, end=' ')
+ print()
+
+ def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
+ """Add the file `name' to the archive. `name' may be any type of file
+ (directory, fifo, symbolic link, etc.). If given, `arcname'
+ specifies an alternative name for the file in the archive.
+ Directories are added recursively by default. This can be avoided by
+ setting `recursive' to False. `exclude' is a function that should
+ return True for each filename to be excluded. `filter' is a function
+ that expects a TarInfo object argument and returns the changed
+ TarInfo object, if it returns None the TarInfo object will be
+ excluded from the archive.
+ """
+ self._check("aw")
+
+ if arcname is None:
+ arcname = name
+
+ # Exclude pathnames.
+ if exclude is not None:
+ import warnings
+ warnings.warn("use the filter argument instead",
+ DeprecationWarning, 2)
+ if exclude(name):
+ self._dbg(2, "tarfile: Excluded %r" % name)
+ return
+
+ # Skip if somebody tries to archive the archive...
+ if self.name is not None and os.path.abspath(name) == self.name:
+ self._dbg(2, "tarfile: Skipped %r" % name)
+ return
+
+ self._dbg(1, name)
+
+ # Create a TarInfo object from the file.
+ tarinfo = self.gettarinfo(name, arcname)
+
+ if tarinfo is None:
+ self._dbg(1, "tarfile: Unsupported type %r" % name)
+ return
+
+ # Change or exclude the TarInfo object.
+ if filter is not None:
+ tarinfo = filter(tarinfo)
+ if tarinfo is None:
+ self._dbg(2, "tarfile: Excluded %r" % name)
+ return
+
+ # Append the tar header and data to the archive.
+ if tarinfo.isreg():
+ f = bltn_open(name, "rb")
+ self.addfile(tarinfo, f)
+ f.close()
+
+ elif tarinfo.isdir():
+ self.addfile(tarinfo)
+ if recursive:
+ for f in os.listdir(name):
+ self.add(os.path.join(name, f), os.path.join(arcname, f),
+ recursive, exclude, filter=filter)
+
+ else:
+ self.addfile(tarinfo)
+
+ def addfile(self, tarinfo, fileobj=None):
+ """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
+ given, tarinfo.size bytes are read from it and added to the archive.
+ You can create TarInfo objects using gettarinfo().
+ On Windows platforms, `fileobj' should always be opened with mode
+ 'rb' to avoid irritation about the file size.
+ """
+ self._check("aw")
+
+ tarinfo = copy.copy(tarinfo)
+
+ buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
+ self.fileobj.write(buf)
+ self.offset += len(buf)
+
+ # If there's data to follow, append it.
+ if fileobj is not None:
+ copyfileobj(fileobj, self.fileobj, tarinfo.size)
+ blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+ if remainder > 0:
+ self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+ blocks += 1
+ self.offset += blocks * BLOCKSIZE
+
+ self.members.append(tarinfo)
+
+ def extractall(self, path=".", members=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ directories = []
+
+ if members is None:
+ members = self
+
+ for tarinfo in members:
+ if tarinfo.isdir():
+ # Extract directories with a safe mode.
+ directories.append(tarinfo)
+ tarinfo = copy.copy(tarinfo)
+ tarinfo.mode = 0o700
+ # Do not set_attrs directories, as we will do that further down
+ self.extract(tarinfo, path, set_attrs=not tarinfo.isdir())
+
+ # Reverse sort directories.
+ directories.sort(key=lambda a: a.name)
+ directories.reverse()
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError as e:
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+ def extract(self, member, path="", set_attrs=True):
+ """Extract a member from the archive to the current working directory,
+ using its full name. Its file information is extracted as accurately
+ as possible. `member' may be a filename or a TarInfo object. You can
+ specify a different directory using `path'. File attributes (owner,
+ mtime, mode) are set unless `set_attrs' is False.
+ """
+ self._check("r")
+
+ if isinstance(member, str):
+ tarinfo = self.getmember(member)
+ else:
+ tarinfo = member
+
+ # Prepare the link target for makelink().
+ if tarinfo.islnk():
+ tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+
+ try:
+ self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
+ set_attrs=set_attrs)
+ except EnvironmentError as e:
+ if self.errorlevel > 0:
+ raise
+ else:
+ if e.filename is None:
+ self._dbg(1, "tarfile: %s" % e.strerror)
+ else:
+ self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+ except ExtractError as e:
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+ def extractfile(self, member):
+ """Extract a member from the archive as a file object. `member' may be
+ a filename or a TarInfo object. If `member' is a regular file, a
+ file-like object is returned. If `member' is a link, a file-like
+ object is constructed from the link's target. If `member' is none of
+ the above, None is returned.
+ The file-like object is read-only and provides the following
+ methods: read(), readline(), readlines(), seek() and tell()
+ """
+ self._check("r")
+
+ if isinstance(member, str):
+ tarinfo = self.getmember(member)
+ else:
+ tarinfo = member
+
+ if tarinfo.isreg():
+ return self.fileobject(self, tarinfo)
+
+ elif tarinfo.type not in SUPPORTED_TYPES:
+ # If a member's type is unknown, it is treated as a
+ # regular file.
+ return self.fileobject(self, tarinfo)
+
+ elif tarinfo.islnk() or tarinfo.issym():
+ if isinstance(self.fileobj, _Stream):
+ # A small but ugly workaround for the case that someone tries
+ # to extract a (sym)link as a file-object from a non-seekable
+ # stream of tar blocks.
+ raise StreamError("cannot extract (sym)link as file object")
+ else:
+ # A (sym)link's file object is its target's file object.
+ return self.extractfile(self._find_link_target(tarinfo))
+ else:
+ # If there's no data associated with the member (directory, chrdev,
+ # blkdev, etc.), return None instead of a file object.
+ return None
+
+ def _extract_member(self, tarinfo, targetpath, set_attrs=True):
+ """Extract the TarInfo object tarinfo to a physical
+ file called targetpath.
+ """
+ # Fetch the TarInfo object for the given name
+ # and build the destination pathname, replacing
+ # forward slashes to platform specific separators.
+ targetpath = targetpath.rstrip("/")
+ targetpath = targetpath.replace("/", os.sep)
+
+ # Create all upper directories.
+ upperdirs = os.path.dirname(targetpath)
+ if upperdirs and not os.path.exists(upperdirs):
+ # Create directories that are not part of the archive with
+ # default permissions.
+ os.makedirs(upperdirs)
+
+ if tarinfo.islnk() or tarinfo.issym():
+ self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+ else:
+ self._dbg(1, tarinfo.name)
+
+ if tarinfo.isreg():
+ self.makefile(tarinfo, targetpath)
+ elif tarinfo.isdir():
+ self.makedir(tarinfo, targetpath)
+ elif tarinfo.isfifo():
+ self.makefifo(tarinfo, targetpath)
+ elif tarinfo.ischr() or tarinfo.isblk():
+ self.makedev(tarinfo, targetpath)
+ elif tarinfo.islnk() or tarinfo.issym():
+ self.makelink(tarinfo, targetpath)
+ elif tarinfo.type not in SUPPORTED_TYPES:
+ self.makeunknown(tarinfo, targetpath)
+ else:
+ self.makefile(tarinfo, targetpath)
+
+ if set_attrs:
+ self.chown(tarinfo, targetpath)
+ if not tarinfo.issym():
+ self.chmod(tarinfo, targetpath)
+ self.utime(tarinfo, targetpath)
+
+ #--------------------------------------------------------------------------
+ # Below are the different file methods. They are called via
+ # _extract_member() when extract() is called. They can be replaced in a
+ # subclass to implement other functionality.
+
+ def makedir(self, tarinfo, targetpath):
+ """Make a directory called targetpath.
+ """
+ try:
+ # Use a safe mode for the directory, the real mode is set
+ # later in _extract_member().
+ os.mkdir(targetpath, 0o700)
+ except EnvironmentError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ def makefile(self, tarinfo, targetpath):
+ """Make a file called targetpath.
+ """
+ source = self.fileobj
+ source.seek(tarinfo.offset_data)
+ target = bltn_open(targetpath, "wb")
+ if tarinfo.sparse is not None:
+ for offset, size in tarinfo.sparse:
+ target.seek(offset)
+ copyfileobj(source, target, size)
+ else:
+ copyfileobj(source, target, tarinfo.size)
+ target.seek(tarinfo.size)
+ target.truncate()
+ target.close()
+
+ def makeunknown(self, tarinfo, targetpath):
+ """Make a file from a TarInfo object with an unknown type
+ at targetpath.
+ """
+ self.makefile(tarinfo, targetpath)
+ self._dbg(1, "tarfile: Unknown file type %r, " \
+ "extracted as regular file." % tarinfo.type)
+
+ def makefifo(self, tarinfo, targetpath):
+ """Make a fifo called targetpath.
+ """
+ if hasattr(os, "mkfifo"):
+ os.mkfifo(targetpath)
+ else:
+ raise ExtractError("fifo not supported by system")
+
+ def makedev(self, tarinfo, targetpath):
+ """Make a character or block device called targetpath.
+ """
+ if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+ raise ExtractError("special devices not supported by system")
+
+ mode = tarinfo.mode
+ if tarinfo.isblk():
+ mode |= stat.S_IFBLK
+ else:
+ mode |= stat.S_IFCHR
+
+ os.mknod(targetpath, mode,
+ os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+ def makelink(self, tarinfo, targetpath):
+ """Make a (symbolic) link called targetpath. If it cannot be created
+ (platform limitation), we try to make a copy of the referenced file
+ instead of a link.
+ """
+ try:
+ # For systems that support symbolic and hard links.
+ if tarinfo.issym():
+ os.symlink(tarinfo.linkname, targetpath)
+ else:
+ # See extract().
+ if os.path.exists(tarinfo._link_target):
+ os.link(tarinfo._link_target, targetpath)
+ else:
+ self._extract_member(self._find_link_target(tarinfo),
+ targetpath)
+ except symlink_exception:
+ if tarinfo.issym():
+ linkpath = os.path.join(os.path.dirname(tarinfo.name),
+ tarinfo.linkname)
+ else:
+ linkpath = tarinfo.linkname
+ else:
+ try:
+ self._extract_member(self._find_link_target(tarinfo),
+ targetpath)
+ except KeyError:
+ raise ExtractError("unable to resolve link inside archive")
+
+ def chown(self, tarinfo, targetpath):
+ """Set owner of targetpath according to tarinfo.
+ """
+ if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
+ # We have to be root to do so.
+ try:
+ g = grp.getgrnam(tarinfo.gname)[2]
+ except KeyError:
+ g = tarinfo.gid
+ try:
+ u = pwd.getpwnam(tarinfo.uname)[2]
+ except KeyError:
+ u = tarinfo.uid
+ try:
+ if tarinfo.issym() and hasattr(os, "lchown"):
+ os.lchown(targetpath, u, g)
+ else:
+ if sys.platform != "os2emx":
+ os.chown(targetpath, u, g)
+ except EnvironmentError as e:
+ raise ExtractError("could not change owner")
+
+ def chmod(self, tarinfo, targetpath):
+ """Set file permissions of targetpath according to tarinfo.
+ """
+ if hasattr(os, 'chmod'):
+ try:
+ os.chmod(targetpath, tarinfo.mode)
+ except EnvironmentError as e:
+ raise ExtractError("could not change mode")
+
+ def utime(self, tarinfo, targetpath):
+ """Set modification time of targetpath according to tarinfo.
+ """
+ if not hasattr(os, 'utime'):
+ return
+ try:
+ os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
+ except EnvironmentError as e:
+ raise ExtractError("could not change modification time")
+
+ #--------------------------------------------------------------------------
+ def next(self):
+ """Return the next member of the archive as a TarInfo object, when
+ TarFile is opened for reading. Return None if there is no more
+ available.
+ """
+ self._check("ra")
+ if self.firstmember is not None:
+ m = self.firstmember
+ self.firstmember = None
+ return m
+
+ # Read the next block.
+ self.fileobj.seek(self.offset)
+ tarinfo = None
+ while True:
+ try:
+ tarinfo = self.tarinfo.fromtarfile(self)
+ except EOFHeaderError as e:
+ if self.ignore_zeros:
+ self._dbg(2, "0x%X: %s" % (self.offset, e))
+ self.offset += BLOCKSIZE
+ continue
+ except InvalidHeaderError as e:
+ if self.ignore_zeros:
+ self._dbg(2, "0x%X: %s" % (self.offset, e))
+ self.offset += BLOCKSIZE
+ continue
+ elif self.offset == 0:
+ raise ReadError(str(e))
+ except EmptyHeaderError:
+ if self.offset == 0:
+ raise ReadError("empty file")
+ except TruncatedHeaderError as e:
+ if self.offset == 0:
+ raise ReadError(str(e))
+ except SubsequentHeaderError as e:
+ raise ReadError(str(e))
+ break
+
+ if tarinfo is not None:
+ self.members.append(tarinfo)
+ else:
+ self._loaded = True
+
+ return tarinfo
+
+ #--------------------------------------------------------------------------
+ # Little helper methods:
+
+ def _getmember(self, name, tarinfo=None, normalize=False):
+ """Find an archive member by name from bottom to top.
+ If tarinfo is given, it is used as the starting point.
+ """
+ # Ensure that all members have been loaded.
+ members = self.getmembers()
+
+ # Limit the member search list up to tarinfo.
+ if tarinfo is not None:
+ members = members[:members.index(tarinfo)]
+
+ if normalize:
+ name = os.path.normpath(name)
+
+ for member in reversed(members):
+ if normalize:
+ member_name = os.path.normpath(member.name)
+ else:
+ member_name = member.name
+
+ if name == member_name:
+ return member
+
+ def _load(self):
+ """Read through the entire archive file and look for readable
+ members.
+ """
+ while True:
+ tarinfo = self.next()
+ if tarinfo is None:
+ break
+ self._loaded = True
+
+ def _check(self, mode=None):
+ """Check if TarFile is still open, and if the operation's mode
+ corresponds to TarFile's mode.
+ """
+ if self.closed:
+ raise IOError("%s is closed" % self.__class__.__name__)
+ if mode is not None and self.mode not in mode:
+ raise IOError("bad operation for mode %r" % self.mode)
+
+ def _find_link_target(self, tarinfo):
+ """Find the target member of a symlink or hardlink member in the
+ archive.
+ """
+ if tarinfo.issym():
+ # Always search the entire archive.
+ linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
+ limit = None
+ else:
+ # Search the archive before the link, because a hard link is
+ # just a reference to an already archived file.
+ linkname = tarinfo.linkname
+ limit = tarinfo
+
+ member = self._getmember(linkname, tarinfo=limit, normalize=True)
+ if member is None:
+ raise KeyError("linkname %r not found" % linkname)
+ return member
+
+ def __iter__(self):
+ """Provide an iterator object.
+ """
+ if self._loaded:
+ return iter(self.members)
+ else:
+ return TarIter(self)
+
+ def _dbg(self, level, msg):
+ """Write debugging output to sys.stderr.
+ """
+ if level <= self.debug:
+ print(msg, file=sys.stderr)
+
+ def __enter__(self):
+ self._check()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if type is None:
+ self.close()
+ else:
+ # An exception occurred. We must not call close() because
+ # it would try to write end-of-archive blocks and padding.
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+# class TarFile
+
+class TarIter(object):
+ """Iterator Class.
+
+ for tarinfo in TarFile(...):
+ suite...
+ """
+
+ def __init__(self, tarfile):
+ """Construct a TarIter object.
+ """
+ self.tarfile = tarfile
+ self.index = 0
+ def __iter__(self):
+ """Return iterator object.
+ """
+ return self
+
+ def __next__(self):
+ """Return the next item using TarFile's next() method.
+ When all members have been read, set TarFile as _loaded.
+ """
+ # Fix for SF #1100429: Under rare circumstances it can
+ # happen that getmembers() is called during iteration,
+ # which will cause TarIter to stop prematurely.
+ if not self.tarfile._loaded:
+ tarinfo = self.tarfile.next()
+ if not tarinfo:
+ self.tarfile._loaded = True
+ raise StopIteration
+ else:
+ try:
+ tarinfo = self.tarfile.members[self.index]
+ except IndexError:
+ raise StopIteration
+ self.index += 1
+ return tarinfo
+
+ next = __next__ # for Python 2.x
+
+#--------------------
+# exported functions
+#--------------------
+def is_tarfile(name):
+ """Return True if name points to a tar archive that we
+ are able to handle, else return False.
+ """
+ try:
+ t = open(name)
+ t.close()
+ return True
+ except TarError:
+ return False
+
+bltn_open = open
+open = TarFile.open
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/compat.py
new file mode 100644
index 00000000..ff328c8e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/compat.py
@@ -0,0 +1,1120 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import absolute_import
+
+import os
+import re
+import sys
+
+try:
+ import ssl
+except ImportError: # pragma: no cover
+ ssl = None
+
+if sys.version_info[0] < 3: # pragma: no cover
+ from StringIO import StringIO
+ string_types = basestring,
+ text_type = unicode
+ from types import FileType as file_type
+ import __builtin__ as builtins
+ import ConfigParser as configparser
+ from ._backport import shutil
+ from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
+ from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
+ pathname2url, ContentTooShortError, splittype)
+
+ def quote(s):
+ if isinstance(s, unicode):
+ s = s.encode('utf-8')
+ return _quote(s)
+
+ import urllib2
+ from urllib2 import (Request, urlopen, URLError, HTTPError,
+ HTTPBasicAuthHandler, HTTPPasswordMgr,
+ HTTPHandler, HTTPRedirectHandler,
+ build_opener)
+ if ssl:
+ from urllib2 import HTTPSHandler
+ import httplib
+ import xmlrpclib
+ import Queue as queue
+ from HTMLParser import HTMLParser
+ import htmlentitydefs
+ raw_input = raw_input
+ from itertools import ifilter as filter
+ from itertools import ifilterfalse as filterfalse
+
+ _userprog = None
+ def splituser(host):
+ """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
+ global _userprog
+ if _userprog is None:
+ import re
+ _userprog = re.compile('^(.*)@(.*)$')
+
+ match = _userprog.match(host)
+ if match: return match.group(1, 2)
+ return None, host
+
+else: # pragma: no cover
+ from io import StringIO
+ string_types = str,
+ text_type = str
+ from io import TextIOWrapper as file_type
+ import builtins
+ import configparser
+ import shutil
+ from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote,
+ unquote, urlsplit, urlunsplit, splittype)
+ from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
+ pathname2url,
+ HTTPBasicAuthHandler, HTTPPasswordMgr,
+ HTTPHandler, HTTPRedirectHandler,
+ build_opener)
+ if ssl:
+ from urllib.request import HTTPSHandler
+ from urllib.error import HTTPError, URLError, ContentTooShortError
+ import http.client as httplib
+ import urllib.request as urllib2
+ import xmlrpc.client as xmlrpclib
+ import queue
+ from html.parser import HTMLParser
+ import html.entities as htmlentitydefs
+ raw_input = input
+ from itertools import filterfalse
+ filter = filter
+
+try:
+ from ssl import match_hostname, CertificateError
+except ImportError: # pragma: no cover
+ class CertificateError(ValueError):
+ pass
+
+
+ def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ parts = dn.split('.')
+ leftmost, remainder = parts[0], parts[1:]
+
+ wildcards = leftmost.count('*')
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn))
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ return pat.match(hostname)
+
+
+ def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate, match_hostname needs a "
+ "SSL socket or SSL context with either "
+ "CERT_OPTIONAL or CERT_REQUIRED")
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
+
+
+try:
+ from types import SimpleNamespace as Container
+except ImportError: # pragma: no cover
+ class Container(object):
+ """
+ A generic container for when multiple values need to be returned
+ """
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+
+try:
+ from shutil import which
+except ImportError: # pragma: no cover
+ # Implementation from Python 3.3
+ def which(cmd, mode=os.F_OK | os.X_OK, path=None):
+ """Given a command, mode, and a PATH string, return the path which
+ conforms to the given mode on the PATH, or None if there is no such
+ file.
+
+ `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
+ of os.environ.get("PATH"), or can be overridden with a custom search
+ path.
+
+ """
+ # Check that a given file can be accessed with the correct mode.
+ # Additionally check that `file` is not a directory, as on Windows
+ # directories pass the os.access check.
+ def _access_check(fn, mode):
+ return (os.path.exists(fn) and os.access(fn, mode)
+ and not os.path.isdir(fn))
+
+ # If we're given a path with a directory part, look it up directly rather
+ # than referring to PATH directories. This includes checking relative to the
+ # current directory, e.g. ./script
+ if os.path.dirname(cmd):
+ if _access_check(cmd, mode):
+ return cmd
+ return None
+
+ if path is None:
+ path = os.environ.get("PATH", os.defpath)
+ if not path:
+ return None
+ path = path.split(os.pathsep)
+
+ if sys.platform == "win32":
+ # The current directory takes precedence on Windows.
+ if not os.curdir in path:
+ path.insert(0, os.curdir)
+
+ # PATHEXT is necessary to check on Windows.
+ pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
+ # See if the given file matches any of the expected path extensions.
+ # This will allow us to short circuit when given "python.exe".
+ # If it does match, only test that one, otherwise we have to try
+ # others.
+ if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
+ files = [cmd]
+ else:
+ files = [cmd + ext for ext in pathext]
+ else:
+ # On other platforms you don't have things like PATHEXT to tell you
+ # what file suffixes are executable, so just pass on cmd as-is.
+ files = [cmd]
+
+ seen = set()
+ for dir in path:
+ normdir = os.path.normcase(dir)
+ if not normdir in seen:
+ seen.add(normdir)
+ for thefile in files:
+ name = os.path.join(dir, thefile)
+ if _access_check(name, mode):
+ return name
+ return None
+
+
+# ZipFile is a context manager in 2.7, but not in 2.6
+
+from zipfile import ZipFile as BaseZipFile
+
+if hasattr(BaseZipFile, '__enter__'): # pragma: no cover
+ ZipFile = BaseZipFile
+else: # pragma: no cover
+ from zipfile import ZipExtFile as BaseZipExtFile
+
+ class ZipExtFile(BaseZipExtFile):
+ def __init__(self, base):
+ self.__dict__.update(base.__dict__)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ self.close()
+ # return None, so if an exception occurred, it will propagate
+
+ class ZipFile(BaseZipFile):
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ self.close()
+ # return None, so if an exception occurred, it will propagate
+
+ def open(self, *args, **kwargs):
+ base = BaseZipFile.open(self, *args, **kwargs)
+ return ZipExtFile(base)
+
+try:
+ from platform import python_implementation
+except ImportError: # pragma: no cover
+ def python_implementation():
+ """Return a string identifying the Python implementation."""
+ if 'PyPy' in sys.version:
+ return 'PyPy'
+ if os.name == 'java':
+ return 'Jython'
+ if sys.version.startswith('IronPython'):
+ return 'IronPython'
+ return 'CPython'
+
+try:
+ import sysconfig
+except ImportError: # pragma: no cover
+ from ._backport import sysconfig
+
+try:
+ callable = callable
+except NameError: # pragma: no cover
+ from collections import Callable
+
+ def callable(obj):
+ return isinstance(obj, Callable)
+
+
+try:
+ fsencode = os.fsencode
+ fsdecode = os.fsdecode
+except AttributeError: # pragma: no cover
+ # Issue #99: on some systems (e.g. containerised),
+ # sys.getfilesystemencoding() returns None, and we need a real value,
+ # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
+ # sys.getfilesystemencoding(): the return value is "the user’s preference
+ # according to the result of nl_langinfo(CODESET), or None if the
+ # nl_langinfo(CODESET) failed."
+ _fsencoding = sys.getfilesystemencoding() or 'utf-8'
+ if _fsencoding == 'mbcs':
+ _fserrors = 'strict'
+ else:
+ _fserrors = 'surrogateescape'
+
+ def fsencode(filename):
+ if isinstance(filename, bytes):
+ return filename
+ elif isinstance(filename, text_type):
+ return filename.encode(_fsencoding, _fserrors)
+ else:
+ raise TypeError("expect bytes or str, not %s" %
+ type(filename).__name__)
+
+ def fsdecode(filename):
+ if isinstance(filename, text_type):
+ return filename
+ elif isinstance(filename, bytes):
+ return filename.decode(_fsencoding, _fserrors)
+ else:
+ raise TypeError("expect bytes or str, not %s" %
+ type(filename).__name__)
+
+try:
+ from tokenize import detect_encoding
+except ImportError: # pragma: no cover
+ from codecs import BOM_UTF8, lookup
+ import re
+
+ cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
+
+ def _get_normal_name(orig_enc):
+ """Imitates get_normal_name in tokenizer.c."""
+ # Only care about the first 12 characters.
+ enc = orig_enc[:12].lower().replace("_", "-")
+ if enc == "utf-8" or enc.startswith("utf-8-"):
+ return "utf-8"
+ if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+ return "iso-8859-1"
+ return orig_enc
+
+ def detect_encoding(readline):
+ """
+ The detect_encoding() function is used to detect the encoding that should
+ be used to decode a Python source file. It requires one argument, readline,
+ in the same way as the tokenize() generator.
+
+ It will call readline a maximum of twice, and return the encoding used
+ (as a string) and a list of any lines (left as bytes) it has read in.
+
+ It detects the encoding from the presence of a utf-8 bom or an encoding
+ cookie as specified in pep-0263. If both a bom and a cookie are present,
+ but disagree, a SyntaxError will be raised. If the encoding cookie is an
+ invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
+ 'utf-8-sig' is returned.
+
+ If no encoding is specified, then the default of 'utf-8' will be returned.
+ """
+ try:
+ filename = readline.__self__.name
+ except AttributeError:
+ filename = None
+ bom_found = False
+ encoding = None
+ default = 'utf-8'
+ def read_or_stop():
+ try:
+ return readline()
+ except StopIteration:
+ return b''
+
+ def find_cookie(line):
+ try:
+ # Decode as UTF-8. Either the line is an encoding declaration,
+ # in which case it should be pure ASCII, or it must be UTF-8
+ # per default encoding.
+ line_string = line.decode('utf-8')
+ except UnicodeDecodeError:
+ msg = "invalid or missing encoding declaration"
+ if filename is not None:
+ msg = '{} for {!r}'.format(msg, filename)
+ raise SyntaxError(msg)
+
+ matches = cookie_re.findall(line_string)
+ if not matches:
+ return None
+ encoding = _get_normal_name(matches[0])
+ try:
+ codec = lookup(encoding)
+ except LookupError:
+ # This behaviour mimics the Python interpreter
+ if filename is None:
+ msg = "unknown encoding: " + encoding
+ else:
+ msg = "unknown encoding for {!r}: {}".format(filename,
+ encoding)
+ raise SyntaxError(msg)
+
+ if bom_found:
+ if codec.name != 'utf-8':
+ # This behaviour mimics the Python interpreter
+ if filename is None:
+ msg = 'encoding problem: utf-8'
+ else:
+ msg = 'encoding problem for {!r}: utf-8'.format(filename)
+ raise SyntaxError(msg)
+ encoding += '-sig'
+ return encoding
+
+ first = read_or_stop()
+ if first.startswith(BOM_UTF8):
+ bom_found = True
+ first = first[3:]
+ default = 'utf-8-sig'
+ if not first:
+ return default, []
+
+ encoding = find_cookie(first)
+ if encoding:
+ return encoding, [first]
+
+ second = read_or_stop()
+ if not second:
+ return default, [first]
+
+ encoding = find_cookie(second)
+ if encoding:
+ return encoding, [first, second]
+
+ return default, [first, second]
+
+# For converting & <-> &amp; etc.
+try:
+ from html import escape
+except ImportError:
+ from cgi import escape
+if sys.version_info[:2] < (3, 4):
+ unescape = HTMLParser().unescape
+else:
+ from html import unescape
+
+try:
+ from collections import ChainMap
+except ImportError: # pragma: no cover
+ from collections import MutableMapping
+
+ try:
+ from reprlib import recursive_repr as _recursive_repr
+ except ImportError:
+ def _recursive_repr(fillvalue='...'):
+ '''
+ Decorator to make a repr function return fillvalue for a recursive
+ call
+ '''
+
+ def decorating_function(user_function):
+ repr_running = set()
+
+ def wrapper(self):
+ key = id(self), get_ident()
+ if key in repr_running:
+ return fillvalue
+ repr_running.add(key)
+ try:
+ result = user_function(self)
+ finally:
+ repr_running.discard(key)
+ return result
+
+ # Can't use functools.wraps() here because of bootstrap issues
+ wrapper.__module__ = getattr(user_function, '__module__')
+ wrapper.__doc__ = getattr(user_function, '__doc__')
+ wrapper.__name__ = getattr(user_function, '__name__')
+ wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
+ return wrapper
+
+ return decorating_function
+
+ class ChainMap(MutableMapping):
+ ''' A ChainMap groups multiple dicts (or other mappings) together
+ to create a single, updateable view.
+
+ The underlying mappings are stored in a list. That list is public and can
+ accessed or updated using the *maps* attribute. There is no other state.
+
+ Lookups search the underlying mappings successively until a key is found.
+ In contrast, writes, updates, and deletions only operate on the first
+ mapping.
+
+ '''
+
+ def __init__(self, *maps):
+ '''Initialize a ChainMap by setting *maps* to the given mappings.
+ If no mappings are provided, a single empty dictionary is used.
+
+ '''
+ self.maps = list(maps) or [{}] # always at least one map
+
+ def __missing__(self, key):
+ raise KeyError(key)
+
+ def __getitem__(self, key):
+ for mapping in self.maps:
+ try:
+ return mapping[key] # can't use 'key in mapping' with defaultdict
+ except KeyError:
+ pass
+ return self.__missing__(key) # support subclasses that define __missing__
+
+ def get(self, key, default=None):
+ return self[key] if key in self else default
+
+ def __len__(self):
+ return len(set().union(*self.maps)) # reuses stored hash values if possible
+
+ def __iter__(self):
+ return iter(set().union(*self.maps))
+
+ def __contains__(self, key):
+ return any(key in m for m in self.maps)
+
+ def __bool__(self):
+ return any(self.maps)
+
+ @_recursive_repr()
+ def __repr__(self):
+ return '{0.__class__.__name__}({1})'.format(
+ self, ', '.join(map(repr, self.maps)))
+
+ @classmethod
+ def fromkeys(cls, iterable, *args):
+ 'Create a ChainMap with a single dict created from the iterable.'
+ return cls(dict.fromkeys(iterable, *args))
+
+ def copy(self):
+ 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
+ return self.__class__(self.maps[0].copy(), *self.maps[1:])
+
+ __copy__ = copy
+
+ def new_child(self): # like Django's Context.push()
+ 'New ChainMap with a new dict followed by all previous maps.'
+ return self.__class__({}, *self.maps)
+
+ @property
+ def parents(self): # like Django's Context.pop()
+ 'New ChainMap from maps[1:].'
+ return self.__class__(*self.maps[1:])
+
+ def __setitem__(self, key, value):
+ self.maps[0][key] = value
+
+ def __delitem__(self, key):
+ try:
+ del self.maps[0][key]
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def popitem(self):
+ 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
+ try:
+ return self.maps[0].popitem()
+ except KeyError:
+ raise KeyError('No keys found in the first mapping.')
+
+ def pop(self, key, *args):
+ 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
+ try:
+ return self.maps[0].pop(key, *args)
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def clear(self):
+ 'Clear maps[0], leaving maps[1:] intact.'
+ self.maps[0].clear()
+
+try:
+ from importlib.util import cache_from_source # Python >= 3.4
+except ImportError: # pragma: no cover
+ try:
+ from imp import cache_from_source
+ except ImportError: # pragma: no cover
+ def cache_from_source(path, debug_override=None):
+ assert path.endswith('.py')
+ if debug_override is None:
+ debug_override = __debug__
+ if debug_override:
+ suffix = 'c'
+ else:
+ suffix = 'o'
+ return path + suffix
+
+try:
+ from collections import OrderedDict
+except ImportError: # pragma: no cover
+## {{{ http://code.activestate.com/recipes/576693/ (r9)
+# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+# Passes Python2.7's test suite and incorporates all the latest updates.
+ try:
+ from thread import get_ident as _get_ident
+ except ImportError:
+ from dummy_thread import get_ident as _get_ident
+
+ try:
+ from _abcoll import KeysView, ValuesView, ItemsView
+ except ImportError:
+ pass
+
+
+ class OrderedDict(dict):
+ 'Dictionary that remembers insertion order'
+ # An inherited dict maps keys to values.
+ # The inherited dict provides __getitem__, __len__, __contains__, and get.
+ # The remaining methods are order-aware.
+ # Big-O running times for all methods are the same as for regular dictionaries.
+
+ # The internal self.__map dictionary maps keys to links in a doubly linked list.
+ # The circular doubly linked list starts and ends with a sentinel element.
+ # The sentinel element never gets deleted (this simplifies the algorithm).
+ # Each link is stored as a list of length three: [PREV, NEXT, KEY].
+
+ def __init__(self, *args, **kwds):
+ '''Initialize an ordered dictionary. Signature is the same as for
+ regular dictionaries, but keyword arguments are not recommended
+ because their insertion order is arbitrary.
+
+ '''
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__root
+ except AttributeError:
+ self.__root = root = [] # sentinel node
+ root[:] = [root, root, None]
+ self.__map = {}
+ self.__update(*args, **kwds)
+
+ def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+ 'od.__setitem__(i, y) <==> od[i]=y'
+ # Setting a new item creates a new link which goes at the end of the linked
+ # list, and the inherited dictionary is updated with the new key/value pair.
+ if key not in self:
+ root = self.__root
+ last = root[0]
+ last[1] = root[0] = self.__map[key] = [last, root, key]
+ dict_setitem(self, key, value)
+
+ def __delitem__(self, key, dict_delitem=dict.__delitem__):
+ 'od.__delitem__(y) <==> del od[y]'
+ # Deleting an existing item uses self.__map to find the link which is
+ # then removed by updating the links in the predecessor and successor nodes.
+ dict_delitem(self, key)
+ link_prev, link_next, key = self.__map.pop(key)
+ link_prev[1] = link_next
+ link_next[0] = link_prev
+
+ def __iter__(self):
+ 'od.__iter__() <==> iter(od)'
+ root = self.__root
+ curr = root[1]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[1]
+
+ def __reversed__(self):
+ 'od.__reversed__() <==> reversed(od)'
+ root = self.__root
+ curr = root[0]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[0]
+
+ def clear(self):
+ 'od.clear() -> None. Remove all items from od.'
+ try:
+ for node in self.__map.itervalues():
+ del node[:]
+ root = self.__root
+ root[:] = [root, root, None]
+ self.__map.clear()
+ except AttributeError:
+ pass
+ dict.clear(self)
+
+ def popitem(self, last=True):
+ '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+ Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+ '''
+ if not self:
+ raise KeyError('dictionary is empty')
+ root = self.__root
+ if last:
+ link = root[0]
+ link_prev = link[0]
+ link_prev[1] = root
+ root[0] = link_prev
+ else:
+ link = root[1]
+ link_next = link[1]
+ root[1] = link_next
+ link_next[0] = root
+ key = link[2]
+ del self.__map[key]
+ value = dict.pop(self, key)
+ return key, value
+
+ # -- the following methods do not depend on the internal structure --
+
+ def keys(self):
+ 'od.keys() -> list of keys in od'
+ return list(self)
+
+ def values(self):
+ 'od.values() -> list of values in od'
+ return [self[key] for key in self]
+
+ def items(self):
+ 'od.items() -> list of (key, value) pairs in od'
+ return [(key, self[key]) for key in self]
+
+ def iterkeys(self):
+ 'od.iterkeys() -> an iterator over the keys in od'
+ return iter(self)
+
+ def itervalues(self):
+ 'od.itervalues -> an iterator over the values in od'
+ for k in self:
+ yield self[k]
+
+ def iteritems(self):
+ 'od.iteritems -> an iterator over the (key, value) items in od'
+ for k in self:
+ yield (k, self[k])
+
+ def update(*args, **kwds):
+ '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
+
+ If E is a dict instance, does: for k in E: od[k] = E[k]
+ If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
+ Or if E is an iterable of items, does: for k, v in E: od[k] = v
+ In either case, this is followed by: for k, v in F.items(): od[k] = v
+
+ '''
+ if len(args) > 2:
+ raise TypeError('update() takes at most 2 positional '
+ 'arguments (%d given)' % (len(args),))
+ elif not args:
+ raise TypeError('update() takes at least 1 argument (0 given)')
+ self = args[0]
+ # Make progressively weaker assumptions about "other"
+ other = ()
+ if len(args) == 2:
+ other = args[1]
+ if isinstance(other, dict):
+ for key in other:
+ self[key] = other[key]
+ elif hasattr(other, 'keys'):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
+ for key, value in kwds.items():
+ self[key] = value
+
+ __update = update # let subclasses override update without breaking __init__
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+
+ '''
+ if key in self:
+ result = self[key]
+ del self[key]
+ return result
+ if default is self.__marker:
+ raise KeyError(key)
+ return default
+
+ def setdefault(self, key, default=None):
+ 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+ if key in self:
+ return self[key]
+ self[key] = default
+ return default
+
+ def __repr__(self, _repr_running=None):
+ 'od.__repr__() <==> repr(od)'
+ if not _repr_running: _repr_running = {}
+ call_key = id(self), _get_ident()
+ if call_key in _repr_running:
+ return '...'
+ _repr_running[call_key] = 1
+ try:
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, self.items())
+ finally:
+ del _repr_running[call_key]
+
+ def __reduce__(self):
+ 'Return state information for pickling'
+ items = [[k, self[k]] for k in self]
+ inst_dict = vars(self).copy()
+ for k in vars(OrderedDict()):
+ inst_dict.pop(k, None)
+ if inst_dict:
+ return (self.__class__, (items,), inst_dict)
+ return self.__class__, (items,)
+
+ def copy(self):
+ 'od.copy() -> a shallow copy of od'
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+ and values equal to v (which defaults to None).
+
+ '''
+ d = cls()
+ for key in iterable:
+ d[key] = value
+ return d
+
+ def __eq__(self, other):
+ '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
+ while comparison to a regular mapping is order-insensitive.
+
+ '''
+ if isinstance(other, OrderedDict):
+ return len(self)==len(other) and self.items() == other.items()
+ return dict.__eq__(self, other)
+
+ def __ne__(self, other):
+ return not self == other
+
+ # -- the following methods are only used in Python 2.7 --
+
+ def viewkeys(self):
+ "od.viewkeys() -> a set-like object providing a view on od's keys"
+ return KeysView(self)
+
+ def viewvalues(self):
+ "od.viewvalues() -> an object providing a view on od's values"
+ return ValuesView(self)
+
+ def viewitems(self):
+ "od.viewitems() -> a set-like object providing a view on od's items"
+ return ItemsView(self)
+
+try:
+ from logging.config import BaseConfigurator, valid_ident
+except ImportError: # pragma: no cover
+ IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
+
+
+ def valid_ident(s):
+ m = IDENTIFIER.match(s)
+ if not m:
+ raise ValueError('Not a valid Python identifier: %r' % s)
+ return True
+
+
+ # The ConvertingXXX classes are wrappers around standard Python containers,
+ # and they serve to convert any suitable values in the container. The
+ # conversion converts base dicts, lists and tuples to their wrapped
+ # equivalents, whereas strings which match a conversion format are converted
+ # appropriately.
+ #
+ # Each wrapper should have a configurator attribute holding the actual
+ # configurator to use for conversion.
+
+ class ConvertingDict(dict):
+ """A converting dictionary wrapper."""
+
+ def __getitem__(self, key):
+ value = dict.__getitem__(self, key)
+ result = self.configurator.convert(value)
+ #If the converted value is different, save for next time
+ if value is not result:
+ self[key] = result
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ def get(self, key, default=None):
+ value = dict.get(self, key, default)
+ result = self.configurator.convert(value)
+ #If the converted value is different, save for next time
+ if value is not result:
+ self[key] = result
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ def pop(self, key, default=None):
+ value = dict.pop(self, key, default)
+ result = self.configurator.convert(value)
+ if value is not result:
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ class ConvertingList(list):
+ """A converting list wrapper."""
+ def __getitem__(self, key):
+ value = list.__getitem__(self, key)
+ result = self.configurator.convert(value)
+ #If the converted value is different, save for next time
+ if value is not result:
+ self[key] = result
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ def pop(self, idx=-1):
+ value = list.pop(self, idx)
+ result = self.configurator.convert(value)
+ if value is not result:
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ return result
+
+ class ConvertingTuple(tuple):
+ """A converting tuple wrapper."""
+ def __getitem__(self, key):
+ value = tuple.__getitem__(self, key)
+ result = self.configurator.convert(value)
+ if value is not result:
+ if type(result) in (ConvertingDict, ConvertingList,
+ ConvertingTuple):
+ result.parent = self
+ result.key = key
+ return result
+
+ class BaseConfigurator(object):
+ """
+ The configurator base class which defines some useful defaults.
+ """
+
+ CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
+
+ WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
+ DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
+ INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
+ DIGIT_PATTERN = re.compile(r'^\d+$')
+
+ value_converters = {
+ 'ext' : 'ext_convert',
+ 'cfg' : 'cfg_convert',
+ }
+
+ # We might want to use a different one, e.g. importlib
+ importer = staticmethod(__import__)
+
+ def __init__(self, config):
+ self.config = ConvertingDict(config)
+ self.config.configurator = self
+
+ def resolve(self, s):
+ """
+ Resolve strings to objects using standard import and attribute
+ syntax.
+ """
+ name = s.split('.')
+ used = name.pop(0)
+ try:
+ found = self.importer(used)
+ for frag in name:
+ used += '.' + frag
+ try:
+ found = getattr(found, frag)
+ except AttributeError:
+ self.importer(used)
+ found = getattr(found, frag)
+ return found
+ except ImportError:
+ e, tb = sys.exc_info()[1:]
+ v = ValueError('Cannot resolve %r: %s' % (s, e))
+ v.__cause__, v.__traceback__ = e, tb
+ raise v
+
+ def ext_convert(self, value):
+ """Default converter for the ext:// protocol."""
+ return self.resolve(value)
+
+ def cfg_convert(self, value):
+ """Default converter for the cfg:// protocol."""
+ rest = value
+ m = self.WORD_PATTERN.match(rest)
+ if m is None:
+ raise ValueError("Unable to convert %r" % value)
+ else:
+ rest = rest[m.end():]
+ d = self.config[m.groups()[0]]
+ #print d, rest
+ while rest:
+ m = self.DOT_PATTERN.match(rest)
+ if m:
+ d = d[m.groups()[0]]
+ else:
+ m = self.INDEX_PATTERN.match(rest)
+ if m:
+ idx = m.groups()[0]
+ if not self.DIGIT_PATTERN.match(idx):
+ d = d[idx]
+ else:
+ try:
+ n = int(idx) # try as number first (most likely)
+ d = d[n]
+ except TypeError:
+ d = d[idx]
+ if m:
+ rest = rest[m.end():]
+ else:
+ raise ValueError('Unable to convert '
+ '%r at %r' % (value, rest))
+ #rest should be empty
+ return d
+
+ def convert(self, value):
+ """
+ Convert values to an appropriate type. dicts, lists and tuples are
+ replaced by their converting alternatives. Strings are checked to
+ see if they have a conversion format and are converted if they do.
+ """
+ if not isinstance(value, ConvertingDict) and isinstance(value, dict):
+ value = ConvertingDict(value)
+ value.configurator = self
+ elif not isinstance(value, ConvertingList) and isinstance(value, list):
+ value = ConvertingList(value)
+ value.configurator = self
+ elif not isinstance(value, ConvertingTuple) and\
+ isinstance(value, tuple):
+ value = ConvertingTuple(value)
+ value.configurator = self
+ elif isinstance(value, string_types):
+ m = self.CONVERT_PATTERN.match(value)
+ if m:
+ d = m.groupdict()
+ prefix = d['prefix']
+ converter = self.value_converters.get(prefix, None)
+ if converter:
+ suffix = d['suffix']
+ converter = getattr(self, converter)
+ value = converter(suffix)
+ return value
+
+ def configure_custom(self, config):
+ """Configure an object with a user-supplied factory."""
+ c = config.pop('()')
+ if not callable(c):
+ c = self.resolve(c)
+ props = config.pop('.', None)
+ # Check for valid identifiers
+ kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
+ result = c(**kwargs)
+ if props:
+ for name, value in props.items():
+ setattr(result, name, value)
+ return result
+
+ def as_tuple(self, value):
+ """Utility function which converts lists to tuples."""
+ if isinstance(value, list):
+ value = tuple(value)
+ return value
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/database.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/database.py
new file mode 100644
index 00000000..b13cdac9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/database.py
@@ -0,0 +1,1339 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2017 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""PEP 376 implementation."""
+
+from __future__ import unicode_literals
+
+import base64
+import codecs
+import contextlib
+import hashlib
+import logging
+import os
+import posixpath
+import sys
+import zipimport
+
+from . import DistlibException, resources
+from .compat import StringIO
+from .version import get_scheme, UnsupportedVersionError
+from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
+ LEGACY_METADATA_FILENAME)
+from .util import (parse_requirement, cached_property, parse_name_and_version,
+ read_exports, write_exports, CSVReader, CSVWriter)
+
+
+__all__ = ['Distribution', 'BaseInstalledDistribution',
+ 'InstalledDistribution', 'EggInfoDistribution',
+ 'DistributionPath']
+
+
+logger = logging.getLogger(__name__)
+
+EXPORTS_FILENAME = 'pydist-exports.json'
+COMMANDS_FILENAME = 'pydist-commands.json'
+
+DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
+ 'RESOURCES', EXPORTS_FILENAME, 'SHARED')
+
+DISTINFO_EXT = '.dist-info'
+
+
+class _Cache(object):
+ """
+ A simple cache mapping names and .dist-info paths to distributions
+ """
+ def __init__(self):
+ """
+ Initialise an instance. There is normally one for each DistributionPath.
+ """
+ self.name = {}
+ self.path = {}
+ self.generated = False
+
+ def clear(self):
+ """
+ Clear the cache, setting it to its initial state.
+ """
+ self.name.clear()
+ self.path.clear()
+ self.generated = False
+
+ def add(self, dist):
+ """
+ Add a distribution to the cache.
+ :param dist: The distribution to add.
+ """
+ if dist.path not in self.path:
+ self.path[dist.path] = dist
+ self.name.setdefault(dist.key, []).append(dist)
+
+
+class DistributionPath(object):
+ """
+ Represents a set of distributions installed on a path (typically sys.path).
+ """
+ def __init__(self, path=None, include_egg=False):
+ """
+ Create an instance from a path, optionally including legacy (distutils/
+ setuptools/distribute) distributions.
+ :param path: The path to use, as a list of directories. If not specified,
+ sys.path is used.
+ :param include_egg: If True, this instance will look for and return legacy
+ distributions as well as those based on PEP 376.
+ """
+ if path is None:
+ path = sys.path
+ self.path = path
+ self._include_dist = True
+ self._include_egg = include_egg
+
+ self._cache = _Cache()
+ self._cache_egg = _Cache()
+ self._cache_enabled = True
+ self._scheme = get_scheme('default')
+
+ def _get_cache_enabled(self):
+ return self._cache_enabled
+
+ def _set_cache_enabled(self, value):
+ self._cache_enabled = value
+
+ cache_enabled = property(_get_cache_enabled, _set_cache_enabled)
+
+ def clear_cache(self):
+ """
+ Clears the internal cache.
+ """
+ self._cache.clear()
+ self._cache_egg.clear()
+
+
+ def _yield_distributions(self):
+ """
+ Yield .dist-info and/or .egg(-info) distributions.
+ """
+ # We need to check if we've seen some resources already, because on
+ # some Linux systems (e.g. some Debian/Ubuntu variants) there are
+ # symlinks which alias other files in the environment.
+ seen = set()
+ for path in self.path:
+ finder = resources.finder_for_path(path)
+ if finder is None:
+ continue
+ r = finder.find('')
+ if not r or not r.is_container:
+ continue
+ rset = sorted(r.resources)
+ for entry in rset:
+ r = finder.find(entry)
+ if not r or r.path in seen:
+ continue
+ if self._include_dist and entry.endswith(DISTINFO_EXT):
+ possible_filenames = [METADATA_FILENAME,
+ WHEEL_METADATA_FILENAME,
+ LEGACY_METADATA_FILENAME]
+ for metadata_filename in possible_filenames:
+ metadata_path = posixpath.join(entry, metadata_filename)
+ pydist = finder.find(metadata_path)
+ if pydist:
+ break
+ else:
+ continue
+
+ with contextlib.closing(pydist.as_stream()) as stream:
+ metadata = Metadata(fileobj=stream, scheme='legacy')
+ logger.debug('Found %s', r.path)
+ seen.add(r.path)
+ yield new_dist_class(r.path, metadata=metadata,
+ env=self)
+ elif self._include_egg and entry.endswith(('.egg-info',
+ '.egg')):
+ logger.debug('Found %s', r.path)
+ seen.add(r.path)
+ yield old_dist_class(r.path, self)
+
+ def _generate_cache(self):
+ """
+ Scan the path for distributions and populate the cache with
+ those that are found.
+ """
+ gen_dist = not self._cache.generated
+ gen_egg = self._include_egg and not self._cache_egg.generated
+ if gen_dist or gen_egg:
+ for dist in self._yield_distributions():
+ if isinstance(dist, InstalledDistribution):
+ self._cache.add(dist)
+ else:
+ self._cache_egg.add(dist)
+
+ if gen_dist:
+ self._cache.generated = True
+ if gen_egg:
+ self._cache_egg.generated = True
+
+ @classmethod
+ def distinfo_dirname(cls, name, version):
+ """
+ The *name* and *version* parameters are converted into their
+ filename-escaped form, i.e. any ``'-'`` characters are replaced
+ with ``'_'`` other than the one in ``'dist-info'`` and the one
+ separating the name from the version number.
+
+ :parameter name: is converted to a standard distribution name by replacing
+ any runs of non- alphanumeric characters with a single
+ ``'-'``.
+ :type name: string
+ :parameter version: is converted to a standard version string. Spaces
+ become dots, and all other non-alphanumeric characters
+ (except dots) become dashes, with runs of multiple
+ dashes condensed to a single dash.
+ :type version: string
+ :returns: directory name
+ :rtype: string"""
+ name = name.replace('-', '_')
+ return '-'.join([name, version]) + DISTINFO_EXT
+
+ def get_distributions(self):
+ """
+ Provides an iterator that looks for distributions and returns
+ :class:`InstalledDistribution` or
+ :class:`EggInfoDistribution` instances for each one of them.
+
+ :rtype: iterator of :class:`InstalledDistribution` and
+ :class:`EggInfoDistribution` instances
+ """
+ if not self._cache_enabled:
+ for dist in self._yield_distributions():
+ yield dist
+ else:
+ self._generate_cache()
+
+ for dist in self._cache.path.values():
+ yield dist
+
+ if self._include_egg:
+ for dist in self._cache_egg.path.values():
+ yield dist
+
+ def get_distribution(self, name):
+ """
+ Looks for a named distribution on the path.
+
+ This function only returns the first result found, as no more than one
+ value is expected. If nothing is found, ``None`` is returned.
+
+ :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
+ or ``None``
+ """
+ result = None
+ name = name.lower()
+ if not self._cache_enabled:
+ for dist in self._yield_distributions():
+ if dist.key == name:
+ result = dist
+ break
+ else:
+ self._generate_cache()
+
+ if name in self._cache.name:
+ result = self._cache.name[name][0]
+ elif self._include_egg and name in self._cache_egg.name:
+ result = self._cache_egg.name[name][0]
+ return result
+
+ def provides_distribution(self, name, version=None):
+ """
+ Iterates over all distributions to find which distributions provide *name*.
+ If a *version* is provided, it will be used to filter the results.
+
+ This function only returns the first result found, since no more than
+ one values are expected. If the directory is not found, returns ``None``.
+
+ :parameter version: a version specifier that indicates the version
+ required, conforming to the format in ``PEP-345``
+
+ :type name: string
+ :type version: string
+ """
+ matcher = None
+ if version is not None:
+ try:
+ matcher = self._scheme.matcher('%s (%s)' % (name, version))
+ except ValueError:
+ raise DistlibException('invalid name or version: %r, %r' %
+ (name, version))
+
+ for dist in self.get_distributions():
+ # We hit a problem on Travis where enum34 was installed and doesn't
+ # have a provides attribute ...
+ if not hasattr(dist, 'provides'):
+ logger.debug('No "provides": %s', dist)
+ else:
+ provided = dist.provides
+
+ for p in provided:
+ p_name, p_ver = parse_name_and_version(p)
+ if matcher is None:
+ if p_name == name:
+ yield dist
+ break
+ else:
+ if p_name == name and matcher.match(p_ver):
+ yield dist
+ break
+
+ def get_file_path(self, name, relative_path):
+ """
+ Return the path to a resource file.
+ """
+ dist = self.get_distribution(name)
+ if dist is None:
+ raise LookupError('no distribution named %r found' % name)
+ return dist.get_resource_path(relative_path)
+
+ def get_exported_entries(self, category, name=None):
+ """
+ Return all of the exported entries in a particular category.
+
+ :param category: The category to search for entries.
+ :param name: If specified, only entries with that name are returned.
+ """
+ for dist in self.get_distributions():
+ r = dist.exports
+ if category in r:
+ d = r[category]
+ if name is not None:
+ if name in d:
+ yield d[name]
+ else:
+ for v in d.values():
+ yield v
+
+
+class Distribution(object):
+ """
+ A base class for distributions, whether installed or from indexes.
+ Either way, it must have some metadata, so that's all that's needed
+ for construction.
+ """
+
+ build_time_dependency = False
+ """
+ Set to True if it's known to be only a build-time dependency (i.e.
+ not needed after installation).
+ """
+
+ requested = False
+ """A boolean that indicates whether the ``REQUESTED`` metadata file is
+ present (in other words, whether the package was installed by user
+ request or it was installed as a dependency)."""
+
+ def __init__(self, metadata):
+ """
+ Initialise an instance.
+ :param metadata: The instance of :class:`Metadata` describing this
+ distribution.
+ """
+ self.metadata = metadata
+ self.name = metadata.name
+ self.key = self.name.lower() # for case-insensitive comparisons
+ self.version = metadata.version
+ self.locator = None
+ self.digest = None
+ self.extras = None # additional features requested
+ self.context = None # environment marker overrides
+ self.download_urls = set()
+ self.digests = {}
+
+ @property
+ def source_url(self):
+ """
+ The source archive download URL for this distribution.
+ """
+ return self.metadata.source_url
+
+ download_url = source_url # Backward compatibility
+
+ @property
+ def name_and_version(self):
+ """
+ A utility property which displays the name and version in parentheses.
+ """
+ return '%s (%s)' % (self.name, self.version)
+
+ @property
+ def provides(self):
+ """
+ A set of distribution names and versions provided by this distribution.
+ :return: A set of "name (version)" strings.
+ """
+ plist = self.metadata.provides
+ s = '%s (%s)' % (self.name, self.version)
+ if s not in plist:
+ plist.append(s)
+ return plist
+
+ def _get_requirements(self, req_attr):
+ md = self.metadata
+ logger.debug('Getting requirements from metadata %r', md.todict())
+ reqts = getattr(md, req_attr)
+ return set(md.get_requirements(reqts, extras=self.extras,
+ env=self.context))
+
+ @property
+ def run_requires(self):
+ return self._get_requirements('run_requires')
+
+ @property
+ def meta_requires(self):
+ return self._get_requirements('meta_requires')
+
+ @property
+ def build_requires(self):
+ return self._get_requirements('build_requires')
+
+ @property
+ def test_requires(self):
+ return self._get_requirements('test_requires')
+
+ @property
+ def dev_requires(self):
+ return self._get_requirements('dev_requires')
+
+ def matches_requirement(self, req):
+ """
+ Say if this instance matches (fulfills) a requirement.
+ :param req: The requirement to match.
+ :rtype req: str
+ :return: True if it matches, else False.
+ """
+ # Requirement may contain extras - parse to lose those
+ # from what's passed to the matcher
+ r = parse_requirement(req)
+ scheme = get_scheme(self.metadata.scheme)
+ try:
+ matcher = scheme.matcher(r.requirement)
+ except UnsupportedVersionError:
+ # XXX compat-mode if cannot read the version
+ logger.warning('could not read version %r - using name only',
+ req)
+ name = req.split()[0]
+ matcher = scheme.matcher(name)
+
+ name = matcher.key # case-insensitive
+
+ result = False
+ for p in self.provides:
+ p_name, p_ver = parse_name_and_version(p)
+ if p_name != name:
+ continue
+ try:
+ result = matcher.match(p_ver)
+ break
+ except UnsupportedVersionError:
+ pass
+ return result
+
+ def __repr__(self):
+ """
+ Return a textual representation of this instance,
+ """
+ if self.source_url:
+ suffix = ' [%s]' % self.source_url
+ else:
+ suffix = ''
+ return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix)
+
+ def __eq__(self, other):
+ """
+ See if this distribution is the same as another.
+ :param other: The distribution to compare with. To be equal to one
+ another. distributions must have the same type, name,
+ version and source_url.
+ :return: True if it is the same, else False.
+ """
+ if type(other) is not type(self):
+ result = False
+ else:
+ result = (self.name == other.name and
+ self.version == other.version and
+ self.source_url == other.source_url)
+ return result
+
+ def __hash__(self):
+ """
+ Compute hash in a way which matches the equality test.
+ """
+ return hash(self.name) + hash(self.version) + hash(self.source_url)
+
+
+class BaseInstalledDistribution(Distribution):
+ """
+ This is the base class for installed distributions (whether PEP 376 or
+ legacy).
+ """
+
+ hasher = None
+
+ def __init__(self, metadata, path, env=None):
+ """
+ Initialise an instance.
+ :param metadata: An instance of :class:`Metadata` which describes the
+ distribution. This will normally have been initialised
+ from a metadata file in the ``path``.
+ :param path: The path of the ``.dist-info`` or ``.egg-info``
+ directory for the distribution.
+ :param env: This is normally the :class:`DistributionPath`
+ instance where this distribution was found.
+ """
+ super(BaseInstalledDistribution, self).__init__(metadata)
+ self.path = path
+ self.dist_path = env
+
+ def get_hash(self, data, hasher=None):
+ """
+ Get the hash of some data, using a particular hash algorithm, if
+ specified.
+
+ :param data: The data to be hashed.
+ :type data: bytes
+ :param hasher: The name of a hash implementation, supported by hashlib,
+ or ``None``. Examples of valid values are ``'sha1'``,
+ ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
+ ``'sha512'``. If no hasher is specified, the ``hasher``
+ attribute of the :class:`InstalledDistribution` instance
+ is used. If the hasher is determined to be ``None``, MD5
+ is used as the hashing algorithm.
+ :returns: The hash of the data. If a hasher was explicitly specified,
+ the returned hash will be prefixed with the specified hasher
+ followed by '='.
+ :rtype: str
+ """
+ if hasher is None:
+ hasher = self.hasher
+ if hasher is None:
+ hasher = hashlib.md5
+ prefix = ''
+ else:
+ hasher = getattr(hashlib, hasher)
+ prefix = '%s=' % self.hasher
+ digest = hasher(data).digest()
+ digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
+ return '%s%s' % (prefix, digest)
+
+
+class InstalledDistribution(BaseInstalledDistribution):
+ """
+ Created with the *path* of the ``.dist-info`` directory provided to the
+ constructor. It reads the metadata contained in ``pydist.json`` when it is
+ instantiated., or uses a passed in Metadata instance (useful for when
+ dry-run mode is being used).
+ """
+
+ hasher = 'sha256'
+
+ def __init__(self, path, metadata=None, env=None):
+ self.modules = []
+ self.finder = finder = resources.finder_for_path(path)
+ if finder is None:
+ raise ValueError('finder unavailable for %s' % path)
+ if env and env._cache_enabled and path in env._cache.path:
+ metadata = env._cache.path[path].metadata
+ elif metadata is None:
+ r = finder.find(METADATA_FILENAME)
+ # Temporary - for Wheel 0.23 support
+ if r is None:
+ r = finder.find(WHEEL_METADATA_FILENAME)
+ # Temporary - for legacy support
+ if r is None:
+ r = finder.find('METADATA')
+ if r is None:
+ raise ValueError('no %s found in %s' % (METADATA_FILENAME,
+ path))
+ with contextlib.closing(r.as_stream()) as stream:
+ metadata = Metadata(fileobj=stream, scheme='legacy')
+
+ super(InstalledDistribution, self).__init__(metadata, path, env)
+
+ if env and env._cache_enabled:
+ env._cache.add(self)
+
+ r = finder.find('REQUESTED')
+ self.requested = r is not None
+ p = os.path.join(path, 'top_level.txt')
+ if os.path.exists(p):
+ with open(p, 'rb') as f:
+ data = f.read()
+ self.modules = data.splitlines()
+
+ def __repr__(self):
+ return '<InstalledDistribution %r %s at %r>' % (
+ self.name, self.version, self.path)
+
+ def __str__(self):
+ return "%s %s" % (self.name, self.version)
+
+ def _get_records(self):
+ """
+ Get the list of installed files for the distribution
+ :return: A list of tuples of path, hash and size. Note that hash and
+ size might be ``None`` for some entries. The path is exactly
+ as stored in the file (which is as in PEP 376).
+ """
+ results = []
+ r = self.get_distinfo_resource('RECORD')
+ with contextlib.closing(r.as_stream()) as stream:
+ with CSVReader(stream=stream) as record_reader:
+ # Base location is parent dir of .dist-info dir
+ #base_location = os.path.dirname(self.path)
+ #base_location = os.path.abspath(base_location)
+ for row in record_reader:
+ missing = [None for i in range(len(row), 3)]
+ path, checksum, size = row + missing
+ #if not os.path.isabs(path):
+ # path = path.replace('/', os.sep)
+ # path = os.path.join(base_location, path)
+ results.append((path, checksum, size))
+ return results
+
+ @cached_property
+ def exports(self):
+ """
+ Return the information exported by this distribution.
+ :return: A dictionary of exports, mapping an export category to a dict
+ of :class:`ExportEntry` instances describing the individual
+ export entries, and keyed by name.
+ """
+ result = {}
+ r = self.get_distinfo_resource(EXPORTS_FILENAME)
+ if r:
+ result = self.read_exports()
+ return result
+
+ def read_exports(self):
+ """
+ Read exports data from a file in .ini format.
+
+ :return: A dictionary of exports, mapping an export category to a list
+ of :class:`ExportEntry` instances describing the individual
+ export entries.
+ """
+ result = {}
+ r = self.get_distinfo_resource(EXPORTS_FILENAME)
+ if r:
+ with contextlib.closing(r.as_stream()) as stream:
+ result = read_exports(stream)
+ return result
+
+ def write_exports(self, exports):
+ """
+ Write a dictionary of exports to a file in .ini format.
+ :param exports: A dictionary of exports, mapping an export category to
+ a list of :class:`ExportEntry` instances describing the
+ individual export entries.
+ """
+ rf = self.get_distinfo_file(EXPORTS_FILENAME)
+ with open(rf, 'w') as f:
+ write_exports(exports, f)
+
+ def get_resource_path(self, relative_path):
+ """
+ NOTE: This API may change in the future.
+
+ Return the absolute path to a resource file with the given relative
+ path.
+
+ :param relative_path: The path, relative to .dist-info, of the resource
+ of interest.
+ :return: The absolute path where the resource is to be found.
+ """
+ r = self.get_distinfo_resource('RESOURCES')
+ with contextlib.closing(r.as_stream()) as stream:
+ with CSVReader(stream=stream) as resources_reader:
+ for relative, destination in resources_reader:
+ if relative == relative_path:
+ return destination
+ raise KeyError('no resource file with relative path %r '
+ 'is installed' % relative_path)
+
+ def list_installed_files(self):
+ """
+ Iterates over the ``RECORD`` entries and returns a tuple
+ ``(path, hash, size)`` for each line.
+
+ :returns: iterator of (path, hash, size)
+ """
+ for result in self._get_records():
+ yield result
+
+ def write_installed_files(self, paths, prefix, dry_run=False):
+ """
+ Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
+ existing ``RECORD`` file is silently overwritten.
+
+ prefix is used to determine when to write absolute paths.
+ """
+ prefix = os.path.join(prefix, '')
+ base = os.path.dirname(self.path)
+ base_under_prefix = base.startswith(prefix)
+ base = os.path.join(base, '')
+ record_path = self.get_distinfo_file('RECORD')
+ logger.info('creating %s', record_path)
+ if dry_run:
+ return None
+ with CSVWriter(record_path) as writer:
+ for path in paths:
+ if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
+ # do not put size and hash, as in PEP-376
+ hash_value = size = ''
+ else:
+ size = '%d' % os.path.getsize(path)
+ with open(path, 'rb') as fp:
+ hash_value = self.get_hash(fp.read())
+ if path.startswith(base) or (base_under_prefix and
+ path.startswith(prefix)):
+ path = os.path.relpath(path, base)
+ writer.writerow((path, hash_value, size))
+
+ # add the RECORD file itself
+ if record_path.startswith(base):
+ record_path = os.path.relpath(record_path, base)
+ writer.writerow((record_path, '', ''))
+ return record_path
+
+ def check_installed_files(self):
+ """
+ Checks that the hashes and sizes of the files in ``RECORD`` are
+ matched by the files themselves. Returns a (possibly empty) list of
+ mismatches. Each entry in the mismatch list will be a tuple consisting
+ of the path, 'exists', 'size' or 'hash' according to what didn't match
+ (existence is checked first, then size, then hash), the expected
+ value and the actual value.
+ """
+ mismatches = []
+ base = os.path.dirname(self.path)
+ record_path = self.get_distinfo_file('RECORD')
+ for path, hash_value, size in self.list_installed_files():
+ if not os.path.isabs(path):
+ path = os.path.join(base, path)
+ if path == record_path:
+ continue
+ if not os.path.exists(path):
+ mismatches.append((path, 'exists', True, False))
+ elif os.path.isfile(path):
+ actual_size = str(os.path.getsize(path))
+ if size and actual_size != size:
+ mismatches.append((path, 'size', size, actual_size))
+ elif hash_value:
+ if '=' in hash_value:
+ hasher = hash_value.split('=', 1)[0]
+ else:
+ hasher = None
+
+ with open(path, 'rb') as f:
+ actual_hash = self.get_hash(f.read(), hasher)
+ if actual_hash != hash_value:
+ mismatches.append((path, 'hash', hash_value, actual_hash))
+ return mismatches
+
+ @cached_property
+ def shared_locations(self):
+ """
+ A dictionary of shared locations whose keys are in the set 'prefix',
+ 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
+ The corresponding value is the absolute path of that category for
+ this distribution, and takes into account any paths selected by the
+ user at installation time (e.g. via command-line arguments). In the
+ case of the 'namespace' key, this would be a list of absolute paths
+ for the roots of namespace packages in this distribution.
+
+ The first time this property is accessed, the relevant information is
+ read from the SHARED file in the .dist-info directory.
+ """
+ result = {}
+ shared_path = os.path.join(self.path, 'SHARED')
+ if os.path.isfile(shared_path):
+ with codecs.open(shared_path, 'r', encoding='utf-8') as f:
+ lines = f.read().splitlines()
+ for line in lines:
+ key, value = line.split('=', 1)
+ if key == 'namespace':
+ result.setdefault(key, []).append(value)
+ else:
+ result[key] = value
+ return result
+
+ def write_shared_locations(self, paths, dry_run=False):
+ """
+ Write shared location information to the SHARED file in .dist-info.
+ :param paths: A dictionary as described in the documentation for
+ :meth:`shared_locations`.
+ :param dry_run: If True, the action is logged but no file is actually
+ written.
+ :return: The path of the file written to.
+ """
+ shared_path = os.path.join(self.path, 'SHARED')
+ logger.info('creating %s', shared_path)
+ if dry_run:
+ return None
+ lines = []
+ for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
+ path = paths[key]
+ if os.path.isdir(paths[key]):
+ lines.append('%s=%s' % (key, path))
+ for ns in paths.get('namespace', ()):
+ lines.append('namespace=%s' % ns)
+
+ with codecs.open(shared_path, 'w', encoding='utf-8') as f:
+ f.write('\n'.join(lines))
+ return shared_path
+
+ def get_distinfo_resource(self, path):
+ if path not in DIST_FILES:
+ raise DistlibException('invalid path for a dist-info file: '
+ '%r at %r' % (path, self.path))
+ finder = resources.finder_for_path(self.path)
+ if finder is None:
+ raise DistlibException('Unable to get a finder for %s' % self.path)
+ return finder.find(path)
+
+ def get_distinfo_file(self, path):
+ """
+ Returns a path located under the ``.dist-info`` directory. Returns a
+ string representing the path.
+
+ :parameter path: a ``'/'``-separated path relative to the
+ ``.dist-info`` directory or an absolute path;
+ If *path* is an absolute path and doesn't start
+ with the ``.dist-info`` directory path,
+ a :class:`DistlibException` is raised
+ :type path: str
+ :rtype: str
+ """
+ # Check if it is an absolute path # XXX use relpath, add tests
+ if path.find(os.sep) >= 0:
+ # it's an absolute path?
+ distinfo_dirname, path = path.split(os.sep)[-2:]
+ if distinfo_dirname != self.path.split(os.sep)[-1]:
+ raise DistlibException(
+ 'dist-info file %r does not belong to the %r %s '
+ 'distribution' % (path, self.name, self.version))
+
+ # The file must be relative
+ if path not in DIST_FILES:
+ raise DistlibException('invalid path for a dist-info file: '
+ '%r at %r' % (path, self.path))
+
+ return os.path.join(self.path, path)
+
+ def list_distinfo_files(self):
+ """
+ Iterates over the ``RECORD`` entries and returns paths for each line if
+ the path is pointing to a file located in the ``.dist-info`` directory
+ or one of its subdirectories.
+
+ :returns: iterator of paths
+ """
+ base = os.path.dirname(self.path)
+ for path, checksum, size in self._get_records():
+ # XXX add separator or use real relpath algo
+ if not os.path.isabs(path):
+ path = os.path.join(base, path)
+ if path.startswith(self.path):
+ yield path
+
+ def __eq__(self, other):
+ return (isinstance(other, InstalledDistribution) and
+ self.path == other.path)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ __hash__ = object.__hash__
+
+
+class EggInfoDistribution(BaseInstalledDistribution):
+ """Created with the *path* of the ``.egg-info`` directory or file provided
+ to the constructor. It reads the metadata contained in the file itself, or
+ if the given path happens to be a directory, the metadata is read from the
+ file ``PKG-INFO`` under that directory."""
+
+ requested = True # as we have no way of knowing, assume it was
+ shared_locations = {}
+
+ def __init__(self, path, env=None):
+ def set_name_and_version(s, n, v):
+ s.name = n
+ s.key = n.lower() # for case-insensitive comparisons
+ s.version = v
+
+ self.path = path
+ self.dist_path = env
+ if env and env._cache_enabled and path in env._cache_egg.path:
+ metadata = env._cache_egg.path[path].metadata
+ set_name_and_version(self, metadata.name, metadata.version)
+ else:
+ metadata = self._get_metadata(path)
+
+ # Need to be set before caching
+ set_name_and_version(self, metadata.name, metadata.version)
+
+ if env and env._cache_enabled:
+ env._cache_egg.add(self)
+ super(EggInfoDistribution, self).__init__(metadata, path, env)
+
+ def _get_metadata(self, path):
+ requires = None
+
+ def parse_requires_data(data):
+ """Create a list of dependencies from a requires.txt file.
+
+ *data*: the contents of a setuptools-produced requires.txt file.
+ """
+ reqs = []
+ lines = data.splitlines()
+ for line in lines:
+ line = line.strip()
+ if line.startswith('['):
+ logger.warning('Unexpected line: quitting requirement scan: %r',
+ line)
+ break
+ r = parse_requirement(line)
+ if not r:
+ logger.warning('Not recognised as a requirement: %r', line)
+ continue
+ if r.extras:
+ logger.warning('extra requirements in requires.txt are '
+ 'not supported')
+ if not r.constraints:
+ reqs.append(r.name)
+ else:
+ cons = ', '.join('%s%s' % c for c in r.constraints)
+ reqs.append('%s (%s)' % (r.name, cons))
+ return reqs
+
+ def parse_requires_path(req_path):
+ """Create a list of dependencies from a requires.txt file.
+
+ *req_path*: the path to a setuptools-produced requires.txt file.
+ """
+
+ reqs = []
+ try:
+ with codecs.open(req_path, 'r', 'utf-8') as fp:
+ reqs = parse_requires_data(fp.read())
+ except IOError:
+ pass
+ return reqs
+
+ tl_path = tl_data = None
+ if path.endswith('.egg'):
+ if os.path.isdir(path):
+ p = os.path.join(path, 'EGG-INFO')
+ meta_path = os.path.join(p, 'PKG-INFO')
+ metadata = Metadata(path=meta_path, scheme='legacy')
+ req_path = os.path.join(p, 'requires.txt')
+ tl_path = os.path.join(p, 'top_level.txt')
+ requires = parse_requires_path(req_path)
+ else:
+ # FIXME handle the case where zipfile is not available
+ zipf = zipimport.zipimporter(path)
+ fileobj = StringIO(
+ zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
+ metadata = Metadata(fileobj=fileobj, scheme='legacy')
+ try:
+ data = zipf.get_data('EGG-INFO/requires.txt')
+ tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8')
+ requires = parse_requires_data(data.decode('utf-8'))
+ except IOError:
+ requires = None
+ elif path.endswith('.egg-info'):
+ if os.path.isdir(path):
+ req_path = os.path.join(path, 'requires.txt')
+ requires = parse_requires_path(req_path)
+ path = os.path.join(path, 'PKG-INFO')
+ tl_path = os.path.join(path, 'top_level.txt')
+ metadata = Metadata(path=path, scheme='legacy')
+ else:
+ raise DistlibException('path must end with .egg-info or .egg, '
+ 'got %r' % path)
+
+ if requires:
+ metadata.add_requirements(requires)
+ # look for top-level modules in top_level.txt, if present
+ if tl_data is None:
+ if tl_path is not None and os.path.exists(tl_path):
+ with open(tl_path, 'rb') as f:
+ tl_data = f.read().decode('utf-8')
+ if not tl_data:
+ tl_data = []
+ else:
+ tl_data = tl_data.splitlines()
+ self.modules = tl_data
+ return metadata
+
+ def __repr__(self):
+ return '<EggInfoDistribution %r %s at %r>' % (
+ self.name, self.version, self.path)
+
+ def __str__(self):
+ return "%s %s" % (self.name, self.version)
+
+ def check_installed_files(self):
+ """
+ Checks that the hashes and sizes of the files in ``RECORD`` are
+ matched by the files themselves. Returns a (possibly empty) list of
+ mismatches. Each entry in the mismatch list will be a tuple consisting
+ of the path, 'exists', 'size' or 'hash' according to what didn't match
+ (existence is checked first, then size, then hash), the expected
+ value and the actual value.
+ """
+ mismatches = []
+ record_path = os.path.join(self.path, 'installed-files.txt')
+ if os.path.exists(record_path):
+ for path, _, _ in self.list_installed_files():
+ if path == record_path:
+ continue
+ if not os.path.exists(path):
+ mismatches.append((path, 'exists', True, False))
+ return mismatches
+
+ def list_installed_files(self):
+ """
+ Iterates over the ``installed-files.txt`` entries and returns a tuple
+ ``(path, hash, size)`` for each line.
+
+ :returns: a list of (path, hash, size)
+ """
+
+ def _md5(path):
+ f = open(path, 'rb')
+ try:
+ content = f.read()
+ finally:
+ f.close()
+ return hashlib.md5(content).hexdigest()
+
+ def _size(path):
+ return os.stat(path).st_size
+
+ record_path = os.path.join(self.path, 'installed-files.txt')
+ result = []
+ if os.path.exists(record_path):
+ with codecs.open(record_path, 'r', encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ p = os.path.normpath(os.path.join(self.path, line))
+ # "./" is present as a marker between installed files
+ # and installation metadata files
+ if not os.path.exists(p):
+ logger.warning('Non-existent file: %s', p)
+ if p.endswith(('.pyc', '.pyo')):
+ continue
+ #otherwise fall through and fail
+ if not os.path.isdir(p):
+ result.append((p, _md5(p), _size(p)))
+ result.append((record_path, None, None))
+ return result
+
+ def list_distinfo_files(self, absolute=False):
+ """
+ Iterates over the ``installed-files.txt`` entries and returns paths for
+ each line if the path is pointing to a file located in the
+ ``.egg-info`` directory or one of its subdirectories.
+
+ :parameter absolute: If *absolute* is ``True``, each returned path is
+ transformed into a local absolute path. Otherwise the
+ raw value from ``installed-files.txt`` is returned.
+ :type absolute: boolean
+ :returns: iterator of paths
+ """
+ record_path = os.path.join(self.path, 'installed-files.txt')
+ if os.path.exists(record_path):
+ skip = True
+ with codecs.open(record_path, 'r', encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ if line == './':
+ skip = False
+ continue
+ if not skip:
+ p = os.path.normpath(os.path.join(self.path, line))
+ if p.startswith(self.path):
+ if absolute:
+ yield p
+ else:
+ yield line
+
+ def __eq__(self, other):
+ return (isinstance(other, EggInfoDistribution) and
+ self.path == other.path)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ __hash__ = object.__hash__
+
+new_dist_class = InstalledDistribution
+old_dist_class = EggInfoDistribution
+
+
+class DependencyGraph(object):
+ """
+ Represents a dependency graph between distributions.
+
+ The dependency relationships are stored in an ``adjacency_list`` that maps
+ distributions to a list of ``(other, label)`` tuples where ``other``
+ is a distribution and the edge is labeled with ``label`` (i.e. the version
+ specifier, if such was provided). Also, for more efficient traversal, for
+ every distribution ``x``, a list of predecessors is kept in
+ ``reverse_list[x]``. An edge from distribution ``a`` to
+ distribution ``b`` means that ``a`` depends on ``b``. If any missing
+ dependencies are found, they are stored in ``missing``, which is a
+ dictionary that maps distributions to a list of requirements that were not
+ provided by any other distributions.
+ """
+
+ def __init__(self):
+ self.adjacency_list = {}
+ self.reverse_list = {}
+ self.missing = {}
+
+ def add_distribution(self, distribution):
+ """Add the *distribution* to the graph.
+
+ :type distribution: :class:`distutils2.database.InstalledDistribution`
+ or :class:`distutils2.database.EggInfoDistribution`
+ """
+ self.adjacency_list[distribution] = []
+ self.reverse_list[distribution] = []
+ #self.missing[distribution] = []
+
+ def add_edge(self, x, y, label=None):
+ """Add an edge from distribution *x* to distribution *y* with the given
+ *label*.
+
+ :type x: :class:`distutils2.database.InstalledDistribution` or
+ :class:`distutils2.database.EggInfoDistribution`
+ :type y: :class:`distutils2.database.InstalledDistribution` or
+ :class:`distutils2.database.EggInfoDistribution`
+ :type label: ``str`` or ``None``
+ """
+ self.adjacency_list[x].append((y, label))
+ # multiple edges are allowed, so be careful
+ if x not in self.reverse_list[y]:
+ self.reverse_list[y].append(x)
+
+ def add_missing(self, distribution, requirement):
+ """
+ Add a missing *requirement* for the given *distribution*.
+
+ :type distribution: :class:`distutils2.database.InstalledDistribution`
+ or :class:`distutils2.database.EggInfoDistribution`
+ :type requirement: ``str``
+ """
+ logger.debug('%s missing %r', distribution, requirement)
+ self.missing.setdefault(distribution, []).append(requirement)
+
+ def _repr_dist(self, dist):
+ return '%s %s' % (dist.name, dist.version)
+
+ def repr_node(self, dist, level=1):
+ """Prints only a subgraph"""
+ output = [self._repr_dist(dist)]
+ for other, label in self.adjacency_list[dist]:
+ dist = self._repr_dist(other)
+ if label is not None:
+ dist = '%s [%s]' % (dist, label)
+ output.append(' ' * level + str(dist))
+ suboutput = self.repr_node(other, level + 1)
+ subs = suboutput.split('\n')
+ output.extend(subs[1:])
+ return '\n'.join(output)
+
+ def to_dot(self, f, skip_disconnected=True):
+ """Writes a DOT output for the graph to the provided file *f*.
+
+ If *skip_disconnected* is set to ``True``, then all distributions
+ that are not dependent on any other distribution are skipped.
+
+ :type f: has to support ``file``-like operations
+ :type skip_disconnected: ``bool``
+ """
+ disconnected = []
+
+ f.write("digraph dependencies {\n")
+ for dist, adjs in self.adjacency_list.items():
+ if len(adjs) == 0 and not skip_disconnected:
+ disconnected.append(dist)
+ for other, label in adjs:
+ if not label is None:
+ f.write('"%s" -> "%s" [label="%s"]\n' %
+ (dist.name, other.name, label))
+ else:
+ f.write('"%s" -> "%s"\n' % (dist.name, other.name))
+ if not skip_disconnected and len(disconnected) > 0:
+ f.write('subgraph disconnected {\n')
+ f.write('label = "Disconnected"\n')
+ f.write('bgcolor = red\n')
+
+ for dist in disconnected:
+ f.write('"%s"' % dist.name)
+ f.write('\n')
+ f.write('}\n')
+ f.write('}\n')
+
+ def topological_sort(self):
+ """
+ Perform a topological sort of the graph.
+ :return: A tuple, the first element of which is a topologically sorted
+ list of distributions, and the second element of which is a
+ list of distributions that cannot be sorted because they have
+ circular dependencies and so form a cycle.
+ """
+ result = []
+ # Make a shallow copy of the adjacency list
+ alist = {}
+ for k, v in self.adjacency_list.items():
+ alist[k] = v[:]
+ while True:
+ # See what we can remove in this run
+ to_remove = []
+ for k, v in list(alist.items())[:]:
+ if not v:
+ to_remove.append(k)
+ del alist[k]
+ if not to_remove:
+ # What's left in alist (if anything) is a cycle.
+ break
+ # Remove from the adjacency list of others
+ for k, v in alist.items():
+ alist[k] = [(d, r) for d, r in v if d not in to_remove]
+ logger.debug('Moving to result: %s',
+ ['%s (%s)' % (d.name, d.version) for d in to_remove])
+ result.extend(to_remove)
+ return result, list(alist.keys())
+
+ def __repr__(self):
+ """Representation of the graph"""
+ output = []
+ for dist, adjs in self.adjacency_list.items():
+ output.append(self.repr_node(dist))
+ return '\n'.join(output)
+
+
+def make_graph(dists, scheme='default'):
+ """Makes a dependency graph from the given distributions.
+
+ :parameter dists: a list of distributions
+ :type dists: list of :class:`distutils2.database.InstalledDistribution` and
+ :class:`distutils2.database.EggInfoDistribution` instances
+ :rtype: a :class:`DependencyGraph` instance
+ """
+ scheme = get_scheme(scheme)
+ graph = DependencyGraph()
+ provided = {} # maps names to lists of (version, dist) tuples
+
+ # first, build the graph and find out what's provided
+ for dist in dists:
+ graph.add_distribution(dist)
+
+ for p in dist.provides:
+ name, version = parse_name_and_version(p)
+ logger.debug('Add to provided: %s, %s, %s', name, version, dist)
+ provided.setdefault(name, []).append((version, dist))
+
+ # now make the edges
+ for dist in dists:
+ requires = (dist.run_requires | dist.meta_requires |
+ dist.build_requires | dist.dev_requires)
+ for req in requires:
+ try:
+ matcher = scheme.matcher(req)
+ except UnsupportedVersionError:
+ # XXX compat-mode if cannot read the version
+ logger.warning('could not read version %r - using name only',
+ req)
+ name = req.split()[0]
+ matcher = scheme.matcher(name)
+
+ name = matcher.key # case-insensitive
+
+ matched = False
+ if name in provided:
+ for version, provider in provided[name]:
+ try:
+ match = matcher.match(version)
+ except UnsupportedVersionError:
+ match = False
+
+ if match:
+ graph.add_edge(dist, provider, req)
+ matched = True
+ break
+ if not matched:
+ graph.add_missing(dist, req)
+ return graph
+
+
+def get_dependent_dists(dists, dist):
+ """Recursively generate a list of distributions from *dists* that are
+ dependent on *dist*.
+
+ :param dists: a list of distributions
+ :param dist: a distribution, member of *dists* for which we are interested
+ """
+ if dist not in dists:
+ raise DistlibException('given distribution %r is not a member '
+ 'of the list' % dist.name)
+ graph = make_graph(dists)
+
+ dep = [dist] # dependent distributions
+ todo = graph.reverse_list[dist] # list of nodes we should inspect
+
+ while todo:
+ d = todo.pop()
+ dep.append(d)
+ for succ in graph.reverse_list[d]:
+ if succ not in dep:
+ todo.append(succ)
+
+ dep.pop(0) # remove dist from dep, was there to prevent infinite loops
+ return dep
+
+
+def get_required_dists(dists, dist):
+ """Recursively generate a list of distributions from *dists* that are
+ required by *dist*.
+
+ :param dists: a list of distributions
+ :param dist: a distribution, member of *dists* for which we are interested
+ """
+ if dist not in dists:
+ raise DistlibException('given distribution %r is not a member '
+ 'of the list' % dist.name)
+ graph = make_graph(dists)
+
+ req = [] # required distributions
+ todo = graph.adjacency_list[dist] # list of nodes we should inspect
+
+ while todo:
+ d = todo.pop()[0]
+ req.append(d)
+ for pred in graph.adjacency_list[d]:
+ if pred not in req:
+ todo.append(pred)
+
+ return req
+
+
+def make_dist(name, version, **kwargs):
+ """
+ A convenience method for making a dist given just a name and version.
+ """
+ summary = kwargs.pop('summary', 'Placeholder for summary')
+ md = Metadata(**kwargs)
+ md.name = name
+ md.version = version
+ md.summary = summary or 'Placeholder for summary'
+ return Distribution(md)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/index.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/index.py
new file mode 100644
index 00000000..7a87cdcf
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/index.py
@@ -0,0 +1,516 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import hashlib
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+try:
+ from threading import Thread
+except ImportError:
+ from dummy_threading import Thread
+
+from . import DistlibException
+from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
+ urlparse, build_opener, string_types)
+from .util import cached_property, zip_dir, ServerProxy
+
+logger = logging.getLogger(__name__)
+
+DEFAULT_INDEX = 'https://pypi.org/pypi'
+DEFAULT_REALM = 'pypi'
+
+class PackageIndex(object):
+ """
+ This class represents a package index compatible with PyPI, the Python
+ Package Index.
+ """
+
+ boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
+
+ def __init__(self, url=None):
+ """
+ Initialise an instance.
+
+ :param url: The URL of the index. If not specified, the URL for PyPI is
+ used.
+ """
+ self.url = url or DEFAULT_INDEX
+ self.read_configuration()
+ scheme, netloc, path, params, query, frag = urlparse(self.url)
+ if params or query or frag or scheme not in ('http', 'https'):
+ raise DistlibException('invalid repository: %s' % self.url)
+ self.password_handler = None
+ self.ssl_verifier = None
+ self.gpg = None
+ self.gpg_home = None
+ with open(os.devnull, 'w') as sink:
+ # Use gpg by default rather than gpg2, as gpg2 insists on
+ # prompting for passwords
+ for s in ('gpg', 'gpg2'):
+ try:
+ rc = subprocess.check_call([s, '--version'], stdout=sink,
+ stderr=sink)
+ if rc == 0:
+ self.gpg = s
+ break
+ except OSError:
+ pass
+
+ def _get_pypirc_command(self):
+ """
+ Get the distutils command for interacting with PyPI configurations.
+ :return: the command.
+ """
+ from distutils.core import Distribution
+ from distutils.config import PyPIRCCommand
+ d = Distribution()
+ return PyPIRCCommand(d)
+
+ def read_configuration(self):
+ """
+ Read the PyPI access configuration as supported by distutils, getting
+ PyPI to do the actual work. This populates ``username``, ``password``,
+ ``realm`` and ``url`` attributes from the configuration.
+ """
+ # get distutils to do the work
+ c = self._get_pypirc_command()
+ c.repository = self.url
+ cfg = c._read_pypirc()
+ self.username = cfg.get('username')
+ self.password = cfg.get('password')
+ self.realm = cfg.get('realm', 'pypi')
+ self.url = cfg.get('repository', self.url)
+
+ def save_configuration(self):
+ """
+ Save the PyPI access configuration. You must have set ``username`` and
+ ``password`` attributes before calling this method.
+
+ Again, distutils is used to do the actual work.
+ """
+ self.check_credentials()
+ # get distutils to do the work
+ c = self._get_pypirc_command()
+ c._store_pypirc(self.username, self.password)
+
+ def check_credentials(self):
+ """
+ Check that ``username`` and ``password`` have been set, and raise an
+ exception if not.
+ """
+ if self.username is None or self.password is None:
+ raise DistlibException('username and password must be set')
+ pm = HTTPPasswordMgr()
+ _, netloc, _, _, _, _ = urlparse(self.url)
+ pm.add_password(self.realm, netloc, self.username, self.password)
+ self.password_handler = HTTPBasicAuthHandler(pm)
+
+ def register(self, metadata):
+ """
+ Register a distribution on PyPI, using the provided metadata.
+
+ :param metadata: A :class:`Metadata` instance defining at least a name
+ and version number for the distribution to be
+ registered.
+ :return: The HTTP response received from PyPI upon submission of the
+ request.
+ """
+ self.check_credentials()
+ metadata.validate()
+ d = metadata.todict()
+ d[':action'] = 'verify'
+ request = self.encode_request(d.items(), [])
+ response = self.send_request(request)
+ d[':action'] = 'submit'
+ request = self.encode_request(d.items(), [])
+ return self.send_request(request)
+
+ def _reader(self, name, stream, outbuf):
+ """
+ Thread runner for reading lines of from a subprocess into a buffer.
+
+ :param name: The logical name of the stream (used for logging only).
+ :param stream: The stream to read from. This will typically a pipe
+ connected to the output stream of a subprocess.
+ :param outbuf: The list to append the read lines to.
+ """
+ while True:
+ s = stream.readline()
+ if not s:
+ break
+ s = s.decode('utf-8').rstrip()
+ outbuf.append(s)
+ logger.debug('%s: %s' % (name, s))
+ stream.close()
+
+ def get_sign_command(self, filename, signer, sign_password,
+ keystore=None):
+ """
+ Return a suitable command for signing a file.
+
+ :param filename: The pathname to the file to be signed.
+ :param signer: The identifier of the signer of the file.
+ :param sign_password: The passphrase for the signer's
+ private key used for signing.
+ :param keystore: The path to a directory which contains the keys
+ used in verification. If not specified, the
+ instance's ``gpg_home`` attribute is used instead.
+ :return: The signing command as a list suitable to be
+ passed to :class:`subprocess.Popen`.
+ """
+ cmd = [self.gpg, '--status-fd', '2', '--no-tty']
+ if keystore is None:
+ keystore = self.gpg_home
+ if keystore:
+ cmd.extend(['--homedir', keystore])
+ if sign_password is not None:
+ cmd.extend(['--batch', '--passphrase-fd', '0'])
+ td = tempfile.mkdtemp()
+ sf = os.path.join(td, os.path.basename(filename) + '.asc')
+ cmd.extend(['--detach-sign', '--armor', '--local-user',
+ signer, '--output', sf, filename])
+ logger.debug('invoking: %s', ' '.join(cmd))
+ return cmd, sf
+
+ def run_command(self, cmd, input_data=None):
+ """
+ Run a command in a child process , passing it any input data specified.
+
+ :param cmd: The command to run.
+ :param input_data: If specified, this must be a byte string containing
+ data to be sent to the child process.
+ :return: A tuple consisting of the subprocess' exit code, a list of
+ lines read from the subprocess' ``stdout``, and a list of
+ lines read from the subprocess' ``stderr``.
+ """
+ kwargs = {
+ 'stdout': subprocess.PIPE,
+ 'stderr': subprocess.PIPE,
+ }
+ if input_data is not None:
+ kwargs['stdin'] = subprocess.PIPE
+ stdout = []
+ stderr = []
+ p = subprocess.Popen(cmd, **kwargs)
+ # We don't use communicate() here because we may need to
+ # get clever with interacting with the command
+ t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
+ t1.start()
+ t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
+ t2.start()
+ if input_data is not None:
+ p.stdin.write(input_data)
+ p.stdin.close()
+
+ p.wait()
+ t1.join()
+ t2.join()
+ return p.returncode, stdout, stderr
+
+ def sign_file(self, filename, signer, sign_password, keystore=None):
+ """
+ Sign a file.
+
+ :param filename: The pathname to the file to be signed.
+ :param signer: The identifier of the signer of the file.
+ :param sign_password: The passphrase for the signer's
+ private key used for signing.
+ :param keystore: The path to a directory which contains the keys
+ used in signing. If not specified, the instance's
+ ``gpg_home`` attribute is used instead.
+ :return: The absolute pathname of the file where the signature is
+ stored.
+ """
+ cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
+ keystore)
+ rc, stdout, stderr = self.run_command(cmd,
+ sign_password.encode('utf-8'))
+ if rc != 0:
+ raise DistlibException('sign command failed with error '
+ 'code %s' % rc)
+ return sig_file
+
+ def upload_file(self, metadata, filename, signer=None, sign_password=None,
+ filetype='sdist', pyversion='source', keystore=None):
+ """
+ Upload a release file to the index.
+
+ :param metadata: A :class:`Metadata` instance defining at least a name
+ and version number for the file to be uploaded.
+ :param filename: The pathname of the file to be uploaded.
+ :param signer: The identifier of the signer of the file.
+ :param sign_password: The passphrase for the signer's
+ private key used for signing.
+ :param filetype: The type of the file being uploaded. This is the
+ distutils command which produced that file, e.g.
+ ``sdist`` or ``bdist_wheel``.
+ :param pyversion: The version of Python which the release relates
+ to. For code compatible with any Python, this would
+ be ``source``, otherwise it would be e.g. ``3.2``.
+ :param keystore: The path to a directory which contains the keys
+ used in signing. If not specified, the instance's
+ ``gpg_home`` attribute is used instead.
+ :return: The HTTP response received from PyPI upon submission of the
+ request.
+ """
+ self.check_credentials()
+ if not os.path.exists(filename):
+ raise DistlibException('not found: %s' % filename)
+ metadata.validate()
+ d = metadata.todict()
+ sig_file = None
+ if signer:
+ if not self.gpg:
+ logger.warning('no signing program available - not signed')
+ else:
+ sig_file = self.sign_file(filename, signer, sign_password,
+ keystore)
+ with open(filename, 'rb') as f:
+ file_data = f.read()
+ md5_digest = hashlib.md5(file_data).hexdigest()
+ sha256_digest = hashlib.sha256(file_data).hexdigest()
+ d.update({
+ ':action': 'file_upload',
+ 'protocol_version': '1',
+ 'filetype': filetype,
+ 'pyversion': pyversion,
+ 'md5_digest': md5_digest,
+ 'sha256_digest': sha256_digest,
+ })
+ files = [('content', os.path.basename(filename), file_data)]
+ if sig_file:
+ with open(sig_file, 'rb') as f:
+ sig_data = f.read()
+ files.append(('gpg_signature', os.path.basename(sig_file),
+ sig_data))
+ shutil.rmtree(os.path.dirname(sig_file))
+ request = self.encode_request(d.items(), files)
+ return self.send_request(request)
+
+ def upload_documentation(self, metadata, doc_dir):
+ """
+ Upload documentation to the index.
+
+ :param metadata: A :class:`Metadata` instance defining at least a name
+ and version number for the documentation to be
+ uploaded.
+ :param doc_dir: The pathname of the directory which contains the
+ documentation. This should be the directory that
+ contains the ``index.html`` for the documentation.
+ :return: The HTTP response received from PyPI upon submission of the
+ request.
+ """
+ self.check_credentials()
+ if not os.path.isdir(doc_dir):
+ raise DistlibException('not a directory: %r' % doc_dir)
+ fn = os.path.join(doc_dir, 'index.html')
+ if not os.path.exists(fn):
+ raise DistlibException('not found: %r' % fn)
+ metadata.validate()
+ name, version = metadata.name, metadata.version
+ zip_data = zip_dir(doc_dir).getvalue()
+ fields = [(':action', 'doc_upload'),
+ ('name', name), ('version', version)]
+ files = [('content', name, zip_data)]
+ request = self.encode_request(fields, files)
+ return self.send_request(request)
+
+ def get_verify_command(self, signature_filename, data_filename,
+ keystore=None):
+ """
+ Return a suitable command for verifying a file.
+
+ :param signature_filename: The pathname to the file containing the
+ signature.
+ :param data_filename: The pathname to the file containing the
+ signed data.
+ :param keystore: The path to a directory which contains the keys
+ used in verification. If not specified, the
+ instance's ``gpg_home`` attribute is used instead.
+ :return: The verifying command as a list suitable to be
+ passed to :class:`subprocess.Popen`.
+ """
+ cmd = [self.gpg, '--status-fd', '2', '--no-tty']
+ if keystore is None:
+ keystore = self.gpg_home
+ if keystore:
+ cmd.extend(['--homedir', keystore])
+ cmd.extend(['--verify', signature_filename, data_filename])
+ logger.debug('invoking: %s', ' '.join(cmd))
+ return cmd
+
+ def verify_signature(self, signature_filename, data_filename,
+ keystore=None):
+ """
+ Verify a signature for a file.
+
+ :param signature_filename: The pathname to the file containing the
+ signature.
+ :param data_filename: The pathname to the file containing the
+ signed data.
+ :param keystore: The path to a directory which contains the keys
+ used in verification. If not specified, the
+ instance's ``gpg_home`` attribute is used instead.
+ :return: True if the signature was verified, else False.
+ """
+ if not self.gpg:
+ raise DistlibException('verification unavailable because gpg '
+ 'unavailable')
+ cmd = self.get_verify_command(signature_filename, data_filename,
+ keystore)
+ rc, stdout, stderr = self.run_command(cmd)
+ if rc not in (0, 1):
+ raise DistlibException('verify command failed with error '
+ 'code %s' % rc)
+ return rc == 0
+
+ def download_file(self, url, destfile, digest=None, reporthook=None):
+ """
+ This is a convenience method for downloading a file from an URL.
+ Normally, this will be a file from the index, though currently
+ no check is made for this (i.e. a file can be downloaded from
+ anywhere).
+
+ The method is just like the :func:`urlretrieve` function in the
+ standard library, except that it allows digest computation to be
+ done during download and checking that the downloaded data
+ matched any expected value.
+
+ :param url: The URL of the file to be downloaded (assumed to be
+ available via an HTTP GET request).
+ :param destfile: The pathname where the downloaded file is to be
+ saved.
+ :param digest: If specified, this must be a (hasher, value)
+ tuple, where hasher is the algorithm used (e.g.
+ ``'md5'``) and ``value`` is the expected value.
+ :param reporthook: The same as for :func:`urlretrieve` in the
+ standard library.
+ """
+ if digest is None:
+ digester = None
+ logger.debug('No digest specified')
+ else:
+ if isinstance(digest, (list, tuple)):
+ hasher, digest = digest
+ else:
+ hasher = 'md5'
+ digester = getattr(hashlib, hasher)()
+ logger.debug('Digest specified: %s' % digest)
+ # The following code is equivalent to urlretrieve.
+ # We need to do it this way so that we can compute the
+ # digest of the file as we go.
+ with open(destfile, 'wb') as dfp:
+ # addinfourl is not a context manager on 2.x
+ # so we have to use try/finally
+ sfp = self.send_request(Request(url))
+ try:
+ headers = sfp.info()
+ blocksize = 8192
+ size = -1
+ read = 0
+ blocknum = 0
+ if "content-length" in headers:
+ size = int(headers["Content-Length"])
+ if reporthook:
+ reporthook(blocknum, blocksize, size)
+ while True:
+ block = sfp.read(blocksize)
+ if not block:
+ break
+ read += len(block)
+ dfp.write(block)
+ if digester:
+ digester.update(block)
+ blocknum += 1
+ if reporthook:
+ reporthook(blocknum, blocksize, size)
+ finally:
+ sfp.close()
+
+ # check that we got the whole file, if we can
+ if size >= 0 and read < size:
+ raise DistlibException(
+ 'retrieval incomplete: got only %d out of %d bytes'
+ % (read, size))
+ # if we have a digest, it must match.
+ if digester:
+ actual = digester.hexdigest()
+ if digest != actual:
+ raise DistlibException('%s digest mismatch for %s: expected '
+ '%s, got %s' % (hasher, destfile,
+ digest, actual))
+ logger.debug('Digest verified: %s', digest)
+
+ def send_request(self, req):
+ """
+ Send a standard library :class:`Request` to PyPI and return its
+ response.
+
+ :param req: The request to send.
+ :return: The HTTP response from PyPI (a standard library HTTPResponse).
+ """
+ handlers = []
+ if self.password_handler:
+ handlers.append(self.password_handler)
+ if self.ssl_verifier:
+ handlers.append(self.ssl_verifier)
+ opener = build_opener(*handlers)
+ return opener.open(req)
+
+ def encode_request(self, fields, files):
+ """
+ Encode fields and files for posting to an HTTP server.
+
+ :param fields: The fields to send as a list of (fieldname, value)
+ tuples.
+ :param files: The files to send as a list of (fieldname, filename,
+ file_bytes) tuple.
+ """
+ # Adapted from packaging, which in turn was adapted from
+ # http://code.activestate.com/recipes/146306
+
+ parts = []
+ boundary = self.boundary
+ for k, values in fields:
+ if not isinstance(values, (list, tuple)):
+ values = [values]
+
+ for v in values:
+ parts.extend((
+ b'--' + boundary,
+ ('Content-Disposition: form-data; name="%s"' %
+ k).encode('utf-8'),
+ b'',
+ v.encode('utf-8')))
+ for key, filename, value in files:
+ parts.extend((
+ b'--' + boundary,
+ ('Content-Disposition: form-data; name="%s"; filename="%s"' %
+ (key, filename)).encode('utf-8'),
+ b'',
+ value))
+
+ parts.extend((b'--' + boundary + b'--', b''))
+
+ body = b'\r\n'.join(parts)
+ ct = b'multipart/form-data; boundary=' + boundary
+ headers = {
+ 'Content-type': ct,
+ 'Content-length': str(len(body))
+ }
+ return Request(self.url, body, headers)
+
+ def search(self, terms, operator=None):
+ if isinstance(terms, string_types):
+ terms = {'name': terms}
+ rpc_proxy = ServerProxy(self.url, timeout=3.0)
+ try:
+ return rpc_proxy.search(terms, operator or 'and')
+ finally:
+ rpc_proxy('close')()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/locators.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/locators.py
new file mode 100644
index 00000000..a7ed9469
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/locators.py
@@ -0,0 +1,1295 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2015 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+
+import gzip
+from io import BytesIO
+import json
+import logging
+import os
+import posixpath
+import re
+try:
+ import threading
+except ImportError: # pragma: no cover
+ import dummy_threading as threading
+import zlib
+
+from . import DistlibException
+from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
+ queue, quote, unescape, string_types, build_opener,
+ HTTPRedirectHandler as BaseRedirectHandler, text_type,
+ Request, HTTPError, URLError)
+from .database import Distribution, DistributionPath, make_dist
+from .metadata import Metadata, MetadataInvalidError
+from .util import (cached_property, parse_credentials, ensure_slash,
+ split_filename, get_project_data, parse_requirement,
+ parse_name_and_version, ServerProxy, normalize_name)
+from .version import get_scheme, UnsupportedVersionError
+from .wheel import Wheel, is_compatible
+
+logger = logging.getLogger(__name__)
+
+HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)')
+CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
+HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
+DEFAULT_INDEX = 'https://pypi.org/pypi'
+
+def get_all_distribution_names(url=None):
+ """
+ Return all distribution names known by an index.
+ :param url: The URL of the index.
+ :return: A list of all known distribution names.
+ """
+ if url is None:
+ url = DEFAULT_INDEX
+ client = ServerProxy(url, timeout=3.0)
+ try:
+ return client.list_packages()
+ finally:
+ client('close')()
+
+class RedirectHandler(BaseRedirectHandler):
+ """
+ A class to work around a bug in some Python 3.2.x releases.
+ """
+ # There's a bug in the base version for some 3.2.x
+ # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
+ # returns e.g. /abc, it bails because it says the scheme ''
+ # is bogus, when actually it should use the request's
+ # URL for the scheme. See Python issue #13696.
+ def http_error_302(self, req, fp, code, msg, headers):
+ # Some servers (incorrectly) return multiple Location headers
+ # (so probably same goes for URI). Use first header.
+ newurl = None
+ for key in ('location', 'uri'):
+ if key in headers:
+ newurl = headers[key]
+ break
+ if newurl is None: # pragma: no cover
+ return
+ urlparts = urlparse(newurl)
+ if urlparts.scheme == '':
+ newurl = urljoin(req.get_full_url(), newurl)
+ if hasattr(headers, 'replace_header'):
+ headers.replace_header(key, newurl)
+ else:
+ headers[key] = newurl
+ return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
+ headers)
+
+ http_error_301 = http_error_303 = http_error_307 = http_error_302
+
+class Locator(object):
+ """
+ A base class for locators - things that locate distributions.
+ """
+ source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
+ binary_extensions = ('.egg', '.exe', '.whl')
+ excluded_extensions = ('.pdf',)
+
+ # A list of tags indicating which wheels you want to match. The default
+ # value of None matches against the tags compatible with the running
+ # Python. If you want to match other values, set wheel_tags on a locator
+ # instance to a list of tuples (pyver, abi, arch) which you want to match.
+ wheel_tags = None
+
+ downloadable_extensions = source_extensions + ('.whl',)
+
+ def __init__(self, scheme='default'):
+ """
+ Initialise an instance.
+ :param scheme: Because locators look for most recent versions, they
+ need to know the version scheme to use. This specifies
+ the current PEP-recommended scheme - use ``'legacy'``
+ if you need to support existing distributions on PyPI.
+ """
+ self._cache = {}
+ self.scheme = scheme
+ # Because of bugs in some of the handlers on some of the platforms,
+ # we use our own opener rather than just using urlopen.
+ self.opener = build_opener(RedirectHandler())
+ # If get_project() is called from locate(), the matcher instance
+ # is set from the requirement passed to locate(). See issue #18 for
+ # why this can be useful to know.
+ self.matcher = None
+ self.errors = queue.Queue()
+
+ def get_errors(self):
+ """
+ Return any errors which have occurred.
+ """
+ result = []
+ while not self.errors.empty(): # pragma: no cover
+ try:
+ e = self.errors.get(False)
+ result.append(e)
+ except self.errors.Empty:
+ continue
+ self.errors.task_done()
+ return result
+
+ def clear_errors(self):
+ """
+ Clear any errors which may have been logged.
+ """
+ # Just get the errors and throw them away
+ self.get_errors()
+
+ def clear_cache(self):
+ self._cache.clear()
+
+ def _get_scheme(self):
+ return self._scheme
+
+ def _set_scheme(self, value):
+ self._scheme = value
+
+ scheme = property(_get_scheme, _set_scheme)
+
+ def _get_project(self, name):
+ """
+ For a given project, get a dictionary mapping available versions to Distribution
+ instances.
+
+ This should be implemented in subclasses.
+
+ If called from a locate() request, self.matcher will be set to a
+ matcher for the requirement to satisfy, otherwise it will be None.
+ """
+ raise NotImplementedError('Please implement in the subclass')
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ raise NotImplementedError('Please implement in the subclass')
+
+ def get_project(self, name):
+ """
+ For a given project, get a dictionary mapping available versions to Distribution
+ instances.
+
+ This calls _get_project to do all the work, and just implements a caching layer on top.
+ """
+ if self._cache is None: # pragma: no cover
+ result = self._get_project(name)
+ elif name in self._cache:
+ result = self._cache[name]
+ else:
+ self.clear_errors()
+ result = self._get_project(name)
+ self._cache[name] = result
+ return result
+
+ def score_url(self, url):
+ """
+ Give an url a score which can be used to choose preferred URLs
+ for a given project release.
+ """
+ t = urlparse(url)
+ basename = posixpath.basename(t.path)
+ compatible = True
+ is_wheel = basename.endswith('.whl')
+ is_downloadable = basename.endswith(self.downloadable_extensions)
+ if is_wheel:
+ compatible = is_compatible(Wheel(basename), self.wheel_tags)
+ return (t.scheme == 'https', 'pypi.org' in t.netloc,
+ is_downloadable, is_wheel, compatible, basename)
+
+ def prefer_url(self, url1, url2):
+ """
+ Choose one of two URLs where both are candidates for distribution
+ archives for the same version of a distribution (for example,
+ .tar.gz vs. zip).
+
+ The current implementation favours https:// URLs over http://, archives
+ from PyPI over those from other locations, wheel compatibility (if a
+ wheel) and then the archive name.
+ """
+ result = url2
+ if url1:
+ s1 = self.score_url(url1)
+ s2 = self.score_url(url2)
+ if s1 > s2:
+ result = url1
+ if result != url2:
+ logger.debug('Not replacing %r with %r', url1, url2)
+ else:
+ logger.debug('Replacing %r with %r', url1, url2)
+ return result
+
+ def split_filename(self, filename, project_name):
+ """
+ Attempt to split a filename in project name, version and Python version.
+ """
+ return split_filename(filename, project_name)
+
+ def convert_url_to_download_info(self, url, project_name):
+ """
+ See if a URL is a candidate for a download URL for a project (the URL
+ has typically been scraped from an HTML page).
+
+ If it is, a dictionary is returned with keys "name", "version",
+ "filename" and "url"; otherwise, None is returned.
+ """
+ def same_project(name1, name2):
+ return normalize_name(name1) == normalize_name(name2)
+
+ result = None
+ scheme, netloc, path, params, query, frag = urlparse(url)
+ if frag.lower().startswith('egg='): # pragma: no cover
+ logger.debug('%s: version hint in fragment: %r',
+ project_name, frag)
+ m = HASHER_HASH.match(frag)
+ if m:
+ algo, digest = m.groups()
+ else:
+ algo, digest = None, None
+ origpath = path
+ if path and path[-1] == '/': # pragma: no cover
+ path = path[:-1]
+ if path.endswith('.whl'):
+ try:
+ wheel = Wheel(path)
+ if not is_compatible(wheel, self.wheel_tags):
+ logger.debug('Wheel not compatible: %s', path)
+ else:
+ if project_name is None:
+ include = True
+ else:
+ include = same_project(wheel.name, project_name)
+ if include:
+ result = {
+ 'name': wheel.name,
+ 'version': wheel.version,
+ 'filename': wheel.filename,
+ 'url': urlunparse((scheme, netloc, origpath,
+ params, query, '')),
+ 'python-version': ', '.join(
+ ['.'.join(list(v[2:])) for v in wheel.pyver]),
+ }
+ except Exception as e: # pragma: no cover
+ logger.warning('invalid path for wheel: %s', path)
+ elif not path.endswith(self.downloadable_extensions): # pragma: no cover
+ logger.debug('Not downloadable: %s', path)
+ else: # downloadable extension
+ path = filename = posixpath.basename(path)
+ for ext in self.downloadable_extensions:
+ if path.endswith(ext):
+ path = path[:-len(ext)]
+ t = self.split_filename(path, project_name)
+ if not t: # pragma: no cover
+ logger.debug('No match for project/version: %s', path)
+ else:
+ name, version, pyver = t
+ if not project_name or same_project(project_name, name):
+ result = {
+ 'name': name,
+ 'version': version,
+ 'filename': filename,
+ 'url': urlunparse((scheme, netloc, origpath,
+ params, query, '')),
+ #'packagetype': 'sdist',
+ }
+ if pyver: # pragma: no cover
+ result['python-version'] = pyver
+ break
+ if result and algo:
+ result['%s_digest' % algo] = digest
+ return result
+
+ def _get_digest(self, info):
+ """
+ Get a digest from a dictionary by looking at keys of the form
+ 'algo_digest'.
+
+ Returns a 2-tuple (algo, digest) if found, else None. Currently
+ looks only for SHA256, then MD5.
+ """
+ result = None
+ for algo in ('sha256', 'md5'):
+ key = '%s_digest' % algo
+ if key in info:
+ result = (algo, info[key])
+ break
+ return result
+
+ def _update_version_data(self, result, info):
+ """
+ Update a result dictionary (the final result from _get_project) with a
+ dictionary for a specific version, which typically holds information
+ gleaned from a filename or URL for an archive for the distribution.
+ """
+ name = info.pop('name')
+ version = info.pop('version')
+ if version in result:
+ dist = result[version]
+ md = dist.metadata
+ else:
+ dist = make_dist(name, version, scheme=self.scheme)
+ md = dist.metadata
+ dist.digest = digest = self._get_digest(info)
+ url = info['url']
+ result['digests'][url] = digest
+ if md.source_url != info['url']:
+ md.source_url = self.prefer_url(md.source_url, url)
+ result['urls'].setdefault(version, set()).add(url)
+ dist.locator = self
+ result[version] = dist
+
+ def locate(self, requirement, prereleases=False):
+ """
+ Find the most recent distribution which matches the given
+ requirement.
+
+ :param requirement: A requirement of the form 'foo (1.0)' or perhaps
+ 'foo (>= 1.0, < 2.0, != 1.3)'
+ :param prereleases: If ``True``, allow pre-release versions
+ to be located. Otherwise, pre-release versions
+ are not returned.
+ :return: A :class:`Distribution` instance, or ``None`` if no such
+ distribution could be located.
+ """
+ result = None
+ r = parse_requirement(requirement)
+ if r is None: # pragma: no cover
+ raise DistlibException('Not a valid requirement: %r' % requirement)
+ scheme = get_scheme(self.scheme)
+ self.matcher = matcher = scheme.matcher(r.requirement)
+ logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
+ versions = self.get_project(r.name)
+ if len(versions) > 2: # urls and digests keys are present
+ # sometimes, versions are invalid
+ slist = []
+ vcls = matcher.version_class
+ for k in versions:
+ if k in ('urls', 'digests'):
+ continue
+ try:
+ if not matcher.match(k):
+ logger.debug('%s did not match %r', matcher, k)
+ else:
+ if prereleases or not vcls(k).is_prerelease:
+ slist.append(k)
+ else:
+ logger.debug('skipping pre-release '
+ 'version %s of %s', k, matcher.name)
+ except Exception: # pragma: no cover
+ logger.warning('error matching %s with %r', matcher, k)
+ pass # slist.append(k)
+ if len(slist) > 1:
+ slist = sorted(slist, key=scheme.key)
+ if slist:
+ logger.debug('sorted list: %s', slist)
+ version = slist[-1]
+ result = versions[version]
+ if result:
+ if r.extras:
+ result.extras = r.extras
+ result.download_urls = versions.get('urls', {}).get(version, set())
+ d = {}
+ sd = versions.get('digests', {})
+ for url in result.download_urls:
+ if url in sd: # pragma: no cover
+ d[url] = sd[url]
+ result.digests = d
+ self.matcher = None
+ return result
+
+
+class PyPIRPCLocator(Locator):
+ """
+ This locator uses XML-RPC to locate distributions. It therefore
+ cannot be used with simple mirrors (that only mirror file content).
+ """
+ def __init__(self, url, **kwargs):
+ """
+ Initialise an instance.
+
+ :param url: The URL to use for XML-RPC.
+ :param kwargs: Passed to the superclass constructor.
+ """
+ super(PyPIRPCLocator, self).__init__(**kwargs)
+ self.base_url = url
+ self.client = ServerProxy(url, timeout=3.0)
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ return set(self.client.list_packages())
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ versions = self.client.package_releases(name, True)
+ for v in versions:
+ urls = self.client.release_urls(name, v)
+ data = self.client.release_data(name, v)
+ metadata = Metadata(scheme=self.scheme)
+ metadata.name = data['name']
+ metadata.version = data['version']
+ metadata.license = data.get('license')
+ metadata.keywords = data.get('keywords', [])
+ metadata.summary = data.get('summary')
+ dist = Distribution(metadata)
+ if urls:
+ info = urls[0]
+ metadata.source_url = info['url']
+ dist.digest = self._get_digest(info)
+ dist.locator = self
+ result[v] = dist
+ for info in urls:
+ url = info['url']
+ digest = self._get_digest(info)
+ result['urls'].setdefault(v, set()).add(url)
+ result['digests'][url] = digest
+ return result
+
+class PyPIJSONLocator(Locator):
+ """
+ This locator uses PyPI's JSON interface. It's very limited in functionality
+ and probably not worth using.
+ """
+ def __init__(self, url, **kwargs):
+ super(PyPIJSONLocator, self).__init__(**kwargs)
+ self.base_url = ensure_slash(url)
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ raise NotImplementedError('Not available from this locator')
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ url = urljoin(self.base_url, '%s/json' % quote(name))
+ try:
+ resp = self.opener.open(url)
+ data = resp.read().decode() # for now
+ d = json.loads(data)
+ md = Metadata(scheme=self.scheme)
+ data = d['info']
+ md.name = data['name']
+ md.version = data['version']
+ md.license = data.get('license')
+ md.keywords = data.get('keywords', [])
+ md.summary = data.get('summary')
+ dist = Distribution(md)
+ dist.locator = self
+ urls = d['urls']
+ result[md.version] = dist
+ for info in d['urls']:
+ url = info['url']
+ dist.download_urls.add(url)
+ dist.digests[url] = self._get_digest(info)
+ result['urls'].setdefault(md.version, set()).add(url)
+ result['digests'][url] = self._get_digest(info)
+ # Now get other releases
+ for version, infos in d['releases'].items():
+ if version == md.version:
+ continue # already done
+ omd = Metadata(scheme=self.scheme)
+ omd.name = md.name
+ omd.version = version
+ odist = Distribution(omd)
+ odist.locator = self
+ result[version] = odist
+ for info in infos:
+ url = info['url']
+ odist.download_urls.add(url)
+ odist.digests[url] = self._get_digest(info)
+ result['urls'].setdefault(version, set()).add(url)
+ result['digests'][url] = self._get_digest(info)
+# for info in urls:
+# md.source_url = info['url']
+# dist.digest = self._get_digest(info)
+# dist.locator = self
+# for info in urls:
+# url = info['url']
+# result['urls'].setdefault(md.version, set()).add(url)
+# result['digests'][url] = self._get_digest(info)
+ except Exception as e:
+ self.errors.put(text_type(e))
+ logger.exception('JSON fetch failed: %s', e)
+ return result
+
+
+class Page(object):
+ """
+ This class represents a scraped HTML page.
+ """
+ # The following slightly hairy-looking regex just looks for the contents of
+ # an anchor link, which has an attribute "href" either immediately preceded
+ # or immediately followed by a "rel" attribute. The attribute values can be
+ # declared with double quotes, single quotes or no quotes - which leads to
+ # the length of the expression.
+ _href = re.compile("""
+(rel\\s*=\\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\\s\n]*))\\s+)?
+href\\s*=\\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\\s\n]*))
+(\\s+rel\\s*=\\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\\s\n]*)))?
+""", re.I | re.S | re.X)
+ _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S)
+
+ def __init__(self, data, url):
+ """
+ Initialise an instance with the Unicode page contents and the URL they
+ came from.
+ """
+ self.data = data
+ self.base_url = self.url = url
+ m = self._base.search(self.data)
+ if m:
+ self.base_url = m.group(1)
+
+ _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
+
+ @cached_property
+ def links(self):
+ """
+ Return the URLs of all the links on a page together with information
+ about their "rel" attribute, for determining which ones to treat as
+ downloads and which ones to queue for further scraping.
+ """
+ def clean(url):
+ "Tidy up an URL."
+ scheme, netloc, path, params, query, frag = urlparse(url)
+ return urlunparse((scheme, netloc, quote(path),
+ params, query, frag))
+
+ result = set()
+ for match in self._href.finditer(self.data):
+ d = match.groupdict('')
+ rel = (d['rel1'] or d['rel2'] or d['rel3'] or
+ d['rel4'] or d['rel5'] or d['rel6'])
+ url = d['url1'] or d['url2'] or d['url3']
+ url = urljoin(self.base_url, url)
+ url = unescape(url)
+ url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
+ result.add((url, rel))
+ # We sort the result, hoping to bring the most recent versions
+ # to the front
+ result = sorted(result, key=lambda t: t[0], reverse=True)
+ return result
+
+
+class SimpleScrapingLocator(Locator):
+ """
+ A locator which scrapes HTML pages to locate downloads for a distribution.
+ This runs multiple threads to do the I/O; performance is at least as good
+ as pip's PackageFinder, which works in an analogous fashion.
+ """
+
+ # These are used to deal with various Content-Encoding schemes.
+ decoders = {
+ 'deflate': zlib.decompress,
+ 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(),
+ 'none': lambda b: b,
+ }
+
+ def __init__(self, url, timeout=None, num_workers=10, **kwargs):
+ """
+ Initialise an instance.
+ :param url: The root URL to use for scraping.
+ :param timeout: The timeout, in seconds, to be applied to requests.
+ This defaults to ``None`` (no timeout specified).
+ :param num_workers: The number of worker threads you want to do I/O,
+ This defaults to 10.
+ :param kwargs: Passed to the superclass.
+ """
+ super(SimpleScrapingLocator, self).__init__(**kwargs)
+ self.base_url = ensure_slash(url)
+ self.timeout = timeout
+ self._page_cache = {}
+ self._seen = set()
+ self._to_fetch = queue.Queue()
+ self._bad_hosts = set()
+ self.skip_externals = False
+ self.num_workers = num_workers
+ self._lock = threading.RLock()
+ # See issue #45: we need to be resilient when the locator is used
+ # in a thread, e.g. with concurrent.futures. We can't use self._lock
+ # as it is for coordinating our internal threads - the ones created
+ # in _prepare_threads.
+ self._gplock = threading.RLock()
+ self.platform_check = False # See issue #112
+
+ def _prepare_threads(self):
+ """
+ Threads are created only when get_project is called, and terminate
+ before it returns. They are there primarily to parallelise I/O (i.e.
+ fetching web pages).
+ """
+ self._threads = []
+ for i in range(self.num_workers):
+ t = threading.Thread(target=self._fetch)
+ t.setDaemon(True)
+ t.start()
+ self._threads.append(t)
+
+ def _wait_threads(self):
+ """
+ Tell all the threads to terminate (by sending a sentinel value) and
+ wait for them to do so.
+ """
+ # Note that you need two loops, since you can't say which
+ # thread will get each sentinel
+ for t in self._threads:
+ self._to_fetch.put(None) # sentinel
+ for t in self._threads:
+ t.join()
+ self._threads = []
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ with self._gplock:
+ self.result = result
+ self.project_name = name
+ url = urljoin(self.base_url, '%s/' % quote(name))
+ self._seen.clear()
+ self._page_cache.clear()
+ self._prepare_threads()
+ try:
+ logger.debug('Queueing %s', url)
+ self._to_fetch.put(url)
+ self._to_fetch.join()
+ finally:
+ self._wait_threads()
+ del self.result
+ return result
+
+ platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
+ r'win(32|_amd64)|macosx_?\d+)\b', re.I)
+
+ def _is_platform_dependent(self, url):
+ """
+ Does an URL refer to a platform-specific download?
+ """
+ return self.platform_dependent.search(url)
+
+ def _process_download(self, url):
+ """
+ See if an URL is a suitable download for a project.
+
+ If it is, register information in the result dictionary (for
+ _get_project) about the specific version it's for.
+
+ Note that the return value isn't actually used other than as a boolean
+ value.
+ """
+ if self.platform_check and self._is_platform_dependent(url):
+ info = None
+ else:
+ info = self.convert_url_to_download_info(url, self.project_name)
+ logger.debug('process_download: %s -> %s', url, info)
+ if info:
+ with self._lock: # needed because self.result is shared
+ self._update_version_data(self.result, info)
+ return info
+
+ def _should_queue(self, link, referrer, rel):
+ """
+ Determine whether a link URL from a referring page and with a
+ particular "rel" attribute should be queued for scraping.
+ """
+ scheme, netloc, path, _, _, _ = urlparse(link)
+ if path.endswith(self.source_extensions + self.binary_extensions +
+ self.excluded_extensions):
+ result = False
+ elif self.skip_externals and not link.startswith(self.base_url):
+ result = False
+ elif not referrer.startswith(self.base_url):
+ result = False
+ elif rel not in ('homepage', 'download'):
+ result = False
+ elif scheme not in ('http', 'https', 'ftp'):
+ result = False
+ elif self._is_platform_dependent(link):
+ result = False
+ else:
+ host = netloc.split(':', 1)[0]
+ if host.lower() == 'localhost':
+ result = False
+ else:
+ result = True
+ logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
+ referrer, result)
+ return result
+
+ def _fetch(self):
+ """
+ Get a URL to fetch from the work queue, get the HTML page, examine its
+ links for download candidates and candidates for further scraping.
+
+ This is a handy method to run in a thread.
+ """
+ while True:
+ url = self._to_fetch.get()
+ try:
+ if url:
+ page = self.get_page(url)
+ if page is None: # e.g. after an error
+ continue
+ for link, rel in page.links:
+ if link not in self._seen:
+ try:
+ self._seen.add(link)
+ if (not self._process_download(link) and
+ self._should_queue(link, url, rel)):
+ logger.debug('Queueing %s from %s', link, url)
+ self._to_fetch.put(link)
+ except MetadataInvalidError: # e.g. invalid versions
+ pass
+ except Exception as e: # pragma: no cover
+ self.errors.put(text_type(e))
+ finally:
+ # always do this, to avoid hangs :-)
+ self._to_fetch.task_done()
+ if not url:
+ #logger.debug('Sentinel seen, quitting.')
+ break
+
+ def get_page(self, url):
+ """
+ Get the HTML for an URL, possibly from an in-memory cache.
+
+ XXX TODO Note: this cache is never actually cleared. It's assumed that
+ the data won't get stale over the lifetime of a locator instance (not
+ necessarily true for the default_locator).
+ """
+ # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
+ scheme, netloc, path, _, _, _ = urlparse(url)
+ if scheme == 'file' and os.path.isdir(url2pathname(path)):
+ url = urljoin(ensure_slash(url), 'index.html')
+
+ if url in self._page_cache:
+ result = self._page_cache[url]
+ logger.debug('Returning %s from cache: %s', url, result)
+ else:
+ host = netloc.split(':', 1)[0]
+ result = None
+ if host in self._bad_hosts:
+ logger.debug('Skipping %s due to bad host %s', url, host)
+ else:
+ req = Request(url, headers={'Accept-encoding': 'identity'})
+ try:
+ logger.debug('Fetching %s', url)
+ resp = self.opener.open(req, timeout=self.timeout)
+ logger.debug('Fetched %s', url)
+ headers = resp.info()
+ content_type = headers.get('Content-Type', '')
+ if HTML_CONTENT_TYPE.match(content_type):
+ final_url = resp.geturl()
+ data = resp.read()
+ encoding = headers.get('Content-Encoding')
+ if encoding:
+ decoder = self.decoders[encoding] # fail if not found
+ data = decoder(data)
+ encoding = 'utf-8'
+ m = CHARSET.search(content_type)
+ if m:
+ encoding = m.group(1)
+ try:
+ data = data.decode(encoding)
+ except UnicodeError: # pragma: no cover
+ data = data.decode('latin-1') # fallback
+ result = Page(data, final_url)
+ self._page_cache[final_url] = result
+ except HTTPError as e:
+ if e.code != 404:
+ logger.exception('Fetch failed: %s: %s', url, e)
+ except URLError as e: # pragma: no cover
+ logger.exception('Fetch failed: %s: %s', url, e)
+ with self._lock:
+ self._bad_hosts.add(host)
+ except Exception as e: # pragma: no cover
+ logger.exception('Fetch failed: %s: %s', url, e)
+ finally:
+ self._page_cache[url] = result # even if None (failure)
+ return result
+
+ _distname_re = re.compile('<a href=[^>]*>([^<]+)<')
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ result = set()
+ page = self.get_page(self.base_url)
+ if not page:
+ raise DistlibException('Unable to get %s' % self.base_url)
+ for match in self._distname_re.finditer(page.data):
+ result.add(match.group(1))
+ return result
+
+class DirectoryLocator(Locator):
+ """
+ This class locates distributions in a directory tree.
+ """
+
+ def __init__(self, path, **kwargs):
+ """
+ Initialise an instance.
+ :param path: The root of the directory tree to search.
+ :param kwargs: Passed to the superclass constructor,
+ except for:
+ * recursive - if True (the default), subdirectories are
+ recursed into. If False, only the top-level directory
+ is searched,
+ """
+ self.recursive = kwargs.pop('recursive', True)
+ super(DirectoryLocator, self).__init__(**kwargs)
+ path = os.path.abspath(path)
+ if not os.path.isdir(path): # pragma: no cover
+ raise DistlibException('Not a directory: %r' % path)
+ self.base_dir = path
+
+ def should_include(self, filename, parent):
+ """
+ Should a filename be considered as a candidate for a distribution
+ archive? As well as the filename, the directory which contains it
+ is provided, though not used by the current implementation.
+ """
+ return filename.endswith(self.downloadable_extensions)
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ for root, dirs, files in os.walk(self.base_dir):
+ for fn in files:
+ if self.should_include(fn, root):
+ fn = os.path.join(root, fn)
+ url = urlunparse(('file', '',
+ pathname2url(os.path.abspath(fn)),
+ '', '', ''))
+ info = self.convert_url_to_download_info(url, name)
+ if info:
+ self._update_version_data(result, info)
+ if not self.recursive:
+ break
+ return result
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ result = set()
+ for root, dirs, files in os.walk(self.base_dir):
+ for fn in files:
+ if self.should_include(fn, root):
+ fn = os.path.join(root, fn)
+ url = urlunparse(('file', '',
+ pathname2url(os.path.abspath(fn)),
+ '', '', ''))
+ info = self.convert_url_to_download_info(url, None)
+ if info:
+ result.add(info['name'])
+ if not self.recursive:
+ break
+ return result
+
+class JSONLocator(Locator):
+ """
+ This locator uses special extended metadata (not available on PyPI) and is
+ the basis of performant dependency resolution in distlib. Other locators
+ require archive downloads before dependencies can be determined! As you
+ might imagine, that can be slow.
+ """
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ raise NotImplementedError('Not available from this locator')
+
+ def _get_project(self, name):
+ result = {'urls': {}, 'digests': {}}
+ data = get_project_data(name)
+ if data:
+ for info in data.get('files', []):
+ if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
+ continue
+ # We don't store summary in project metadata as it makes
+ # the data bigger for no benefit during dependency
+ # resolution
+ dist = make_dist(data['name'], info['version'],
+ summary=data.get('summary',
+ 'Placeholder for summary'),
+ scheme=self.scheme)
+ md = dist.metadata
+ md.source_url = info['url']
+ # TODO SHA256 digest
+ if 'digest' in info and info['digest']:
+ dist.digest = ('md5', info['digest'])
+ md.dependencies = info.get('requirements', {})
+ dist.exports = info.get('exports', {})
+ result[dist.version] = dist
+ result['urls'].setdefault(dist.version, set()).add(info['url'])
+ return result
+
+class DistPathLocator(Locator):
+ """
+ This locator finds installed distributions in a path. It can be useful for
+ adding to an :class:`AggregatingLocator`.
+ """
+ def __init__(self, distpath, **kwargs):
+ """
+ Initialise an instance.
+
+ :param distpath: A :class:`DistributionPath` instance to search.
+ """
+ super(DistPathLocator, self).__init__(**kwargs)
+ assert isinstance(distpath, DistributionPath)
+ self.distpath = distpath
+
+ def _get_project(self, name):
+ dist = self.distpath.get_distribution(name)
+ if dist is None:
+ result = {'urls': {}, 'digests': {}}
+ else:
+ result = {
+ dist.version: dist,
+ 'urls': {dist.version: set([dist.source_url])},
+ 'digests': {dist.version: set([None])}
+ }
+ return result
+
+
+class AggregatingLocator(Locator):
+ """
+ This class allows you to chain and/or merge a list of locators.
+ """
+ def __init__(self, *locators, **kwargs):
+ """
+ Initialise an instance.
+
+ :param locators: The list of locators to search.
+ :param kwargs: Passed to the superclass constructor,
+ except for:
+ * merge - if False (the default), the first successful
+ search from any of the locators is returned. If True,
+ the results from all locators are merged (this can be
+ slow).
+ """
+ self.merge = kwargs.pop('merge', False)
+ self.locators = locators
+ super(AggregatingLocator, self).__init__(**kwargs)
+
+ def clear_cache(self):
+ super(AggregatingLocator, self).clear_cache()
+ for locator in self.locators:
+ locator.clear_cache()
+
+ def _set_scheme(self, value):
+ self._scheme = value
+ for locator in self.locators:
+ locator.scheme = value
+
+ scheme = property(Locator.scheme.fget, _set_scheme)
+
+ def _get_project(self, name):
+ result = {}
+ for locator in self.locators:
+ d = locator.get_project(name)
+ if d:
+ if self.merge:
+ files = result.get('urls', {})
+ digests = result.get('digests', {})
+ # next line could overwrite result['urls'], result['digests']
+ result.update(d)
+ df = result.get('urls')
+ if files and df:
+ for k, v in files.items():
+ if k in df:
+ df[k] |= v
+ else:
+ df[k] = v
+ dd = result.get('digests')
+ if digests and dd:
+ dd.update(digests)
+ else:
+ # See issue #18. If any dists are found and we're looking
+ # for specific constraints, we only return something if
+ # a match is found. For example, if a DirectoryLocator
+ # returns just foo (1.0) while we're looking for
+ # foo (>= 2.0), we'll pretend there was nothing there so
+ # that subsequent locators can be queried. Otherwise we
+ # would just return foo (1.0) which would then lead to a
+ # failure to find foo (>= 2.0), because other locators
+ # weren't searched. Note that this only matters when
+ # merge=False.
+ if self.matcher is None:
+ found = True
+ else:
+ found = False
+ for k in d:
+ if self.matcher.match(k):
+ found = True
+ break
+ if found:
+ result = d
+ break
+ return result
+
+ def get_distribution_names(self):
+ """
+ Return all the distribution names known to this locator.
+ """
+ result = set()
+ for locator in self.locators:
+ try:
+ result |= locator.get_distribution_names()
+ except NotImplementedError:
+ pass
+ return result
+
+
+# We use a legacy scheme simply because most of the dists on PyPI use legacy
+# versions which don't conform to PEP 426 / PEP 440.
+default_locator = AggregatingLocator(
+ JSONLocator(),
+ SimpleScrapingLocator('https://pypi.org/simple/',
+ timeout=3.0),
+ scheme='legacy')
+
+locate = default_locator.locate
+
+NAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*'
+ r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$')
+
+class DependencyFinder(object):
+ """
+ Locate dependencies for distributions.
+ """
+
+ def __init__(self, locator=None):
+ """
+ Initialise an instance, using the specified locator
+ to locate distributions.
+ """
+ self.locator = locator or default_locator
+ self.scheme = get_scheme(self.locator.scheme)
+
+ def add_distribution(self, dist):
+ """
+ Add a distribution to the finder. This will update internal information
+ about who provides what.
+ :param dist: The distribution to add.
+ """
+ logger.debug('adding distribution %s', dist)
+ name = dist.key
+ self.dists_by_name[name] = dist
+ self.dists[(name, dist.version)] = dist
+ for p in dist.provides:
+ name, version = parse_name_and_version(p)
+ logger.debug('Add to provided: %s, %s, %s', name, version, dist)
+ self.provided.setdefault(name, set()).add((version, dist))
+
+ def remove_distribution(self, dist):
+ """
+ Remove a distribution from the finder. This will update internal
+ information about who provides what.
+ :param dist: The distribution to remove.
+ """
+ logger.debug('removing distribution %s', dist)
+ name = dist.key
+ del self.dists_by_name[name]
+ del self.dists[(name, dist.version)]
+ for p in dist.provides:
+ name, version = parse_name_and_version(p)
+ logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
+ s = self.provided[name]
+ s.remove((version, dist))
+ if not s:
+ del self.provided[name]
+
+ def get_matcher(self, reqt):
+ """
+ Get a version matcher for a requirement.
+ :param reqt: The requirement
+ :type reqt: str
+ :return: A version matcher (an instance of
+ :class:`distlib.version.Matcher`).
+ """
+ try:
+ matcher = self.scheme.matcher(reqt)
+ except UnsupportedVersionError: # pragma: no cover
+ # XXX compat-mode if cannot read the version
+ name = reqt.split()[0]
+ matcher = self.scheme.matcher(name)
+ return matcher
+
+ def find_providers(self, reqt):
+ """
+ Find the distributions which can fulfill a requirement.
+
+ :param reqt: The requirement.
+ :type reqt: str
+ :return: A set of distribution which can fulfill the requirement.
+ """
+ matcher = self.get_matcher(reqt)
+ name = matcher.key # case-insensitive
+ result = set()
+ provided = self.provided
+ if name in provided:
+ for version, provider in provided[name]:
+ try:
+ match = matcher.match(version)
+ except UnsupportedVersionError:
+ match = False
+
+ if match:
+ result.add(provider)
+ break
+ return result
+
+ def try_to_replace(self, provider, other, problems):
+ """
+ Attempt to replace one provider with another. This is typically used
+ when resolving dependencies from multiple sources, e.g. A requires
+ (B >= 1.0) while C requires (B >= 1.1).
+
+ For successful replacement, ``provider`` must meet all the requirements
+ which ``other`` fulfills.
+
+ :param provider: The provider we are trying to replace with.
+ :param other: The provider we're trying to replace.
+ :param problems: If False is returned, this will contain what
+ problems prevented replacement. This is currently
+ a tuple of the literal string 'cantreplace',
+ ``provider``, ``other`` and the set of requirements
+ that ``provider`` couldn't fulfill.
+ :return: True if we can replace ``other`` with ``provider``, else
+ False.
+ """
+ rlist = self.reqts[other]
+ unmatched = set()
+ for s in rlist:
+ matcher = self.get_matcher(s)
+ if not matcher.match(provider.version):
+ unmatched.add(s)
+ if unmatched:
+ # can't replace other with provider
+ problems.add(('cantreplace', provider, other,
+ frozenset(unmatched)))
+ result = False
+ else:
+ # can replace other with provider
+ self.remove_distribution(other)
+ del self.reqts[other]
+ for s in rlist:
+ self.reqts.setdefault(provider, set()).add(s)
+ self.add_distribution(provider)
+ result = True
+ return result
+
+ def find(self, requirement, meta_extras=None, prereleases=False):
+ """
+ Find a distribution and all distributions it depends on.
+
+ :param requirement: The requirement specifying the distribution to
+ find, or a Distribution instance.
+ :param meta_extras: A list of meta extras such as :test:, :build: and
+ so on.
+ :param prereleases: If ``True``, allow pre-release versions to be
+ returned - otherwise, don't return prereleases
+ unless they're all that's available.
+
+ Return a set of :class:`Distribution` instances and a set of
+ problems.
+
+ The distributions returned should be such that they have the
+ :attr:`required` attribute set to ``True`` if they were
+ from the ``requirement`` passed to ``find()``, and they have the
+ :attr:`build_time_dependency` attribute set to ``True`` unless they
+ are post-installation dependencies of the ``requirement``.
+
+ The problems should be a tuple consisting of the string
+ ``'unsatisfied'`` and the requirement which couldn't be satisfied
+ by any distribution known to the locator.
+ """
+
+ self.provided = {}
+ self.dists = {}
+ self.dists_by_name = {}
+ self.reqts = {}
+
+ meta_extras = set(meta_extras or [])
+ if ':*:' in meta_extras:
+ meta_extras.remove(':*:')
+ # :meta: and :run: are implicitly included
+ meta_extras |= set([':test:', ':build:', ':dev:'])
+
+ if isinstance(requirement, Distribution):
+ dist = odist = requirement
+ logger.debug('passed %s as requirement', odist)
+ else:
+ dist = odist = self.locator.locate(requirement,
+ prereleases=prereleases)
+ if dist is None:
+ raise DistlibException('Unable to locate %r' % requirement)
+ logger.debug('located %s', odist)
+ dist.requested = True
+ problems = set()
+ todo = set([dist])
+ install_dists = set([odist])
+ while todo:
+ dist = todo.pop()
+ name = dist.key # case-insensitive
+ if name not in self.dists_by_name:
+ self.add_distribution(dist)
+ else:
+ #import pdb; pdb.set_trace()
+ other = self.dists_by_name[name]
+ if other != dist:
+ self.try_to_replace(dist, other, problems)
+
+ ireqts = dist.run_requires | dist.meta_requires
+ sreqts = dist.build_requires
+ ereqts = set()
+ if meta_extras and dist in install_dists:
+ for key in ('test', 'build', 'dev'):
+ e = ':%s:' % key
+ if e in meta_extras:
+ ereqts |= getattr(dist, '%s_requires' % key)
+ all_reqts = ireqts | sreqts | ereqts
+ for r in all_reqts:
+ providers = self.find_providers(r)
+ if not providers:
+ logger.debug('No providers found for %r', r)
+ provider = self.locator.locate(r, prereleases=prereleases)
+ # If no provider is found and we didn't consider
+ # prereleases, consider them now.
+ if provider is None and not prereleases:
+ provider = self.locator.locate(r, prereleases=True)
+ if provider is None:
+ logger.debug('Cannot satisfy %r', r)
+ problems.add(('unsatisfied', r))
+ else:
+ n, v = provider.key, provider.version
+ if (n, v) not in self.dists:
+ todo.add(provider)
+ providers.add(provider)
+ if r in ireqts and dist in install_dists:
+ install_dists.add(provider)
+ logger.debug('Adding %s to install_dists',
+ provider.name_and_version)
+ for p in providers:
+ name = p.key
+ if name not in self.dists_by_name:
+ self.reqts.setdefault(p, set()).add(r)
+ else:
+ other = self.dists_by_name[name]
+ if other != p:
+ # see if other can be replaced by p
+ self.try_to_replace(p, other, problems)
+
+ dists = set(self.dists.values())
+ for dist in dists:
+ dist.build_time_dependency = dist not in install_dists
+ if dist.build_time_dependency:
+ logger.debug('%s is a build-time dependency only.',
+ dist.name_and_version)
+ logger.debug('find done for %s', odist)
+ return dists, problems
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/manifest.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/manifest.py
new file mode 100644
index 00000000..ca0fe442
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/manifest.py
@@ -0,0 +1,393 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2013 Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Class representing the list of files in a distribution.
+
+Equivalent to distutils.filelist, but fixes some problems.
+"""
+import fnmatch
+import logging
+import os
+import re
+import sys
+
+from . import DistlibException
+from .compat import fsdecode
+from .util import convert_path
+
+
+__all__ = ['Manifest']
+
+logger = logging.getLogger(__name__)
+
+# a \ followed by some spaces + EOL
+_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
+_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
+
+#
+# Due to the different results returned by fnmatch.translate, we need
+# to do slightly different processing for Python 2.7 and 3.2 ... this needed
+# to be brought in for Python 3.6 onwards.
+#
+_PYTHON_VERSION = sys.version_info[:2]
+
+class Manifest(object):
+ """A list of files built by on exploring the filesystem and filtered by
+ applying various patterns to what we find there.
+ """
+
+ def __init__(self, base=None):
+ """
+ Initialise an instance.
+
+ :param base: The base directory to explore under.
+ """
+ self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
+ self.prefix = self.base + os.sep
+ self.allfiles = None
+ self.files = set()
+
+ #
+ # Public API
+ #
+
+ def findall(self):
+ """Find all files under the base and set ``allfiles`` to the absolute
+ pathnames of files found.
+ """
+ from stat import S_ISREG, S_ISDIR, S_ISLNK
+
+ self.allfiles = allfiles = []
+ root = self.base
+ stack = [root]
+ pop = stack.pop
+ push = stack.append
+
+ while stack:
+ root = pop()
+ names = os.listdir(root)
+
+ for name in names:
+ fullname = os.path.join(root, name)
+
+ # Avoid excess stat calls -- just one will do, thank you!
+ stat = os.stat(fullname)
+ mode = stat.st_mode
+ if S_ISREG(mode):
+ allfiles.append(fsdecode(fullname))
+ elif S_ISDIR(mode) and not S_ISLNK(mode):
+ push(fullname)
+
+ def add(self, item):
+ """
+ Add a file to the manifest.
+
+ :param item: The pathname to add. This can be relative to the base.
+ """
+ if not item.startswith(self.prefix):
+ item = os.path.join(self.base, item)
+ self.files.add(os.path.normpath(item))
+
+ def add_many(self, items):
+ """
+ Add a list of files to the manifest.
+
+ :param items: The pathnames to add. These can be relative to the base.
+ """
+ for item in items:
+ self.add(item)
+
+ def sorted(self, wantdirs=False):
+ """
+ Return sorted files in directory order
+ """
+
+ def add_dir(dirs, d):
+ dirs.add(d)
+ logger.debug('add_dir added %s', d)
+ if d != self.base:
+ parent, _ = os.path.split(d)
+ assert parent not in ('', '/')
+ add_dir(dirs, parent)
+
+ result = set(self.files) # make a copy!
+ if wantdirs:
+ dirs = set()
+ for f in result:
+ add_dir(dirs, os.path.dirname(f))
+ result |= dirs
+ return [os.path.join(*path_tuple) for path_tuple in
+ sorted(os.path.split(path) for path in result)]
+
+ def clear(self):
+ """Clear all collected files."""
+ self.files = set()
+ self.allfiles = []
+
+ def process_directive(self, directive):
+ """
+ Process a directive which either adds some files from ``allfiles`` to
+ ``files``, or removes some files from ``files``.
+
+ :param directive: The directive to process. This should be in a format
+ compatible with distutils ``MANIFEST.in`` files:
+
+ http://docs.python.org/distutils/sourcedist.html#commands
+ """
+ # Parse the line: split it up, make sure the right number of words
+ # is there, and return the relevant words. 'action' is always
+ # defined: it's the first word of the line. Which of the other
+ # three are defined depends on the action; it'll be either
+ # patterns, (dir and patterns), or (dirpattern).
+ action, patterns, thedir, dirpattern = self._parse_directive(directive)
+
+ # OK, now we know that the action is valid and we have the
+ # right number of words on the line for that action -- so we
+ # can proceed with minimal error-checking.
+ if action == 'include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, anchor=True):
+ logger.warning('no files found matching %r', pattern)
+
+ elif action == 'exclude':
+ for pattern in patterns:
+ found = self._exclude_pattern(pattern, anchor=True)
+ #if not found:
+ # logger.warning('no previously-included files '
+ # 'found matching %r', pattern)
+
+ elif action == 'global-include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, anchor=False):
+ logger.warning('no files found matching %r '
+ 'anywhere in distribution', pattern)
+
+ elif action == 'global-exclude':
+ for pattern in patterns:
+ found = self._exclude_pattern(pattern, anchor=False)
+ #if not found:
+ # logger.warning('no previously-included files '
+ # 'matching %r found anywhere in '
+ # 'distribution', pattern)
+
+ elif action == 'recursive-include':
+ for pattern in patterns:
+ if not self._include_pattern(pattern, prefix=thedir):
+ logger.warning('no files found matching %r '
+ 'under directory %r', pattern, thedir)
+
+ elif action == 'recursive-exclude':
+ for pattern in patterns:
+ found = self._exclude_pattern(pattern, prefix=thedir)
+ #if not found:
+ # logger.warning('no previously-included files '
+ # 'matching %r found under directory %r',
+ # pattern, thedir)
+
+ elif action == 'graft':
+ if not self._include_pattern(None, prefix=dirpattern):
+ logger.warning('no directories found matching %r',
+ dirpattern)
+
+ elif action == 'prune':
+ if not self._exclude_pattern(None, prefix=dirpattern):
+ logger.warning('no previously-included directories found '
+ 'matching %r', dirpattern)
+ else: # pragma: no cover
+ # This should never happen, as it should be caught in
+ # _parse_template_line
+ raise DistlibException(
+ 'invalid action %r' % action)
+
+ #
+ # Private API
+ #
+
+ def _parse_directive(self, directive):
+ """
+ Validate a directive.
+ :param directive: The directive to validate.
+ :return: A tuple of action, patterns, thedir, dir_patterns
+ """
+ words = directive.split()
+ if len(words) == 1 and words[0] not in ('include', 'exclude',
+ 'global-include',
+ 'global-exclude',
+ 'recursive-include',
+ 'recursive-exclude',
+ 'graft', 'prune'):
+ # no action given, let's use the default 'include'
+ words.insert(0, 'include')
+
+ action = words[0]
+ patterns = thedir = dir_pattern = None
+
+ if action in ('include', 'exclude',
+ 'global-include', 'global-exclude'):
+ if len(words) < 2:
+ raise DistlibException(
+ '%r expects <pattern1> <pattern2> ...' % action)
+
+ patterns = [convert_path(word) for word in words[1:]]
+
+ elif action in ('recursive-include', 'recursive-exclude'):
+ if len(words) < 3:
+ raise DistlibException(
+ '%r expects <dir> <pattern1> <pattern2> ...' % action)
+
+ thedir = convert_path(words[1])
+ patterns = [convert_path(word) for word in words[2:]]
+
+ elif action in ('graft', 'prune'):
+ if len(words) != 2:
+ raise DistlibException(
+ '%r expects a single <dir_pattern>' % action)
+
+ dir_pattern = convert_path(words[1])
+
+ else:
+ raise DistlibException('unknown action %r' % action)
+
+ return action, patterns, thedir, dir_pattern
+
+ def _include_pattern(self, pattern, anchor=True, prefix=None,
+ is_regex=False):
+ """Select strings (presumably filenames) from 'self.files' that
+ match 'pattern', a Unix-style wildcard (glob) pattern.
+
+ Patterns are not quite the same as implemented by the 'fnmatch'
+ module: '*' and '?' match non-special characters, where "special"
+ is platform-dependent: slash on Unix; colon, slash, and backslash on
+ DOS/Windows; and colon on Mac OS.
+
+ If 'anchor' is true (the default), then the pattern match is more
+ stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
+ 'anchor' is false, both of these will match.
+
+ If 'prefix' is supplied, then only filenames starting with 'prefix'
+ (itself a pattern) and ending with 'pattern', with anything in between
+ them, will match. 'anchor' is ignored in this case.
+
+ If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+ 'pattern' is assumed to be either a string containing a regex or a
+ regex object -- no translation is done, the regex is just compiled
+ and used as-is.
+
+ Selected strings will be added to self.files.
+
+ Return True if files are found.
+ """
+ # XXX docstring lying about what the special chars are?
+ found = False
+ pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
+
+ # delayed loading of allfiles list
+ if self.allfiles is None:
+ self.findall()
+
+ for name in self.allfiles:
+ if pattern_re.search(name):
+ self.files.add(name)
+ found = True
+ return found
+
+ def _exclude_pattern(self, pattern, anchor=True, prefix=None,
+ is_regex=False):
+ """Remove strings (presumably filenames) from 'files' that match
+ 'pattern'.
+
+ Other parameters are the same as for 'include_pattern()', above.
+ The list 'self.files' is modified in place. Return True if files are
+ found.
+
+ This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
+ packaging source distributions
+ """
+ found = False
+ pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
+ for f in list(self.files):
+ if pattern_re.search(f):
+ self.files.remove(f)
+ found = True
+ return found
+
+ def _translate_pattern(self, pattern, anchor=True, prefix=None,
+ is_regex=False):
+ """Translate a shell-like wildcard pattern to a compiled regular
+ expression.
+
+ Return the compiled regex. If 'is_regex' true,
+ then 'pattern' is directly compiled to a regex (if it's a string)
+ or just returned as-is (assumes it's a regex object).
+ """
+ if is_regex:
+ if isinstance(pattern, str):
+ return re.compile(pattern)
+ else:
+ return pattern
+
+ if _PYTHON_VERSION > (3, 2):
+ # ditch start and end characters
+ start, _, end = self._glob_to_re('_').partition('_')
+
+ if pattern:
+ pattern_re = self._glob_to_re(pattern)
+ if _PYTHON_VERSION > (3, 2):
+ assert pattern_re.startswith(start) and pattern_re.endswith(end)
+ else:
+ pattern_re = ''
+
+ base = re.escape(os.path.join(self.base, ''))
+ if prefix is not None:
+ # ditch end of pattern character
+ if _PYTHON_VERSION <= (3, 2):
+ empty_pattern = self._glob_to_re('')
+ prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
+ else:
+ prefix_re = self._glob_to_re(prefix)
+ assert prefix_re.startswith(start) and prefix_re.endswith(end)
+ prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
+ sep = os.sep
+ if os.sep == '\\':
+ sep = r'\\'
+ if _PYTHON_VERSION <= (3, 2):
+ pattern_re = '^' + base + sep.join((prefix_re,
+ '.*' + pattern_re))
+ else:
+ pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
+ pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
+ pattern_re, end)
+ else: # no prefix -- respect anchor flag
+ if anchor:
+ if _PYTHON_VERSION <= (3, 2):
+ pattern_re = '^' + base + pattern_re
+ else:
+ pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
+
+ return re.compile(pattern_re)
+
+ def _glob_to_re(self, pattern):
+ """Translate a shell-like glob pattern to a regular expression.
+
+ Return a string containing the regex. Differs from
+ 'fnmatch.translate()' in that '*' does not match "special characters"
+ (which are platform-specific).
+ """
+ pattern_re = fnmatch.translate(pattern)
+
+ # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+ # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+ # and by extension they shouldn't match such "special characters" under
+ # any OS. So change all non-escaped dots in the RE to match any
+ # character except the special characters (currently: just os.sep).
+ sep = os.sep
+ if os.sep == '\\':
+ # we're using a regex to manipulate a regex, so we need
+ # to escape the backslash twice
+ sep = r'\\\\'
+ escaped = r'\1[^%s]' % sep
+ pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
+ return pattern_re
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/markers.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/markers.py
new file mode 100644
index 00000000..ee1f3e23
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/markers.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Parser for the environment markers micro-language defined in PEP 508.
+"""
+
+# Note: In PEP 345, the micro-language was Python compatible, so the ast
+# module could be used to parse it. However, PEP 508 introduced operators such
+# as ~= and === which aren't in Python, necessitating a different approach.
+
+import os
+import sys
+import platform
+import re
+
+from .compat import python_implementation, urlparse, string_types
+from .util import in_venv, parse_marker
+
+__all__ = ['interpret']
+
+def _is_literal(o):
+ if not isinstance(o, string_types) or not o:
+ return False
+ return o[0] in '\'"'
+
+class Evaluator(object):
+ """
+ This class is used to evaluate marker expessions.
+ """
+
+ operations = {
+ '==': lambda x, y: x == y,
+ '===': lambda x, y: x == y,
+ '~=': lambda x, y: x == y or x > y,
+ '!=': lambda x, y: x != y,
+ '<': lambda x, y: x < y,
+ '<=': lambda x, y: x == y or x < y,
+ '>': lambda x, y: x > y,
+ '>=': lambda x, y: x == y or x > y,
+ 'and': lambda x, y: x and y,
+ 'or': lambda x, y: x or y,
+ 'in': lambda x, y: x in y,
+ 'not in': lambda x, y: x not in y,
+ }
+
+ def evaluate(self, expr, context):
+ """
+ Evaluate a marker expression returned by the :func:`parse_requirement`
+ function in the specified context.
+ """
+ if isinstance(expr, string_types):
+ if expr[0] in '\'"':
+ result = expr[1:-1]
+ else:
+ if expr not in context:
+ raise SyntaxError('unknown variable: %s' % expr)
+ result = context[expr]
+ else:
+ assert isinstance(expr, dict)
+ op = expr['op']
+ if op not in self.operations:
+ raise NotImplementedError('op not implemented: %s' % op)
+ elhs = expr['lhs']
+ erhs = expr['rhs']
+ if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
+ raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
+
+ lhs = self.evaluate(elhs, context)
+ rhs = self.evaluate(erhs, context)
+ result = self.operations[op](lhs, rhs)
+ return result
+
+def default_context():
+ def format_full_version(info):
+ version = '%s.%s.%s' % (info.major, info.minor, info.micro)
+ kind = info.releaselevel
+ if kind != 'final':
+ version += kind[0] + str(info.serial)
+ return version
+
+ if hasattr(sys, 'implementation'):
+ implementation_version = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ else:
+ implementation_version = '0'
+ implementation_name = ''
+
+ result = {
+ 'implementation_name': implementation_name,
+ 'implementation_version': implementation_version,
+ 'os_name': os.name,
+ 'platform_machine': platform.machine(),
+ 'platform_python_implementation': platform.python_implementation(),
+ 'platform_release': platform.release(),
+ 'platform_system': platform.system(),
+ 'platform_version': platform.version(),
+ 'platform_in_venv': str(in_venv()),
+ 'python_full_version': platform.python_version(),
+ 'python_version': platform.python_version()[:3],
+ 'sys_platform': sys.platform,
+ }
+ return result
+
+DEFAULT_CONTEXT = default_context()
+del default_context
+
+evaluator = Evaluator()
+
+def interpret(marker, execution_context=None):
+ """
+ Interpret a marker and return a result depending on environment.
+
+ :param marker: The marker to interpret.
+ :type marker: str
+ :param execution_context: The context used for name lookup.
+ :type execution_context: mapping
+ """
+ try:
+ expr, rest = parse_marker(marker)
+ except Exception as e:
+ raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
+ if rest and rest[0] != '#':
+ raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
+ context = dict(DEFAULT_CONTEXT)
+ if execution_context:
+ context.update(execution_context)
+ return evaluator.evaluate(expr, context)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/metadata.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/metadata.py
new file mode 100644
index 00000000..2d61378e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/metadata.py
@@ -0,0 +1,1096 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""Implementation of the Metadata for Python packages PEPs.
+
+Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental).
+"""
+from __future__ import unicode_literals
+
+import codecs
+from email import message_from_file
+import json
+import logging
+import re
+
+
+from . import DistlibException, __version__
+from .compat import StringIO, string_types, text_type
+from .markers import interpret
+from .util import extract_by_key, get_extras
+from .version import get_scheme, PEP440_VERSION_RE
+
+logger = logging.getLogger(__name__)
+
+
+class MetadataMissingError(DistlibException):
+ """A required metadata is missing"""
+
+
+class MetadataConflictError(DistlibException):
+ """Attempt to read or write metadata fields that are conflictual."""
+
+
+class MetadataUnrecognizedVersionError(DistlibException):
+ """Unknown metadata version number."""
+
+
+class MetadataInvalidError(DistlibException):
+ """A metadata value is invalid"""
+
+# public API of this module
+__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
+
+# Encoding used for the PKG-INFO files
+PKG_INFO_ENCODING = 'utf-8'
+
+# preferred version. Hopefully will be changed
+# to 1.2 once PEP 345 is supported everywhere
+PKG_INFO_PREFERRED_VERSION = '1.1'
+
+_LINE_PREFIX_1_2 = re.compile('\n \\|')
+_LINE_PREFIX_PRE_1_2 = re.compile('\n ')
+_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'License')
+
+_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Supported-Platform', 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'License', 'Classifier', 'Download-URL', 'Obsoletes',
+ 'Provides', 'Requires')
+
+_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
+ 'Download-URL')
+
+_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Supported-Platform', 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'Maintainer', 'Maintainer-email', 'License',
+ 'Classifier', 'Download-URL', 'Obsoletes-Dist',
+ 'Project-URL', 'Provides-Dist', 'Requires-Dist',
+ 'Requires-Python', 'Requires-External')
+
+_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
+ 'Obsoletes-Dist', 'Requires-External', 'Maintainer',
+ 'Maintainer-email', 'Project-URL')
+
+_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
+ 'Supported-Platform', 'Summary', 'Description',
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
+ 'Maintainer', 'Maintainer-email', 'License',
+ 'Classifier', 'Download-URL', 'Obsoletes-Dist',
+ 'Project-URL', 'Provides-Dist', 'Requires-Dist',
+ 'Requires-Python', 'Requires-External', 'Private-Version',
+ 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
+ 'Provides-Extra')
+
+_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
+ 'Setup-Requires-Dist', 'Extension')
+
+# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in
+# the metadata. Include them in the tuple literal below to allow them
+# (for now).
+_566_FIELDS = _426_FIELDS + ('Description-Content-Type',
+ 'Requires', 'Provides')
+
+_566_MARKERS = ('Description-Content-Type',)
+
+_ALL_FIELDS = set()
+_ALL_FIELDS.update(_241_FIELDS)
+_ALL_FIELDS.update(_314_FIELDS)
+_ALL_FIELDS.update(_345_FIELDS)
+_ALL_FIELDS.update(_426_FIELDS)
+_ALL_FIELDS.update(_566_FIELDS)
+
+EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
+
+
+def _version2fieldlist(version):
+ if version == '1.0':
+ return _241_FIELDS
+ elif version == '1.1':
+ return _314_FIELDS
+ elif version == '1.2':
+ return _345_FIELDS
+ elif version in ('1.3', '2.1'):
+ return _345_FIELDS + _566_FIELDS
+ elif version == '2.0':
+ return _426_FIELDS
+ raise MetadataUnrecognizedVersionError(version)
+
+
+def _best_version(fields):
+ """Detect the best version depending on the fields used."""
+ def _has_marker(keys, markers):
+ for marker in markers:
+ if marker in keys:
+ return True
+ return False
+
+ keys = []
+ for key, value in fields.items():
+ if value in ([], 'UNKNOWN', None):
+ continue
+ keys.append(key)
+
+ possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1']
+
+ # first let's try to see if a field is not part of one of the version
+ for key in keys:
+ if key not in _241_FIELDS and '1.0' in possible_versions:
+ possible_versions.remove('1.0')
+ logger.debug('Removed 1.0 due to %s', key)
+ if key not in _314_FIELDS and '1.1' in possible_versions:
+ possible_versions.remove('1.1')
+ logger.debug('Removed 1.1 due to %s', key)
+ if key not in _345_FIELDS and '1.2' in possible_versions:
+ possible_versions.remove('1.2')
+ logger.debug('Removed 1.2 due to %s', key)
+ if key not in _566_FIELDS and '1.3' in possible_versions:
+ possible_versions.remove('1.3')
+ logger.debug('Removed 1.3 due to %s', key)
+ if key not in _566_FIELDS and '2.1' in possible_versions:
+ if key != 'Description': # In 2.1, description allowed after headers
+ possible_versions.remove('2.1')
+ logger.debug('Removed 2.1 due to %s', key)
+ if key not in _426_FIELDS and '2.0' in possible_versions:
+ possible_versions.remove('2.0')
+ logger.debug('Removed 2.0 due to %s', key)
+
+ # possible_version contains qualified versions
+ if len(possible_versions) == 1:
+ return possible_versions[0] # found !
+ elif len(possible_versions) == 0:
+ logger.debug('Out of options - unknown metadata set: %s', fields)
+ raise MetadataConflictError('Unknown metadata set')
+
+ # let's see if one unique marker is found
+ is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
+ is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
+ is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
+ is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
+ if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1:
+ raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields')
+
+ # we have the choice, 1.0, or 1.2, or 2.0
+ # - 1.0 has a broken Summary field but works with all tools
+ # - 1.1 is to avoid
+ # - 1.2 fixes Summary but has little adoption
+ # - 2.0 adds more features and is very new
+ if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0:
+ # we couldn't find any specific marker
+ if PKG_INFO_PREFERRED_VERSION in possible_versions:
+ return PKG_INFO_PREFERRED_VERSION
+ if is_1_1:
+ return '1.1'
+ if is_1_2:
+ return '1.2'
+ if is_2_1:
+ return '2.1'
+
+ return '2.0'
+
+_ATTR2FIELD = {
+ 'metadata_version': 'Metadata-Version',
+ 'name': 'Name',
+ 'version': 'Version',
+ 'platform': 'Platform',
+ 'supported_platform': 'Supported-Platform',
+ 'summary': 'Summary',
+ 'description': 'Description',
+ 'keywords': 'Keywords',
+ 'home_page': 'Home-page',
+ 'author': 'Author',
+ 'author_email': 'Author-email',
+ 'maintainer': 'Maintainer',
+ 'maintainer_email': 'Maintainer-email',
+ 'license': 'License',
+ 'classifier': 'Classifier',
+ 'download_url': 'Download-URL',
+ 'obsoletes_dist': 'Obsoletes-Dist',
+ 'provides_dist': 'Provides-Dist',
+ 'requires_dist': 'Requires-Dist',
+ 'setup_requires_dist': 'Setup-Requires-Dist',
+ 'requires_python': 'Requires-Python',
+ 'requires_external': 'Requires-External',
+ 'requires': 'Requires',
+ 'provides': 'Provides',
+ 'obsoletes': 'Obsoletes',
+ 'project_url': 'Project-URL',
+ 'private_version': 'Private-Version',
+ 'obsoleted_by': 'Obsoleted-By',
+ 'extension': 'Extension',
+ 'provides_extra': 'Provides-Extra',
+}
+
+_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
+_VERSIONS_FIELDS = ('Requires-Python',)
+_VERSION_FIELDS = ('Version',)
+_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
+ 'Requires', 'Provides', 'Obsoletes-Dist',
+ 'Provides-Dist', 'Requires-Dist', 'Requires-External',
+ 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
+ 'Provides-Extra', 'Extension')
+_LISTTUPLEFIELDS = ('Project-URL',)
+
+_ELEMENTSFIELD = ('Keywords',)
+
+_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
+
+_MISSING = object()
+
+_FILESAFE = re.compile('[^A-Za-z0-9.]+')
+
+
+def _get_name_and_version(name, version, for_filename=False):
+ """Return the distribution name with version.
+
+ If for_filename is true, return a filename-escaped form."""
+ if for_filename:
+ # For both name and version any runs of non-alphanumeric or '.'
+ # characters are replaced with a single '-'. Additionally any
+ # spaces in the version string become '.'
+ name = _FILESAFE.sub('-', name)
+ version = _FILESAFE.sub('-', version.replace(' ', '.'))
+ return '%s-%s' % (name, version)
+
+
+class LegacyMetadata(object):
+ """The legacy metadata of a release.
+
+ Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
+ instantiate the class with one of these arguments (or none):
+ - *path*, the path to a metadata file
+ - *fileobj* give a file-like object with metadata as content
+ - *mapping* is a dict-like object
+ - *scheme* is a version scheme name
+ """
+ # TODO document the mapping API and UNKNOWN default key
+
+ def __init__(self, path=None, fileobj=None, mapping=None,
+ scheme='default'):
+ if [path, fileobj, mapping].count(None) < 2:
+ raise TypeError('path, fileobj and mapping are exclusive')
+ self._fields = {}
+ self.requires_files = []
+ self._dependencies = None
+ self.scheme = scheme
+ if path is not None:
+ self.read(path)
+ elif fileobj is not None:
+ self.read_file(fileobj)
+ elif mapping is not None:
+ self.update(mapping)
+ self.set_metadata_version()
+
+ def set_metadata_version(self):
+ self._fields['Metadata-Version'] = _best_version(self._fields)
+
+ def _write_field(self, fileobj, name, value):
+ fileobj.write('%s: %s\n' % (name, value))
+
+ def __getitem__(self, name):
+ return self.get(name)
+
+ def __setitem__(self, name, value):
+ return self.set(name, value)
+
+ def __delitem__(self, name):
+ field_name = self._convert_name(name)
+ try:
+ del self._fields[field_name]
+ except KeyError:
+ raise KeyError(name)
+
+ def __contains__(self, name):
+ return (name in self._fields or
+ self._convert_name(name) in self._fields)
+
+ def _convert_name(self, name):
+ if name in _ALL_FIELDS:
+ return name
+ name = name.replace('-', '_').lower()
+ return _ATTR2FIELD.get(name, name)
+
+ def _default_value(self, name):
+ if name in _LISTFIELDS or name in _ELEMENTSFIELD:
+ return []
+ return 'UNKNOWN'
+
+ def _remove_line_prefix(self, value):
+ if self.metadata_version in ('1.0', '1.1'):
+ return _LINE_PREFIX_PRE_1_2.sub('\n', value)
+ else:
+ return _LINE_PREFIX_1_2.sub('\n', value)
+
+ def __getattr__(self, name):
+ if name in _ATTR2FIELD:
+ return self[name]
+ raise AttributeError(name)
+
+ #
+ # Public API
+ #
+
+# dependencies = property(_get_dependencies, _set_dependencies)
+
+ def get_fullname(self, filesafe=False):
+ """Return the distribution name with version.
+
+ If filesafe is true, return a filename-escaped form."""
+ return _get_name_and_version(self['Name'], self['Version'], filesafe)
+
+ def is_field(self, name):
+ """return True if name is a valid metadata key"""
+ name = self._convert_name(name)
+ return name in _ALL_FIELDS
+
+ def is_multi_field(self, name):
+ name = self._convert_name(name)
+ return name in _LISTFIELDS
+
+ def read(self, filepath):
+ """Read the metadata values from a file path."""
+ fp = codecs.open(filepath, 'r', encoding='utf-8')
+ try:
+ self.read_file(fp)
+ finally:
+ fp.close()
+
+ def read_file(self, fileob):
+ """Read the metadata values from a file object."""
+ msg = message_from_file(fileob)
+ self._fields['Metadata-Version'] = msg['metadata-version']
+
+ # When reading, get all the fields we can
+ for field in _ALL_FIELDS:
+ if field not in msg:
+ continue
+ if field in _LISTFIELDS:
+ # we can have multiple lines
+ values = msg.get_all(field)
+ if field in _LISTTUPLEFIELDS and values is not None:
+ values = [tuple(value.split(',')) for value in values]
+ self.set(field, values)
+ else:
+ # single line
+ value = msg[field]
+ if value is not None and value != 'UNKNOWN':
+ self.set(field, value)
+ # logger.debug('Attempting to set metadata for %s', self)
+ # self.set_metadata_version()
+
+ def write(self, filepath, skip_unknown=False):
+ """Write the metadata fields to filepath."""
+ fp = codecs.open(filepath, 'w', encoding='utf-8')
+ try:
+ self.write_file(fp, skip_unknown)
+ finally:
+ fp.close()
+
+ def write_file(self, fileobject, skip_unknown=False):
+ """Write the PKG-INFO format data to a file object."""
+ self.set_metadata_version()
+
+ for field in _version2fieldlist(self['Metadata-Version']):
+ values = self.get(field)
+ if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
+ continue
+ if field in _ELEMENTSFIELD:
+ self._write_field(fileobject, field, ','.join(values))
+ continue
+ if field not in _LISTFIELDS:
+ if field == 'Description':
+ if self.metadata_version in ('1.0', '1.1'):
+ values = values.replace('\n', '\n ')
+ else:
+ values = values.replace('\n', '\n |')
+ values = [values]
+
+ if field in _LISTTUPLEFIELDS:
+ values = [','.join(value) for value in values]
+
+ for value in values:
+ self._write_field(fileobject, field, value)
+
+ def update(self, other=None, **kwargs):
+ """Set metadata values from the given iterable `other` and kwargs.
+
+ Behavior is like `dict.update`: If `other` has a ``keys`` method,
+ they are looped over and ``self[key]`` is assigned ``other[key]``.
+ Else, ``other`` is an iterable of ``(key, value)`` iterables.
+
+ Keys that don't match a metadata field or that have an empty value are
+ dropped.
+ """
+ def _set(key, value):
+ if key in _ATTR2FIELD and value:
+ self.set(self._convert_name(key), value)
+
+ if not other:
+ # other is None or empty container
+ pass
+ elif hasattr(other, 'keys'):
+ for k in other.keys():
+ _set(k, other[k])
+ else:
+ for k, v in other:
+ _set(k, v)
+
+ if kwargs:
+ for k, v in kwargs.items():
+ _set(k, v)
+
+ def set(self, name, value):
+ """Control then set a metadata field."""
+ name = self._convert_name(name)
+
+ if ((name in _ELEMENTSFIELD or name == 'Platform') and
+ not isinstance(value, (list, tuple))):
+ if isinstance(value, string_types):
+ value = [v.strip() for v in value.split(',')]
+ else:
+ value = []
+ elif (name in _LISTFIELDS and
+ not isinstance(value, (list, tuple))):
+ if isinstance(value, string_types):
+ value = [value]
+ else:
+ value = []
+
+ if logger.isEnabledFor(logging.WARNING):
+ project_name = self['Name']
+
+ scheme = get_scheme(self.scheme)
+ if name in _PREDICATE_FIELDS and value is not None:
+ for v in value:
+ # check that the values are valid
+ if not scheme.is_valid_matcher(v.split(';')[0]):
+ logger.warning(
+ "'%s': '%s' is not valid (field '%s')",
+ project_name, v, name)
+ # FIXME this rejects UNKNOWN, is that right?
+ elif name in _VERSIONS_FIELDS and value is not None:
+ if not scheme.is_valid_constraint_list(value):
+ logger.warning("'%s': '%s' is not a valid version (field '%s')",
+ project_name, value, name)
+ elif name in _VERSION_FIELDS and value is not None:
+ if not scheme.is_valid_version(value):
+ logger.warning("'%s': '%s' is not a valid version (field '%s')",
+ project_name, value, name)
+
+ if name in _UNICODEFIELDS:
+ if name == 'Description':
+ value = self._remove_line_prefix(value)
+
+ self._fields[name] = value
+
+ def get(self, name, default=_MISSING):
+ """Get a metadata field."""
+ name = self._convert_name(name)
+ if name not in self._fields:
+ if default is _MISSING:
+ default = self._default_value(name)
+ return default
+ if name in _UNICODEFIELDS:
+ value = self._fields[name]
+ return value
+ elif name in _LISTFIELDS:
+ value = self._fields[name]
+ if value is None:
+ return []
+ res = []
+ for val in value:
+ if name not in _LISTTUPLEFIELDS:
+ res.append(val)
+ else:
+ # That's for Project-URL
+ res.append((val[0], val[1]))
+ return res
+
+ elif name in _ELEMENTSFIELD:
+ value = self._fields[name]
+ if isinstance(value, string_types):
+ return value.split(',')
+ return self._fields[name]
+
+ def check(self, strict=False):
+ """Check if the metadata is compliant. If strict is True then raise if
+ no Name or Version are provided"""
+ self.set_metadata_version()
+
+ # XXX should check the versions (if the file was loaded)
+ missing, warnings = [], []
+
+ for attr in ('Name', 'Version'): # required by PEP 345
+ if attr not in self:
+ missing.append(attr)
+
+ if strict and missing != []:
+ msg = 'missing required metadata: %s' % ', '.join(missing)
+ raise MetadataMissingError(msg)
+
+ for attr in ('Home-page', 'Author'):
+ if attr not in self:
+ missing.append(attr)
+
+ # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
+ if self['Metadata-Version'] != '1.2':
+ return missing, warnings
+
+ scheme = get_scheme(self.scheme)
+
+ def are_valid_constraints(value):
+ for v in value:
+ if not scheme.is_valid_matcher(v.split(';')[0]):
+ return False
+ return True
+
+ for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
+ (_VERSIONS_FIELDS,
+ scheme.is_valid_constraint_list),
+ (_VERSION_FIELDS,
+ scheme.is_valid_version)):
+ for field in fields:
+ value = self.get(field, None)
+ if value is not None and not controller(value):
+ warnings.append("Wrong value for '%s': %s" % (field, value))
+
+ return missing, warnings
+
+ def todict(self, skip_missing=False):
+ """Return fields as a dict.
+
+ Field names will be converted to use the underscore-lowercase style
+ instead of hyphen-mixed case (i.e. home_page instead of Home-page).
+ """
+ self.set_metadata_version()
+
+ mapping_1_0 = (
+ ('metadata_version', 'Metadata-Version'),
+ ('name', 'Name'),
+ ('version', 'Version'),
+ ('summary', 'Summary'),
+ ('home_page', 'Home-page'),
+ ('author', 'Author'),
+ ('author_email', 'Author-email'),
+ ('license', 'License'),
+ ('description', 'Description'),
+ ('keywords', 'Keywords'),
+ ('platform', 'Platform'),
+ ('classifiers', 'Classifier'),
+ ('download_url', 'Download-URL'),
+ )
+
+ data = {}
+ for key, field_name in mapping_1_0:
+ if not skip_missing or field_name in self._fields:
+ data[key] = self[field_name]
+
+ if self['Metadata-Version'] == '1.2':
+ mapping_1_2 = (
+ ('requires_dist', 'Requires-Dist'),
+ ('requires_python', 'Requires-Python'),
+ ('requires_external', 'Requires-External'),
+ ('provides_dist', 'Provides-Dist'),
+ ('obsoletes_dist', 'Obsoletes-Dist'),
+ ('project_url', 'Project-URL'),
+ ('maintainer', 'Maintainer'),
+ ('maintainer_email', 'Maintainer-email'),
+ )
+ for key, field_name in mapping_1_2:
+ if not skip_missing or field_name in self._fields:
+ if key != 'project_url':
+ data[key] = self[field_name]
+ else:
+ data[key] = [','.join(u) for u in self[field_name]]
+
+ elif self['Metadata-Version'] == '1.1':
+ mapping_1_1 = (
+ ('provides', 'Provides'),
+ ('requires', 'Requires'),
+ ('obsoletes', 'Obsoletes'),
+ )
+ for key, field_name in mapping_1_1:
+ if not skip_missing or field_name in self._fields:
+ data[key] = self[field_name]
+
+ return data
+
+ def add_requirements(self, requirements):
+ if self['Metadata-Version'] == '1.1':
+ # we can't have 1.1 metadata *and* Setuptools requires
+ for field in ('Obsoletes', 'Requires', 'Provides'):
+ if field in self:
+ del self[field]
+ self['Requires-Dist'] += requirements
+
+ # Mapping API
+ # TODO could add iter* variants
+
+ def keys(self):
+ return list(_version2fieldlist(self['Metadata-Version']))
+
+ def __iter__(self):
+ for key in self.keys():
+ yield key
+
+ def values(self):
+ return [self[key] for key in self.keys()]
+
+ def items(self):
+ return [(key, self[key]) for key in self.keys()]
+
+ def __repr__(self):
+ return '<%s %s %s>' % (self.__class__.__name__, self.name,
+ self.version)
+
+
+METADATA_FILENAME = 'pydist.json'
+WHEEL_METADATA_FILENAME = 'metadata.json'
+LEGACY_METADATA_FILENAME = 'METADATA'
+
+
+class Metadata(object):
+ """
+ The metadata of a release. This implementation uses 2.0 (JSON)
+ metadata where possible. If not possible, it wraps a LegacyMetadata
+ instance which handles the key-value metadata format.
+ """
+
+ METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
+
+ NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
+
+ VERSION_MATCHER = PEP440_VERSION_RE
+
+ SUMMARY_MATCHER = re.compile('.{1,2047}')
+
+ METADATA_VERSION = '2.0'
+
+ GENERATOR = 'distlib (%s)' % __version__
+
+ MANDATORY_KEYS = {
+ 'name': (),
+ 'version': (),
+ 'summary': ('legacy',),
+ }
+
+ INDEX_KEYS = ('name version license summary description author '
+ 'author_email keywords platform home_page classifiers '
+ 'download_url')
+
+ DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
+ 'dev_requires provides meta_requires obsoleted_by '
+ 'supports_environments')
+
+ SYNTAX_VALIDATORS = {
+ 'metadata_version': (METADATA_VERSION_MATCHER, ()),
+ 'name': (NAME_MATCHER, ('legacy',)),
+ 'version': (VERSION_MATCHER, ('legacy',)),
+ 'summary': (SUMMARY_MATCHER, ('legacy',)),
+ }
+
+ __slots__ = ('_legacy', '_data', 'scheme')
+
+ def __init__(self, path=None, fileobj=None, mapping=None,
+ scheme='default'):
+ if [path, fileobj, mapping].count(None) < 2:
+ raise TypeError('path, fileobj and mapping are exclusive')
+ self._legacy = None
+ self._data = None
+ self.scheme = scheme
+ #import pdb; pdb.set_trace()
+ if mapping is not None:
+ try:
+ self._validate_mapping(mapping, scheme)
+ self._data = mapping
+ except MetadataUnrecognizedVersionError:
+ self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
+ self.validate()
+ else:
+ data = None
+ if path:
+ with open(path, 'rb') as f:
+ data = f.read()
+ elif fileobj:
+ data = fileobj.read()
+ if data is None:
+ # Initialised with no args - to be added
+ self._data = {
+ 'metadata_version': self.METADATA_VERSION,
+ 'generator': self.GENERATOR,
+ }
+ else:
+ if not isinstance(data, text_type):
+ data = data.decode('utf-8')
+ try:
+ self._data = json.loads(data)
+ self._validate_mapping(self._data, scheme)
+ except ValueError:
+ # Note: MetadataUnrecognizedVersionError does not
+ # inherit from ValueError (it's a DistlibException,
+ # which should not inherit from ValueError).
+ # The ValueError comes from the json.load - if that
+ # succeeds and we get a validation error, we want
+ # that to propagate
+ self._legacy = LegacyMetadata(fileobj=StringIO(data),
+ scheme=scheme)
+ self.validate()
+
+ common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
+
+ none_list = (None, list)
+ none_dict = (None, dict)
+
+ mapped_keys = {
+ 'run_requires': ('Requires-Dist', list),
+ 'build_requires': ('Setup-Requires-Dist', list),
+ 'dev_requires': none_list,
+ 'test_requires': none_list,
+ 'meta_requires': none_list,
+ 'extras': ('Provides-Extra', list),
+ 'modules': none_list,
+ 'namespaces': none_list,
+ 'exports': none_dict,
+ 'commands': none_dict,
+ 'classifiers': ('Classifier', list),
+ 'source_url': ('Download-URL', None),
+ 'metadata_version': ('Metadata-Version', None),
+ }
+
+ del none_list, none_dict
+
+ def __getattribute__(self, key):
+ common = object.__getattribute__(self, 'common_keys')
+ mapped = object.__getattribute__(self, 'mapped_keys')
+ if key in mapped:
+ lk, maker = mapped[key]
+ if self._legacy:
+ if lk is None:
+ result = None if maker is None else maker()
+ else:
+ result = self._legacy.get(lk)
+ else:
+ value = None if maker is None else maker()
+ if key not in ('commands', 'exports', 'modules', 'namespaces',
+ 'classifiers'):
+ result = self._data.get(key, value)
+ else:
+ # special cases for PEP 459
+ sentinel = object()
+ result = sentinel
+ d = self._data.get('extensions')
+ if d:
+ if key == 'commands':
+ result = d.get('python.commands', value)
+ elif key == 'classifiers':
+ d = d.get('python.details')
+ if d:
+ result = d.get(key, value)
+ else:
+ d = d.get('python.exports')
+ if not d:
+ d = self._data.get('python.exports')
+ if d:
+ result = d.get(key, value)
+ if result is sentinel:
+ result = value
+ elif key not in common:
+ result = object.__getattribute__(self, key)
+ elif self._legacy:
+ result = self._legacy.get(key)
+ else:
+ result = self._data.get(key)
+ return result
+
+ def _validate_value(self, key, value, scheme=None):
+ if key in self.SYNTAX_VALIDATORS:
+ pattern, exclusions = self.SYNTAX_VALIDATORS[key]
+ if (scheme or self.scheme) not in exclusions:
+ m = pattern.match(value)
+ if not m:
+ raise MetadataInvalidError("'%s' is an invalid value for "
+ "the '%s' property" % (value,
+ key))
+
+ def __setattr__(self, key, value):
+ self._validate_value(key, value)
+ common = object.__getattribute__(self, 'common_keys')
+ mapped = object.__getattribute__(self, 'mapped_keys')
+ if key in mapped:
+ lk, _ = mapped[key]
+ if self._legacy:
+ if lk is None:
+ raise NotImplementedError
+ self._legacy[lk] = value
+ elif key not in ('commands', 'exports', 'modules', 'namespaces',
+ 'classifiers'):
+ self._data[key] = value
+ else:
+ # special cases for PEP 459
+ d = self._data.setdefault('extensions', {})
+ if key == 'commands':
+ d['python.commands'] = value
+ elif key == 'classifiers':
+ d = d.setdefault('python.details', {})
+ d[key] = value
+ else:
+ d = d.setdefault('python.exports', {})
+ d[key] = value
+ elif key not in common:
+ object.__setattr__(self, key, value)
+ else:
+ if key == 'keywords':
+ if isinstance(value, string_types):
+ value = value.strip()
+ if value:
+ value = value.split()
+ else:
+ value = []
+ if self._legacy:
+ self._legacy[key] = value
+ else:
+ self._data[key] = value
+
+ @property
+ def name_and_version(self):
+ return _get_name_and_version(self.name, self.version, True)
+
+ @property
+ def provides(self):
+ if self._legacy:
+ result = self._legacy['Provides-Dist']
+ else:
+ result = self._data.setdefault('provides', [])
+ s = '%s (%s)' % (self.name, self.version)
+ if s not in result:
+ result.append(s)
+ return result
+
+ @provides.setter
+ def provides(self, value):
+ if self._legacy:
+ self._legacy['Provides-Dist'] = value
+ else:
+ self._data['provides'] = value
+
+ def get_requirements(self, reqts, extras=None, env=None):
+ """
+ Base method to get dependencies, given a set of extras
+ to satisfy and an optional environment context.
+ :param reqts: A list of sometimes-wanted dependencies,
+ perhaps dependent on extras and environment.
+ :param extras: A list of optional components being requested.
+ :param env: An optional environment for marker evaluation.
+ """
+ if self._legacy:
+ result = reqts
+ else:
+ result = []
+ extras = get_extras(extras or [], self.extras)
+ for d in reqts:
+ if 'extra' not in d and 'environment' not in d:
+ # unconditional
+ include = True
+ else:
+ if 'extra' not in d:
+ # Not extra-dependent - only environment-dependent
+ include = True
+ else:
+ include = d.get('extra') in extras
+ if include:
+ # Not excluded because of extras, check environment
+ marker = d.get('environment')
+ if marker:
+ include = interpret(marker, env)
+ if include:
+ result.extend(d['requires'])
+ for key in ('build', 'dev', 'test'):
+ e = ':%s:' % key
+ if e in extras:
+ extras.remove(e)
+ # A recursive call, but it should terminate since 'test'
+ # has been removed from the extras
+ reqts = self._data.get('%s_requires' % key, [])
+ result.extend(self.get_requirements(reqts, extras=extras,
+ env=env))
+ return result
+
+ @property
+ def dictionary(self):
+ if self._legacy:
+ return self._from_legacy()
+ return self._data
+
+ @property
+ def dependencies(self):
+ if self._legacy:
+ raise NotImplementedError
+ else:
+ return extract_by_key(self._data, self.DEPENDENCY_KEYS)
+
+ @dependencies.setter
+ def dependencies(self, value):
+ if self._legacy:
+ raise NotImplementedError
+ else:
+ self._data.update(value)
+
+ def _validate_mapping(self, mapping, scheme):
+ if mapping.get('metadata_version') != self.METADATA_VERSION:
+ raise MetadataUnrecognizedVersionError()
+ missing = []
+ for key, exclusions in self.MANDATORY_KEYS.items():
+ if key not in mapping:
+ if scheme not in exclusions:
+ missing.append(key)
+ if missing:
+ msg = 'Missing metadata items: %s' % ', '.join(missing)
+ raise MetadataMissingError(msg)
+ for k, v in mapping.items():
+ self._validate_value(k, v, scheme)
+
+ def validate(self):
+ if self._legacy:
+ missing, warnings = self._legacy.check(True)
+ if missing or warnings:
+ logger.warning('Metadata: missing: %s, warnings: %s',
+ missing, warnings)
+ else:
+ self._validate_mapping(self._data, self.scheme)
+
+ def todict(self):
+ if self._legacy:
+ return self._legacy.todict(True)
+ else:
+ result = extract_by_key(self._data, self.INDEX_KEYS)
+ return result
+
+ def _from_legacy(self):
+ assert self._legacy and not self._data
+ result = {
+ 'metadata_version': self.METADATA_VERSION,
+ 'generator': self.GENERATOR,
+ }
+ lmd = self._legacy.todict(True) # skip missing ones
+ for k in ('name', 'version', 'license', 'summary', 'description',
+ 'classifier'):
+ if k in lmd:
+ if k == 'classifier':
+ nk = 'classifiers'
+ else:
+ nk = k
+ result[nk] = lmd[k]
+ kw = lmd.get('Keywords', [])
+ if kw == ['']:
+ kw = []
+ result['keywords'] = kw
+ keys = (('requires_dist', 'run_requires'),
+ ('setup_requires_dist', 'build_requires'))
+ for ok, nk in keys:
+ if ok in lmd and lmd[ok]:
+ result[nk] = [{'requires': lmd[ok]}]
+ result['provides'] = self.provides
+ author = {}
+ maintainer = {}
+ return result
+
+ LEGACY_MAPPING = {
+ 'name': 'Name',
+ 'version': 'Version',
+ 'license': 'License',
+ 'summary': 'Summary',
+ 'description': 'Description',
+ 'classifiers': 'Classifier',
+ }
+
+ def _to_legacy(self):
+ def process_entries(entries):
+ reqts = set()
+ for e in entries:
+ extra = e.get('extra')
+ env = e.get('environment')
+ rlist = e['requires']
+ for r in rlist:
+ if not env and not extra:
+ reqts.add(r)
+ else:
+ marker = ''
+ if extra:
+ marker = 'extra == "%s"' % extra
+ if env:
+ if marker:
+ marker = '(%s) and %s' % (env, marker)
+ else:
+ marker = env
+ reqts.add(';'.join((r, marker)))
+ return reqts
+
+ assert self._data and not self._legacy
+ result = LegacyMetadata()
+ nmd = self._data
+ for nk, ok in self.LEGACY_MAPPING.items():
+ if nk in nmd:
+ result[ok] = nmd[nk]
+ r1 = process_entries(self.run_requires + self.meta_requires)
+ r2 = process_entries(self.build_requires + self.dev_requires)
+ if self.extras:
+ result['Provides-Extra'] = sorted(self.extras)
+ result['Requires-Dist'] = sorted(r1)
+ result['Setup-Requires-Dist'] = sorted(r2)
+ # TODO: other fields such as contacts
+ return result
+
+ def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
+ if [path, fileobj].count(None) != 1:
+ raise ValueError('Exactly one of path and fileobj is needed')
+ self.validate()
+ if legacy:
+ if self._legacy:
+ legacy_md = self._legacy
+ else:
+ legacy_md = self._to_legacy()
+ if path:
+ legacy_md.write(path, skip_unknown=skip_unknown)
+ else:
+ legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
+ else:
+ if self._legacy:
+ d = self._from_legacy()
+ else:
+ d = self._data
+ if fileobj:
+ json.dump(d, fileobj, ensure_ascii=True, indent=2,
+ sort_keys=True)
+ else:
+ with codecs.open(path, 'w', 'utf-8') as f:
+ json.dump(d, f, ensure_ascii=True, indent=2,
+ sort_keys=True)
+
+ def add_requirements(self, requirements):
+ if self._legacy:
+ self._legacy.add_requirements(requirements)
+ else:
+ run_requires = self._data.setdefault('run_requires', [])
+ always = None
+ for entry in run_requires:
+ if 'environment' not in entry and 'extra' not in entry:
+ always = entry
+ break
+ if always is None:
+ always = { 'requires': requirements }
+ run_requires.insert(0, always)
+ else:
+ rset = set(always['requires']) | set(requirements)
+ always['requires'] = sorted(rset)
+
+ def __repr__(self):
+ name = self.name or '(no name)'
+ version = self.version or 'no version'
+ return '<%s %s %s (%s)>' % (self.__class__.__name__,
+ self.metadata_version, name, version)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/resources.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/resources.py
new file mode 100644
index 00000000..18840167
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/resources.py
@@ -0,0 +1,355 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import unicode_literals
+
+import bisect
+import io
+import logging
+import os
+import pkgutil
+import shutil
+import sys
+import types
+import zipimport
+
+from . import DistlibException
+from .util import cached_property, get_cache_base, path_to_cache_dir, Cache
+
+logger = logging.getLogger(__name__)
+
+
+cache = None # created when needed
+
+
+class ResourceCache(Cache):
+ def __init__(self, base=None):
+ if base is None:
+ # Use native string to avoid issues on 2.x: see Python #20140.
+ base = os.path.join(get_cache_base(), str('resource-cache'))
+ super(ResourceCache, self).__init__(base)
+
+ def is_stale(self, resource, path):
+ """
+ Is the cache stale for the given resource?
+
+ :param resource: The :class:`Resource` being cached.
+ :param path: The path of the resource in the cache.
+ :return: True if the cache is stale.
+ """
+ # Cache invalidation is a hard problem :-)
+ return True
+
+ def get(self, resource):
+ """
+ Get a resource into the cache,
+
+ :param resource: A :class:`Resource` instance.
+ :return: The pathname of the resource in the cache.
+ """
+ prefix, path = resource.finder.get_cache_info(resource)
+ if prefix is None:
+ result = path
+ else:
+ result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
+ dirname = os.path.dirname(result)
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ if not os.path.exists(result):
+ stale = True
+ else:
+ stale = self.is_stale(resource, path)
+ if stale:
+ # write the bytes of the resource to the cache location
+ with open(result, 'wb') as f:
+ f.write(resource.bytes)
+ return result
+
+
+class ResourceBase(object):
+ def __init__(self, finder, name):
+ self.finder = finder
+ self.name = name
+
+
+class Resource(ResourceBase):
+ """
+ A class representing an in-package resource, such as a data file. This is
+ not normally instantiated by user code, but rather by a
+ :class:`ResourceFinder` which manages the resource.
+ """
+ is_container = False # Backwards compatibility
+
+ def as_stream(self):
+ """
+ Get the resource as a stream.
+
+ This is not a property to make it obvious that it returns a new stream
+ each time.
+ """
+ return self.finder.get_stream(self)
+
+ @cached_property
+ def file_path(self):
+ global cache
+ if cache is None:
+ cache = ResourceCache()
+ return cache.get(self)
+
+ @cached_property
+ def bytes(self):
+ return self.finder.get_bytes(self)
+
+ @cached_property
+ def size(self):
+ return self.finder.get_size(self)
+
+
+class ResourceContainer(ResourceBase):
+ is_container = True # Backwards compatibility
+
+ @cached_property
+ def resources(self):
+ return self.finder.get_resources(self)
+
+
+class ResourceFinder(object):
+ """
+ Resource finder for file system resources.
+ """
+
+ if sys.platform.startswith('java'):
+ skipped_extensions = ('.pyc', '.pyo', '.class')
+ else:
+ skipped_extensions = ('.pyc', '.pyo')
+
+ def __init__(self, module):
+ self.module = module
+ self.loader = getattr(module, '__loader__', None)
+ self.base = os.path.dirname(getattr(module, '__file__', ''))
+
+ def _adjust_path(self, path):
+ return os.path.realpath(path)
+
+ def _make_path(self, resource_name):
+ # Issue #50: need to preserve type of path on Python 2.x
+ # like os.path._get_sep
+ if isinstance(resource_name, bytes): # should only happen on 2.x
+ sep = b'/'
+ else:
+ sep = '/'
+ parts = resource_name.split(sep)
+ parts.insert(0, self.base)
+ result = os.path.join(*parts)
+ return self._adjust_path(result)
+
+ def _find(self, path):
+ return os.path.exists(path)
+
+ def get_cache_info(self, resource):
+ return None, resource.path
+
+ def find(self, resource_name):
+ path = self._make_path(resource_name)
+ if not self._find(path):
+ result = None
+ else:
+ if self._is_directory(path):
+ result = ResourceContainer(self, resource_name)
+ else:
+ result = Resource(self, resource_name)
+ result.path = path
+ return result
+
+ def get_stream(self, resource):
+ return open(resource.path, 'rb')
+
+ def get_bytes(self, resource):
+ with open(resource.path, 'rb') as f:
+ return f.read()
+
+ def get_size(self, resource):
+ return os.path.getsize(resource.path)
+
+ def get_resources(self, resource):
+ def allowed(f):
+ return (f != '__pycache__' and not
+ f.endswith(self.skipped_extensions))
+ return set([f for f in os.listdir(resource.path) if allowed(f)])
+
+ def is_container(self, resource):
+ return self._is_directory(resource.path)
+
+ _is_directory = staticmethod(os.path.isdir)
+
+ def iterator(self, resource_name):
+ resource = self.find(resource_name)
+ if resource is not None:
+ todo = [resource]
+ while todo:
+ resource = todo.pop(0)
+ yield resource
+ if resource.is_container:
+ rname = resource.name
+ for name in resource.resources:
+ if not rname:
+ new_name = name
+ else:
+ new_name = '/'.join([rname, name])
+ child = self.find(new_name)
+ if child.is_container:
+ todo.append(child)
+ else:
+ yield child
+
+
+class ZipResourceFinder(ResourceFinder):
+ """
+ Resource finder for resources in .zip files.
+ """
+ def __init__(self, module):
+ super(ZipResourceFinder, self).__init__(module)
+ archive = self.loader.archive
+ self.prefix_len = 1 + len(archive)
+ # PyPy doesn't have a _files attr on zipimporter, and you can't set one
+ if hasattr(self.loader, '_files'):
+ self._files = self.loader._files
+ else:
+ self._files = zipimport._zip_directory_cache[archive]
+ self.index = sorted(self._files)
+
+ def _adjust_path(self, path):
+ return path
+
+ def _find(self, path):
+ path = path[self.prefix_len:]
+ if path in self._files:
+ result = True
+ else:
+ if path and path[-1] != os.sep:
+ path = path + os.sep
+ i = bisect.bisect(self.index, path)
+ try:
+ result = self.index[i].startswith(path)
+ except IndexError:
+ result = False
+ if not result:
+ logger.debug('_find failed: %r %r', path, self.loader.prefix)
+ else:
+ logger.debug('_find worked: %r %r', path, self.loader.prefix)
+ return result
+
+ def get_cache_info(self, resource):
+ prefix = self.loader.archive
+ path = resource.path[1 + len(prefix):]
+ return prefix, path
+
+ def get_bytes(self, resource):
+ return self.loader.get_data(resource.path)
+
+ def get_stream(self, resource):
+ return io.BytesIO(self.get_bytes(resource))
+
+ def get_size(self, resource):
+ path = resource.path[self.prefix_len:]
+ return self._files[path][3]
+
+ def get_resources(self, resource):
+ path = resource.path[self.prefix_len:]
+ if path and path[-1] != os.sep:
+ path += os.sep
+ plen = len(path)
+ result = set()
+ i = bisect.bisect(self.index, path)
+ while i < len(self.index):
+ if not self.index[i].startswith(path):
+ break
+ s = self.index[i][plen:]
+ result.add(s.split(os.sep, 1)[0]) # only immediate children
+ i += 1
+ return result
+
+ def _is_directory(self, path):
+ path = path[self.prefix_len:]
+ if path and path[-1] != os.sep:
+ path += os.sep
+ i = bisect.bisect(self.index, path)
+ try:
+ result = self.index[i].startswith(path)
+ except IndexError:
+ result = False
+ return result
+
+_finder_registry = {
+ type(None): ResourceFinder,
+ zipimport.zipimporter: ZipResourceFinder
+}
+
+try:
+ # In Python 3.6, _frozen_importlib -> _frozen_importlib_external
+ try:
+ import _frozen_importlib_external as _fi
+ except ImportError:
+ import _frozen_importlib as _fi
+ _finder_registry[_fi.SourceFileLoader] = ResourceFinder
+ _finder_registry[_fi.FileFinder] = ResourceFinder
+ del _fi
+except (ImportError, AttributeError):
+ pass
+
+
+def register_finder(loader, finder_maker):
+ _finder_registry[type(loader)] = finder_maker
+
+_finder_cache = {}
+
+
+def finder(package):
+ """
+ Return a resource finder for a package.
+ :param package: The name of the package.
+ :return: A :class:`ResourceFinder` instance for the package.
+ """
+ if package in _finder_cache:
+ result = _finder_cache[package]
+ else:
+ if package not in sys.modules:
+ __import__(package)
+ module = sys.modules[package]
+ path = getattr(module, '__path__', None)
+ if path is None:
+ raise DistlibException('You cannot get a finder for a module, '
+ 'only for a package')
+ loader = getattr(module, '__loader__', None)
+ finder_maker = _finder_registry.get(type(loader))
+ if finder_maker is None:
+ raise DistlibException('Unable to locate finder for %r' % package)
+ result = finder_maker(module)
+ _finder_cache[package] = result
+ return result
+
+
+_dummy_module = types.ModuleType(str('__dummy__'))
+
+
+def finder_for_path(path):
+ """
+ Return a resource finder for a path, which should represent a container.
+
+ :param path: The path.
+ :return: A :class:`ResourceFinder` instance for the path.
+ """
+ result = None
+ # calls any path hooks, gets importer into cache
+ pkgutil.get_importer(path)
+ loader = sys.path_importer_cache.get(path)
+ finder = _finder_registry.get(type(loader))
+ if finder:
+ module = _dummy_module
+ module.__file__ = os.path.join(path, '')
+ module.__loader__ = loader
+ result = finder(module)
+ return result
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/scripts.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/scripts.py
new file mode 100644
index 00000000..5965e241
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/scripts.py
@@ -0,0 +1,403 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2015 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from io import BytesIO
+import logging
+import os
+import re
+import struct
+import sys
+
+from .compat import sysconfig, detect_encoding, ZipFile
+from .resources import finder
+from .util import (FileOperator, get_export_entry, convert_path,
+ get_executable, in_venv)
+
+logger = logging.getLogger(__name__)
+
+_DEFAULT_MANIFEST = '''
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+ <assemblyIdentity version="1.0.0.0"
+ processorArchitecture="X86"
+ name="%s"
+ type="win32"/>
+
+ <!-- Identify the application security requirements. -->
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+</assembly>'''.strip()
+
+# check if Python is called on the first line with this expression
+FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
+SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
+import re
+import sys
+from %(module)s import %(import_name)s
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(%(func)s())
+'''
+
+
+def _enquote_executable(executable):
+ if ' ' in executable:
+ # make sure we quote only the executable in case of env
+ # for example /usr/bin/env "/dir with spaces/bin/jython"
+ # instead of "/usr/bin/env /dir with spaces/bin/jython"
+ # otherwise whole
+ if executable.startswith('/usr/bin/env '):
+ env, _executable = executable.split(' ', 1)
+ if ' ' in _executable and not _executable.startswith('"'):
+ executable = '%s "%s"' % (env, _executable)
+ else:
+ if not executable.startswith('"'):
+ executable = '"%s"' % executable
+ return executable
+
+
+class ScriptMaker(object):
+ """
+ A class to copy or create scripts from source scripts or callable
+ specifications.
+ """
+ script_template = SCRIPT_TEMPLATE
+
+ executable = None # for shebangs
+
+ def __init__(self, source_dir, target_dir, add_launchers=True,
+ dry_run=False, fileop=None):
+ self.source_dir = source_dir
+ self.target_dir = target_dir
+ self.add_launchers = add_launchers
+ self.force = False
+ self.clobber = False
+ # It only makes sense to set mode bits on POSIX.
+ self.set_mode = (os.name == 'posix') or (os.name == 'java' and
+ os._name == 'posix')
+ self.variants = set(('', 'X.Y'))
+ self._fileop = fileop or FileOperator(dry_run)
+
+ self._is_nt = os.name == 'nt' or (
+ os.name == 'java' and os._name == 'nt')
+
+ def _get_alternate_executable(self, executable, options):
+ if options.get('gui', False) and self._is_nt: # pragma: no cover
+ dn, fn = os.path.split(executable)
+ fn = fn.replace('python', 'pythonw')
+ executable = os.path.join(dn, fn)
+ return executable
+
+ if sys.platform.startswith('java'): # pragma: no cover
+ def _is_shell(self, executable):
+ """
+ Determine if the specified executable is a script
+ (contains a #! line)
+ """
+ try:
+ with open(executable) as fp:
+ return fp.read(2) == '#!'
+ except (OSError, IOError):
+ logger.warning('Failed to open %s', executable)
+ return False
+
+ def _fix_jython_executable(self, executable):
+ if self._is_shell(executable):
+ # Workaround for Jython is not needed on Linux systems.
+ import java
+
+ if java.lang.System.getProperty('os.name') == 'Linux':
+ return executable
+ elif executable.lower().endswith('jython.exe'):
+ # Use wrapper exe for Jython on Windows
+ return executable
+ return '/usr/bin/env %s' % executable
+
+ def _build_shebang(self, executable, post_interp):
+ """
+ Build a shebang line. In the simple case (on Windows, or a shebang line
+ which is not too long or contains spaces) use a simple formulation for
+ the shebang. Otherwise, use /bin/sh as the executable, with a contrived
+ shebang which allows the script to run either under Python or sh, using
+ suitable quoting. Thanks to Harald Nordgren for his input.
+
+ See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
+ https://hg.mozilla.org/mozilla-central/file/tip/mach
+ """
+ if os.name != 'posix':
+ simple_shebang = True
+ else:
+ # Add 3 for '#!' prefix and newline suffix.
+ shebang_length = len(executable) + len(post_interp) + 3
+ if sys.platform == 'darwin':
+ max_shebang_length = 512
+ else:
+ max_shebang_length = 127
+ simple_shebang = ((b' ' not in executable) and
+ (shebang_length <= max_shebang_length))
+
+ if simple_shebang:
+ result = b'#!' + executable + post_interp + b'\n'
+ else:
+ result = b'#!/bin/sh\n'
+ result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
+ result += b"' '''"
+ return result
+
+ def _get_shebang(self, encoding, post_interp=b'', options=None):
+ enquote = True
+ if self.executable:
+ executable = self.executable
+ enquote = False # assume this will be taken care of
+ elif not sysconfig.is_python_build():
+ executable = get_executable()
+ elif in_venv(): # pragma: no cover
+ executable = os.path.join(sysconfig.get_path('scripts'),
+ 'python%s' % sysconfig.get_config_var('EXE'))
+ else: # pragma: no cover
+ executable = os.path.join(
+ sysconfig.get_config_var('BINDIR'),
+ 'python%s%s' % (sysconfig.get_config_var('VERSION'),
+ sysconfig.get_config_var('EXE')))
+ if options:
+ executable = self._get_alternate_executable(executable, options)
+
+ if sys.platform.startswith('java'): # pragma: no cover
+ executable = self._fix_jython_executable(executable)
+ # Normalise case for Windows
+ executable = os.path.normcase(executable)
+ # If the user didn't specify an executable, it may be necessary to
+ # cater for executable paths with spaces (not uncommon on Windows)
+ if enquote:
+ executable = _enquote_executable(executable)
+ # Issue #51: don't use fsencode, since we later try to
+ # check that the shebang is decodable using utf-8.
+ executable = executable.encode('utf-8')
+ # in case of IronPython, play safe and enable frames support
+ if (sys.platform == 'cli' and '-X:Frames' not in post_interp
+ and '-X:FullFrames' not in post_interp): # pragma: no cover
+ post_interp += b' -X:Frames'
+ shebang = self._build_shebang(executable, post_interp)
+ # Python parser starts to read a script using UTF-8 until
+ # it gets a #coding:xxx cookie. The shebang has to be the
+ # first line of a file, the #coding:xxx cookie cannot be
+ # written before. So the shebang has to be decodable from
+ # UTF-8.
+ try:
+ shebang.decode('utf-8')
+ except UnicodeDecodeError: # pragma: no cover
+ raise ValueError(
+ 'The shebang (%r) is not decodable from utf-8' % shebang)
+ # If the script is encoded to a custom encoding (use a
+ # #coding:xxx cookie), the shebang has to be decodable from
+ # the script encoding too.
+ if encoding != 'utf-8':
+ try:
+ shebang.decode(encoding)
+ except UnicodeDecodeError: # pragma: no cover
+ raise ValueError(
+ 'The shebang (%r) is not decodable '
+ 'from the script encoding (%r)' % (shebang, encoding))
+ return shebang
+
+ def _get_script_text(self, entry):
+ return self.script_template % dict(module=entry.prefix,
+ import_name=entry.suffix.split('.')[0],
+ func=entry.suffix)
+
+ manifest = _DEFAULT_MANIFEST
+
+ def get_manifest(self, exename):
+ base = os.path.basename(exename)
+ return self.manifest % base
+
+ def _write_script(self, names, shebang, script_bytes, filenames, ext):
+ use_launcher = self.add_launchers and self._is_nt
+ linesep = os.linesep.encode('utf-8')
+ if not shebang.endswith(linesep):
+ shebang += linesep
+ if not use_launcher:
+ script_bytes = shebang + script_bytes
+ else: # pragma: no cover
+ if ext == 'py':
+ launcher = self._get_launcher('t')
+ else:
+ launcher = self._get_launcher('w')
+ stream = BytesIO()
+ with ZipFile(stream, 'w') as zf:
+ zf.writestr('__main__.py', script_bytes)
+ zip_data = stream.getvalue()
+ script_bytes = launcher + shebang + zip_data
+ for name in names:
+ outname = os.path.join(self.target_dir, name)
+ if use_launcher: # pragma: no cover
+ n, e = os.path.splitext(outname)
+ if e.startswith('.py'):
+ outname = n
+ outname = '%s.exe' % outname
+ try:
+ self._fileop.write_binary_file(outname, script_bytes)
+ except Exception:
+ # Failed writing an executable - it might be in use.
+ logger.warning('Failed to write executable - trying to '
+ 'use .deleteme logic')
+ dfname = '%s.deleteme' % outname
+ if os.path.exists(dfname):
+ os.remove(dfname) # Not allowed to fail here
+ os.rename(outname, dfname) # nor here
+ self._fileop.write_binary_file(outname, script_bytes)
+ logger.debug('Able to replace executable using '
+ '.deleteme logic')
+ try:
+ os.remove(dfname)
+ except Exception:
+ pass # still in use - ignore error
+ else:
+ if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
+ outname = '%s.%s' % (outname, ext)
+ if os.path.exists(outname) and not self.clobber:
+ logger.warning('Skipping existing file %s', outname)
+ continue
+ self._fileop.write_binary_file(outname, script_bytes)
+ if self.set_mode:
+ self._fileop.set_executable_mode([outname])
+ filenames.append(outname)
+
+ def _make_script(self, entry, filenames, options=None):
+ post_interp = b''
+ if options:
+ args = options.get('interpreter_args', [])
+ if args:
+ args = ' %s' % ' '.join(args)
+ post_interp = args.encode('utf-8')
+ shebang = self._get_shebang('utf-8', post_interp, options=options)
+ script = self._get_script_text(entry).encode('utf-8')
+ name = entry.name
+ scriptnames = set()
+ if '' in self.variants:
+ scriptnames.add(name)
+ if 'X' in self.variants:
+ scriptnames.add('%s%s' % (name, sys.version[0]))
+ if 'X.Y' in self.variants:
+ scriptnames.add('%s-%s' % (name, sys.version[:3]))
+ if options and options.get('gui', False):
+ ext = 'pyw'
+ else:
+ ext = 'py'
+ self._write_script(scriptnames, shebang, script, filenames, ext)
+
+ def _copy_script(self, script, filenames):
+ adjust = False
+ script = os.path.join(self.source_dir, convert_path(script))
+ outname = os.path.join(self.target_dir, os.path.basename(script))
+ if not self.force and not self._fileop.newer(script, outname):
+ logger.debug('not copying %s (up-to-date)', script)
+ return
+
+ # Always open the file, but ignore failures in dry-run mode --
+ # that way, we'll get accurate feedback if we can read the
+ # script.
+ try:
+ f = open(script, 'rb')
+ except IOError: # pragma: no cover
+ if not self.dry_run:
+ raise
+ f = None
+ else:
+ first_line = f.readline()
+ if not first_line: # pragma: no cover
+ logger.warning('%s: %s is an empty file (skipping)',
+ self.get_command_name(), script)
+ return
+
+ match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
+ if match:
+ adjust = True
+ post_interp = match.group(1) or b''
+
+ if not adjust:
+ if f:
+ f.close()
+ self._fileop.copy_file(script, outname)
+ if self.set_mode:
+ self._fileop.set_executable_mode([outname])
+ filenames.append(outname)
+ else:
+ logger.info('copying and adjusting %s -> %s', script,
+ self.target_dir)
+ if not self._fileop.dry_run:
+ encoding, lines = detect_encoding(f.readline)
+ f.seek(0)
+ shebang = self._get_shebang(encoding, post_interp)
+ if b'pythonw' in first_line: # pragma: no cover
+ ext = 'pyw'
+ else:
+ ext = 'py'
+ n = os.path.basename(outname)
+ self._write_script([n], shebang, f.read(), filenames, ext)
+ if f:
+ f.close()
+
+ @property
+ def dry_run(self):
+ return self._fileop.dry_run
+
+ @dry_run.setter
+ def dry_run(self, value):
+ self._fileop.dry_run = value
+
+ if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
+ # Executable launcher support.
+ # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
+
+ def _get_launcher(self, kind):
+ if struct.calcsize('P') == 8: # 64-bit
+ bits = '64'
+ else:
+ bits = '32'
+ name = '%s%s.exe' % (kind, bits)
+ # Issue 31: don't hardcode an absolute package name, but
+ # determine it relative to the current package
+ distlib_package = __name__.rsplit('.', 1)[0]
+ result = finder(distlib_package).find(name).bytes
+ return result
+
+ # Public API follows
+
+ def make(self, specification, options=None):
+ """
+ Make a script.
+
+ :param specification: The specification, which is either a valid export
+ entry specification (to make a script from a
+ callable) or a filename (to make a script by
+ copying from a source location).
+ :param options: A dictionary of options controlling script generation.
+ :return: A list of all absolute pathnames written to.
+ """
+ filenames = []
+ entry = get_export_entry(specification)
+ if entry is None:
+ self._copy_script(specification, filenames)
+ else:
+ self._make_script(entry, filenames, options=options)
+ return filenames
+
+ def make_multiple(self, specifications, options=None):
+ """
+ Take a list of specifications and make scripts from them,
+ :param specifications: A list of specifications.
+ :return: A list of all absolute pathnames written to,
+ """
+ filenames = []
+ for specification in specifications:
+ filenames.extend(self.make(specification, options))
+ return filenames
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/t32.exe b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/t32.exe
new file mode 100644
index 00000000..5d5bce1f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/t32.exe
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/t64.exe b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/t64.exe
new file mode 100644
index 00000000..039ce441
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/t64.exe
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/util.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/util.py
new file mode 100644
index 00000000..e851146c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/util.py
@@ -0,0 +1,1760 @@
+#
+# Copyright (C) 2012-2017 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+import codecs
+from collections import deque
+import contextlib
+import csv
+from glob import iglob as std_iglob
+import io
+import json
+import logging
+import os
+import py_compile
+import re
+import socket
+try:
+ import ssl
+except ImportError: # pragma: no cover
+ ssl = None
+import subprocess
+import sys
+import tarfile
+import tempfile
+import textwrap
+
+try:
+ import threading
+except ImportError: # pragma: no cover
+ import dummy_threading as threading
+import time
+
+from . import DistlibException
+from .compat import (string_types, text_type, shutil, raw_input, StringIO,
+ cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
+ splittype, HTTPHandler, BaseConfigurator, valid_ident,
+ Container, configparser, URLError, ZipFile, fsdecode,
+ unquote, urlparse)
+
+logger = logging.getLogger(__name__)
+
+#
+# Requirement parsing code as per PEP 508
+#
+
+IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
+VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
+COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
+MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
+OR = re.compile(r'^or\b\s*')
+AND = re.compile(r'^and\b\s*')
+NON_SPACE = re.compile(r'(\S+)\s*')
+STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
+
+
+def parse_marker(marker_string):
+ """
+ Parse a marker string and return a dictionary containing a marker expression.
+
+ The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
+ the expression grammar, or strings. A string contained in quotes is to be
+ interpreted as a literal string, and a string not contained in quotes is a
+ variable (such as os_name).
+ """
+ def marker_var(remaining):
+ # either identifier, or literal string
+ m = IDENTIFIER.match(remaining)
+ if m:
+ result = m.groups()[0]
+ remaining = remaining[m.end():]
+ elif not remaining:
+ raise SyntaxError('unexpected end of input')
+ else:
+ q = remaining[0]
+ if q not in '\'"':
+ raise SyntaxError('invalid expression: %s' % remaining)
+ oq = '\'"'.replace(q, '')
+ remaining = remaining[1:]
+ parts = [q]
+ while remaining:
+ # either a string chunk, or oq, or q to terminate
+ if remaining[0] == q:
+ break
+ elif remaining[0] == oq:
+ parts.append(oq)
+ remaining = remaining[1:]
+ else:
+ m = STRING_CHUNK.match(remaining)
+ if not m:
+ raise SyntaxError('error in string literal: %s' % remaining)
+ parts.append(m.groups()[0])
+ remaining = remaining[m.end():]
+ else:
+ s = ''.join(parts)
+ raise SyntaxError('unterminated string: %s' % s)
+ parts.append(q)
+ result = ''.join(parts)
+ remaining = remaining[1:].lstrip() # skip past closing quote
+ return result, remaining
+
+ def marker_expr(remaining):
+ if remaining and remaining[0] == '(':
+ result, remaining = marker(remaining[1:].lstrip())
+ if remaining[0] != ')':
+ raise SyntaxError('unterminated parenthesis: %s' % remaining)
+ remaining = remaining[1:].lstrip()
+ else:
+ lhs, remaining = marker_var(remaining)
+ while remaining:
+ m = MARKER_OP.match(remaining)
+ if not m:
+ break
+ op = m.groups()[0]
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_var(remaining)
+ lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
+ result = lhs
+ return result, remaining
+
+ def marker_and(remaining):
+ lhs, remaining = marker_expr(remaining)
+ while remaining:
+ m = AND.match(remaining)
+ if not m:
+ break
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_expr(remaining)
+ lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
+ return lhs, remaining
+
+ def marker(remaining):
+ lhs, remaining = marker_and(remaining)
+ while remaining:
+ m = OR.match(remaining)
+ if not m:
+ break
+ remaining = remaining[m.end():]
+ rhs, remaining = marker_and(remaining)
+ lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
+ return lhs, remaining
+
+ return marker(marker_string)
+
+
+def parse_requirement(req):
+ """
+ Parse a requirement passed in as a string. Return a Container
+ whose attributes contain the various parts of the requirement.
+ """
+ remaining = req.strip()
+ if not remaining or remaining.startswith('#'):
+ return None
+ m = IDENTIFIER.match(remaining)
+ if not m:
+ raise SyntaxError('name expected: %s' % remaining)
+ distname = m.groups()[0]
+ remaining = remaining[m.end():]
+ extras = mark_expr = versions = uri = None
+ if remaining and remaining[0] == '[':
+ i = remaining.find(']', 1)
+ if i < 0:
+ raise SyntaxError('unterminated extra: %s' % remaining)
+ s = remaining[1:i]
+ remaining = remaining[i + 1:].lstrip()
+ extras = []
+ while s:
+ m = IDENTIFIER.match(s)
+ if not m:
+ raise SyntaxError('malformed extra: %s' % s)
+ extras.append(m.groups()[0])
+ s = s[m.end():]
+ if not s:
+ break
+ if s[0] != ',':
+ raise SyntaxError('comma expected in extras: %s' % s)
+ s = s[1:].lstrip()
+ if not extras:
+ extras = None
+ if remaining:
+ if remaining[0] == '@':
+ # it's a URI
+ remaining = remaining[1:].lstrip()
+ m = NON_SPACE.match(remaining)
+ if not m:
+ raise SyntaxError('invalid URI: %s' % remaining)
+ uri = m.groups()[0]
+ t = urlparse(uri)
+ # there are issues with Python and URL parsing, so this test
+ # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
+ # always parse invalid URLs correctly - it should raise
+ # exceptions for malformed URLs
+ if not (t.scheme and t.netloc):
+ raise SyntaxError('Invalid URL: %s' % uri)
+ remaining = remaining[m.end():].lstrip()
+ else:
+
+ def get_versions(ver_remaining):
+ """
+ Return a list of operator, version tuples if any are
+ specified, else None.
+ """
+ m = COMPARE_OP.match(ver_remaining)
+ versions = None
+ if m:
+ versions = []
+ while True:
+ op = m.groups()[0]
+ ver_remaining = ver_remaining[m.end():]
+ m = VERSION_IDENTIFIER.match(ver_remaining)
+ if not m:
+ raise SyntaxError('invalid version: %s' % ver_remaining)
+ v = m.groups()[0]
+ versions.append((op, v))
+ ver_remaining = ver_remaining[m.end():]
+ if not ver_remaining or ver_remaining[0] != ',':
+ break
+ ver_remaining = ver_remaining[1:].lstrip()
+ m = COMPARE_OP.match(ver_remaining)
+ if not m:
+ raise SyntaxError('invalid constraint: %s' % ver_remaining)
+ if not versions:
+ versions = None
+ return versions, ver_remaining
+
+ if remaining[0] != '(':
+ versions, remaining = get_versions(remaining)
+ else:
+ i = remaining.find(')', 1)
+ if i < 0:
+ raise SyntaxError('unterminated parenthesis: %s' % remaining)
+ s = remaining[1:i]
+ remaining = remaining[i + 1:].lstrip()
+ # As a special diversion from PEP 508, allow a version number
+ # a.b.c in parentheses as a synonym for ~= a.b.c (because this
+ # is allowed in earlier PEPs)
+ if COMPARE_OP.match(s):
+ versions, _ = get_versions(s)
+ else:
+ m = VERSION_IDENTIFIER.match(s)
+ if not m:
+ raise SyntaxError('invalid constraint: %s' % s)
+ v = m.groups()[0]
+ s = s[m.end():].lstrip()
+ if s:
+ raise SyntaxError('invalid constraint: %s' % s)
+ versions = [('~=', v)]
+
+ if remaining:
+ if remaining[0] != ';':
+ raise SyntaxError('invalid requirement: %s' % remaining)
+ remaining = remaining[1:].lstrip()
+
+ mark_expr, remaining = parse_marker(remaining)
+
+ if remaining and remaining[0] != '#':
+ raise SyntaxError('unexpected trailing data: %s' % remaining)
+
+ if not versions:
+ rs = distname
+ else:
+ rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
+ return Container(name=distname, extras=extras, constraints=versions,
+ marker=mark_expr, url=uri, requirement=rs)
+
+
+def get_resources_dests(resources_root, rules):
+ """Find destinations for resources files"""
+
+ def get_rel_path(root, path):
+ # normalizes and returns a lstripped-/-separated path
+ root = root.replace(os.path.sep, '/')
+ path = path.replace(os.path.sep, '/')
+ assert path.startswith(root)
+ return path[len(root):].lstrip('/')
+
+ destinations = {}
+ for base, suffix, dest in rules:
+ prefix = os.path.join(resources_root, base)
+ for abs_base in iglob(prefix):
+ abs_glob = os.path.join(abs_base, suffix)
+ for abs_path in iglob(abs_glob):
+ resource_file = get_rel_path(resources_root, abs_path)
+ if dest is None: # remove the entry if it was here
+ destinations.pop(resource_file, None)
+ else:
+ rel_path = get_rel_path(abs_base, abs_path)
+ rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
+ destinations[resource_file] = rel_dest + '/' + rel_path
+ return destinations
+
+
+def in_venv():
+ if hasattr(sys, 'real_prefix'):
+ # virtualenv venvs
+ result = True
+ else:
+ # PEP 405 venvs
+ result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
+ return result
+
+
+def get_executable():
+# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
+# changes to the stub launcher mean that sys.executable always points
+# to the stub on OS X
+# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
+# in os.environ):
+# result = os.environ['__PYVENV_LAUNCHER__']
+# else:
+# result = sys.executable
+# return result
+ result = os.path.normcase(sys.executable)
+ if not isinstance(result, text_type):
+ result = fsdecode(result)
+ return result
+
+
+def proceed(prompt, allowed_chars, error_prompt=None, default=None):
+ p = prompt
+ while True:
+ s = raw_input(p)
+ p = prompt
+ if not s and default:
+ s = default
+ if s:
+ c = s[0].lower()
+ if c in allowed_chars:
+ break
+ if error_prompt:
+ p = '%c: %s\n%s' % (c, error_prompt, prompt)
+ return c
+
+
+def extract_by_key(d, keys):
+ if isinstance(keys, string_types):
+ keys = keys.split()
+ result = {}
+ for key in keys:
+ if key in d:
+ result[key] = d[key]
+ return result
+
+def read_exports(stream):
+ if sys.version_info[0] >= 3:
+ # needs to be a text stream
+ stream = codecs.getreader('utf-8')(stream)
+ # Try to load as JSON, falling back on legacy format
+ data = stream.read()
+ stream = StringIO(data)
+ try:
+ jdata = json.load(stream)
+ result = jdata['extensions']['python.exports']['exports']
+ for group, entries in result.items():
+ for k, v in entries.items():
+ s = '%s = %s' % (k, v)
+ entry = get_export_entry(s)
+ assert entry is not None
+ entries[k] = entry
+ return result
+ except Exception:
+ stream.seek(0, 0)
+
+ def read_stream(cp, stream):
+ if hasattr(cp, 'read_file'):
+ cp.read_file(stream)
+ else:
+ cp.readfp(stream)
+
+ cp = configparser.ConfigParser()
+ try:
+ read_stream(cp, stream)
+ except configparser.MissingSectionHeaderError:
+ stream.close()
+ data = textwrap.dedent(data)
+ stream = StringIO(data)
+ read_stream(cp, stream)
+
+ result = {}
+ for key in cp.sections():
+ result[key] = entries = {}
+ for name, value in cp.items(key):
+ s = '%s = %s' % (name, value)
+ entry = get_export_entry(s)
+ assert entry is not None
+ #entry.dist = self
+ entries[name] = entry
+ return result
+
+
+def write_exports(exports, stream):
+ if sys.version_info[0] >= 3:
+ # needs to be a text stream
+ stream = codecs.getwriter('utf-8')(stream)
+ cp = configparser.ConfigParser()
+ for k, v in exports.items():
+ # TODO check k, v for valid values
+ cp.add_section(k)
+ for entry in v.values():
+ if entry.suffix is None:
+ s = entry.prefix
+ else:
+ s = '%s:%s' % (entry.prefix, entry.suffix)
+ if entry.flags:
+ s = '%s [%s]' % (s, ', '.join(entry.flags))
+ cp.set(k, entry.name, s)
+ cp.write(stream)
+
+
+@contextlib.contextmanager
+def tempdir():
+ td = tempfile.mkdtemp()
+ try:
+ yield td
+ finally:
+ shutil.rmtree(td)
+
+@contextlib.contextmanager
+def chdir(d):
+ cwd = os.getcwd()
+ try:
+ os.chdir(d)
+ yield
+ finally:
+ os.chdir(cwd)
+
+
+@contextlib.contextmanager
+def socket_timeout(seconds=15):
+ cto = socket.getdefaulttimeout()
+ try:
+ socket.setdefaulttimeout(seconds)
+ yield
+ finally:
+ socket.setdefaulttimeout(cto)
+
+
+class cached_property(object):
+ def __init__(self, func):
+ self.func = func
+ #for attr in ('__name__', '__module__', '__doc__'):
+ # setattr(self, attr, getattr(func, attr, None))
+
+ def __get__(self, obj, cls=None):
+ if obj is None:
+ return self
+ value = self.func(obj)
+ object.__setattr__(obj, self.func.__name__, value)
+ #obj.__dict__[self.func.__name__] = value = self.func(obj)
+ return value
+
+def convert_path(pathname):
+ """Return 'pathname' as a name that will work on the native filesystem.
+
+ The path is split on '/' and put back together again using the current
+ directory separator. Needed because filenames in the setup script are
+ always supplied in Unix style, and have to be converted to the local
+ convention before we can actually use them in the filesystem. Raises
+ ValueError on non-Unix-ish systems if 'pathname' either starts or
+ ends with a slash.
+ """
+ if os.sep == '/':
+ return pathname
+ if not pathname:
+ return pathname
+ if pathname[0] == '/':
+ raise ValueError("path '%s' cannot be absolute" % pathname)
+ if pathname[-1] == '/':
+ raise ValueError("path '%s' cannot end with '/'" % pathname)
+
+ paths = pathname.split('/')
+ while os.curdir in paths:
+ paths.remove(os.curdir)
+ if not paths:
+ return os.curdir
+ return os.path.join(*paths)
+
+
+class FileOperator(object):
+ def __init__(self, dry_run=False):
+ self.dry_run = dry_run
+ self.ensured = set()
+ self._init_record()
+
+ def _init_record(self):
+ self.record = False
+ self.files_written = set()
+ self.dirs_created = set()
+
+ def record_as_written(self, path):
+ if self.record:
+ self.files_written.add(path)
+
+ def newer(self, source, target):
+ """Tell if the target is newer than the source.
+
+ Returns true if 'source' exists and is more recently modified than
+ 'target', or if 'source' exists and 'target' doesn't.
+
+ Returns false if both exist and 'target' is the same age or younger
+ than 'source'. Raise PackagingFileError if 'source' does not exist.
+
+ Note that this test is not very accurate: files created in the same
+ second will have the same "age".
+ """
+ if not os.path.exists(source):
+ raise DistlibException("file '%r' does not exist" %
+ os.path.abspath(source))
+ if not os.path.exists(target):
+ return True
+
+ return os.stat(source).st_mtime > os.stat(target).st_mtime
+
+ def copy_file(self, infile, outfile, check=True):
+ """Copy a file respecting dry-run and force flags.
+ """
+ self.ensure_dir(os.path.dirname(outfile))
+ logger.info('Copying %s to %s', infile, outfile)
+ if not self.dry_run:
+ msg = None
+ if check:
+ if os.path.islink(outfile):
+ msg = '%s is a symlink' % outfile
+ elif os.path.exists(outfile) and not os.path.isfile(outfile):
+ msg = '%s is a non-regular file' % outfile
+ if msg:
+ raise ValueError(msg + ' which would be overwritten')
+ shutil.copyfile(infile, outfile)
+ self.record_as_written(outfile)
+
+ def copy_stream(self, instream, outfile, encoding=None):
+ assert not os.path.isdir(outfile)
+ self.ensure_dir(os.path.dirname(outfile))
+ logger.info('Copying stream %s to %s', instream, outfile)
+ if not self.dry_run:
+ if encoding is None:
+ outstream = open(outfile, 'wb')
+ else:
+ outstream = codecs.open(outfile, 'w', encoding=encoding)
+ try:
+ shutil.copyfileobj(instream, outstream)
+ finally:
+ outstream.close()
+ self.record_as_written(outfile)
+
+ def write_binary_file(self, path, data):
+ self.ensure_dir(os.path.dirname(path))
+ if not self.dry_run:
+ if os.path.exists(path):
+ os.remove(path)
+ with open(path, 'wb') as f:
+ f.write(data)
+ self.record_as_written(path)
+
+ def write_text_file(self, path, data, encoding):
+ self.write_binary_file(path, data.encode(encoding))
+
+ def set_mode(self, bits, mask, files):
+ if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
+ # Set the executable bits (owner, group, and world) on
+ # all the files specified.
+ for f in files:
+ if self.dry_run:
+ logger.info("changing mode of %s", f)
+ else:
+ mode = (os.stat(f).st_mode | bits) & mask
+ logger.info("changing mode of %s to %o", f, mode)
+ os.chmod(f, mode)
+
+ set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
+
+ def ensure_dir(self, path):
+ path = os.path.abspath(path)
+ if path not in self.ensured and not os.path.exists(path):
+ self.ensured.add(path)
+ d, f = os.path.split(path)
+ self.ensure_dir(d)
+ logger.info('Creating %s' % path)
+ if not self.dry_run:
+ os.mkdir(path)
+ if self.record:
+ self.dirs_created.add(path)
+
+ def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
+ dpath = cache_from_source(path, not optimize)
+ logger.info('Byte-compiling %s to %s', path, dpath)
+ if not self.dry_run:
+ if force or self.newer(path, dpath):
+ if not prefix:
+ diagpath = None
+ else:
+ assert path.startswith(prefix)
+ diagpath = path[len(prefix):]
+ compile_kwargs = {}
+ if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
+ compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
+ py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error
+ self.record_as_written(dpath)
+ return dpath
+
+ def ensure_removed(self, path):
+ if os.path.exists(path):
+ if os.path.isdir(path) and not os.path.islink(path):
+ logger.debug('Removing directory tree at %s', path)
+ if not self.dry_run:
+ shutil.rmtree(path)
+ if self.record:
+ if path in self.dirs_created:
+ self.dirs_created.remove(path)
+ else:
+ if os.path.islink(path):
+ s = 'link'
+ else:
+ s = 'file'
+ logger.debug('Removing %s %s', s, path)
+ if not self.dry_run:
+ os.remove(path)
+ if self.record:
+ if path in self.files_written:
+ self.files_written.remove(path)
+
+ def is_writable(self, path):
+ result = False
+ while not result:
+ if os.path.exists(path):
+ result = os.access(path, os.W_OK)
+ break
+ parent = os.path.dirname(path)
+ if parent == path:
+ break
+ path = parent
+ return result
+
+ def commit(self):
+ """
+ Commit recorded changes, turn off recording, return
+ changes.
+ """
+ assert self.record
+ result = self.files_written, self.dirs_created
+ self._init_record()
+ return result
+
+ def rollback(self):
+ if not self.dry_run:
+ for f in list(self.files_written):
+ if os.path.exists(f):
+ os.remove(f)
+ # dirs should all be empty now, except perhaps for
+ # __pycache__ subdirs
+ # reverse so that subdirs appear before their parents
+ dirs = sorted(self.dirs_created, reverse=True)
+ for d in dirs:
+ flist = os.listdir(d)
+ if flist:
+ assert flist == ['__pycache__']
+ sd = os.path.join(d, flist[0])
+ os.rmdir(sd)
+ os.rmdir(d) # should fail if non-empty
+ self._init_record()
+
+def resolve(module_name, dotted_path):
+ if module_name in sys.modules:
+ mod = sys.modules[module_name]
+ else:
+ mod = __import__(module_name)
+ if dotted_path is None:
+ result = mod
+ else:
+ parts = dotted_path.split('.')
+ result = getattr(mod, parts.pop(0))
+ for p in parts:
+ result = getattr(result, p)
+ return result
+
+
+class ExportEntry(object):
+ def __init__(self, name, prefix, suffix, flags):
+ self.name = name
+ self.prefix = prefix
+ self.suffix = suffix
+ self.flags = flags
+
+ @cached_property
+ def value(self):
+ return resolve(self.prefix, self.suffix)
+
+ def __repr__(self): # pragma: no cover
+ return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
+ self.suffix, self.flags)
+
+ def __eq__(self, other):
+ if not isinstance(other, ExportEntry):
+ result = False
+ else:
+ result = (self.name == other.name and
+ self.prefix == other.prefix and
+ self.suffix == other.suffix and
+ self.flags == other.flags)
+ return result
+
+ __hash__ = object.__hash__
+
+
+ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+)
+ \s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
+ \s*(\[\s*(?P<flags>\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
+ ''', re.VERBOSE)
+
+def get_export_entry(specification):
+ m = ENTRY_RE.search(specification)
+ if not m:
+ result = None
+ if '[' in specification or ']' in specification:
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
+ else:
+ d = m.groupdict()
+ name = d['name']
+ path = d['callable']
+ colons = path.count(':')
+ if colons == 0:
+ prefix, suffix = path, None
+ else:
+ if colons != 1:
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
+ prefix, suffix = path.split(':')
+ flags = d['flags']
+ if flags is None:
+ if '[' in specification or ']' in specification:
+ raise DistlibException("Invalid specification "
+ "'%s'" % specification)
+ flags = []
+ else:
+ flags = [f.strip() for f in flags.split(',')]
+ result = ExportEntry(name, prefix, suffix, flags)
+ return result
+
+
+def get_cache_base(suffix=None):
+ """
+ Return the default base location for distlib caches. If the directory does
+ not exist, it is created. Use the suffix provided for the base directory,
+ and default to '.distlib' if it isn't provided.
+
+ On Windows, if LOCALAPPDATA is defined in the environment, then it is
+ assumed to be a directory, and will be the parent directory of the result.
+ On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
+ directory - using os.expanduser('~') - will be the parent directory of
+ the result.
+
+ The result is just the directory '.distlib' in the parent directory as
+ determined above, or with the name specified with ``suffix``.
+ """
+ if suffix is None:
+ suffix = '.distlib'
+ if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
+ result = os.path.expandvars('$localappdata')
+ else:
+ # Assume posix, or old Windows
+ result = os.path.expanduser('~')
+ # we use 'isdir' instead of 'exists', because we want to
+ # fail if there's a file with that name
+ if os.path.isdir(result):
+ usable = os.access(result, os.W_OK)
+ if not usable:
+ logger.warning('Directory exists but is not writable: %s', result)
+ else:
+ try:
+ os.makedirs(result)
+ usable = True
+ except OSError:
+ logger.warning('Unable to create %s', result, exc_info=True)
+ usable = False
+ if not usable:
+ result = tempfile.mkdtemp()
+ logger.warning('Default location unusable, using %s', result)
+ return os.path.join(result, suffix)
+
+
+def path_to_cache_dir(path):
+ """
+ Convert an absolute path to a directory name for use in a cache.
+
+ The algorithm used is:
+
+ #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
+ #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
+ #. ``'.cache'`` is appended.
+ """
+ d, p = os.path.splitdrive(os.path.abspath(path))
+ if d:
+ d = d.replace(':', '---')
+ p = p.replace(os.sep, '--')
+ return d + p + '.cache'
+
+
+def ensure_slash(s):
+ if not s.endswith('/'):
+ return s + '/'
+ return s
+
+
+def parse_credentials(netloc):
+ username = password = None
+ if '@' in netloc:
+ prefix, netloc = netloc.rsplit('@', 1)
+ if ':' not in prefix:
+ username = prefix
+ else:
+ username, password = prefix.split(':', 1)
+ if username:
+ username = unquote(username)
+ if password:
+ password = unquote(password)
+ return username, password, netloc
+
+
+def get_process_umask():
+ result = os.umask(0o22)
+ os.umask(result)
+ return result
+
+def is_string_sequence(seq):
+ result = True
+ i = None
+ for i, s in enumerate(seq):
+ if not isinstance(s, string_types):
+ result = False
+ break
+ assert i is not None
+ return result
+
+PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
+ '([a-z0-9_.+-]+)', re.I)
+PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
+
+
+def split_filename(filename, project_name=None):
+ """
+ Extract name, version, python version from a filename (no extension)
+
+ Return name, version, pyver or None
+ """
+ result = None
+ pyver = None
+ filename = unquote(filename).replace(' ', '-')
+ m = PYTHON_VERSION.search(filename)
+ if m:
+ pyver = m.group(1)
+ filename = filename[:m.start()]
+ if project_name and len(filename) > len(project_name) + 1:
+ m = re.match(re.escape(project_name) + r'\b', filename)
+ if m:
+ n = m.end()
+ result = filename[:n], filename[n + 1:], pyver
+ if result is None:
+ m = PROJECT_NAME_AND_VERSION.match(filename)
+ if m:
+ result = m.group(1), m.group(3), pyver
+ return result
+
+# Allow spaces in name because of legacy dists like "Twisted Core"
+NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
+ r'\(\s*(?P<ver>[^\s)]+)\)$')
+
+def parse_name_and_version(p):
+ """
+ A utility method used to get name and version from a string.
+
+ From e.g. a Provides-Dist value.
+
+ :param p: A value in a form 'foo (1.0)'
+ :return: The name and version as a tuple.
+ """
+ m = NAME_VERSION_RE.match(p)
+ if not m:
+ raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
+ d = m.groupdict()
+ return d['name'].strip().lower(), d['ver']
+
+def get_extras(requested, available):
+ result = set()
+ requested = set(requested or [])
+ available = set(available or [])
+ if '*' in requested:
+ requested.remove('*')
+ result |= available
+ for r in requested:
+ if r == '-':
+ result.add(r)
+ elif r.startswith('-'):
+ unwanted = r[1:]
+ if unwanted not in available:
+ logger.warning('undeclared extra: %s' % unwanted)
+ if unwanted in result:
+ result.remove(unwanted)
+ else:
+ if r not in available:
+ logger.warning('undeclared extra: %s' % r)
+ result.add(r)
+ return result
+#
+# Extended metadata functionality
+#
+
+def _get_external_data(url):
+ result = {}
+ try:
+ # urlopen might fail if it runs into redirections,
+ # because of Python issue #13696. Fixed in locators
+ # using a custom redirect handler.
+ resp = urlopen(url)
+ headers = resp.info()
+ ct = headers.get('Content-Type')
+ if not ct.startswith('application/json'):
+ logger.debug('Unexpected response for JSON request: %s', ct)
+ else:
+ reader = codecs.getreader('utf-8')(resp)
+ #data = reader.read().decode('utf-8')
+ #result = json.loads(data)
+ result = json.load(reader)
+ except Exception as e:
+ logger.exception('Failed to get external data for %s: %s', url, e)
+ return result
+
+_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
+
+def get_project_data(name):
+ url = '%s/%s/project.json' % (name[0].upper(), name)
+ url = urljoin(_external_data_base_url, url)
+ result = _get_external_data(url)
+ return result
+
+def get_package_data(name, version):
+ url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
+ url = urljoin(_external_data_base_url, url)
+ return _get_external_data(url)
+
+
+class Cache(object):
+ """
+ A class implementing a cache for resources that need to live in the file system
+ e.g. shared libraries. This class was moved from resources to here because it
+ could be used by other modules, e.g. the wheel module.
+ """
+
+ def __init__(self, base):
+ """
+ Initialise an instance.
+
+ :param base: The base directory where the cache should be located.
+ """
+ # we use 'isdir' instead of 'exists', because we want to
+ # fail if there's a file with that name
+ if not os.path.isdir(base): # pragma: no cover
+ os.makedirs(base)
+ if (os.stat(base).st_mode & 0o77) != 0:
+ logger.warning('Directory \'%s\' is not private', base)
+ self.base = os.path.abspath(os.path.normpath(base))
+
+ def prefix_to_dir(self, prefix):
+ """
+ Converts a resource prefix to a directory name in the cache.
+ """
+ return path_to_cache_dir(prefix)
+
+ def clear(self):
+ """
+ Clear the cache.
+ """
+ not_removed = []
+ for fn in os.listdir(self.base):
+ fn = os.path.join(self.base, fn)
+ try:
+ if os.path.islink(fn) or os.path.isfile(fn):
+ os.remove(fn)
+ elif os.path.isdir(fn):
+ shutil.rmtree(fn)
+ except Exception:
+ not_removed.append(fn)
+ return not_removed
+
+
+class EventMixin(object):
+ """
+ A very simple publish/subscribe system.
+ """
+ def __init__(self):
+ self._subscribers = {}
+
+ def add(self, event, subscriber, append=True):
+ """
+ Add a subscriber for an event.
+
+ :param event: The name of an event.
+ :param subscriber: The subscriber to be added (and called when the
+ event is published).
+ :param append: Whether to append or prepend the subscriber to an
+ existing subscriber list for the event.
+ """
+ subs = self._subscribers
+ if event not in subs:
+ subs[event] = deque([subscriber])
+ else:
+ sq = subs[event]
+ if append:
+ sq.append(subscriber)
+ else:
+ sq.appendleft(subscriber)
+
+ def remove(self, event, subscriber):
+ """
+ Remove a subscriber for an event.
+
+ :param event: The name of an event.
+ :param subscriber: The subscriber to be removed.
+ """
+ subs = self._subscribers
+ if event not in subs:
+ raise ValueError('No subscribers: %r' % event)
+ subs[event].remove(subscriber)
+
+ def get_subscribers(self, event):
+ """
+ Return an iterator for the subscribers for an event.
+ :param event: The event to return subscribers for.
+ """
+ return iter(self._subscribers.get(event, ()))
+
+ def publish(self, event, *args, **kwargs):
+ """
+ Publish a event and return a list of values returned by its
+ subscribers.
+
+ :param event: The event to publish.
+ :param args: The positional arguments to pass to the event's
+ subscribers.
+ :param kwargs: The keyword arguments to pass to the event's
+ subscribers.
+ """
+ result = []
+ for subscriber in self.get_subscribers(event):
+ try:
+ value = subscriber(event, *args, **kwargs)
+ except Exception:
+ logger.exception('Exception during event publication')
+ value = None
+ result.append(value)
+ logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
+ event, args, kwargs, result)
+ return result
+
+#
+# Simple sequencing
+#
+class Sequencer(object):
+ def __init__(self):
+ self._preds = {}
+ self._succs = {}
+ self._nodes = set() # nodes with no preds/succs
+
+ def add_node(self, node):
+ self._nodes.add(node)
+
+ def remove_node(self, node, edges=False):
+ if node in self._nodes:
+ self._nodes.remove(node)
+ if edges:
+ for p in set(self._preds.get(node, ())):
+ self.remove(p, node)
+ for s in set(self._succs.get(node, ())):
+ self.remove(node, s)
+ # Remove empties
+ for k, v in list(self._preds.items()):
+ if not v:
+ del self._preds[k]
+ for k, v in list(self._succs.items()):
+ if not v:
+ del self._succs[k]
+
+ def add(self, pred, succ):
+ assert pred != succ
+ self._preds.setdefault(succ, set()).add(pred)
+ self._succs.setdefault(pred, set()).add(succ)
+
+ def remove(self, pred, succ):
+ assert pred != succ
+ try:
+ preds = self._preds[succ]
+ succs = self._succs[pred]
+ except KeyError: # pragma: no cover
+ raise ValueError('%r not a successor of anything' % succ)
+ try:
+ preds.remove(pred)
+ succs.remove(succ)
+ except KeyError: # pragma: no cover
+ raise ValueError('%r not a successor of %r' % (succ, pred))
+
+ def is_step(self, step):
+ return (step in self._preds or step in self._succs or
+ step in self._nodes)
+
+ def get_steps(self, final):
+ if not self.is_step(final):
+ raise ValueError('Unknown: %r' % final)
+ result = []
+ todo = []
+ seen = set()
+ todo.append(final)
+ while todo:
+ step = todo.pop(0)
+ if step in seen:
+ # if a step was already seen,
+ # move it to the end (so it will appear earlier
+ # when reversed on return) ... but not for the
+ # final step, as that would be confusing for
+ # users
+ if step != final:
+ result.remove(step)
+ result.append(step)
+ else:
+ seen.add(step)
+ result.append(step)
+ preds = self._preds.get(step, ())
+ todo.extend(preds)
+ return reversed(result)
+
+ @property
+ def strong_connections(self):
+ #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
+ index_counter = [0]
+ stack = []
+ lowlinks = {}
+ index = {}
+ result = []
+
+ graph = self._succs
+
+ def strongconnect(node):
+ # set the depth index for this node to the smallest unused index
+ index[node] = index_counter[0]
+ lowlinks[node] = index_counter[0]
+ index_counter[0] += 1
+ stack.append(node)
+
+ # Consider successors
+ try:
+ successors = graph[node]
+ except Exception:
+ successors = []
+ for successor in successors:
+ if successor not in lowlinks:
+ # Successor has not yet been visited
+ strongconnect(successor)
+ lowlinks[node] = min(lowlinks[node],lowlinks[successor])
+ elif successor in stack:
+ # the successor is in the stack and hence in the current
+ # strongly connected component (SCC)
+ lowlinks[node] = min(lowlinks[node],index[successor])
+
+ # If `node` is a root node, pop the stack and generate an SCC
+ if lowlinks[node] == index[node]:
+ connected_component = []
+
+ while True:
+ successor = stack.pop()
+ connected_component.append(successor)
+ if successor == node: break
+ component = tuple(connected_component)
+ # storing the result
+ result.append(component)
+
+ for node in graph:
+ if node not in lowlinks:
+ strongconnect(node)
+
+ return result
+
+ @property
+ def dot(self):
+ result = ['digraph G {']
+ for succ in self._preds:
+ preds = self._preds[succ]
+ for pred in preds:
+ result.append(' %s -> %s;' % (pred, succ))
+ for node in self._nodes:
+ result.append(' %s;' % node)
+ result.append('}')
+ return '\n'.join(result)
+
+#
+# Unarchiving functionality for zip, tar, tgz, tbz, whl
+#
+
+ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
+ '.tgz', '.tbz', '.whl')
+
+def unarchive(archive_filename, dest_dir, format=None, check=True):
+
+ def check_path(path):
+ if not isinstance(path, text_type):
+ path = path.decode('utf-8')
+ p = os.path.abspath(os.path.join(dest_dir, path))
+ if not p.startswith(dest_dir) or p[plen] != os.sep:
+ raise ValueError('path outside destination: %r' % p)
+
+ dest_dir = os.path.abspath(dest_dir)
+ plen = len(dest_dir)
+ archive = None
+ if format is None:
+ if archive_filename.endswith(('.zip', '.whl')):
+ format = 'zip'
+ elif archive_filename.endswith(('.tar.gz', '.tgz')):
+ format = 'tgz'
+ mode = 'r:gz'
+ elif archive_filename.endswith(('.tar.bz2', '.tbz')):
+ format = 'tbz'
+ mode = 'r:bz2'
+ elif archive_filename.endswith('.tar'):
+ format = 'tar'
+ mode = 'r'
+ else: # pragma: no cover
+ raise ValueError('Unknown format for %r' % archive_filename)
+ try:
+ if format == 'zip':
+ archive = ZipFile(archive_filename, 'r')
+ if check:
+ names = archive.namelist()
+ for name in names:
+ check_path(name)
+ else:
+ archive = tarfile.open(archive_filename, mode)
+ if check:
+ names = archive.getnames()
+ for name in names:
+ check_path(name)
+ if format != 'zip' and sys.version_info[0] < 3:
+ # See Python issue 17153. If the dest path contains Unicode,
+ # tarfile extraction fails on Python 2.x if a member path name
+ # contains non-ASCII characters - it leads to an implicit
+ # bytes -> unicode conversion using ASCII to decode.
+ for tarinfo in archive.getmembers():
+ if not isinstance(tarinfo.name, text_type):
+ tarinfo.name = tarinfo.name.decode('utf-8')
+ archive.extractall(dest_dir)
+
+ finally:
+ if archive:
+ archive.close()
+
+
+def zip_dir(directory):
+ """zip a directory tree into a BytesIO object"""
+ result = io.BytesIO()
+ dlen = len(directory)
+ with ZipFile(result, "w") as zf:
+ for root, dirs, files in os.walk(directory):
+ for name in files:
+ full = os.path.join(root, name)
+ rel = root[dlen:]
+ dest = os.path.join(rel, name)
+ zf.write(full, dest)
+ return result
+
+#
+# Simple progress bar
+#
+
+UNITS = ('', 'K', 'M', 'G','T','P')
+
+
+class Progress(object):
+ unknown = 'UNKNOWN'
+
+ def __init__(self, minval=0, maxval=100):
+ assert maxval is None or maxval >= minval
+ self.min = self.cur = minval
+ self.max = maxval
+ self.started = None
+ self.elapsed = 0
+ self.done = False
+
+ def update(self, curval):
+ assert self.min <= curval
+ assert self.max is None or curval <= self.max
+ self.cur = curval
+ now = time.time()
+ if self.started is None:
+ self.started = now
+ else:
+ self.elapsed = now - self.started
+
+ def increment(self, incr):
+ assert incr >= 0
+ self.update(self.cur + incr)
+
+ def start(self):
+ self.update(self.min)
+ return self
+
+ def stop(self):
+ if self.max is not None:
+ self.update(self.max)
+ self.done = True
+
+ @property
+ def maximum(self):
+ return self.unknown if self.max is None else self.max
+
+ @property
+ def percentage(self):
+ if self.done:
+ result = '100 %'
+ elif self.max is None:
+ result = ' ?? %'
+ else:
+ v = 100.0 * (self.cur - self.min) / (self.max - self.min)
+ result = '%3d %%' % v
+ return result
+
+ def format_duration(self, duration):
+ if (duration <= 0) and self.max is None or self.cur == self.min:
+ result = '??:??:??'
+ #elif duration < 1:
+ # result = '--:--:--'
+ else:
+ result = time.strftime('%H:%M:%S', time.gmtime(duration))
+ return result
+
+ @property
+ def ETA(self):
+ if self.done:
+ prefix = 'Done'
+ t = self.elapsed
+ #import pdb; pdb.set_trace()
+ else:
+ prefix = 'ETA '
+ if self.max is None:
+ t = -1
+ elif self.elapsed == 0 or (self.cur == self.min):
+ t = 0
+ else:
+ #import pdb; pdb.set_trace()
+ t = float(self.max - self.min)
+ t /= self.cur - self.min
+ t = (t - 1) * self.elapsed
+ return '%s: %s' % (prefix, self.format_duration(t))
+
+ @property
+ def speed(self):
+ if self.elapsed == 0:
+ result = 0.0
+ else:
+ result = (self.cur - self.min) / self.elapsed
+ for unit in UNITS:
+ if result < 1000:
+ break
+ result /= 1000.0
+ return '%d %sB/s' % (result, unit)
+
+#
+# Glob functionality
+#
+
+RICH_GLOB = re.compile(r'\{([^}]*)\}')
+_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
+_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
+
+
+def iglob(path_glob):
+ """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
+ if _CHECK_RECURSIVE_GLOB.search(path_glob):
+ msg = """invalid glob %r: recursive glob "**" must be used alone"""
+ raise ValueError(msg % path_glob)
+ if _CHECK_MISMATCH_SET.search(path_glob):
+ msg = """invalid glob %r: mismatching set marker '{' or '}'"""
+ raise ValueError(msg % path_glob)
+ return _iglob(path_glob)
+
+
+def _iglob(path_glob):
+ rich_path_glob = RICH_GLOB.split(path_glob, 1)
+ if len(rich_path_glob) > 1:
+ assert len(rich_path_glob) == 3, rich_path_glob
+ prefix, set, suffix = rich_path_glob
+ for item in set.split(','):
+ for path in _iglob(''.join((prefix, item, suffix))):
+ yield path
+ else:
+ if '**' not in path_glob:
+ for item in std_iglob(path_glob):
+ yield item
+ else:
+ prefix, radical = path_glob.split('**', 1)
+ if prefix == '':
+ prefix = '.'
+ if radical == '':
+ radical = '*'
+ else:
+ # we support both
+ radical = radical.lstrip('/')
+ radical = radical.lstrip('\\')
+ for path, dir, files in os.walk(prefix):
+ path = os.path.normpath(path)
+ for fn in _iglob(os.path.join(path, radical)):
+ yield fn
+
+if ssl:
+ from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
+ CertificateError)
+
+
+#
+# HTTPSConnection which verifies certificates/matches domains
+#
+
+ class HTTPSConnection(httplib.HTTPSConnection):
+ ca_certs = None # set this to the path to the certs file (.pem)
+ check_domain = True # only used if ca_certs is not None
+
+ # noinspection PyPropertyAccess
+ def connect(self):
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+ if getattr(self, '_tunnel_host', False):
+ self.sock = sock
+ self._tunnel()
+
+ if not hasattr(ssl, 'SSLContext'):
+ # For 2.x
+ if self.ca_certs:
+ cert_reqs = ssl.CERT_REQUIRED
+ else:
+ cert_reqs = ssl.CERT_NONE
+ self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
+ cert_reqs=cert_reqs,
+ ssl_version=ssl.PROTOCOL_SSLv23,
+ ca_certs=self.ca_certs)
+ else: # pragma: no cover
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ context.options |= ssl.OP_NO_SSLv2
+ if self.cert_file:
+ context.load_cert_chain(self.cert_file, self.key_file)
+ kwargs = {}
+ if self.ca_certs:
+ context.verify_mode = ssl.CERT_REQUIRED
+ context.load_verify_locations(cafile=self.ca_certs)
+ if getattr(ssl, 'HAS_SNI', False):
+ kwargs['server_hostname'] = self.host
+ self.sock = context.wrap_socket(sock, **kwargs)
+ if self.ca_certs and self.check_domain:
+ try:
+ match_hostname(self.sock.getpeercert(), self.host)
+ logger.debug('Host verified: %s', self.host)
+ except CertificateError: # pragma: no cover
+ self.sock.shutdown(socket.SHUT_RDWR)
+ self.sock.close()
+ raise
+
+ class HTTPSHandler(BaseHTTPSHandler):
+ def __init__(self, ca_certs, check_domain=True):
+ BaseHTTPSHandler.__init__(self)
+ self.ca_certs = ca_certs
+ self.check_domain = check_domain
+
+ def _conn_maker(self, *args, **kwargs):
+ """
+ This is called to create a connection instance. Normally you'd
+ pass a connection class to do_open, but it doesn't actually check for
+ a class, and just expects a callable. As long as we behave just as a
+ constructor would have, we should be OK. If it ever changes so that
+ we *must* pass a class, we'll create an UnsafeHTTPSConnection class
+ which just sets check_domain to False in the class definition, and
+ choose which one to pass to do_open.
+ """
+ result = HTTPSConnection(*args, **kwargs)
+ if self.ca_certs:
+ result.ca_certs = self.ca_certs
+ result.check_domain = self.check_domain
+ return result
+
+ def https_open(self, req):
+ try:
+ return self.do_open(self._conn_maker, req)
+ except URLError as e:
+ if 'certificate verify failed' in str(e.reason):
+ raise CertificateError('Unable to verify server certificate '
+ 'for %s' % req.host)
+ else:
+ raise
+
+ #
+ # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
+ # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
+ # HTML containing a http://xyz link when it should be https://xyz),
+ # you can use the following handler class, which does not allow HTTP traffic.
+ #
+ # It works by inheriting from HTTPHandler - so build_opener won't add a
+ # handler for HTTP itself.
+ #
+ class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
+ def http_open(self, req):
+ raise URLError('Unexpected HTTP request on what should be a secure '
+ 'connection: %s' % req)
+
+#
+# XML-RPC with timeouts
+#
+
+_ver_info = sys.version_info[:2]
+
+if _ver_info == (2, 6):
+ class HTTP(httplib.HTTP):
+ def __init__(self, host='', port=None, **kwargs):
+ if port == 0: # 0 means use port 0, not the default port
+ port = None
+ self._setup(self._connection_class(host, port, **kwargs))
+
+
+ if ssl:
+ class HTTPS(httplib.HTTPS):
+ def __init__(self, host='', port=None, **kwargs):
+ if port == 0: # 0 means use port 0, not the default port
+ port = None
+ self._setup(self._connection_class(host, port, **kwargs))
+
+
+class Transport(xmlrpclib.Transport):
+ def __init__(self, timeout, use_datetime=0):
+ self.timeout = timeout
+ xmlrpclib.Transport.__init__(self, use_datetime)
+
+ def make_connection(self, host):
+ h, eh, x509 = self.get_host_info(host)
+ if _ver_info == (2, 6):
+ result = HTTP(h, timeout=self.timeout)
+ else:
+ if not self._connection or host != self._connection[0]:
+ self._extra_headers = eh
+ self._connection = host, httplib.HTTPConnection(h)
+ result = self._connection[1]
+ return result
+
+if ssl:
+ class SafeTransport(xmlrpclib.SafeTransport):
+ def __init__(self, timeout, use_datetime=0):
+ self.timeout = timeout
+ xmlrpclib.SafeTransport.__init__(self, use_datetime)
+
+ def make_connection(self, host):
+ h, eh, kwargs = self.get_host_info(host)
+ if not kwargs:
+ kwargs = {}
+ kwargs['timeout'] = self.timeout
+ if _ver_info == (2, 6):
+ result = HTTPS(host, None, **kwargs)
+ else:
+ if not self._connection or host != self._connection[0]:
+ self._extra_headers = eh
+ self._connection = host, httplib.HTTPSConnection(h, None,
+ **kwargs)
+ result = self._connection[1]
+ return result
+
+
+class ServerProxy(xmlrpclib.ServerProxy):
+ def __init__(self, uri, **kwargs):
+ self.timeout = timeout = kwargs.pop('timeout', None)
+ # The above classes only come into play if a timeout
+ # is specified
+ if timeout is not None:
+ scheme, _ = splittype(uri)
+ use_datetime = kwargs.get('use_datetime', 0)
+ if scheme == 'https':
+ tcls = SafeTransport
+ else:
+ tcls = Transport
+ kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
+ self.transport = t
+ xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
+
+#
+# CSV functionality. This is provided because on 2.x, the csv module can't
+# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
+#
+
+def _csv_open(fn, mode, **kwargs):
+ if sys.version_info[0] < 3:
+ mode += 'b'
+ else:
+ kwargs['newline'] = ''
+ # Python 3 determines encoding from locale. Force 'utf-8'
+ # file encoding to match other forced utf-8 encoding
+ kwargs['encoding'] = 'utf-8'
+ return open(fn, mode, **kwargs)
+
+
+class CSVBase(object):
+ defaults = {
+ 'delimiter': str(','), # The strs are used because we need native
+ 'quotechar': str('"'), # str in the csv API (2.x won't take
+ 'lineterminator': str('\n') # Unicode)
+ }
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ self.stream.close()
+
+
+class CSVReader(CSVBase):
+ def __init__(self, **kwargs):
+ if 'stream' in kwargs:
+ stream = kwargs['stream']
+ if sys.version_info[0] >= 3:
+ # needs to be a text stream
+ stream = codecs.getreader('utf-8')(stream)
+ self.stream = stream
+ else:
+ self.stream = _csv_open(kwargs['path'], 'r')
+ self.reader = csv.reader(self.stream, **self.defaults)
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ result = next(self.reader)
+ if sys.version_info[0] < 3:
+ for i, item in enumerate(result):
+ if not isinstance(item, text_type):
+ result[i] = item.decode('utf-8')
+ return result
+
+ __next__ = next
+
+class CSVWriter(CSVBase):
+ def __init__(self, fn, **kwargs):
+ self.stream = _csv_open(fn, 'w')
+ self.writer = csv.writer(self.stream, **self.defaults)
+
+ def writerow(self, row):
+ if sys.version_info[0] < 3:
+ r = []
+ for item in row:
+ if isinstance(item, text_type):
+ item = item.encode('utf-8')
+ r.append(item)
+ row = r
+ self.writer.writerow(row)
+
+#
+# Configurator functionality
+#
+
+class Configurator(BaseConfigurator):
+
+ value_converters = dict(BaseConfigurator.value_converters)
+ value_converters['inc'] = 'inc_convert'
+
+ def __init__(self, config, base=None):
+ super(Configurator, self).__init__(config)
+ self.base = base or os.getcwd()
+
+ def configure_custom(self, config):
+ def convert(o):
+ if isinstance(o, (list, tuple)):
+ result = type(o)([convert(i) for i in o])
+ elif isinstance(o, dict):
+ if '()' in o:
+ result = self.configure_custom(o)
+ else:
+ result = {}
+ for k in o:
+ result[k] = convert(o[k])
+ else:
+ result = self.convert(o)
+ return result
+
+ c = config.pop('()')
+ if not callable(c):
+ c = self.resolve(c)
+ props = config.pop('.', None)
+ # Check for valid identifiers
+ args = config.pop('[]', ())
+ if args:
+ args = tuple([convert(o) for o in args])
+ items = [(k, convert(config[k])) for k in config if valid_ident(k)]
+ kwargs = dict(items)
+ result = c(*args, **kwargs)
+ if props:
+ for n, v in props.items():
+ setattr(result, n, convert(v))
+ return result
+
+ def __getitem__(self, key):
+ result = self.config[key]
+ if isinstance(result, dict) and '()' in result:
+ self.config[key] = result = self.configure_custom(result)
+ return result
+
+ def inc_convert(self, value):
+ """Default converter for the inc:// protocol."""
+ if not os.path.isabs(value):
+ value = os.path.join(self.base, value)
+ with codecs.open(value, 'r', encoding='utf-8') as f:
+ result = json.load(f)
+ return result
+
+
+class SubprocessMixin(object):
+ """
+ Mixin for running subprocesses and capturing their output
+ """
+ def __init__(self, verbose=False, progress=None):
+ self.verbose = verbose
+ self.progress = progress
+
+ def reader(self, stream, context):
+ """
+ Read lines from a subprocess' output stream and either pass to a progress
+ callable (if specified) or write progress information to sys.stderr.
+ """
+ progress = self.progress
+ verbose = self.verbose
+ while True:
+ s = stream.readline()
+ if not s:
+ break
+ if progress is not None:
+ progress(s, context)
+ else:
+ if not verbose:
+ sys.stderr.write('.')
+ else:
+ sys.stderr.write(s.decode('utf-8'))
+ sys.stderr.flush()
+ stream.close()
+
+ def run_command(self, cmd, **kwargs):
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, **kwargs)
+ t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
+ t1.start()
+ t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
+ t2.start()
+ p.wait()
+ t1.join()
+ t2.join()
+ if self.progress is not None:
+ self.progress('done.', 'main')
+ elif self.verbose:
+ sys.stderr.write('done.\n')
+ return p
+
+
+def normalize_name(name):
+ """Normalize a python package name a la PEP 503"""
+ # https://www.python.org/dev/peps/pep-0503/#normalized-names
+ return re.sub('[-_.]+', '-', name).lower()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/version.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/version.py
new file mode 100644
index 00000000..3eebe18e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/version.py
@@ -0,0 +1,736 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2012-2017 The Python Software Foundation.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+"""
+Implementation of a flexible versioning scheme providing support for PEP-440,
+setuptools-compatible and semantic versioning.
+"""
+
+import logging
+import re
+
+from .compat import string_types
+from .util import parse_requirement
+
+__all__ = ['NormalizedVersion', 'NormalizedMatcher',
+ 'LegacyVersion', 'LegacyMatcher',
+ 'SemanticVersion', 'SemanticMatcher',
+ 'UnsupportedVersionError', 'get_scheme']
+
+logger = logging.getLogger(__name__)
+
+
+class UnsupportedVersionError(ValueError):
+ """This is an unsupported version."""
+ pass
+
+
+class Version(object):
+ def __init__(self, s):
+ self._string = s = s.strip()
+ self._parts = parts = self.parse(s)
+ assert isinstance(parts, tuple)
+ assert len(parts) > 0
+
+ def parse(self, s):
+ raise NotImplementedError('please implement in a subclass')
+
+ def _check_compatible(self, other):
+ if type(self) != type(other):
+ raise TypeError('cannot compare %r and %r' % (self, other))
+
+ def __eq__(self, other):
+ self._check_compatible(other)
+ return self._parts == other._parts
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __lt__(self, other):
+ self._check_compatible(other)
+ return self._parts < other._parts
+
+ def __gt__(self, other):
+ return not (self.__lt__(other) or self.__eq__(other))
+
+ def __le__(self, other):
+ return self.__lt__(other) or self.__eq__(other)
+
+ def __ge__(self, other):
+ return self.__gt__(other) or self.__eq__(other)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ def __hash__(self):
+ return hash(self._parts)
+
+ def __repr__(self):
+ return "%s('%s')" % (self.__class__.__name__, self._string)
+
+ def __str__(self):
+ return self._string
+
+ @property
+ def is_prerelease(self):
+ raise NotImplementedError('Please implement in subclasses.')
+
+
+class Matcher(object):
+ version_class = None
+
+ # value is either a callable or the name of a method
+ _operators = {
+ '<': lambda v, c, p: v < c,
+ '>': lambda v, c, p: v > c,
+ '<=': lambda v, c, p: v == c or v < c,
+ '>=': lambda v, c, p: v == c or v > c,
+ '==': lambda v, c, p: v == c,
+ '===': lambda v, c, p: v == c,
+ # by default, compatible => >=.
+ '~=': lambda v, c, p: v == c or v > c,
+ '!=': lambda v, c, p: v != c,
+ }
+
+ # this is a method only to support alternative implementations
+ # via overriding
+ def parse_requirement(self, s):
+ return parse_requirement(s)
+
+ def __init__(self, s):
+ if self.version_class is None:
+ raise ValueError('Please specify a version class')
+ self._string = s = s.strip()
+ r = self.parse_requirement(s)
+ if not r:
+ raise ValueError('Not valid: %r' % s)
+ self.name = r.name
+ self.key = self.name.lower() # for case-insensitive comparisons
+ clist = []
+ if r.constraints:
+ # import pdb; pdb.set_trace()
+ for op, s in r.constraints:
+ if s.endswith('.*'):
+ if op not in ('==', '!='):
+ raise ValueError('\'.*\' not allowed for '
+ '%r constraints' % op)
+ # Could be a partial version (e.g. for '2.*') which
+ # won't parse as a version, so keep it as a string
+ vn, prefix = s[:-2], True
+ # Just to check that vn is a valid version
+ self.version_class(vn)
+ else:
+ # Should parse as a version, so we can create an
+ # instance for the comparison
+ vn, prefix = self.version_class(s), False
+ clist.append((op, vn, prefix))
+ self._parts = tuple(clist)
+
+ def match(self, version):
+ """
+ Check if the provided version matches the constraints.
+
+ :param version: The version to match against this instance.
+ :type version: String or :class:`Version` instance.
+ """
+ if isinstance(version, string_types):
+ version = self.version_class(version)
+ for operator, constraint, prefix in self._parts:
+ f = self._operators.get(operator)
+ if isinstance(f, string_types):
+ f = getattr(self, f)
+ if not f:
+ msg = ('%r not implemented '
+ 'for %s' % (operator, self.__class__.__name__))
+ raise NotImplementedError(msg)
+ if not f(version, constraint, prefix):
+ return False
+ return True
+
+ @property
+ def exact_version(self):
+ result = None
+ if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
+ result = self._parts[0][1]
+ return result
+
+ def _check_compatible(self, other):
+ if type(self) != type(other) or self.name != other.name:
+ raise TypeError('cannot compare %s and %s' % (self, other))
+
+ def __eq__(self, other):
+ self._check_compatible(other)
+ return self.key == other.key and self._parts == other._parts
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ # See http://docs.python.org/reference/datamodel#object.__hash__
+ def __hash__(self):
+ return hash(self.key) + hash(self._parts)
+
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self._string)
+
+ def __str__(self):
+ return self._string
+
+
+PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
+ r'(\.(post)(\d+))?(\.(dev)(\d+))?'
+ r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')
+
+
+def _pep_440_key(s):
+ s = s.strip()
+ m = PEP440_VERSION_RE.match(s)
+ if not m:
+ raise UnsupportedVersionError('Not a valid version: %s' % s)
+ groups = m.groups()
+ nums = tuple(int(v) for v in groups[1].split('.'))
+ while len(nums) > 1 and nums[-1] == 0:
+ nums = nums[:-1]
+
+ if not groups[0]:
+ epoch = 0
+ else:
+ epoch = int(groups[0])
+ pre = groups[4:6]
+ post = groups[7:9]
+ dev = groups[10:12]
+ local = groups[13]
+ if pre == (None, None):
+ pre = ()
+ else:
+ pre = pre[0], int(pre[1])
+ if post == (None, None):
+ post = ()
+ else:
+ post = post[0], int(post[1])
+ if dev == (None, None):
+ dev = ()
+ else:
+ dev = dev[0], int(dev[1])
+ if local is None:
+ local = ()
+ else:
+ parts = []
+ for part in local.split('.'):
+ # to ensure that numeric compares as > lexicographic, avoid
+ # comparing them directly, but encode a tuple which ensures
+ # correct sorting
+ if part.isdigit():
+ part = (1, int(part))
+ else:
+ part = (0, part)
+ parts.append(part)
+ local = tuple(parts)
+ if not pre:
+ # either before pre-release, or final release and after
+ if not post and dev:
+ # before pre-release
+ pre = ('a', -1) # to sort before a0
+ else:
+ pre = ('z',) # to sort after all pre-releases
+ # now look at the state of post and dev.
+ if not post:
+ post = ('_',) # sort before 'a'
+ if not dev:
+ dev = ('final',)
+
+ #print('%s -> %s' % (s, m.groups()))
+ return epoch, nums, pre, post, dev, local
+
+
+_normalized_key = _pep_440_key
+
+
+class NormalizedVersion(Version):
+ """A rational version.
+
+ Good:
+ 1.2 # equivalent to "1.2.0"
+ 1.2.0
+ 1.2a1
+ 1.2.3a2
+ 1.2.3b1
+ 1.2.3c1
+ 1.2.3.4
+ TODO: fill this out
+
+ Bad:
+ 1 # minimum two numbers
+ 1.2a # release level must have a release serial
+ 1.2.3b
+ """
+ def parse(self, s):
+ result = _normalized_key(s)
+ # _normalized_key loses trailing zeroes in the release
+ # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
+ # However, PEP 440 prefix matching needs it: for example,
+ # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
+ m = PEP440_VERSION_RE.match(s) # must succeed
+ groups = m.groups()
+ self._release_clause = tuple(int(v) for v in groups[1].split('.'))
+ return result
+
+ PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
+
+ @property
+ def is_prerelease(self):
+ return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
+
+
+def _match_prefix(x, y):
+ x = str(x)
+ y = str(y)
+ if x == y:
+ return True
+ if not x.startswith(y):
+ return False
+ n = len(y)
+ return x[n] == '.'
+
+
+class NormalizedMatcher(Matcher):
+ version_class = NormalizedVersion
+
+ # value is either a callable or the name of a method
+ _operators = {
+ '~=': '_match_compatible',
+ '<': '_match_lt',
+ '>': '_match_gt',
+ '<=': '_match_le',
+ '>=': '_match_ge',
+ '==': '_match_eq',
+ '===': '_match_arbitrary',
+ '!=': '_match_ne',
+ }
+
+ def _adjust_local(self, version, constraint, prefix):
+ if prefix:
+ strip_local = '+' not in constraint and version._parts[-1]
+ else:
+ # both constraint and version are
+ # NormalizedVersion instances.
+ # If constraint does not have a local component,
+ # ensure the version doesn't, either.
+ strip_local = not constraint._parts[-1] and version._parts[-1]
+ if strip_local:
+ s = version._string.split('+', 1)[0]
+ version = self.version_class(s)
+ return version, constraint
+
+ def _match_lt(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if version >= constraint:
+ return False
+ release_clause = constraint._release_clause
+ pfx = '.'.join([str(i) for i in release_clause])
+ return not _match_prefix(version, pfx)
+
+ def _match_gt(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if version <= constraint:
+ return False
+ release_clause = constraint._release_clause
+ pfx = '.'.join([str(i) for i in release_clause])
+ return not _match_prefix(version, pfx)
+
+ def _match_le(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ return version <= constraint
+
+ def _match_ge(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ return version >= constraint
+
+ def _match_eq(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if not prefix:
+ result = (version == constraint)
+ else:
+ result = _match_prefix(version, constraint)
+ return result
+
+ def _match_arbitrary(self, version, constraint, prefix):
+ return str(version) == str(constraint)
+
+ def _match_ne(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if not prefix:
+ result = (version != constraint)
+ else:
+ result = not _match_prefix(version, constraint)
+ return result
+
+ def _match_compatible(self, version, constraint, prefix):
+ version, constraint = self._adjust_local(version, constraint, prefix)
+ if version == constraint:
+ return True
+ if version < constraint:
+ return False
+# if not prefix:
+# return True
+ release_clause = constraint._release_clause
+ if len(release_clause) > 1:
+ release_clause = release_clause[:-1]
+ pfx = '.'.join([str(i) for i in release_clause])
+ return _match_prefix(version, pfx)
+
+_REPLACEMENTS = (
+ (re.compile('[.+-]$'), ''), # remove trailing puncts
+ (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
+ (re.compile('^[.-]'), ''), # remove leading puncts
+ (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
+ (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
+ (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
+ (re.compile('[.]{2,}'), '.'), # multiple runs of '.'
+ (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
+ (re.compile(r'\b(pre-alpha|prealpha)\b'),
+ 'pre.alpha'), # standardise
+ (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
+)
+
+_SUFFIX_REPLACEMENTS = (
+ (re.compile('^[:~._+-]+'), ''), # remove leading puncts
+ (re.compile('[,*")([\\]]'), ''), # remove unwanted chars
+ (re.compile('[~:+_ -]'), '.'), # replace illegal chars
+ (re.compile('[.]{2,}'), '.'), # multiple runs of '.'
+ (re.compile(r'\.$'), ''), # trailing '.'
+)
+
+_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
+
+
+def _suggest_semantic_version(s):
+ """
+ Try to suggest a semantic form for a version for which
+ _suggest_normalized_version couldn't come up with anything.
+ """
+ result = s.strip().lower()
+ for pat, repl in _REPLACEMENTS:
+ result = pat.sub(repl, result)
+ if not result:
+ result = '0.0.0'
+
+ # Now look for numeric prefix, and separate it out from
+ # the rest.
+ #import pdb; pdb.set_trace()
+ m = _NUMERIC_PREFIX.match(result)
+ if not m:
+ prefix = '0.0.0'
+ suffix = result
+ else:
+ prefix = m.groups()[0].split('.')
+ prefix = [int(i) for i in prefix]
+ while len(prefix) < 3:
+ prefix.append(0)
+ if len(prefix) == 3:
+ suffix = result[m.end():]
+ else:
+ suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
+ prefix = prefix[:3]
+ prefix = '.'.join([str(i) for i in prefix])
+ suffix = suffix.strip()
+ if suffix:
+ #import pdb; pdb.set_trace()
+ # massage the suffix.
+ for pat, repl in _SUFFIX_REPLACEMENTS:
+ suffix = pat.sub(repl, suffix)
+
+ if not suffix:
+ result = prefix
+ else:
+ sep = '-' if 'dev' in suffix else '+'
+ result = prefix + sep + suffix
+ if not is_semver(result):
+ result = None
+ return result
+
+
+def _suggest_normalized_version(s):
+ """Suggest a normalized version close to the given version string.
+
+ If you have a version string that isn't rational (i.e. NormalizedVersion
+ doesn't like it) then you might be able to get an equivalent (or close)
+ rational version from this function.
+
+ This does a number of simple normalizations to the given string, based
+ on observation of versions currently in use on PyPI. Given a dump of
+ those version during PyCon 2009, 4287 of them:
+ - 2312 (53.93%) match NormalizedVersion without change
+ with the automatic suggestion
+ - 3474 (81.04%) match when using this suggestion method
+
+ @param s {str} An irrational version string.
+ @returns A rational version string, or None, if couldn't determine one.
+ """
+ try:
+ _normalized_key(s)
+ return s # already rational
+ except UnsupportedVersionError:
+ pass
+
+ rs = s.lower()
+
+ # part of this could use maketrans
+ for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
+ ('beta', 'b'), ('rc', 'c'), ('-final', ''),
+ ('-pre', 'c'),
+ ('-release', ''), ('.release', ''), ('-stable', ''),
+ ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
+ ('final', '')):
+ rs = rs.replace(orig, repl)
+
+ # if something ends with dev or pre, we add a 0
+ rs = re.sub(r"pre$", r"pre0", rs)
+ rs = re.sub(r"dev$", r"dev0", rs)
+
+ # if we have something like "b-2" or "a.2" at the end of the
+ # version, that is probably beta, alpha, etc
+ # let's remove the dash or dot
+ rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
+
+ # 1.0-dev-r371 -> 1.0.dev371
+ # 0.1-dev-r79 -> 0.1.dev79
+ rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
+
+ # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
+ rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
+
+ # Clean: v0.3, v1.0
+ if rs.startswith('v'):
+ rs = rs[1:]
+
+ # Clean leading '0's on numbers.
+ #TODO: unintended side-effect on, e.g., "2003.05.09"
+ # PyPI stats: 77 (~2%) better
+ rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
+
+ # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
+ # zero.
+ # PyPI stats: 245 (7.56%) better
+ rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
+
+ # the 'dev-rNNN' tag is a dev tag
+ rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
+
+ # clean the - when used as a pre delimiter
+ rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
+
+ # a terminal "dev" or "devel" can be changed into ".dev0"
+ rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
+
+ # a terminal "dev" can be changed into ".dev0"
+ rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
+
+ # a terminal "final" or "stable" can be removed
+ rs = re.sub(r"(final|stable)$", "", rs)
+
+ # The 'r' and the '-' tags are post release tags
+ # 0.4a1.r10 -> 0.4a1.post10
+ # 0.9.33-17222 -> 0.9.33.post17222
+ # 0.9.33-r17222 -> 0.9.33.post17222
+ rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
+
+ # Clean 'r' instead of 'dev' usage:
+ # 0.9.33+r17222 -> 0.9.33.dev17222
+ # 1.0dev123 -> 1.0.dev123
+ # 1.0.git123 -> 1.0.dev123
+ # 1.0.bzr123 -> 1.0.dev123
+ # 0.1a0dev.123 -> 0.1a0.dev123
+ # PyPI stats: ~150 (~4%) better
+ rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
+
+ # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
+ # 0.2.pre1 -> 0.2c1
+ # 0.2-c1 -> 0.2c1
+ # 1.0preview123 -> 1.0c123
+ # PyPI stats: ~21 (0.62%) better
+ rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
+
+ # Tcl/Tk uses "px" for their post release markers
+ rs = re.sub(r"p(\d+)$", r".post\1", rs)
+
+ try:
+ _normalized_key(rs)
+ except UnsupportedVersionError:
+ rs = None
+ return rs
+
+#
+# Legacy version processing (distribute-compatible)
+#
+
+_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
+_VERSION_REPLACE = {
+ 'pre': 'c',
+ 'preview': 'c',
+ '-': 'final-',
+ 'rc': 'c',
+ 'dev': '@',
+ '': None,
+ '.': None,
+}
+
+
+def _legacy_key(s):
+ def get_parts(s):
+ result = []
+ for p in _VERSION_PART.split(s.lower()):
+ p = _VERSION_REPLACE.get(p, p)
+ if p:
+ if '0' <= p[:1] <= '9':
+ p = p.zfill(8)
+ else:
+ p = '*' + p
+ result.append(p)
+ result.append('*final')
+ return result
+
+ result = []
+ for p in get_parts(s):
+ if p.startswith('*'):
+ if p < '*final':
+ while result and result[-1] == '*final-':
+ result.pop()
+ while result and result[-1] == '00000000':
+ result.pop()
+ result.append(p)
+ return tuple(result)
+
+
+class LegacyVersion(Version):
+ def parse(self, s):
+ return _legacy_key(s)
+
+ @property
+ def is_prerelease(self):
+ result = False
+ for x in self._parts:
+ if (isinstance(x, string_types) and x.startswith('*') and
+ x < '*final'):
+ result = True
+ break
+ return result
+
+
+class LegacyMatcher(Matcher):
+ version_class = LegacyVersion
+
+ _operators = dict(Matcher._operators)
+ _operators['~='] = '_match_compatible'
+
+ numeric_re = re.compile(r'^(\d+(\.\d+)*)')
+
+ def _match_compatible(self, version, constraint, prefix):
+ if version < constraint:
+ return False
+ m = self.numeric_re.match(str(constraint))
+ if not m:
+ logger.warning('Cannot compute compatible match for version %s '
+ ' and constraint %s', version, constraint)
+ return True
+ s = m.groups()[0]
+ if '.' in s:
+ s = s.rsplit('.', 1)[0]
+ return _match_prefix(version, s)
+
+#
+# Semantic versioning
+#
+
+_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
+ r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
+ r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
+
+
+def is_semver(s):
+ return _SEMVER_RE.match(s)
+
+
+def _semantic_key(s):
+ def make_tuple(s, absent):
+ if s is None:
+ result = (absent,)
+ else:
+ parts = s[1:].split('.')
+ # We can't compare ints and strings on Python 3, so fudge it
+ # by zero-filling numeric values so simulate a numeric comparison
+ result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
+ return result
+
+ m = is_semver(s)
+ if not m:
+ raise UnsupportedVersionError(s)
+ groups = m.groups()
+ major, minor, patch = [int(i) for i in groups[:3]]
+ # choose the '|' and '*' so that versions sort correctly
+ pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
+ return (major, minor, patch), pre, build
+
+
+class SemanticVersion(Version):
+ def parse(self, s):
+ return _semantic_key(s)
+
+ @property
+ def is_prerelease(self):
+ return self._parts[1][0] != '|'
+
+
+class SemanticMatcher(Matcher):
+ version_class = SemanticVersion
+
+
+class VersionScheme(object):
+ def __init__(self, key, matcher, suggester=None):
+ self.key = key
+ self.matcher = matcher
+ self.suggester = suggester
+
+ def is_valid_version(self, s):
+ try:
+ self.matcher.version_class(s)
+ result = True
+ except UnsupportedVersionError:
+ result = False
+ return result
+
+ def is_valid_matcher(self, s):
+ try:
+ self.matcher(s)
+ result = True
+ except UnsupportedVersionError:
+ result = False
+ return result
+
+ def is_valid_constraint_list(self, s):
+ """
+ Used for processing some metadata fields
+ """
+ return self.is_valid_matcher('dummy_name (%s)' % s)
+
+ def suggest(self, s):
+ if self.suggester is None:
+ result = None
+ else:
+ result = self.suggester(s)
+ return result
+
+_SCHEMES = {
+ 'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
+ _suggest_normalized_version),
+ 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
+ 'semantic': VersionScheme(_semantic_key, SemanticMatcher,
+ _suggest_semantic_version),
+}
+
+_SCHEMES['default'] = _SCHEMES['normalized']
+
+
+def get_scheme(name):
+ if name not in _SCHEMES:
+ raise ValueError('unknown scheme name: %r' % name)
+ return _SCHEMES[name]
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/w32.exe b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/w32.exe
new file mode 100644
index 00000000..4df77001
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/w32.exe
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/w64.exe b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/w64.exe
new file mode 100644
index 00000000..63ce483d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/w64.exe
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/wheel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/wheel.py
new file mode 100644
index 00000000..0c8efad9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distlib/wheel.py
@@ -0,0 +1,1004 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2013-2017 Vinay Sajip.
+# Licensed to the Python Software Foundation under a contributor agreement.
+# See LICENSE.txt and CONTRIBUTORS.txt.
+#
+from __future__ import unicode_literals
+
+import base64
+import codecs
+import datetime
+import distutils.util
+from email import message_from_file
+import hashlib
+import imp
+import json
+import logging
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import zipfile
+
+from . import __version__, DistlibException
+from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
+from .database import InstalledDistribution
+from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME
+from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
+ cached_property, get_cache_base, read_exports, tempdir)
+from .version import NormalizedVersion, UnsupportedVersionError
+
+logger = logging.getLogger(__name__)
+
+cache = None # created when needed
+
+if hasattr(sys, 'pypy_version_info'): # pragma: no cover
+ IMP_PREFIX = 'pp'
+elif sys.platform.startswith('java'): # pragma: no cover
+ IMP_PREFIX = 'jy'
+elif sys.platform == 'cli': # pragma: no cover
+ IMP_PREFIX = 'ip'
+else:
+ IMP_PREFIX = 'cp'
+
+VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
+if not VER_SUFFIX: # pragma: no cover
+ VER_SUFFIX = '%s%s' % sys.version_info[:2]
+PYVER = 'py' + VER_SUFFIX
+IMPVER = IMP_PREFIX + VER_SUFFIX
+
+ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')
+
+ABI = sysconfig.get_config_var('SOABI')
+if ABI and ABI.startswith('cpython-'):
+ ABI = ABI.replace('cpython-', 'cp')
+else:
+ def _derive_abi():
+ parts = ['cp', VER_SUFFIX]
+ if sysconfig.get_config_var('Py_DEBUG'):
+ parts.append('d')
+ if sysconfig.get_config_var('WITH_PYMALLOC'):
+ parts.append('m')
+ if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
+ parts.append('u')
+ return ''.join(parts)
+ ABI = _derive_abi()
+ del _derive_abi
+
+FILENAME_RE = re.compile(r'''
+(?P<nm>[^-]+)
+-(?P<vn>\d+[^-]*)
+(-(?P<bn>\d+[^-]*))?
+-(?P<py>\w+\d+(\.\w+\d+)*)
+-(?P<bi>\w+)
+-(?P<ar>\w+(\.\w+)*)
+\.whl$
+''', re.IGNORECASE | re.VERBOSE)
+
+NAME_VERSION_RE = re.compile(r'''
+(?P<nm>[^-]+)
+-(?P<vn>\d+[^-]*)
+(-(?P<bn>\d+[^-]*))?$
+''', re.IGNORECASE | re.VERBOSE)
+
+SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
+SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
+SHEBANG_PYTHON = b'#!python'
+SHEBANG_PYTHONW = b'#!pythonw'
+
+if os.sep == '/':
+ to_posix = lambda o: o
+else:
+ to_posix = lambda o: o.replace(os.sep, '/')
+
+
+class Mounter(object):
+ def __init__(self):
+ self.impure_wheels = {}
+ self.libs = {}
+
+ def add(self, pathname, extensions):
+ self.impure_wheels[pathname] = extensions
+ self.libs.update(extensions)
+
+ def remove(self, pathname):
+ extensions = self.impure_wheels.pop(pathname)
+ for k, v in extensions:
+ if k in self.libs:
+ del self.libs[k]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.libs:
+ result = self
+ else:
+ result = None
+ return result
+
+ def load_module(self, fullname):
+ if fullname in sys.modules:
+ result = sys.modules[fullname]
+ else:
+ if fullname not in self.libs:
+ raise ImportError('unable to find extension for %s' % fullname)
+ result = imp.load_dynamic(fullname, self.libs[fullname])
+ result.__loader__ = self
+ parts = fullname.rsplit('.', 1)
+ if len(parts) > 1:
+ result.__package__ = parts[0]
+ return result
+
+_hook = Mounter()
+
+
+class Wheel(object):
+ """
+ Class to build and install from Wheel files (PEP 427).
+ """
+
+ wheel_version = (1, 1)
+ hash_kind = 'sha256'
+
+ def __init__(self, filename=None, sign=False, verify=False):
+ """
+ Initialise an instance using a (valid) filename.
+ """
+ self.sign = sign
+ self.should_verify = verify
+ self.buildver = ''
+ self.pyver = [PYVER]
+ self.abi = ['none']
+ self.arch = ['any']
+ self.dirname = os.getcwd()
+ if filename is None:
+ self.name = 'dummy'
+ self.version = '0.1'
+ self._filename = self.filename
+ else:
+ m = NAME_VERSION_RE.match(filename)
+ if m:
+ info = m.groupdict('')
+ self.name = info['nm']
+ # Reinstate the local version separator
+ self.version = info['vn'].replace('_', '-')
+ self.buildver = info['bn']
+ self._filename = self.filename
+ else:
+ dirname, filename = os.path.split(filename)
+ m = FILENAME_RE.match(filename)
+ if not m:
+ raise DistlibException('Invalid name or '
+ 'filename: %r' % filename)
+ if dirname:
+ self.dirname = os.path.abspath(dirname)
+ self._filename = filename
+ info = m.groupdict('')
+ self.name = info['nm']
+ self.version = info['vn']
+ self.buildver = info['bn']
+ self.pyver = info['py'].split('.')
+ self.abi = info['bi'].split('.')
+ self.arch = info['ar'].split('.')
+
+ @property
+ def filename(self):
+ """
+ Build and return a filename from the various components.
+ """
+ if self.buildver:
+ buildver = '-' + self.buildver
+ else:
+ buildver = ''
+ pyver = '.'.join(self.pyver)
+ abi = '.'.join(self.abi)
+ arch = '.'.join(self.arch)
+ # replace - with _ as a local version separator
+ version = self.version.replace('-', '_')
+ return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
+ pyver, abi, arch)
+
+ @property
+ def exists(self):
+ path = os.path.join(self.dirname, self.filename)
+ return os.path.isfile(path)
+
+ @property
+ def tags(self):
+ for pyver in self.pyver:
+ for abi in self.abi:
+ for arch in self.arch:
+ yield pyver, abi, arch
+
+ @cached_property
+ def metadata(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ wrapper = codecs.getreader('utf-8')
+ with ZipFile(pathname, 'r') as zf:
+ wheel_metadata = self.get_wheel_metadata(zf)
+ wv = wheel_metadata['Wheel-Version'].split('.', 1)
+ file_version = tuple([int(i) for i in wv])
+ if file_version < (1, 1):
+ fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA']
+ else:
+ fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
+ result = None
+ for fn in fns:
+ try:
+ metadata_filename = posixpath.join(info_dir, fn)
+ with zf.open(metadata_filename) as bf:
+ wf = wrapper(bf)
+ result = Metadata(fileobj=wf)
+ if result:
+ break
+ except KeyError:
+ pass
+ if not result:
+ raise ValueError('Invalid wheel, because metadata is '
+ 'missing: looked in %s' % ', '.join(fns))
+ return result
+
+ def get_wheel_metadata(self, zf):
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ metadata_filename = posixpath.join(info_dir, 'WHEEL')
+ with zf.open(metadata_filename) as bf:
+ wf = codecs.getreader('utf-8')(bf)
+ message = message_from_file(wf)
+ return dict(message)
+
+ @cached_property
+ def info(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ with ZipFile(pathname, 'r') as zf:
+ result = self.get_wheel_metadata(zf)
+ return result
+
+ def process_shebang(self, data):
+ m = SHEBANG_RE.match(data)
+ if m:
+ end = m.end()
+ shebang, data_after_shebang = data[:end], data[end:]
+ # Preserve any arguments after the interpreter
+ if b'pythonw' in shebang.lower():
+ shebang_python = SHEBANG_PYTHONW
+ else:
+ shebang_python = SHEBANG_PYTHON
+ m = SHEBANG_DETAIL_RE.match(shebang)
+ if m:
+ args = b' ' + m.groups()[-1]
+ else:
+ args = b''
+ shebang = shebang_python + args
+ data = shebang + data_after_shebang
+ else:
+ cr = data.find(b'\r')
+ lf = data.find(b'\n')
+ if cr < 0 or cr > lf:
+ term = b'\n'
+ else:
+ if data[cr:cr + 2] == b'\r\n':
+ term = b'\r\n'
+ else:
+ term = b'\r'
+ data = SHEBANG_PYTHON + term + data
+ return data
+
+ def get_hash(self, data, hash_kind=None):
+ if hash_kind is None:
+ hash_kind = self.hash_kind
+ try:
+ hasher = getattr(hashlib, hash_kind)
+ except AttributeError:
+ raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
+ result = hasher(data).digest()
+ result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
+ return hash_kind, result
+
+ def write_record(self, records, record_path, base):
+ records = list(records) # make a copy for sorting
+ p = to_posix(os.path.relpath(record_path, base))
+ records.append((p, '', ''))
+ records.sort()
+ with CSVWriter(record_path) as writer:
+ for row in records:
+ writer.writerow(row)
+
+ def write_records(self, info, libdir, archive_paths):
+ records = []
+ distinfo, info_dir = info
+ hasher = getattr(hashlib, self.hash_kind)
+ for ap, p in archive_paths:
+ with open(p, 'rb') as f:
+ data = f.read()
+ digest = '%s=%s' % self.get_hash(data)
+ size = os.path.getsize(p)
+ records.append((ap, digest, size))
+
+ p = os.path.join(distinfo, 'RECORD')
+ self.write_record(records, p, libdir)
+ ap = to_posix(os.path.join(info_dir, 'RECORD'))
+ archive_paths.append((ap, p))
+
+ def build_zip(self, pathname, archive_paths):
+ with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
+ for ap, p in archive_paths:
+ logger.debug('Wrote %s to %s in wheel', p, ap)
+ zf.write(p, ap)
+
+ def build(self, paths, tags=None, wheel_version=None):
+ """
+ Build a wheel from files in specified paths, and use any specified tags
+ when determining the name of the wheel.
+ """
+ if tags is None:
+ tags = {}
+
+ libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
+ if libkey == 'platlib':
+ is_pure = 'false'
+ default_pyver = [IMPVER]
+ default_abi = [ABI]
+ default_arch = [ARCH]
+ else:
+ is_pure = 'true'
+ default_pyver = [PYVER]
+ default_abi = ['none']
+ default_arch = ['any']
+
+ self.pyver = tags.get('pyver', default_pyver)
+ self.abi = tags.get('abi', default_abi)
+ self.arch = tags.get('arch', default_arch)
+
+ libdir = paths[libkey]
+
+ name_ver = '%s-%s' % (self.name, self.version)
+ data_dir = '%s.data' % name_ver
+ info_dir = '%s.dist-info' % name_ver
+
+ archive_paths = []
+
+ # First, stuff which is not in site-packages
+ for key in ('data', 'headers', 'scripts'):
+ if key not in paths:
+ continue
+ path = paths[key]
+ if os.path.isdir(path):
+ for root, dirs, files in os.walk(path):
+ for fn in files:
+ p = fsdecode(os.path.join(root, fn))
+ rp = os.path.relpath(p, path)
+ ap = to_posix(os.path.join(data_dir, key, rp))
+ archive_paths.append((ap, p))
+ if key == 'scripts' and not p.endswith('.exe'):
+ with open(p, 'rb') as f:
+ data = f.read()
+ data = self.process_shebang(data)
+ with open(p, 'wb') as f:
+ f.write(data)
+
+ # Now, stuff which is in site-packages, other than the
+ # distinfo stuff.
+ path = libdir
+ distinfo = None
+ for root, dirs, files in os.walk(path):
+ if root == path:
+ # At the top level only, save distinfo for later
+ # and skip it for now
+ for i, dn in enumerate(dirs):
+ dn = fsdecode(dn)
+ if dn.endswith('.dist-info'):
+ distinfo = os.path.join(root, dn)
+ del dirs[i]
+ break
+ assert distinfo, '.dist-info directory expected, not found'
+
+ for fn in files:
+ # comment out next suite to leave .pyc files in
+ if fsdecode(fn).endswith(('.pyc', '.pyo')):
+ continue
+ p = os.path.join(root, fn)
+ rp = to_posix(os.path.relpath(p, path))
+ archive_paths.append((rp, p))
+
+ # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
+ files = os.listdir(distinfo)
+ for fn in files:
+ if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
+ p = fsdecode(os.path.join(distinfo, fn))
+ ap = to_posix(os.path.join(info_dir, fn))
+ archive_paths.append((ap, p))
+
+ wheel_metadata = [
+ 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
+ 'Generator: distlib %s' % __version__,
+ 'Root-Is-Purelib: %s' % is_pure,
+ ]
+ for pyver, abi, arch in self.tags:
+ wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
+ p = os.path.join(distinfo, 'WHEEL')
+ with open(p, 'w') as f:
+ f.write('\n'.join(wheel_metadata))
+ ap = to_posix(os.path.join(info_dir, 'WHEEL'))
+ archive_paths.append((ap, p))
+
+ # Now, at last, RECORD.
+ # Paths in here are archive paths - nothing else makes sense.
+ self.write_records((distinfo, info_dir), libdir, archive_paths)
+ # Now, ready to build the zip file
+ pathname = os.path.join(self.dirname, self.filename)
+ self.build_zip(pathname, archive_paths)
+ return pathname
+
+ def skip_entry(self, arcname):
+ """
+ Determine whether an archive entry should be skipped when verifying
+ or installing.
+ """
+ # The signature file won't be in RECORD,
+ # and we don't currently don't do anything with it
+ # We also skip directories, as they won't be in RECORD
+ # either. See:
+ #
+ # https://github.com/pypa/wheel/issues/294
+ # https://github.com/pypa/wheel/issues/287
+ # https://github.com/pypa/wheel/pull/289
+ #
+ return arcname.endswith(('/', '/RECORD.jws'))
+
+ def install(self, paths, maker, **kwargs):
+ """
+ Install a wheel to the specified paths. If kwarg ``warner`` is
+ specified, it should be a callable, which will be called with two
+ tuples indicating the wheel version of this software and the wheel
+ version in the file, if there is a discrepancy in the versions.
+ This can be used to issue any warnings to raise any exceptions.
+ If kwarg ``lib_only`` is True, only the purelib/platlib files are
+ installed, and the headers, scripts, data and dist-info metadata are
+ not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
+ bytecode will try to use file-hash based invalidation (PEP-552) on
+ supported interpreter versions (CPython 2.7+).
+
+ The return value is a :class:`InstalledDistribution` instance unless
+ ``options.lib_only`` is True, in which case the return value is ``None``.
+ """
+
+ dry_run = maker.dry_run
+ warner = kwargs.get('warner')
+ lib_only = kwargs.get('lib_only', False)
+ bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
+
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ data_dir = '%s.data' % name_ver
+ info_dir = '%s.dist-info' % name_ver
+
+ metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
+ wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+ record_name = posixpath.join(info_dir, 'RECORD')
+
+ wrapper = codecs.getreader('utf-8')
+
+ with ZipFile(pathname, 'r') as zf:
+ with zf.open(wheel_metadata_name) as bwf:
+ wf = wrapper(bwf)
+ message = message_from_file(wf)
+ wv = message['Wheel-Version'].split('.', 1)
+ file_version = tuple([int(i) for i in wv])
+ if (file_version != self.wheel_version) and warner:
+ warner(self.wheel_version, file_version)
+
+ if message['Root-Is-Purelib'] == 'true':
+ libdir = paths['purelib']
+ else:
+ libdir = paths['platlib']
+
+ records = {}
+ with zf.open(record_name) as bf:
+ with CSVReader(stream=bf) as reader:
+ for row in reader:
+ p = row[0]
+ records[p] = row
+
+ data_pfx = posixpath.join(data_dir, '')
+ info_pfx = posixpath.join(info_dir, '')
+ script_pfx = posixpath.join(data_dir, 'scripts', '')
+
+ # make a new instance rather than a copy of maker's,
+ # as we mutate it
+ fileop = FileOperator(dry_run=dry_run)
+ fileop.record = True # so we can rollback if needed
+
+ bc = not sys.dont_write_bytecode # Double negatives. Lovely!
+
+ outfiles = [] # for RECORD writing
+
+ # for script copying/shebang processing
+ workdir = tempfile.mkdtemp()
+ # set target dir later
+ # we default add_launchers to False, as the
+ # Python Launcher should be used instead
+ maker.source_dir = workdir
+ maker.target_dir = None
+ try:
+ for zinfo in zf.infolist():
+ arcname = zinfo.filename
+ if isinstance(arcname, text_type):
+ u_arcname = arcname
+ else:
+ u_arcname = arcname.decode('utf-8')
+ if self.skip_entry(u_arcname):
+ continue
+ row = records[u_arcname]
+ if row[2] and str(zinfo.file_size) != row[2]:
+ raise DistlibException('size mismatch for '
+ '%s' % u_arcname)
+ if row[1]:
+ kind, value = row[1].split('=', 1)
+ with zf.open(arcname) as bf:
+ data = bf.read()
+ _, digest = self.get_hash(data, kind)
+ if digest != value:
+ raise DistlibException('digest mismatch for '
+ '%s' % arcname)
+
+ if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
+ logger.debug('lib_only: skipping %s', u_arcname)
+ continue
+ is_script = (u_arcname.startswith(script_pfx)
+ and not u_arcname.endswith('.exe'))
+
+ if u_arcname.startswith(data_pfx):
+ _, where, rp = u_arcname.split('/', 2)
+ outfile = os.path.join(paths[where], convert_path(rp))
+ else:
+ # meant for site-packages.
+ if u_arcname in (wheel_metadata_name, record_name):
+ continue
+ outfile = os.path.join(libdir, convert_path(u_arcname))
+ if not is_script:
+ with zf.open(arcname) as bf:
+ fileop.copy_stream(bf, outfile)
+ outfiles.append(outfile)
+ # Double check the digest of the written file
+ if not dry_run and row[1]:
+ with open(outfile, 'rb') as bf:
+ data = bf.read()
+ _, newdigest = self.get_hash(data, kind)
+ if newdigest != digest:
+ raise DistlibException('digest mismatch '
+ 'on write for '
+ '%s' % outfile)
+ if bc and outfile.endswith('.py'):
+ try:
+ pyc = fileop.byte_compile(outfile,
+ hashed_invalidation=bc_hashed_invalidation)
+ outfiles.append(pyc)
+ except Exception:
+ # Don't give up if byte-compilation fails,
+ # but log it and perhaps warn the user
+ logger.warning('Byte-compilation failed',
+ exc_info=True)
+ else:
+ fn = os.path.basename(convert_path(arcname))
+ workname = os.path.join(workdir, fn)
+ with zf.open(arcname) as bf:
+ fileop.copy_stream(bf, workname)
+
+ dn, fn = os.path.split(outfile)
+ maker.target_dir = dn
+ filenames = maker.make(fn)
+ fileop.set_executable_mode(filenames)
+ outfiles.extend(filenames)
+
+ if lib_only:
+ logger.debug('lib_only: returning None')
+ dist = None
+ else:
+ # Generate scripts
+
+ # Try to get pydist.json so we can see if there are
+ # any commands to generate. If this fails (e.g. because
+ # of a legacy wheel), log a warning but don't give up.
+ commands = None
+ file_version = self.info['Wheel-Version']
+ if file_version == '1.0':
+ # Use legacy info
+ ep = posixpath.join(info_dir, 'entry_points.txt')
+ try:
+ with zf.open(ep) as bwf:
+ epdata = read_exports(bwf)
+ commands = {}
+ for key in ('console', 'gui'):
+ k = '%s_scripts' % key
+ if k in epdata:
+ commands['wrap_%s' % key] = d = {}
+ for v in epdata[k].values():
+ s = '%s:%s' % (v.prefix, v.suffix)
+ if v.flags:
+ s += ' %s' % v.flags
+ d[v.name] = s
+ except Exception:
+ logger.warning('Unable to read legacy script '
+ 'metadata, so cannot generate '
+ 'scripts')
+ else:
+ try:
+ with zf.open(metadata_name) as bwf:
+ wf = wrapper(bwf)
+ commands = json.load(wf).get('extensions')
+ if commands:
+ commands = commands.get('python.commands')
+ except Exception:
+ logger.warning('Unable to read JSON metadata, so '
+ 'cannot generate scripts')
+ if commands:
+ console_scripts = commands.get('wrap_console', {})
+ gui_scripts = commands.get('wrap_gui', {})
+ if console_scripts or gui_scripts:
+ script_dir = paths.get('scripts', '')
+ if not os.path.isdir(script_dir):
+ raise ValueError('Valid script path not '
+ 'specified')
+ maker.target_dir = script_dir
+ for k, v in console_scripts.items():
+ script = '%s = %s' % (k, v)
+ filenames = maker.make(script)
+ fileop.set_executable_mode(filenames)
+
+ if gui_scripts:
+ options = {'gui': True }
+ for k, v in gui_scripts.items():
+ script = '%s = %s' % (k, v)
+ filenames = maker.make(script, options)
+ fileop.set_executable_mode(filenames)
+
+ p = os.path.join(libdir, info_dir)
+ dist = InstalledDistribution(p)
+
+ # Write SHARED
+ paths = dict(paths) # don't change passed in dict
+ del paths['purelib']
+ del paths['platlib']
+ paths['lib'] = libdir
+ p = dist.write_shared_locations(paths, dry_run)
+ if p:
+ outfiles.append(p)
+
+ # Write RECORD
+ dist.write_installed_files(outfiles, paths['prefix'],
+ dry_run)
+ return dist
+ except Exception: # pragma: no cover
+ logger.exception('installation failed.')
+ fileop.rollback()
+ raise
+ finally:
+ shutil.rmtree(workdir)
+
+ def _get_dylib_cache(self):
+ global cache
+ if cache is None:
+ # Use native string to avoid issues on 2.x: see Python #20140.
+ base = os.path.join(get_cache_base(), str('dylib-cache'),
+ sys.version[:3])
+ cache = Cache(base)
+ return cache
+
+ def _get_extensions(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ arcname = posixpath.join(info_dir, 'EXTENSIONS')
+ wrapper = codecs.getreader('utf-8')
+ result = []
+ with ZipFile(pathname, 'r') as zf:
+ try:
+ with zf.open(arcname) as bf:
+ wf = wrapper(bf)
+ extensions = json.load(wf)
+ cache = self._get_dylib_cache()
+ prefix = cache.prefix_to_dir(pathname)
+ cache_base = os.path.join(cache.base, prefix)
+ if not os.path.isdir(cache_base):
+ os.makedirs(cache_base)
+ for name, relpath in extensions.items():
+ dest = os.path.join(cache_base, convert_path(relpath))
+ if not os.path.exists(dest):
+ extract = True
+ else:
+ file_time = os.stat(dest).st_mtime
+ file_time = datetime.datetime.fromtimestamp(file_time)
+ info = zf.getinfo(relpath)
+ wheel_time = datetime.datetime(*info.date_time)
+ extract = wheel_time > file_time
+ if extract:
+ zf.extract(relpath, cache_base)
+ result.append((name, dest))
+ except KeyError:
+ pass
+ return result
+
+ def is_compatible(self):
+ """
+ Determine if a wheel is compatible with the running system.
+ """
+ return is_compatible(self)
+
+ def is_mountable(self):
+ """
+ Determine if a wheel is asserted as mountable by its metadata.
+ """
+ return True # for now - metadata details TBD
+
+ def mount(self, append=False):
+ pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
+ if not self.is_compatible():
+ msg = 'Wheel %s not compatible with this Python.' % pathname
+ raise DistlibException(msg)
+ if not self.is_mountable():
+ msg = 'Wheel %s is marked as not mountable.' % pathname
+ raise DistlibException(msg)
+ if pathname in sys.path:
+ logger.debug('%s already in path', pathname)
+ else:
+ if append:
+ sys.path.append(pathname)
+ else:
+ sys.path.insert(0, pathname)
+ extensions = self._get_extensions()
+ if extensions:
+ if _hook not in sys.meta_path:
+ sys.meta_path.append(_hook)
+ _hook.add(pathname, extensions)
+
+ def unmount(self):
+ pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
+ if pathname not in sys.path:
+ logger.debug('%s not in path', pathname)
+ else:
+ sys.path.remove(pathname)
+ if pathname in _hook.impure_wheels:
+ _hook.remove(pathname)
+ if not _hook.impure_wheels:
+ if _hook in sys.meta_path:
+ sys.meta_path.remove(_hook)
+
+ def verify(self):
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ data_dir = '%s.data' % name_ver
+ info_dir = '%s.dist-info' % name_ver
+
+ metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
+ wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
+ record_name = posixpath.join(info_dir, 'RECORD')
+
+ wrapper = codecs.getreader('utf-8')
+
+ with ZipFile(pathname, 'r') as zf:
+ with zf.open(wheel_metadata_name) as bwf:
+ wf = wrapper(bwf)
+ message = message_from_file(wf)
+ wv = message['Wheel-Version'].split('.', 1)
+ file_version = tuple([int(i) for i in wv])
+ # TODO version verification
+
+ records = {}
+ with zf.open(record_name) as bf:
+ with CSVReader(stream=bf) as reader:
+ for row in reader:
+ p = row[0]
+ records[p] = row
+
+ for zinfo in zf.infolist():
+ arcname = zinfo.filename
+ if isinstance(arcname, text_type):
+ u_arcname = arcname
+ else:
+ u_arcname = arcname.decode('utf-8')
+ # See issue #115: some wheels have .. in their entries, but
+ # in the filename ... e.g. __main__..py ! So the check is
+ # updated to look for .. in the directory portions
+ p = u_arcname.split('/')
+ if '..' in p:
+ raise DistlibException('invalid entry in '
+ 'wheel: %r' % u_arcname)
+
+ if self.skip_entry(u_arcname):
+ continue
+ row = records[u_arcname]
+ if row[2] and str(zinfo.file_size) != row[2]:
+ raise DistlibException('size mismatch for '
+ '%s' % u_arcname)
+ if row[1]:
+ kind, value = row[1].split('=', 1)
+ with zf.open(arcname) as bf:
+ data = bf.read()
+ _, digest = self.get_hash(data, kind)
+ if digest != value:
+ raise DistlibException('digest mismatch for '
+ '%s' % arcname)
+
+ def update(self, modifier, dest_dir=None, **kwargs):
+ """
+ Update the contents of a wheel in a generic way. The modifier should
+ be a callable which expects a dictionary argument: its keys are
+ archive-entry paths, and its values are absolute filesystem paths
+ where the contents the corresponding archive entries can be found. The
+ modifier is free to change the contents of the files pointed to, add
+ new entries and remove entries, before returning. This method will
+ extract the entire contents of the wheel to a temporary location, call
+ the modifier, and then use the passed (and possibly updated)
+ dictionary to write a new wheel. If ``dest_dir`` is specified, the new
+ wheel is written there -- otherwise, the original wheel is overwritten.
+
+ The modifier should return True if it updated the wheel, else False.
+ This method returns the same value the modifier returns.
+ """
+
+ def get_version(path_map, info_dir):
+ version = path = None
+ key = '%s/%s' % (info_dir, METADATA_FILENAME)
+ if key not in path_map:
+ key = '%s/PKG-INFO' % info_dir
+ if key in path_map:
+ path = path_map[key]
+ version = Metadata(path=path).version
+ return version, path
+
+ def update_version(version, path):
+ updated = None
+ try:
+ v = NormalizedVersion(version)
+ i = version.find('-')
+ if i < 0:
+ updated = '%s+1' % version
+ else:
+ parts = [int(s) for s in version[i + 1:].split('.')]
+ parts[-1] += 1
+ updated = '%s+%s' % (version[:i],
+ '.'.join(str(i) for i in parts))
+ except UnsupportedVersionError:
+ logger.debug('Cannot update non-compliant (PEP-440) '
+ 'version %r', version)
+ if updated:
+ md = Metadata(path=path)
+ md.version = updated
+ legacy = not path.endswith(METADATA_FILENAME)
+ md.write(path=path, legacy=legacy)
+ logger.debug('Version updated from %r to %r', version,
+ updated)
+
+ pathname = os.path.join(self.dirname, self.filename)
+ name_ver = '%s-%s' % (self.name, self.version)
+ info_dir = '%s.dist-info' % name_ver
+ record_name = posixpath.join(info_dir, 'RECORD')
+ with tempdir() as workdir:
+ with ZipFile(pathname, 'r') as zf:
+ path_map = {}
+ for zinfo in zf.infolist():
+ arcname = zinfo.filename
+ if isinstance(arcname, text_type):
+ u_arcname = arcname
+ else:
+ u_arcname = arcname.decode('utf-8')
+ if u_arcname == record_name:
+ continue
+ if '..' in u_arcname:
+ raise DistlibException('invalid entry in '
+ 'wheel: %r' % u_arcname)
+ zf.extract(zinfo, workdir)
+ path = os.path.join(workdir, convert_path(u_arcname))
+ path_map[u_arcname] = path
+
+ # Remember the version.
+ original_version, _ = get_version(path_map, info_dir)
+ # Files extracted. Call the modifier.
+ modified = modifier(path_map, **kwargs)
+ if modified:
+ # Something changed - need to build a new wheel.
+ current_version, path = get_version(path_map, info_dir)
+ if current_version and (current_version == original_version):
+ # Add or update local version to signify changes.
+ update_version(current_version, path)
+ # Decide where the new wheel goes.
+ if dest_dir is None:
+ fd, newpath = tempfile.mkstemp(suffix='.whl',
+ prefix='wheel-update-',
+ dir=workdir)
+ os.close(fd)
+ else:
+ if not os.path.isdir(dest_dir):
+ raise DistlibException('Not a directory: %r' % dest_dir)
+ newpath = os.path.join(dest_dir, self.filename)
+ archive_paths = list(path_map.items())
+ distinfo = os.path.join(workdir, info_dir)
+ info = distinfo, info_dir
+ self.write_records(info, workdir, archive_paths)
+ self.build_zip(newpath, archive_paths)
+ if dest_dir is None:
+ shutil.copyfile(newpath, pathname)
+ return modified
+
+def compatible_tags():
+ """
+ Return (pyver, abi, arch) tuples compatible with this Python.
+ """
+ versions = [VER_SUFFIX]
+ major = VER_SUFFIX[0]
+ for minor in range(sys.version_info[1] - 1, - 1, -1):
+ versions.append(''.join([major, str(minor)]))
+
+ abis = []
+ for suffix, _, _ in imp.get_suffixes():
+ if suffix.startswith('.abi'):
+ abis.append(suffix.split('.', 2)[1])
+ abis.sort()
+ if ABI != 'none':
+ abis.insert(0, ABI)
+ abis.append('none')
+ result = []
+
+ arches = [ARCH]
+ if sys.platform == 'darwin':
+ m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
+ if m:
+ name, major, minor, arch = m.groups()
+ minor = int(minor)
+ matches = [arch]
+ if arch in ('i386', 'ppc'):
+ matches.append('fat')
+ if arch in ('i386', 'ppc', 'x86_64'):
+ matches.append('fat3')
+ if arch in ('ppc64', 'x86_64'):
+ matches.append('fat64')
+ if arch in ('i386', 'x86_64'):
+ matches.append('intel')
+ if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
+ matches.append('universal')
+ while minor >= 0:
+ for match in matches:
+ s = '%s_%s_%s_%s' % (name, major, minor, match)
+ if s != ARCH: # already there
+ arches.append(s)
+ minor -= 1
+
+ # Most specific - our Python version, ABI and arch
+ for abi in abis:
+ for arch in arches:
+ result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
+
+ # where no ABI / arch dependency, but IMP_PREFIX dependency
+ for i, version in enumerate(versions):
+ result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
+ if i == 0:
+ result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
+
+ # no IMP_PREFIX, ABI or arch dependency
+ for i, version in enumerate(versions):
+ result.append((''.join(('py', version)), 'none', 'any'))
+ if i == 0:
+ result.append((''.join(('py', version[0])), 'none', 'any'))
+ return set(result)
+
+
+COMPATIBLE_TAGS = compatible_tags()
+
+del compatible_tags
+
+
+def is_compatible(wheel, tags=None):
+ if not isinstance(wheel, Wheel):
+ wheel = Wheel(wheel) # assume it's a filename
+ result = False
+ if tags is None:
+ tags = COMPATIBLE_TAGS
+ for ver, abi, arch in tags:
+ if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
+ result = True
+ break
+ return result
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distro.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distro.py
new file mode 100644
index 00000000..33061633
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/distro.py
@@ -0,0 +1,1216 @@
+# Copyright 2015,2016,2017 Nir Cohen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+The ``distro`` package (``distro`` stands for Linux Distribution) provides
+information about the Linux distribution it runs on, such as a reliable
+machine-readable distro ID, or version information.
+
+It is the recommended replacement for Python's original
+:py:func:`platform.linux_distribution` function, but it provides much more
+functionality. An alternative implementation became necessary because Python
+3.5 deprecated this function, and Python 3.8 will remove it altogether.
+Its predecessor function :py:func:`platform.dist` was already
+deprecated since Python 2.6 and will also be removed in Python 3.8.
+Still, there are many cases in which access to OS distribution information
+is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for
+more information.
+"""
+
+import os
+import re
+import sys
+import json
+import shlex
+import logging
+import argparse
+import subprocess
+
+
+_UNIXCONFDIR = os.environ.get('UNIXCONFDIR', '/etc')
+_OS_RELEASE_BASENAME = 'os-release'
+
+#: Translation table for normalizing the "ID" attribute defined in os-release
+#: files, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as defined in the os-release file, translated to lower case,
+#: with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_OS_ID = {
+ 'ol': 'oracle', # Oracle Enterprise Linux
+}
+
+#: Translation table for normalizing the "Distributor ID" attribute returned by
+#: the lsb_release command, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as returned by the lsb_release command, translated to lower
+#: case, with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_LSB_ID = {
+ 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux
+ 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation
+ 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server
+}
+
+#: Translation table for normalizing the distro ID derived from the file name
+#: of distro release files, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as derived from the file name of a distro release file,
+#: translated to lower case, with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_DISTRO_ID = {
+ 'redhat': 'rhel', # RHEL 6.x, 7.x
+}
+
+# Pattern for content of distro release file (reversed)
+_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
+ r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)')
+
+# Pattern for base file name of distro release file
+_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(
+ r'(\w+)[-_](release|version)$')
+
+# Base file names to be ignored when searching for distro release file
+_DISTRO_RELEASE_IGNORE_BASENAMES = (
+ 'debian_version',
+ 'lsb-release',
+ 'oem-release',
+ _OS_RELEASE_BASENAME,
+ 'system-release'
+)
+
+
+def linux_distribution(full_distribution_name=True):
+ """
+ Return information about the current OS distribution as a tuple
+ ``(id_name, version, codename)`` with items as follows:
+
+ * ``id_name``: If *full_distribution_name* is false, the result of
+ :func:`distro.id`. Otherwise, the result of :func:`distro.name`.
+
+ * ``version``: The result of :func:`distro.version`.
+
+ * ``codename``: The result of :func:`distro.codename`.
+
+ The interface of this function is compatible with the original
+ :py:func:`platform.linux_distribution` function, supporting a subset of
+ its parameters.
+
+ The data it returns may not exactly be the same, because it uses more data
+ sources than the original function, and that may lead to different data if
+ the OS distribution is not consistent across multiple data sources it
+ provides (there are indeed such distributions ...).
+
+ Another reason for differences is the fact that the :func:`distro.id`
+ method normalizes the distro ID string to a reliable machine-readable value
+ for a number of popular OS distributions.
+ """
+ return _distro.linux_distribution(full_distribution_name)
+
+
+def id():
+ """
+ Return the distro ID of the current distribution, as a
+ machine-readable string.
+
+ For a number of OS distributions, the returned distro ID value is
+ *reliable*, in the sense that it is documented and that it does not change
+ across releases of the distribution.
+
+ This package maintains the following reliable distro ID values:
+
+ ============== =========================================
+ Distro ID Distribution
+ ============== =========================================
+ "ubuntu" Ubuntu
+ "debian" Debian
+ "rhel" RedHat Enterprise Linux
+ "centos" CentOS
+ "fedora" Fedora
+ "sles" SUSE Linux Enterprise Server
+ "opensuse" openSUSE
+ "amazon" Amazon Linux
+ "arch" Arch Linux
+ "cloudlinux" CloudLinux OS
+ "exherbo" Exherbo Linux
+ "gentoo" GenToo Linux
+ "ibm_powerkvm" IBM PowerKVM
+ "kvmibm" KVM for IBM z Systems
+ "linuxmint" Linux Mint
+ "mageia" Mageia
+ "mandriva" Mandriva Linux
+ "parallels" Parallels
+ "pidora" Pidora
+ "raspbian" Raspbian
+ "oracle" Oracle Linux (and Oracle Enterprise Linux)
+ "scientific" Scientific Linux
+ "slackware" Slackware
+ "xenserver" XenServer
+ "openbsd" OpenBSD
+ "netbsd" NetBSD
+ "freebsd" FreeBSD
+ ============== =========================================
+
+ If you have a need to get distros for reliable IDs added into this set,
+ or if you find that the :func:`distro.id` function returns a different
+ distro ID for one of the listed distros, please create an issue in the
+ `distro issue tracker`_.
+
+ **Lookup hierarchy and transformations:**
+
+ First, the ID is obtained from the following sources, in the specified
+ order. The first available and non-empty value is used:
+
+ * the value of the "ID" attribute of the os-release file,
+
+ * the value of the "Distributor ID" attribute returned by the lsb_release
+ command,
+
+ * the first part of the file name of the distro release file,
+
+ The so determined ID value then passes the following transformations,
+ before it is returned by this method:
+
+ * it is translated to lower case,
+
+ * blanks (which should not be there anyway) are translated to underscores,
+
+ * a normalization of the ID is performed, based upon
+ `normalization tables`_. The purpose of this normalization is to ensure
+ that the ID is as reliable as possible, even across incompatible changes
+ in the OS distributions. A common reason for an incompatible change is
+ the addition of an os-release file, or the addition of the lsb_release
+ command, with ID values that differ from what was previously determined
+ from the distro release file name.
+ """
+ return _distro.id()
+
+
+def name(pretty=False):
+ """
+ Return the name of the current OS distribution, as a human-readable
+ string.
+
+ If *pretty* is false, the name is returned without version or codename.
+ (e.g. "CentOS Linux")
+
+ If *pretty* is true, the version and codename are appended.
+ (e.g. "CentOS Linux 7.1.1503 (Core)")
+
+ **Lookup hierarchy:**
+
+ The name is obtained from the following sources, in the specified order.
+ The first available and non-empty value is used:
+
+ * If *pretty* is false:
+
+ - the value of the "NAME" attribute of the os-release file,
+
+ - the value of the "Distributor ID" attribute returned by the lsb_release
+ command,
+
+ - the value of the "<name>" field of the distro release file.
+
+ * If *pretty* is true:
+
+ - the value of the "PRETTY_NAME" attribute of the os-release file,
+
+ - the value of the "Description" attribute returned by the lsb_release
+ command,
+
+ - the value of the "<name>" field of the distro release file, appended
+ with the value of the pretty version ("<version_id>" and "<codename>"
+ fields) of the distro release file, if available.
+ """
+ return _distro.name(pretty)
+
+
+def version(pretty=False, best=False):
+ """
+ Return the version of the current OS distribution, as a human-readable
+ string.
+
+ If *pretty* is false, the version is returned without codename (e.g.
+ "7.0").
+
+ If *pretty* is true, the codename in parenthesis is appended, if the
+ codename is non-empty (e.g. "7.0 (Maipo)").
+
+ Some distributions provide version numbers with different precisions in
+ the different sources of distribution information. Examining the different
+ sources in a fixed priority order does not always yield the most precise
+ version (e.g. for Debian 8.2, or CentOS 7.1).
+
+ The *best* parameter can be used to control the approach for the returned
+ version:
+
+ If *best* is false, the first non-empty version number in priority order of
+ the examined sources is returned.
+
+ If *best* is true, the most precise version number out of all examined
+ sources is returned.
+
+ **Lookup hierarchy:**
+
+ In all cases, the version number is obtained from the following sources.
+ If *best* is false, this order represents the priority order:
+
+ * the value of the "VERSION_ID" attribute of the os-release file,
+ * the value of the "Release" attribute returned by the lsb_release
+ command,
+ * the version number parsed from the "<version_id>" field of the first line
+ of the distro release file,
+ * the version number parsed from the "PRETTY_NAME" attribute of the
+ os-release file, if it follows the format of the distro release files.
+ * the version number parsed from the "Description" attribute returned by
+ the lsb_release command, if it follows the format of the distro release
+ files.
+ """
+ return _distro.version(pretty, best)
+
+
+def version_parts(best=False):
+ """
+ Return the version of the current OS distribution as a tuple
+ ``(major, minor, build_number)`` with items as follows:
+
+ * ``major``: The result of :func:`distro.major_version`.
+
+ * ``minor``: The result of :func:`distro.minor_version`.
+
+ * ``build_number``: The result of :func:`distro.build_number`.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.version_parts(best)
+
+
+def major_version(best=False):
+ """
+ Return the major version of the current OS distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The major version is the first
+ part of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.major_version(best)
+
+
+def minor_version(best=False):
+ """
+ Return the minor version of the current OS distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The minor version is the second
+ part of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.minor_version(best)
+
+
+def build_number(best=False):
+ """
+ Return the build number of the current OS distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The build number is the third part
+ of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.build_number(best)
+
+
+def like():
+ """
+ Return a space-separated list of distro IDs of distributions that are
+ closely related to the current OS distribution in regards to packaging
+ and programming interfaces, for example distributions the current
+ distribution is a derivative from.
+
+ **Lookup hierarchy:**
+
+ This information item is only provided by the os-release file.
+ For details, see the description of the "ID_LIKE" attribute in the
+ `os-release man page
+ <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
+ """
+ return _distro.like()
+
+
+def codename():
+ """
+ Return the codename for the release of the current OS distribution,
+ as a string.
+
+ If the distribution does not have a codename, an empty string is returned.
+
+ Note that the returned codename is not always really a codename. For
+ example, openSUSE returns "x86_64". This function does not handle such
+ cases in any special way and just returns the string it finds, if any.
+
+ **Lookup hierarchy:**
+
+ * the codename within the "VERSION" attribute of the os-release file, if
+ provided,
+
+ * the value of the "Codename" attribute returned by the lsb_release
+ command,
+
+ * the value of the "<codename>" field of the distro release file.
+ """
+ return _distro.codename()
+
+
+def info(pretty=False, best=False):
+ """
+ Return certain machine-readable information items about the current OS
+ distribution in a dictionary, as shown in the following example:
+
+ .. sourcecode:: python
+
+ {
+ 'id': 'rhel',
+ 'version': '7.0',
+ 'version_parts': {
+ 'major': '7',
+ 'minor': '0',
+ 'build_number': ''
+ },
+ 'like': 'fedora',
+ 'codename': 'Maipo'
+ }
+
+ The dictionary structure and keys are always the same, regardless of which
+ information items are available in the underlying data sources. The values
+ for the various keys are as follows:
+
+ * ``id``: The result of :func:`distro.id`.
+
+ * ``version``: The result of :func:`distro.version`.
+
+ * ``version_parts -> major``: The result of :func:`distro.major_version`.
+
+ * ``version_parts -> minor``: The result of :func:`distro.minor_version`.
+
+ * ``version_parts -> build_number``: The result of
+ :func:`distro.build_number`.
+
+ * ``like``: The result of :func:`distro.like`.
+
+ * ``codename``: The result of :func:`distro.codename`.
+
+ For a description of the *pretty* and *best* parameters, see the
+ :func:`distro.version` method.
+ """
+ return _distro.info(pretty, best)
+
+
+def os_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the os-release file data source of the current OS distribution.
+
+ See `os-release file`_ for details about these information items.
+ """
+ return _distro.os_release_info()
+
+
+def lsb_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the lsb_release command data source of the current OS distribution.
+
+ See `lsb_release command output`_ for details about these information
+ items.
+ """
+ return _distro.lsb_release_info()
+
+
+def distro_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the distro release file data source of the current OS distribution.
+
+ See `distro release file`_ for details about these information items.
+ """
+ return _distro.distro_release_info()
+
+
+def uname_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the distro release file data source of the current OS distribution.
+ """
+ return _distro.uname_info()
+
+
+def os_release_attr(attribute):
+ """
+ Return a single named information item from the os-release file data source
+ of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `os-release file`_ for details about these information items.
+ """
+ return _distro.os_release_attr(attribute)
+
+
+def lsb_release_attr(attribute):
+ """
+ Return a single named information item from the lsb_release command output
+ data source of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `lsb_release command output`_ for details about these information
+ items.
+ """
+ return _distro.lsb_release_attr(attribute)
+
+
+def distro_release_attr(attribute):
+ """
+ Return a single named information item from the distro release file
+ data source of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `distro release file`_ for details about these information items.
+ """
+ return _distro.distro_release_attr(attribute)
+
+
+def uname_attr(attribute):
+ """
+ Return a single named information item from the distro release file
+ data source of the current OS distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+ """
+ return _distro.uname_attr(attribute)
+
+
+class cached_property(object):
+ """A version of @property which caches the value. On access, it calls the
+ underlying function and sets the value in `__dict__` so future accesses
+ will not re-call the property.
+ """
+ def __init__(self, f):
+ self._fname = f.__name__
+ self._f = f
+
+ def __get__(self, obj, owner):
+ assert obj is not None, 'call {} on an instance'.format(self._fname)
+ ret = obj.__dict__[self._fname] = self._f(obj)
+ return ret
+
+
+class LinuxDistribution(object):
+ """
+ Provides information about a OS distribution.
+
+ This package creates a private module-global instance of this class with
+ default initialization arguments, that is used by the
+ `consolidated accessor functions`_ and `single source accessor functions`_.
+ By using default initialization arguments, that module-global instance
+ returns data about the current OS distribution (i.e. the distro this
+ package runs on).
+
+ Normally, it is not necessary to create additional instances of this class.
+ However, in situations where control is needed over the exact data sources
+ that are used, instances of this class can be created with a specific
+ distro release file, or a specific os-release file, or without invoking the
+ lsb_release command.
+ """
+
+ def __init__(self,
+ include_lsb=True,
+ os_release_file='',
+ distro_release_file='',
+ include_uname=True):
+ """
+ The initialization method of this class gathers information from the
+ available data sources, and stores that in private instance attributes.
+ Subsequent access to the information items uses these private instance
+ attributes, so that the data sources are read only once.
+
+ Parameters:
+
+ * ``include_lsb`` (bool): Controls whether the
+ `lsb_release command output`_ is included as a data source.
+
+ If the lsb_release command is not available in the program execution
+ path, the data source for the lsb_release command will be empty.
+
+ * ``os_release_file`` (string): The path name of the
+ `os-release file`_ that is to be used as a data source.
+
+ An empty string (the default) will cause the default path name to
+ be used (see `os-release file`_ for details).
+
+ If the specified or defaulted os-release file does not exist, the
+ data source for the os-release file will be empty.
+
+ * ``distro_release_file`` (string): The path name of the
+ `distro release file`_ that is to be used as a data source.
+
+ An empty string (the default) will cause a default search algorithm
+ to be used (see `distro release file`_ for details).
+
+ If the specified distro release file does not exist, or if no default
+ distro release file can be found, the data source for the distro
+ release file will be empty.
+
+ * ``include_name`` (bool): Controls whether uname command output is
+ included as a data source. If the uname command is not available in
+ the program execution path the data source for the uname command will
+ be empty.
+
+ Public instance attributes:
+
+ * ``os_release_file`` (string): The path name of the
+ `os-release file`_ that is actually used as a data source. The
+ empty string if no distro release file is used as a data source.
+
+ * ``distro_release_file`` (string): The path name of the
+ `distro release file`_ that is actually used as a data source. The
+ empty string if no distro release file is used as a data source.
+
+ * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
+ This controls whether the lsb information will be loaded.
+
+ * ``include_uname`` (bool): The result of the ``include_uname``
+ parameter. This controls whether the uname information will
+ be loaded.
+
+ Raises:
+
+ * :py:exc:`IOError`: Some I/O issue with an os-release file or distro
+ release file.
+
+ * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
+ some issue (other than not being available in the program execution
+ path).
+
+ * :py:exc:`UnicodeError`: A data source has unexpected characters or
+ uses an unexpected encoding.
+ """
+ self.os_release_file = os_release_file or \
+ os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME)
+ self.distro_release_file = distro_release_file or '' # updated later
+ self.include_lsb = include_lsb
+ self.include_uname = include_uname
+
+ def __repr__(self):
+ """Return repr of all info
+ """
+ return \
+ "LinuxDistribution(" \
+ "os_release_file={self.os_release_file!r}, " \
+ "distro_release_file={self.distro_release_file!r}, " \
+ "include_lsb={self.include_lsb!r}, " \
+ "include_uname={self.include_uname!r}, " \
+ "_os_release_info={self._os_release_info!r}, " \
+ "_lsb_release_info={self._lsb_release_info!r}, " \
+ "_distro_release_info={self._distro_release_info!r}, " \
+ "_uname_info={self._uname_info!r})".format(
+ self=self)
+
+ def linux_distribution(self, full_distribution_name=True):
+ """
+ Return information about the OS distribution that is compatible
+ with Python's :func:`platform.linux_distribution`, supporting a subset
+ of its parameters.
+
+ For details, see :func:`distro.linux_distribution`.
+ """
+ return (
+ self.name() if full_distribution_name else self.id(),
+ self.version(),
+ self.codename()
+ )
+
+ def id(self):
+ """Return the distro ID of the OS distribution, as a string.
+
+ For details, see :func:`distro.id`.
+ """
+ def normalize(distro_id, table):
+ distro_id = distro_id.lower().replace(' ', '_')
+ return table.get(distro_id, distro_id)
+
+ distro_id = self.os_release_attr('id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_OS_ID)
+
+ distro_id = self.lsb_release_attr('distributor_id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_LSB_ID)
+
+ distro_id = self.distro_release_attr('id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
+
+ distro_id = self.uname_attr('id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
+
+ return ''
+
+ def name(self, pretty=False):
+ """
+ Return the name of the OS distribution, as a string.
+
+ For details, see :func:`distro.name`.
+ """
+ name = self.os_release_attr('name') \
+ or self.lsb_release_attr('distributor_id') \
+ or self.distro_release_attr('name') \
+ or self.uname_attr('name')
+ if pretty:
+ name = self.os_release_attr('pretty_name') \
+ or self.lsb_release_attr('description')
+ if not name:
+ name = self.distro_release_attr('name') \
+ or self.uname_attr('name')
+ version = self.version(pretty=True)
+ if version:
+ name = name + ' ' + version
+ return name or ''
+
+ def version(self, pretty=False, best=False):
+ """
+ Return the version of the OS distribution, as a string.
+
+ For details, see :func:`distro.version`.
+ """
+ versions = [
+ self.os_release_attr('version_id'),
+ self.lsb_release_attr('release'),
+ self.distro_release_attr('version_id'),
+ self._parse_distro_release_content(
+ self.os_release_attr('pretty_name')).get('version_id', ''),
+ self._parse_distro_release_content(
+ self.lsb_release_attr('description')).get('version_id', ''),
+ self.uname_attr('release')
+ ]
+ version = ''
+ if best:
+ # This algorithm uses the last version in priority order that has
+ # the best precision. If the versions are not in conflict, that
+ # does not matter; otherwise, using the last one instead of the
+ # first one might be considered a surprise.
+ for v in versions:
+ if v.count(".") > version.count(".") or version == '':
+ version = v
+ else:
+ for v in versions:
+ if v != '':
+ version = v
+ break
+ if pretty and version and self.codename():
+ version = u'{0} ({1})'.format(version, self.codename())
+ return version
+
+ def version_parts(self, best=False):
+ """
+ Return the version of the OS distribution, as a tuple of version
+ numbers.
+
+ For details, see :func:`distro.version_parts`.
+ """
+ version_str = self.version(best=best)
+ if version_str:
+ version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?')
+ matches = version_regex.match(version_str)
+ if matches:
+ major, minor, build_number = matches.groups()
+ return major, minor or '', build_number or ''
+ return '', '', ''
+
+ def major_version(self, best=False):
+ """
+ Return the major version number of the current distribution.
+
+ For details, see :func:`distro.major_version`.
+ """
+ return self.version_parts(best)[0]
+
+ def minor_version(self, best=False):
+ """
+ Return the minor version number of the current distribution.
+
+ For details, see :func:`distro.minor_version`.
+ """
+ return self.version_parts(best)[1]
+
+ def build_number(self, best=False):
+ """
+ Return the build number of the current distribution.
+
+ For details, see :func:`distro.build_number`.
+ """
+ return self.version_parts(best)[2]
+
+ def like(self):
+ """
+ Return the IDs of distributions that are like the OS distribution.
+
+ For details, see :func:`distro.like`.
+ """
+ return self.os_release_attr('id_like') or ''
+
+ def codename(self):
+ """
+ Return the codename of the OS distribution.
+
+ For details, see :func:`distro.codename`.
+ """
+ try:
+ # Handle os_release specially since distros might purposefully set
+ # this to empty string to have no codename
+ return self._os_release_info['codename']
+ except KeyError:
+ return self.lsb_release_attr('codename') \
+ or self.distro_release_attr('codename') \
+ or ''
+
+ def info(self, pretty=False, best=False):
+ """
+ Return certain machine-readable information about the OS
+ distribution.
+
+ For details, see :func:`distro.info`.
+ """
+ return dict(
+ id=self.id(),
+ version=self.version(pretty, best),
+ version_parts=dict(
+ major=self.major_version(best),
+ minor=self.minor_version(best),
+ build_number=self.build_number(best)
+ ),
+ like=self.like(),
+ codename=self.codename(),
+ )
+
+ def os_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the os-release file data source of the OS distribution.
+
+ For details, see :func:`distro.os_release_info`.
+ """
+ return self._os_release_info
+
+ def lsb_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the lsb_release command data source of the OS
+ distribution.
+
+ For details, see :func:`distro.lsb_release_info`.
+ """
+ return self._lsb_release_info
+
+ def distro_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the distro release file data source of the OS
+ distribution.
+
+ For details, see :func:`distro.distro_release_info`.
+ """
+ return self._distro_release_info
+
+ def uname_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the uname command data source of the OS distribution.
+
+ For details, see :func:`distro.uname_info`.
+ """
+ return self._uname_info
+
+ def os_release_attr(self, attribute):
+ """
+ Return a single named information item from the os-release file data
+ source of the OS distribution.
+
+ For details, see :func:`distro.os_release_attr`.
+ """
+ return self._os_release_info.get(attribute, '')
+
+ def lsb_release_attr(self, attribute):
+ """
+ Return a single named information item from the lsb_release command
+ output data source of the OS distribution.
+
+ For details, see :func:`distro.lsb_release_attr`.
+ """
+ return self._lsb_release_info.get(attribute, '')
+
+ def distro_release_attr(self, attribute):
+ """
+ Return a single named information item from the distro release file
+ data source of the OS distribution.
+
+ For details, see :func:`distro.distro_release_attr`.
+ """
+ return self._distro_release_info.get(attribute, '')
+
+ def uname_attr(self, attribute):
+ """
+ Return a single named information item from the uname command
+ output data source of the OS distribution.
+
+ For details, see :func:`distro.uname_release_attr`.
+ """
+ return self._uname_info.get(attribute, '')
+
+ @cached_property
+ def _os_release_info(self):
+ """
+ Get the information items from the specified os-release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if os.path.isfile(self.os_release_file):
+ with open(self.os_release_file) as release_file:
+ return self._parse_os_release_content(release_file)
+ return {}
+
+ @staticmethod
+ def _parse_os_release_content(lines):
+ """
+ Parse the lines of an os-release file.
+
+ Parameters:
+
+ * lines: Iterable through the lines in the os-release file.
+ Each line must be a unicode string or a UTF-8 encoded byte
+ string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ props = {}
+ lexer = shlex.shlex(lines, posix=True)
+ lexer.whitespace_split = True
+
+ # The shlex module defines its `wordchars` variable using literals,
+ # making it dependent on the encoding of the Python source file.
+ # In Python 2.6 and 2.7, the shlex source file is encoded in
+ # 'iso-8859-1', and the `wordchars` variable is defined as a byte
+ # string. This causes a UnicodeDecodeError to be raised when the
+ # parsed content is a unicode object. The following fix resolves that
+ # (... but it should be fixed in shlex...):
+ if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
+ lexer.wordchars = lexer.wordchars.decode('iso-8859-1')
+
+ tokens = list(lexer)
+ for token in tokens:
+ # At this point, all shell-like parsing has been done (i.e.
+ # comments processed, quotes and backslash escape sequences
+ # processed, multi-line values assembled, trailing newlines
+ # stripped, etc.), so the tokens are now either:
+ # * variable assignments: var=value
+ # * commands or their arguments (not allowed in os-release)
+ if '=' in token:
+ k, v = token.split('=', 1)
+ if isinstance(v, bytes):
+ v = v.decode('utf-8')
+ props[k.lower()] = v
+ else:
+ # Ignore any tokens that are not variable assignments
+ pass
+
+ if 'version_codename' in props:
+ # os-release added a version_codename field. Use that in
+ # preference to anything else Note that some distros purposefully
+ # do not have code names. They should be setting
+ # version_codename=""
+ props['codename'] = props['version_codename']
+ elif 'ubuntu_codename' in props:
+ # Same as above but a non-standard field name used on older Ubuntus
+ props['codename'] = props['ubuntu_codename']
+ elif 'version' in props:
+ # If there is no version_codename, parse it from the version
+ codename = re.search(r'(\(\D+\))|,(\s+)?\D+', props['version'])
+ if codename:
+ codename = codename.group()
+ codename = codename.strip('()')
+ codename = codename.strip(',')
+ codename = codename.strip()
+ # codename appears within paranthese.
+ props['codename'] = codename
+
+ return props
+
+ @cached_property
+ def _lsb_release_info(self):
+ """
+ Get the information items from the lsb_release command output.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if not self.include_lsb:
+ return {}
+ with open(os.devnull, 'w') as devnull:
+ try:
+ cmd = ('lsb_release', '-a')
+ stdout = subprocess.check_output(cmd, stderr=devnull)
+ except OSError: # Command not found
+ return {}
+ content = stdout.decode(sys.getfilesystemencoding()).splitlines()
+ return self._parse_lsb_release_content(content)
+
+ @staticmethod
+ def _parse_lsb_release_content(lines):
+ """
+ Parse the output of the lsb_release command.
+
+ Parameters:
+
+ * lines: Iterable through the lines of the lsb_release output.
+ Each line must be a unicode string or a UTF-8 encoded byte
+ string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ props = {}
+ for line in lines:
+ kv = line.strip('\n').split(':', 1)
+ if len(kv) != 2:
+ # Ignore lines without colon.
+ continue
+ k, v = kv
+ props.update({k.replace(' ', '_').lower(): v.strip()})
+ return props
+
+ @cached_property
+ def _uname_info(self):
+ with open(os.devnull, 'w') as devnull:
+ try:
+ cmd = ('uname', '-rs')
+ stdout = subprocess.check_output(cmd, stderr=devnull)
+ except OSError:
+ return {}
+ content = stdout.decode(sys.getfilesystemencoding()).splitlines()
+ return self._parse_uname_content(content)
+
+ @staticmethod
+ def _parse_uname_content(lines):
+ props = {}
+ match = re.search(r'^([^\s]+)\s+([\d\.]+)', lines[0].strip())
+ if match:
+ name, version = match.groups()
+
+ # This is to prevent the Linux kernel version from
+ # appearing as the 'best' version on otherwise
+ # identifiable distributions.
+ if name == 'Linux':
+ return {}
+ props['id'] = name.lower()
+ props['name'] = name
+ props['release'] = version
+ return props
+
+ @cached_property
+ def _distro_release_info(self):
+ """
+ Get the information items from the specified distro release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if self.distro_release_file:
+ # If it was specified, we use it and parse what we can, even if
+ # its file name or content does not match the expected pattern.
+ distro_info = self._parse_distro_release_file(
+ self.distro_release_file)
+ basename = os.path.basename(self.distro_release_file)
+ # The file name pattern for user-specified distro release files
+ # is somewhat more tolerant (compared to when searching for the
+ # file), because we want to use what was specified as best as
+ # possible.
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ if 'name' in distro_info \
+ and 'cloudlinux' in distro_info['name'].lower():
+ distro_info['id'] = 'cloudlinux'
+ elif match:
+ distro_info['id'] = match.group(1)
+ return distro_info
+ else:
+ try:
+ basenames = os.listdir(_UNIXCONFDIR)
+ # We sort for repeatability in cases where there are multiple
+ # distro specific files; e.g. CentOS, Oracle, Enterprise all
+ # containing `redhat-release` on top of their own.
+ basenames.sort()
+ except OSError:
+ # This may occur when /etc is not readable but we can't be
+ # sure about the *-release files. Check common entries of
+ # /etc for information. If they turn out to not be there the
+ # error is handled in `_parse_distro_release_file()`.
+ basenames = ['SuSE-release',
+ 'arch-release',
+ 'base-release',
+ 'centos-release',
+ 'fedora-release',
+ 'gentoo-release',
+ 'mageia-release',
+ 'mandrake-release',
+ 'mandriva-release',
+ 'mandrivalinux-release',
+ 'manjaro-release',
+ 'oracle-release',
+ 'redhat-release',
+ 'sl-release',
+ 'slackware-version']
+ for basename in basenames:
+ if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
+ continue
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ if match:
+ filepath = os.path.join(_UNIXCONFDIR, basename)
+ distro_info = self._parse_distro_release_file(filepath)
+ if 'name' in distro_info:
+ # The name is always present if the pattern matches
+ self.distro_release_file = filepath
+ distro_info['id'] = match.group(1)
+ if 'cloudlinux' in distro_info['name'].lower():
+ distro_info['id'] = 'cloudlinux'
+ return distro_info
+ return {}
+
+ def _parse_distro_release_file(self, filepath):
+ """
+ Parse a distro release file.
+
+ Parameters:
+
+ * filepath: Path name of the distro release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ try:
+ with open(filepath) as fp:
+ # Only parse the first line. For instance, on SLES there
+ # are multiple lines. We don't want them...
+ return self._parse_distro_release_content(fp.readline())
+ except (OSError, IOError):
+ # Ignore not being able to read a specific, seemingly version
+ # related file.
+ # See https://github.com/nir0s/distro/issues/162
+ return {}
+
+ @staticmethod
+ def _parse_distro_release_content(line):
+ """
+ Parse a line from a distro release file.
+
+ Parameters:
+ * line: Line from the distro release file. Must be a unicode string
+ or a UTF-8 encoded byte string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if isinstance(line, bytes):
+ line = line.decode('utf-8')
+ matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(
+ line.strip()[::-1])
+ distro_info = {}
+ if matches:
+ # regexp ensures non-None
+ distro_info['name'] = matches.group(3)[::-1]
+ if matches.group(2):
+ distro_info['version_id'] = matches.group(2)[::-1]
+ if matches.group(1):
+ distro_info['codename'] = matches.group(1)[::-1]
+ elif line:
+ distro_info['name'] = line.strip()
+ return distro_info
+
+
+_distro = LinuxDistribution()
+
+
+def main():
+ logger = logging.getLogger(__name__)
+ logger.setLevel(logging.DEBUG)
+ logger.addHandler(logging.StreamHandler(sys.stdout))
+
+ parser = argparse.ArgumentParser(description="OS distro info tool")
+ parser.add_argument(
+ '--json',
+ '-j',
+ help="Output in machine readable format",
+ action="store_true")
+ args = parser.parse_args()
+
+ if args.json:
+ logger.info(json.dumps(info(), indent=4, sort_keys=True))
+ else:
+ logger.info('Name: %s', name(pretty=True))
+ distribution_version = version(pretty=True)
+ logger.info('Version: %s', distribution_version)
+ distribution_codename = codename()
+ logger.info('Codename: %s', distribution_codename)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__init__.py
new file mode 100644
index 00000000..04912349
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__init__.py
@@ -0,0 +1,35 @@
+"""
+HTML parsing library based on the `WHATWG HTML specification
+<https://whatwg.org/html>`_. The parser is designed to be compatible with
+existing HTML found in the wild and implements well-defined error recovery that
+is largely compatible with modern desktop web browsers.
+
+Example usage::
+
+ from pip._vendor import html5lib
+ with open("my_document.html", "rb") as f:
+ tree = html5lib.parse(f)
+
+For convenience, this module re-exports the following names:
+
+* :func:`~.html5parser.parse`
+* :func:`~.html5parser.parseFragment`
+* :class:`~.html5parser.HTMLParser`
+* :func:`~.treebuilders.getTreeBuilder`
+* :func:`~.treewalkers.getTreeWalker`
+* :func:`~.serializer.serialize`
+"""
+
+from __future__ import absolute_import, division, unicode_literals
+
+from .html5parser import HTMLParser, parse, parseFragment
+from .treebuilders import getTreeBuilder
+from .treewalkers import getTreeWalker
+from .serializer import serialize
+
+__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
+ "getTreeWalker", "serialize"]
+
+# this has to be at the top level, see how setup.py parses this
+#: Distribution version number.
+__version__ = "1.0.1"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..8a1c72cd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc
new file mode 100644
index 00000000..86e4cbe4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc
new file mode 100644
index 00000000..96a0040d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc
new file mode 100644
index 00000000..c16c1bf4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc
new file mode 100644
index 00000000..3a84199f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc
new file mode 100644
index 00000000..222766fc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc
new file mode 100644
index 00000000..84b10cb0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc
new file mode 100644
index 00000000..8051998b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_ihatexml.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_ihatexml.py
new file mode 100644
index 00000000..4c77717b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_ihatexml.py
@@ -0,0 +1,288 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import re
+import warnings
+
+from .constants import DataLossWarning
+
+baseChar = """
+[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |
+[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |
+[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |
+[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |
+[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |
+[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |
+[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |
+[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |
+[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |
+[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |
+[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |
+[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |
+[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |
+[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |
+[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |
+[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |
+[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |
+[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |
+[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |
+[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |
+[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |
+[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |
+[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |
+[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |
+[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |
+[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |
+[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |
+[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |
+[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |
+[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |
+#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |
+#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |
+#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |
+[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |
+[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |
+#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |
+[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |
+[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |
+[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |
+[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |
+[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |
+#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |
+[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |
+[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |
+[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |
+[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]"""
+
+ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]"""
+
+combiningCharacter = """
+[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |
+[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |
+[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |
+[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |
+#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |
+[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |
+[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |
+#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |
+[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |
+[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |
+#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |
+[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |
+[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |
+[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |
+[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |
+[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |
+#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |
+[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |
+#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |
+[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |
+[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |
+#x3099 | #x309A"""
+
+digit = """
+[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |
+[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |
+[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |
+[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]"""
+
+extender = """
+#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |
+#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]"""
+
+letter = " | ".join([baseChar, ideographic])
+
+# Without the
+name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter,
+ extender])
+nameFirst = " | ".join([letter, "_"])
+
+reChar = re.compile(r"#x([\d|A-F]{4,4})")
+reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]")
+
+
+def charStringToList(chars):
+ charRanges = [item.strip() for item in chars.split(" | ")]
+ rv = []
+ for item in charRanges:
+ foundMatch = False
+ for regexp in (reChar, reCharRange):
+ match = regexp.match(item)
+ if match is not None:
+ rv.append([hexToInt(item) for item in match.groups()])
+ if len(rv[-1]) == 1:
+ rv[-1] = rv[-1] * 2
+ foundMatch = True
+ break
+ if not foundMatch:
+ assert len(item) == 1
+
+ rv.append([ord(item)] * 2)
+ rv = normaliseCharList(rv)
+ return rv
+
+
+def normaliseCharList(charList):
+ charList = sorted(charList)
+ for item in charList:
+ assert item[1] >= item[0]
+ rv = []
+ i = 0
+ while i < len(charList):
+ j = 1
+ rv.append(charList[i])
+ while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1:
+ rv[-1][1] = charList[i + j][1]
+ j += 1
+ i += j
+ return rv
+
+# We don't really support characters above the BMP :(
+max_unicode = int("FFFF", 16)
+
+
+def missingRanges(charList):
+ rv = []
+ if charList[0] != 0:
+ rv.append([0, charList[0][0] - 1])
+ for i, item in enumerate(charList[:-1]):
+ rv.append([item[1] + 1, charList[i + 1][0] - 1])
+ if charList[-1][1] != max_unicode:
+ rv.append([charList[-1][1] + 1, max_unicode])
+ return rv
+
+
+def listToRegexpStr(charList):
+ rv = []
+ for item in charList:
+ if item[0] == item[1]:
+ rv.append(escapeRegexp(chr(item[0])))
+ else:
+ rv.append(escapeRegexp(chr(item[0])) + "-" +
+ escapeRegexp(chr(item[1])))
+ return "[%s]" % "".join(rv)
+
+
+def hexToInt(hex_str):
+ return int(hex_str, 16)
+
+
+def escapeRegexp(string):
+ specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}",
+ "[", "]", "|", "(", ")", "-")
+ for char in specialCharacters:
+ string = string.replace(char, "\\" + char)
+
+ return string
+
+# output from the above
+nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa
+
+nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]') # noqa
+
+# Simpler things
+nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\\-'()+,./:=?;!*#@$_%]")
+
+
+class InfosetFilter(object):
+ replacementRegexp = re.compile(r"U[\dA-F]{5,5}")
+
+ def __init__(self,
+ dropXmlnsLocalName=False,
+ dropXmlnsAttrNs=False,
+ preventDoubleDashComments=False,
+ preventDashAtCommentEnd=False,
+ replaceFormFeedCharacters=True,
+ preventSingleQuotePubid=False):
+
+ self.dropXmlnsLocalName = dropXmlnsLocalName
+ self.dropXmlnsAttrNs = dropXmlnsAttrNs
+
+ self.preventDoubleDashComments = preventDoubleDashComments
+ self.preventDashAtCommentEnd = preventDashAtCommentEnd
+
+ self.replaceFormFeedCharacters = replaceFormFeedCharacters
+
+ self.preventSingleQuotePubid = preventSingleQuotePubid
+
+ self.replaceCache = {}
+
+ def coerceAttribute(self, name, namespace=None):
+ if self.dropXmlnsLocalName and name.startswith("xmlns:"):
+ warnings.warn("Attributes cannot begin with xmlns", DataLossWarning)
+ return None
+ elif (self.dropXmlnsAttrNs and
+ namespace == "http://www.w3.org/2000/xmlns/"):
+ warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning)
+ return None
+ else:
+ return self.toXmlName(name)
+
+ def coerceElement(self, name):
+ return self.toXmlName(name)
+
+ def coerceComment(self, data):
+ if self.preventDoubleDashComments:
+ while "--" in data:
+ warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning)
+ data = data.replace("--", "- -")
+ if data.endswith("-"):
+ warnings.warn("Comments cannot end in a dash", DataLossWarning)
+ data += " "
+ return data
+
+ def coerceCharacters(self, data):
+ if self.replaceFormFeedCharacters:
+ for _ in range(data.count("\x0C")):
+ warnings.warn("Text cannot contain U+000C", DataLossWarning)
+ data = data.replace("\x0C", " ")
+ # Other non-xml characters
+ return data
+
+ def coercePubid(self, data):
+ dataOutput = data
+ for char in nonPubidCharRegexp.findall(data):
+ warnings.warn("Coercing non-XML pubid", DataLossWarning)
+ replacement = self.getReplacementCharacter(char)
+ dataOutput = dataOutput.replace(char, replacement)
+ if self.preventSingleQuotePubid and dataOutput.find("'") >= 0:
+ warnings.warn("Pubid cannot contain single quote", DataLossWarning)
+ dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'"))
+ return dataOutput
+
+ def toXmlName(self, name):
+ nameFirst = name[0]
+ nameRest = name[1:]
+ m = nonXmlNameFirstBMPRegexp.match(nameFirst)
+ if m:
+ warnings.warn("Coercing non-XML name", DataLossWarning)
+ nameFirstOutput = self.getReplacementCharacter(nameFirst)
+ else:
+ nameFirstOutput = nameFirst
+
+ nameRestOutput = nameRest
+ replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest))
+ for char in replaceChars:
+ warnings.warn("Coercing non-XML name", DataLossWarning)
+ replacement = self.getReplacementCharacter(char)
+ nameRestOutput = nameRestOutput.replace(char, replacement)
+ return nameFirstOutput + nameRestOutput
+
+ def getReplacementCharacter(self, char):
+ if char in self.replaceCache:
+ replacement = self.replaceCache[char]
+ else:
+ replacement = self.escapeChar(char)
+ return replacement
+
+ def fromXmlName(self, name):
+ for item in set(self.replacementRegexp.findall(name)):
+ name = name.replace(item, self.unescapeChar(item))
+ return name
+
+ def escapeChar(self, char):
+ replacement = "U%05X" % ord(char)
+ self.replaceCache[char] = replacement
+ return replacement
+
+ def unescapeChar(self, charcode):
+ return chr(int(charcode[1:], 16))
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py
new file mode 100644
index 00000000..a65e55f6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py
@@ -0,0 +1,923 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from pip._vendor.six import text_type, binary_type
+from pip._vendor.six.moves import http_client, urllib
+
+import codecs
+import re
+
+from pip._vendor import webencodings
+
+from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
+from .constants import _ReparseException
+from . import _utils
+
+from io import StringIO
+
+try:
+ from io import BytesIO
+except ImportError:
+ BytesIO = StringIO
+
+# Non-unicode versions of constants for use in the pre-parser
+spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters])
+asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters])
+asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase])
+spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"])
+
+
+invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa
+
+if _utils.supports_lone_surrogates:
+ # Use one extra step of indirection and create surrogates with
+ # eval. Not using this indirection would introduce an illegal
+ # unicode literal on platforms not supporting such lone
+ # surrogates.
+ assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1
+ invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] +
+ eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used
+ "]")
+else:
+ invalid_unicode_re = re.compile(invalid_unicode_no_surrogate)
+
+non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
+ 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
+ 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
+ 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,
+ 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
+ 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,
+ 0x10FFFE, 0x10FFFF])
+
+ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]")
+
+# Cache for charsUntil()
+charsUntilRegEx = {}
+
+
+class BufferedStream(object):
+ """Buffering for streams that do not have buffering of their own
+
+ The buffer is implemented as a list of chunks on the assumption that
+ joining many strings will be slow since it is O(n**2)
+ """
+
+ def __init__(self, stream):
+ self.stream = stream
+ self.buffer = []
+ self.position = [-1, 0] # chunk number, offset
+
+ def tell(self):
+ pos = 0
+ for chunk in self.buffer[:self.position[0]]:
+ pos += len(chunk)
+ pos += self.position[1]
+ return pos
+
+ def seek(self, pos):
+ assert pos <= self._bufferedBytes()
+ offset = pos
+ i = 0
+ while len(self.buffer[i]) < offset:
+ offset -= len(self.buffer[i])
+ i += 1
+ self.position = [i, offset]
+
+ def read(self, bytes):
+ if not self.buffer:
+ return self._readStream(bytes)
+ elif (self.position[0] == len(self.buffer) and
+ self.position[1] == len(self.buffer[-1])):
+ return self._readStream(bytes)
+ else:
+ return self._readFromBuffer(bytes)
+
+ def _bufferedBytes(self):
+ return sum([len(item) for item in self.buffer])
+
+ def _readStream(self, bytes):
+ data = self.stream.read(bytes)
+ self.buffer.append(data)
+ self.position[0] += 1
+ self.position[1] = len(data)
+ return data
+
+ def _readFromBuffer(self, bytes):
+ remainingBytes = bytes
+ rv = []
+ bufferIndex = self.position[0]
+ bufferOffset = self.position[1]
+ while bufferIndex < len(self.buffer) and remainingBytes != 0:
+ assert remainingBytes > 0
+ bufferedData = self.buffer[bufferIndex]
+
+ if remainingBytes <= len(bufferedData) - bufferOffset:
+ bytesToRead = remainingBytes
+ self.position = [bufferIndex, bufferOffset + bytesToRead]
+ else:
+ bytesToRead = len(bufferedData) - bufferOffset
+ self.position = [bufferIndex, len(bufferedData)]
+ bufferIndex += 1
+ rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead])
+ remainingBytes -= bytesToRead
+
+ bufferOffset = 0
+
+ if remainingBytes:
+ rv.append(self._readStream(remainingBytes))
+
+ return b"".join(rv)
+
+
+def HTMLInputStream(source, **kwargs):
+ # Work around Python bug #20007: read(0) closes the connection.
+ # http://bugs.python.org/issue20007
+ if (isinstance(source, http_client.HTTPResponse) or
+ # Also check for addinfourl wrapping HTTPResponse
+ (isinstance(source, urllib.response.addbase) and
+ isinstance(source.fp, http_client.HTTPResponse))):
+ isUnicode = False
+ elif hasattr(source, "read"):
+ isUnicode = isinstance(source.read(0), text_type)
+ else:
+ isUnicode = isinstance(source, text_type)
+
+ if isUnicode:
+ encodings = [x for x in kwargs if x.endswith("_encoding")]
+ if encodings:
+ raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings)
+
+ return HTMLUnicodeInputStream(source, **kwargs)
+ else:
+ return HTMLBinaryInputStream(source, **kwargs)
+
+
+class HTMLUnicodeInputStream(object):
+ """Provides a unicode stream of characters to the HTMLTokenizer.
+
+ This class takes care of character encoding and removing or replacing
+ incorrect byte-sequences and also provides column and line tracking.
+
+ """
+
+ _defaultChunkSize = 10240
+
+ def __init__(self, source):
+ """Initialises the HTMLInputStream.
+
+ HTMLInputStream(source, [encoding]) -> Normalized stream from source
+ for use by html5lib.
+
+ source can be either a file-object, local filename or a string.
+
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element)
+
+ """
+
+ if not _utils.supports_lone_surrogates:
+ # Such platforms will have already checked for such
+ # surrogate errors, so no need to do this checking.
+ self.reportCharacterErrors = None
+ elif len("\U0010FFFF") == 1:
+ self.reportCharacterErrors = self.characterErrorsUCS4
+ else:
+ self.reportCharacterErrors = self.characterErrorsUCS2
+
+ # List of where new lines occur
+ self.newLines = [0]
+
+ self.charEncoding = (lookupEncoding("utf-8"), "certain")
+ self.dataStream = self.openStream(source)
+
+ self.reset()
+
+ def reset(self):
+ self.chunk = ""
+ self.chunkSize = 0
+ self.chunkOffset = 0
+ self.errors = []
+
+ # number of (complete) lines in previous chunks
+ self.prevNumLines = 0
+ # number of columns in the last line of the previous chunk
+ self.prevNumCols = 0
+
+ # Deal with CR LF and surrogates split over chunk boundaries
+ self._bufferedCharacter = None
+
+ def openStream(self, source):
+ """Produces a file object from source.
+
+ source can be either a file object, local filename or a string.
+
+ """
+ # Already a file object
+ if hasattr(source, 'read'):
+ stream = source
+ else:
+ stream = StringIO(source)
+
+ return stream
+
+ def _position(self, offset):
+ chunk = self.chunk
+ nLines = chunk.count('\n', 0, offset)
+ positionLine = self.prevNumLines + nLines
+ lastLinePos = chunk.rfind('\n', 0, offset)
+ if lastLinePos == -1:
+ positionColumn = self.prevNumCols + offset
+ else:
+ positionColumn = offset - (lastLinePos + 1)
+ return (positionLine, positionColumn)
+
+ def position(self):
+ """Returns (line, col) of the current position in the stream."""
+ line, col = self._position(self.chunkOffset)
+ return (line + 1, col)
+
+ def char(self):
+ """ Read one character from the stream or queue if available. Return
+ EOF when EOF is reached.
+ """
+ # Read a new chunk from the input stream if necessary
+ if self.chunkOffset >= self.chunkSize:
+ if not self.readChunk():
+ return EOF
+
+ chunkOffset = self.chunkOffset
+ char = self.chunk[chunkOffset]
+ self.chunkOffset = chunkOffset + 1
+
+ return char
+
+ def readChunk(self, chunkSize=None):
+ if chunkSize is None:
+ chunkSize = self._defaultChunkSize
+
+ self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)
+
+ self.chunk = ""
+ self.chunkSize = 0
+ self.chunkOffset = 0
+
+ data = self.dataStream.read(chunkSize)
+
+ # Deal with CR LF and surrogates broken across chunks
+ if self._bufferedCharacter:
+ data = self._bufferedCharacter + data
+ self._bufferedCharacter = None
+ elif not data:
+ # We have no more data, bye-bye stream
+ return False
+
+ if len(data) > 1:
+ lastv = ord(data[-1])
+ if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:
+ self._bufferedCharacter = data[-1]
+ data = data[:-1]
+
+ if self.reportCharacterErrors:
+ self.reportCharacterErrors(data)
+
+ # Replace invalid characters
+ data = data.replace("\r\n", "\n")
+ data = data.replace("\r", "\n")
+
+ self.chunk = data
+ self.chunkSize = len(data)
+
+ return True
+
+ def characterErrorsUCS4(self, data):
+ for _ in range(len(invalid_unicode_re.findall(data))):
+ self.errors.append("invalid-codepoint")
+
+ def characterErrorsUCS2(self, data):
+ # Someone picked the wrong compile option
+ # You lose
+ skip = False
+ for match in invalid_unicode_re.finditer(data):
+ if skip:
+ continue
+ codepoint = ord(match.group())
+ pos = match.start()
+ # Pretty sure there should be endianness issues here
+ if _utils.isSurrogatePair(data[pos:pos + 2]):
+ # We have a surrogate pair!
+ char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2])
+ if char_val in non_bmp_invalid_codepoints:
+ self.errors.append("invalid-codepoint")
+ skip = True
+ elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and
+ pos == len(data) - 1):
+ self.errors.append("invalid-codepoint")
+ else:
+ skip = False
+ self.errors.append("invalid-codepoint")
+
+ def charsUntil(self, characters, opposite=False):
+ """ Returns a string of characters from the stream up to but not
+ including any character in 'characters' or EOF. 'characters' must be
+ a container that supports the 'in' method and iteration over its
+ characters.
+ """
+
+ # Use a cache of regexps to find the required characters
+ try:
+ chars = charsUntilRegEx[(characters, opposite)]
+ except KeyError:
+ if __debug__:
+ for c in characters:
+ assert(ord(c) < 128)
+ regex = "".join(["\\x%02x" % ord(c) for c in characters])
+ if not opposite:
+ regex = "^%s" % regex
+ chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex)
+
+ rv = []
+
+ while True:
+ # Find the longest matching prefix
+ m = chars.match(self.chunk, self.chunkOffset)
+ if m is None:
+ # If nothing matched, and it wasn't because we ran out of chunk,
+ # then stop
+ if self.chunkOffset != self.chunkSize:
+ break
+ else:
+ end = m.end()
+ # If not the whole chunk matched, return everything
+ # up to the part that didn't match
+ if end != self.chunkSize:
+ rv.append(self.chunk[self.chunkOffset:end])
+ self.chunkOffset = end
+ break
+ # If the whole remainder of the chunk matched,
+ # use it all and read the next chunk
+ rv.append(self.chunk[self.chunkOffset:])
+ if not self.readChunk():
+ # Reached EOF
+ break
+
+ r = "".join(rv)
+ return r
+
+ def unget(self, char):
+ # Only one character is allowed to be ungotten at once - it must
+ # be consumed again before any further call to unget
+ if char is not None:
+ if self.chunkOffset == 0:
+ # unget is called quite rarely, so it's a good idea to do
+ # more work here if it saves a bit of work in the frequently
+ # called char and charsUntil.
+ # So, just prepend the ungotten character onto the current
+ # chunk:
+ self.chunk = char + self.chunk
+ self.chunkSize += 1
+ else:
+ self.chunkOffset -= 1
+ assert self.chunk[self.chunkOffset] == char
+
+
+class HTMLBinaryInputStream(HTMLUnicodeInputStream):
+ """Provides a unicode stream of characters to the HTMLTokenizer.
+
+ This class takes care of character encoding and removing or replacing
+ incorrect byte-sequences and also provides column and line tracking.
+
+ """
+
+ def __init__(self, source, override_encoding=None, transport_encoding=None,
+ same_origin_parent_encoding=None, likely_encoding=None,
+ default_encoding="windows-1252", useChardet=True):
+ """Initialises the HTMLInputStream.
+
+ HTMLInputStream(source, [encoding]) -> Normalized stream from source
+ for use by html5lib.
+
+ source can be either a file-object, local filename or a string.
+
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element)
+
+ """
+ # Raw Stream - for unicode objects this will encode to utf-8 and set
+ # self.charEncoding as appropriate
+ self.rawStream = self.openStream(source)
+
+ HTMLUnicodeInputStream.__init__(self, self.rawStream)
+
+ # Encoding Information
+ # Number of bytes to use when looking for a meta element with
+ # encoding information
+ self.numBytesMeta = 1024
+ # Number of bytes to use when using detecting encoding using chardet
+ self.numBytesChardet = 100
+ # Things from args
+ self.override_encoding = override_encoding
+ self.transport_encoding = transport_encoding
+ self.same_origin_parent_encoding = same_origin_parent_encoding
+ self.likely_encoding = likely_encoding
+ self.default_encoding = default_encoding
+
+ # Determine encoding
+ self.charEncoding = self.determineEncoding(useChardet)
+ assert self.charEncoding[0] is not None
+
+ # Call superclass
+ self.reset()
+
+ def reset(self):
+ self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace')
+ HTMLUnicodeInputStream.reset(self)
+
+ def openStream(self, source):
+ """Produces a file object from source.
+
+ source can be either a file object, local filename or a string.
+
+ """
+ # Already a file object
+ if hasattr(source, 'read'):
+ stream = source
+ else:
+ stream = BytesIO(source)
+
+ try:
+ stream.seek(stream.tell())
+ except: # pylint:disable=bare-except
+ stream = BufferedStream(stream)
+
+ return stream
+
+ def determineEncoding(self, chardet=True):
+ # BOMs take precedence over everything
+ # This will also read past the BOM if present
+ charEncoding = self.detectBOM(), "certain"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # If we've been overriden, we've been overriden
+ charEncoding = lookupEncoding(self.override_encoding), "certain"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Now check the transport layer
+ charEncoding = lookupEncoding(self.transport_encoding), "certain"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Look for meta elements with encoding information
+ charEncoding = self.detectEncodingMeta(), "tentative"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Parent document encoding
+ charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative"
+ if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"):
+ return charEncoding
+
+ # "likely" encoding
+ charEncoding = lookupEncoding(self.likely_encoding), "tentative"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Guess with chardet, if available
+ if chardet:
+ try:
+ from pip._vendor.chardet.universaldetector import UniversalDetector
+ except ImportError:
+ pass
+ else:
+ buffers = []
+ detector = UniversalDetector()
+ while not detector.done:
+ buffer = self.rawStream.read(self.numBytesChardet)
+ assert isinstance(buffer, bytes)
+ if not buffer:
+ break
+ buffers.append(buffer)
+ detector.feed(buffer)
+ detector.close()
+ encoding = lookupEncoding(detector.result['encoding'])
+ self.rawStream.seek(0)
+ if encoding is not None:
+ return encoding, "tentative"
+
+ # Try the default encoding
+ charEncoding = lookupEncoding(self.default_encoding), "tentative"
+ if charEncoding[0] is not None:
+ return charEncoding
+
+ # Fallback to html5lib's default if even that hasn't worked
+ return lookupEncoding("windows-1252"), "tentative"
+
+ def changeEncoding(self, newEncoding):
+ assert self.charEncoding[1] != "certain"
+ newEncoding = lookupEncoding(newEncoding)
+ if newEncoding is None:
+ return
+ if newEncoding.name in ("utf-16be", "utf-16le"):
+ newEncoding = lookupEncoding("utf-8")
+ assert newEncoding is not None
+ elif newEncoding == self.charEncoding[0]:
+ self.charEncoding = (self.charEncoding[0], "certain")
+ else:
+ self.rawStream.seek(0)
+ self.charEncoding = (newEncoding, "certain")
+ self.reset()
+ raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding))
+
+ def detectBOM(self):
+ """Attempts to detect at BOM at the start of the stream. If
+ an encoding can be determined from the BOM return the name of the
+ encoding otherwise return None"""
+ bomDict = {
+ codecs.BOM_UTF8: 'utf-8',
+ codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be',
+ codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be'
+ }
+
+ # Go to beginning of file and read in 4 bytes
+ string = self.rawStream.read(4)
+ assert isinstance(string, bytes)
+
+ # Try detecting the BOM using bytes from the string
+ encoding = bomDict.get(string[:3]) # UTF-8
+ seek = 3
+ if not encoding:
+ # Need to detect UTF-32 before UTF-16
+ encoding = bomDict.get(string) # UTF-32
+ seek = 4
+ if not encoding:
+ encoding = bomDict.get(string[:2]) # UTF-16
+ seek = 2
+
+ # Set the read position past the BOM if one was found, otherwise
+ # set it to the start of the stream
+ if encoding:
+ self.rawStream.seek(seek)
+ return lookupEncoding(encoding)
+ else:
+ self.rawStream.seek(0)
+ return None
+
+ def detectEncodingMeta(self):
+ """Report the encoding declared by the meta element
+ """
+ buffer = self.rawStream.read(self.numBytesMeta)
+ assert isinstance(buffer, bytes)
+ parser = EncodingParser(buffer)
+ self.rawStream.seek(0)
+ encoding = parser.getEncoding()
+
+ if encoding is not None and encoding.name in ("utf-16be", "utf-16le"):
+ encoding = lookupEncoding("utf-8")
+
+ return encoding
+
+
+class EncodingBytes(bytes):
+ """String-like object with an associated position and various extra methods
+ If the position is ever greater than the string length then an exception is
+ raised"""
+ def __new__(self, value):
+ assert isinstance(value, bytes)
+ return bytes.__new__(self, value.lower())
+
+ def __init__(self, value):
+ # pylint:disable=unused-argument
+ self._position = -1
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ p = self._position = self._position + 1
+ if p >= len(self):
+ raise StopIteration
+ elif p < 0:
+ raise TypeError
+ return self[p:p + 1]
+
+ def next(self):
+ # Py2 compat
+ return self.__next__()
+
+ def previous(self):
+ p = self._position
+ if p >= len(self):
+ raise StopIteration
+ elif p < 0:
+ raise TypeError
+ self._position = p = p - 1
+ return self[p:p + 1]
+
+ def setPosition(self, position):
+ if self._position >= len(self):
+ raise StopIteration
+ self._position = position
+
+ def getPosition(self):
+ if self._position >= len(self):
+ raise StopIteration
+ if self._position >= 0:
+ return self._position
+ else:
+ return None
+
+ position = property(getPosition, setPosition)
+
+ def getCurrentByte(self):
+ return self[self.position:self.position + 1]
+
+ currentByte = property(getCurrentByte)
+
+ def skip(self, chars=spaceCharactersBytes):
+ """Skip past a list of characters"""
+ p = self.position # use property for the error-checking
+ while p < len(self):
+ c = self[p:p + 1]
+ if c not in chars:
+ self._position = p
+ return c
+ p += 1
+ self._position = p
+ return None
+
+ def skipUntil(self, chars):
+ p = self.position
+ while p < len(self):
+ c = self[p:p + 1]
+ if c in chars:
+ self._position = p
+ return c
+ p += 1
+ self._position = p
+ return None
+
+ def matchBytes(self, bytes):
+ """Look for a sequence of bytes at the start of a string. If the bytes
+ are found return True and advance the position to the byte after the
+ match. Otherwise return False and leave the position alone"""
+ p = self.position
+ data = self[p:p + len(bytes)]
+ rv = data.startswith(bytes)
+ if rv:
+ self.position += len(bytes)
+ return rv
+
+ def jumpTo(self, bytes):
+ """Look for the next sequence of bytes matching a given sequence. If
+ a match is found advance the position to the last byte of the match"""
+ newPosition = self[self.position:].find(bytes)
+ if newPosition > -1:
+ # XXX: This is ugly, but I can't see a nicer way to fix this.
+ if self._position == -1:
+ self._position = 0
+ self._position += (newPosition + len(bytes) - 1)
+ return True
+ else:
+ raise StopIteration
+
+
+class EncodingParser(object):
+ """Mini parser for detecting character encoding from meta elements"""
+
+ def __init__(self, data):
+ """string - the data to work on for encoding detection"""
+ self.data = EncodingBytes(data)
+ self.encoding = None
+
+ def getEncoding(self):
+ methodDispatch = (
+ (b"<!--", self.handleComment),
+ (b"<meta", self.handleMeta),
+ (b"</", self.handlePossibleEndTag),
+ (b"<!", self.handleOther),
+ (b"<?", self.handleOther),
+ (b"<", self.handlePossibleStartTag))
+ for _ in self.data:
+ keepParsing = True
+ for key, method in methodDispatch:
+ if self.data.matchBytes(key):
+ try:
+ keepParsing = method()
+ break
+ except StopIteration:
+ keepParsing = False
+ break
+ if not keepParsing:
+ break
+
+ return self.encoding
+
+ def handleComment(self):
+ """Skip over comments"""
+ return self.data.jumpTo(b"-->")
+
+ def handleMeta(self):
+ if self.data.currentByte not in spaceCharactersBytes:
+ # if we have <meta not followed by a space so just keep going
+ return True
+ # We have a valid meta element we want to search for attributes
+ hasPragma = False
+ pendingEncoding = None
+ while True:
+ # Try to find the next attribute after the current position
+ attr = self.getAttribute()
+ if attr is None:
+ return True
+ else:
+ if attr[0] == b"http-equiv":
+ hasPragma = attr[1] == b"content-type"
+ if hasPragma and pendingEncoding is not None:
+ self.encoding = pendingEncoding
+ return False
+ elif attr[0] == b"charset":
+ tentativeEncoding = attr[1]
+ codec = lookupEncoding(tentativeEncoding)
+ if codec is not None:
+ self.encoding = codec
+ return False
+ elif attr[0] == b"content":
+ contentParser = ContentAttrParser(EncodingBytes(attr[1]))
+ tentativeEncoding = contentParser.parse()
+ if tentativeEncoding is not None:
+ codec = lookupEncoding(tentativeEncoding)
+ if codec is not None:
+ if hasPragma:
+ self.encoding = codec
+ return False
+ else:
+ pendingEncoding = codec
+
+ def handlePossibleStartTag(self):
+ return self.handlePossibleTag(False)
+
+ def handlePossibleEndTag(self):
+ next(self.data)
+ return self.handlePossibleTag(True)
+
+ def handlePossibleTag(self, endTag):
+ data = self.data
+ if data.currentByte not in asciiLettersBytes:
+ # If the next byte is not an ascii letter either ignore this
+ # fragment (possible start tag case) or treat it according to
+ # handleOther
+ if endTag:
+ data.previous()
+ self.handleOther()
+ return True
+
+ c = data.skipUntil(spacesAngleBrackets)
+ if c == b"<":
+ # return to the first step in the overall "two step" algorithm
+ # reprocessing the < byte
+ data.previous()
+ else:
+ # Read all attributes
+ attr = self.getAttribute()
+ while attr is not None:
+ attr = self.getAttribute()
+ return True
+
+ def handleOther(self):
+ return self.data.jumpTo(b">")
+
+ def getAttribute(self):
+ """Return a name,value pair for the next attribute in the stream,
+ if one is found, or None"""
+ data = self.data
+ # Step 1 (skip chars)
+ c = data.skip(spaceCharactersBytes | frozenset([b"/"]))
+ assert c is None or len(c) == 1
+ # Step 2
+ if c in (b">", None):
+ return None
+ # Step 3
+ attrName = []
+ attrValue = []
+ # Step 4 attribute name
+ while True:
+ if c == b"=" and attrName:
+ break
+ elif c in spaceCharactersBytes:
+ # Step 6!
+ c = data.skip()
+ break
+ elif c in (b"/", b">"):
+ return b"".join(attrName), b""
+ elif c in asciiUppercaseBytes:
+ attrName.append(c.lower())
+ elif c is None:
+ return None
+ else:
+ attrName.append(c)
+ # Step 5
+ c = next(data)
+ # Step 7
+ if c != b"=":
+ data.previous()
+ return b"".join(attrName), b""
+ # Step 8
+ next(data)
+ # Step 9
+ c = data.skip()
+ # Step 10
+ if c in (b"'", b'"'):
+ # 10.1
+ quoteChar = c
+ while True:
+ # 10.2
+ c = next(data)
+ # 10.3
+ if c == quoteChar:
+ next(data)
+ return b"".join(attrName), b"".join(attrValue)
+ # 10.4
+ elif c in asciiUppercaseBytes:
+ attrValue.append(c.lower())
+ # 10.5
+ else:
+ attrValue.append(c)
+ elif c == b">":
+ return b"".join(attrName), b""
+ elif c in asciiUppercaseBytes:
+ attrValue.append(c.lower())
+ elif c is None:
+ return None
+ else:
+ attrValue.append(c)
+ # Step 11
+ while True:
+ c = next(data)
+ if c in spacesAngleBrackets:
+ return b"".join(attrName), b"".join(attrValue)
+ elif c in asciiUppercaseBytes:
+ attrValue.append(c.lower())
+ elif c is None:
+ return None
+ else:
+ attrValue.append(c)
+
+
+class ContentAttrParser(object):
+ def __init__(self, data):
+ assert isinstance(data, bytes)
+ self.data = data
+
+ def parse(self):
+ try:
+ # Check if the attr name is charset
+ # otherwise return
+ self.data.jumpTo(b"charset")
+ self.data.position += 1
+ self.data.skip()
+ if not self.data.currentByte == b"=":
+ # If there is no = sign keep looking for attrs
+ return None
+ self.data.position += 1
+ self.data.skip()
+ # Look for an encoding between matching quote marks
+ if self.data.currentByte in (b'"', b"'"):
+ quoteMark = self.data.currentByte
+ self.data.position += 1
+ oldPosition = self.data.position
+ if self.data.jumpTo(quoteMark):
+ return self.data[oldPosition:self.data.position]
+ else:
+ return None
+ else:
+ # Unquoted value
+ oldPosition = self.data.position
+ try:
+ self.data.skipUntil(spaceCharactersBytes)
+ return self.data[oldPosition:self.data.position]
+ except StopIteration:
+ # Return the whole remaining value
+ return self.data[oldPosition:]
+ except StopIteration:
+ return None
+
+
+def lookupEncoding(encoding):
+ """Return the python codec name corresponding to an encoding or None if the
+ string doesn't correspond to a valid encoding."""
+ if isinstance(encoding, binary_type):
+ try:
+ encoding = encoding.decode("ascii")
+ except UnicodeDecodeError:
+ return None
+
+ if encoding is not None:
+ try:
+ return webencodings.lookup(encoding)
+ except AttributeError:
+ return None
+ else:
+ return None
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_tokenizer.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_tokenizer.py
new file mode 100644
index 00000000..178f6e7f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_tokenizer.py
@@ -0,0 +1,1721 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from pip._vendor.six import unichr as chr
+
+from collections import deque
+
+from .constants import spaceCharacters
+from .constants import entities
+from .constants import asciiLetters, asciiUpper2Lower
+from .constants import digits, hexDigits, EOF
+from .constants import tokenTypes, tagTokenTypes
+from .constants import replacementCharacters
+
+from ._inputstream import HTMLInputStream
+
+from ._trie import Trie
+
+entitiesTrie = Trie(entities)
+
+
+class HTMLTokenizer(object):
+ """ This class takes care of tokenizing HTML.
+
+ * self.currentToken
+ Holds the token that is currently being processed.
+
+ * self.state
+ Holds a reference to the method to be invoked... XXX
+
+ * self.stream
+ Points to HTMLInputStream object.
+ """
+
+ def __init__(self, stream, parser=None, **kwargs):
+
+ self.stream = HTMLInputStream(stream, **kwargs)
+ self.parser = parser
+
+ # Setup the initial tokenizer state
+ self.escapeFlag = False
+ self.lastFourChars = []
+ self.state = self.dataState
+ self.escape = False
+
+ # The current token being created
+ self.currentToken = None
+ super(HTMLTokenizer, self).__init__()
+
+ def __iter__(self):
+ """ This is where the magic happens.
+
+ We do our usually processing through the states and when we have a token
+ to return we yield the token which pauses processing until the next token
+ is requested.
+ """
+ self.tokenQueue = deque([])
+ # Start processing. When EOF is reached self.state will return False
+ # instead of True and the loop will terminate.
+ while self.state():
+ while self.stream.errors:
+ yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)}
+ while self.tokenQueue:
+ yield self.tokenQueue.popleft()
+
+ def consumeNumberEntity(self, isHex):
+ """This function returns either U+FFFD or the character based on the
+ decimal or hexadecimal representation. It also discards ";" if present.
+ If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked.
+ """
+
+ allowed = digits
+ radix = 10
+ if isHex:
+ allowed = hexDigits
+ radix = 16
+
+ charStack = []
+
+ # Consume all the characters that are in range while making sure we
+ # don't hit an EOF.
+ c = self.stream.char()
+ while c in allowed and c is not EOF:
+ charStack.append(c)
+ c = self.stream.char()
+
+ # Convert the set of characters consumed to an int.
+ charAsInt = int("".join(charStack), radix)
+
+ # Certain characters get replaced with others
+ if charAsInt in replacementCharacters:
+ char = replacementCharacters[charAsInt]
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "illegal-codepoint-for-numeric-entity",
+ "datavars": {"charAsInt": charAsInt}})
+ elif ((0xD800 <= charAsInt <= 0xDFFF) or
+ (charAsInt > 0x10FFFF)):
+ char = "\uFFFD"
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "illegal-codepoint-for-numeric-entity",
+ "datavars": {"charAsInt": charAsInt}})
+ else:
+ # Should speed up this check somehow (e.g. move the set to a constant)
+ if ((0x0001 <= charAsInt <= 0x0008) or
+ (0x000E <= charAsInt <= 0x001F) or
+ (0x007F <= charAsInt <= 0x009F) or
+ (0xFDD0 <= charAsInt <= 0xFDEF) or
+ charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE,
+ 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
+ 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE,
+ 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE,
+ 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE,
+ 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE,
+ 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
+ 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE,
+ 0xFFFFF, 0x10FFFE, 0x10FFFF])):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data":
+ "illegal-codepoint-for-numeric-entity",
+ "datavars": {"charAsInt": charAsInt}})
+ try:
+ # Try/except needed as UCS-2 Python builds' unichar only works
+ # within the BMP.
+ char = chr(charAsInt)
+ except ValueError:
+ v = charAsInt - 0x10000
+ char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF))
+
+ # Discard the ; if present. Otherwise, put it back on the queue and
+ # invoke parseError on parser.
+ if c != ";":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "numeric-entity-without-semicolon"})
+ self.stream.unget(c)
+
+ return char
+
+ def consumeEntity(self, allowedChar=None, fromAttribute=False):
+ # Initialise to the default output for when no entity is matched
+ output = "&"
+
+ charStack = [self.stream.char()]
+ if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or
+ (allowedChar is not None and allowedChar == charStack[0])):
+ self.stream.unget(charStack[0])
+
+ elif charStack[0] == "#":
+ # Read the next character to see if it's hex or decimal
+ hex = False
+ charStack.append(self.stream.char())
+ if charStack[-1] in ("x", "X"):
+ hex = True
+ charStack.append(self.stream.char())
+
+ # charStack[-1] should be the first digit
+ if (hex and charStack[-1] in hexDigits) \
+ or (not hex and charStack[-1] in digits):
+ # At least one digit found, so consume the whole number
+ self.stream.unget(charStack[-1])
+ output = self.consumeNumberEntity(hex)
+ else:
+ # No digits found
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "expected-numeric-entity"})
+ self.stream.unget(charStack.pop())
+ output = "&" + "".join(charStack)
+
+ else:
+ # At this point in the process might have named entity. Entities
+ # are stored in the global variable "entities".
+ #
+ # Consume characters and compare to these to a substring of the
+ # entity names in the list until the substring no longer matches.
+ while (charStack[-1] is not EOF):
+ if not entitiesTrie.has_keys_with_prefix("".join(charStack)):
+ break
+ charStack.append(self.stream.char())
+
+ # At this point we have a string that starts with some characters
+ # that may match an entity
+ # Try to find the longest entity the string will match to take care
+ # of &noti for instance.
+ try:
+ entityName = entitiesTrie.longest_prefix("".join(charStack[:-1]))
+ entityLength = len(entityName)
+ except KeyError:
+ entityName = None
+
+ if entityName is not None:
+ if entityName[-1] != ";":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "named-entity-without-semicolon"})
+ if (entityName[-1] != ";" and fromAttribute and
+ (charStack[entityLength] in asciiLetters or
+ charStack[entityLength] in digits or
+ charStack[entityLength] == "=")):
+ self.stream.unget(charStack.pop())
+ output = "&" + "".join(charStack)
+ else:
+ output = entities[entityName]
+ self.stream.unget(charStack.pop())
+ output += "".join(charStack[entityLength:])
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-named-entity"})
+ self.stream.unget(charStack.pop())
+ output = "&" + "".join(charStack)
+
+ if fromAttribute:
+ self.currentToken["data"][-1][1] += output
+ else:
+ if output in spaceCharacters:
+ tokenType = "SpaceCharacters"
+ else:
+ tokenType = "Characters"
+ self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output})
+
+ def processEntityInAttribute(self, allowedChar):
+ """This method replaces the need for "entityInAttributeValueState".
+ """
+ self.consumeEntity(allowedChar=allowedChar, fromAttribute=True)
+
+ def emitCurrentToken(self):
+ """This method is a generic handler for emitting the tags. It also sets
+ the state to "data" because that's what's needed after a token has been
+ emitted.
+ """
+ token = self.currentToken
+ # Add token to the queue to be yielded
+ if (token["type"] in tagTokenTypes):
+ token["name"] = token["name"].translate(asciiUpper2Lower)
+ if token["type"] == tokenTypes["EndTag"]:
+ if token["data"]:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "attributes-in-end-tag"})
+ if token["selfClosing"]:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "self-closing-flag-on-end-tag"})
+ self.tokenQueue.append(token)
+ self.state = self.dataState
+
+ # Below are the various tokenizer states worked out.
+ def dataState(self):
+ data = self.stream.char()
+ if data == "&":
+ self.state = self.entityDataState
+ elif data == "<":
+ self.state = self.tagOpenState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\u0000"})
+ elif data is EOF:
+ # Tokenization ends.
+ return False
+ elif data in spaceCharacters:
+ # Directly after emitting a token you switch back to the "data
+ # state". At that point spaceCharacters are important so they are
+ # emitted separately.
+ self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":
+ data + self.stream.charsUntil(spaceCharacters, True)})
+ # No need to update lastFourChars here, since the first space will
+ # have already been appended to lastFourChars and will have broken
+ # any <!-- or --> sequences
+ else:
+ chars = self.stream.charsUntil(("&", "<", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def entityDataState(self):
+ self.consumeEntity()
+ self.state = self.dataState
+ return True
+
+ def rcdataState(self):
+ data = self.stream.char()
+ if data == "&":
+ self.state = self.characterReferenceInRcdata
+ elif data == "<":
+ self.state = self.rcdataLessThanSignState
+ elif data == EOF:
+ # Tokenization ends.
+ return False
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data in spaceCharacters:
+ # Directly after emitting a token you switch back to the "data
+ # state". At that point spaceCharacters are important so they are
+ # emitted separately.
+ self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":
+ data + self.stream.charsUntil(spaceCharacters, True)})
+ # No need to update lastFourChars here, since the first space will
+ # have already been appended to lastFourChars and will have broken
+ # any <!-- or --> sequences
+ else:
+ chars = self.stream.charsUntil(("&", "<", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def characterReferenceInRcdata(self):
+ self.consumeEntity()
+ self.state = self.rcdataState
+ return True
+
+ def rawtextState(self):
+ data = self.stream.char()
+ if data == "<":
+ self.state = self.rawtextLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data == EOF:
+ # Tokenization ends.
+ return False
+ else:
+ chars = self.stream.charsUntil(("<", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def scriptDataState(self):
+ data = self.stream.char()
+ if data == "<":
+ self.state = self.scriptDataLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data == EOF:
+ # Tokenization ends.
+ return False
+ else:
+ chars = self.stream.charsUntil(("<", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def plaintextState(self):
+ data = self.stream.char()
+ if data == EOF:
+ # Tokenization ends.
+ return False
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + self.stream.charsUntil("\u0000")})
+ return True
+
+ def tagOpenState(self):
+ data = self.stream.char()
+ if data == "!":
+ self.state = self.markupDeclarationOpenState
+ elif data == "/":
+ self.state = self.closeTagOpenState
+ elif data in asciiLetters:
+ self.currentToken = {"type": tokenTypes["StartTag"],
+ "name": data, "data": [],
+ "selfClosing": False,
+ "selfClosingAcknowledged": False}
+ self.state = self.tagNameState
+ elif data == ">":
+ # XXX In theory it could be something besides a tag name. But
+ # do we really care?
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-tag-name-but-got-right-bracket"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"})
+ self.state = self.dataState
+ elif data == "?":
+ # XXX In theory it could be something besides a tag name. But
+ # do we really care?
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-tag-name-but-got-question-mark"})
+ self.stream.unget(data)
+ self.state = self.bogusCommentState
+ else:
+ # XXX
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-tag-name"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.dataState
+ return True
+
+ def closeTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.currentToken = {"type": tokenTypes["EndTag"], "name": data,
+ "data": [], "selfClosing": False}
+ self.state = self.tagNameState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-closing-tag-but-got-right-bracket"})
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-closing-tag-but-got-eof"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.state = self.dataState
+ else:
+ # XXX data can be _'_...
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-closing-tag-but-got-char",
+ "datavars": {"data": data}})
+ self.stream.unget(data)
+ self.state = self.bogusCommentState
+ return True
+
+ def tagNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeAttributeNameState
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-tag-name"})
+ self.state = self.dataState
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["name"] += "\uFFFD"
+ else:
+ self.currentToken["name"] += data
+ # (Don't use charsUntil here, because tag names are
+ # very short and it's faster to not do anything fancy)
+ return True
+
+ def rcdataLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.temporaryBuffer = ""
+ self.state = self.rcdataEndTagOpenState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.rcdataState
+ return True
+
+ def rcdataEndTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.temporaryBuffer += data
+ self.state = self.rcdataEndTagNameState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.stream.unget(data)
+ self.state = self.rcdataState
+ return True
+
+ def rcdataEndTagNameState(self):
+ appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
+ data = self.stream.char()
+ if data in spaceCharacters and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.beforeAttributeNameState
+ elif data == "/" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.selfClosingStartTagState
+ elif data == ">" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.emitCurrentToken()
+ self.state = self.dataState
+ elif data in asciiLetters:
+ self.temporaryBuffer += data
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "</" + self.temporaryBuffer})
+ self.stream.unget(data)
+ self.state = self.rcdataState
+ return True
+
+ def rawtextLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.temporaryBuffer = ""
+ self.state = self.rawtextEndTagOpenState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.rawtextState
+ return True
+
+ def rawtextEndTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.temporaryBuffer += data
+ self.state = self.rawtextEndTagNameState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.stream.unget(data)
+ self.state = self.rawtextState
+ return True
+
+ def rawtextEndTagNameState(self):
+ appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
+ data = self.stream.char()
+ if data in spaceCharacters and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.beforeAttributeNameState
+ elif data == "/" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.selfClosingStartTagState
+ elif data == ">" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.emitCurrentToken()
+ self.state = self.dataState
+ elif data in asciiLetters:
+ self.temporaryBuffer += data
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "</" + self.temporaryBuffer})
+ self.stream.unget(data)
+ self.state = self.rawtextState
+ return True
+
+ def scriptDataLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.temporaryBuffer = ""
+ self.state = self.scriptDataEndTagOpenState
+ elif data == "!":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<!"})
+ self.state = self.scriptDataEscapeStartState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEndTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.temporaryBuffer += data
+ self.state = self.scriptDataEndTagNameState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEndTagNameState(self):
+ appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
+ data = self.stream.char()
+ if data in spaceCharacters and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.beforeAttributeNameState
+ elif data == "/" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.selfClosingStartTagState
+ elif data == ">" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.emitCurrentToken()
+ self.state = self.dataState
+ elif data in asciiLetters:
+ self.temporaryBuffer += data
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "</" + self.temporaryBuffer})
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEscapeStartState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataEscapeStartDashState
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEscapeStartDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataEscapedDashDashState
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataState
+ return True
+
+ def scriptDataEscapedState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataEscapedDashState
+ elif data == "<":
+ self.state = self.scriptDataEscapedLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data == EOF:
+ self.state = self.dataState
+ else:
+ chars = self.stream.charsUntil(("<", "-", "\u0000"))
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
+ data + chars})
+ return True
+
+ def scriptDataEscapedDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataEscapedDashDashState
+ elif data == "<":
+ self.state = self.scriptDataEscapedLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ self.state = self.scriptDataEscapedState
+ elif data == EOF:
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataEscapedDashDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ elif data == "<":
+ self.state = self.scriptDataEscapedLessThanSignState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})
+ self.state = self.scriptDataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ self.state = self.scriptDataEscapedState
+ elif data == EOF:
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataEscapedLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.temporaryBuffer = ""
+ self.state = self.scriptDataEscapedEndTagOpenState
+ elif data in asciiLetters:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data})
+ self.temporaryBuffer = data
+ self.state = self.scriptDataDoubleEscapeStartState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.stream.unget(data)
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataEscapedEndTagOpenState(self):
+ data = self.stream.char()
+ if data in asciiLetters:
+ self.temporaryBuffer = data
+ self.state = self.scriptDataEscapedEndTagNameState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"})
+ self.stream.unget(data)
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataEscapedEndTagNameState(self):
+ appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower()
+ data = self.stream.char()
+ if data in spaceCharacters and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.beforeAttributeNameState
+ elif data == "/" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.state = self.selfClosingStartTagState
+ elif data == ">" and appropriate:
+ self.currentToken = {"type": tokenTypes["EndTag"],
+ "name": self.temporaryBuffer,
+ "data": [], "selfClosing": False}
+ self.emitCurrentToken()
+ self.state = self.dataState
+ elif data in asciiLetters:
+ self.temporaryBuffer += data
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "</" + self.temporaryBuffer})
+ self.stream.unget(data)
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataDoubleEscapeStartState(self):
+ data = self.stream.char()
+ if data in (spaceCharacters | frozenset(("/", ">"))):
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ if self.temporaryBuffer.lower() == "script":
+ self.state = self.scriptDataDoubleEscapedState
+ else:
+ self.state = self.scriptDataEscapedState
+ elif data in asciiLetters:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.temporaryBuffer += data
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataEscapedState
+ return True
+
+ def scriptDataDoubleEscapedState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataDoubleEscapedDashState
+ elif data == "<":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.state = self.scriptDataDoubleEscapedLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ elif data == EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-script-in-script"})
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ return True
+
+ def scriptDataDoubleEscapedDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ self.state = self.scriptDataDoubleEscapedDashDashState
+ elif data == "<":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.state = self.scriptDataDoubleEscapedLessThanSignState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ self.state = self.scriptDataDoubleEscapedState
+ elif data == EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-script-in-script"})
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.state = self.scriptDataDoubleEscapedState
+ return True
+
+ def scriptDataDoubleEscapedDashDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
+ elif data == "<":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
+ self.state = self.scriptDataDoubleEscapedLessThanSignState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})
+ self.state = self.scriptDataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": "\uFFFD"})
+ self.state = self.scriptDataDoubleEscapedState
+ elif data == EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-script-in-script"})
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.state = self.scriptDataDoubleEscapedState
+ return True
+
+ def scriptDataDoubleEscapedLessThanSignState(self):
+ data = self.stream.char()
+ if data == "/":
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"})
+ self.temporaryBuffer = ""
+ self.state = self.scriptDataDoubleEscapeEndState
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataDoubleEscapedState
+ return True
+
+ def scriptDataDoubleEscapeEndState(self):
+ data = self.stream.char()
+ if data in (spaceCharacters | frozenset(("/", ">"))):
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ if self.temporaryBuffer.lower() == "script":
+ self.state = self.scriptDataEscapedState
+ else:
+ self.state = self.scriptDataDoubleEscapedState
+ elif data in asciiLetters:
+ self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
+ self.temporaryBuffer += data
+ else:
+ self.stream.unget(data)
+ self.state = self.scriptDataDoubleEscapedState
+ return True
+
+ def beforeAttributeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.stream.charsUntil(spaceCharacters, True)
+ elif data in asciiLetters:
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data in ("'", '"', "=", "<"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "invalid-character-in-attribute-name"})
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"].append(["\uFFFD", ""])
+ self.state = self.attributeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-attribute-name-but-got-eof"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ return True
+
+ def attributeNameState(self):
+ data = self.stream.char()
+ leavingThisState = True
+ emitToken = False
+ if data == "=":
+ self.state = self.beforeAttributeValueState
+ elif data in asciiLetters:
+ self.currentToken["data"][-1][0] += data +\
+ self.stream.charsUntil(asciiLetters, True)
+ leavingThisState = False
+ elif data == ">":
+ # XXX If we emit here the attributes are converted to a dict
+ # without being checked and when the code below runs we error
+ # because data is a dict not a list
+ emitToken = True
+ elif data in spaceCharacters:
+ self.state = self.afterAttributeNameState
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][0] += "\uFFFD"
+ leavingThisState = False
+ elif data in ("'", '"', "<"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data":
+ "invalid-character-in-attribute-name"})
+ self.currentToken["data"][-1][0] += data
+ leavingThisState = False
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "eof-in-attribute-name"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][0] += data
+ leavingThisState = False
+
+ if leavingThisState:
+ # Attributes are not dropped at this stage. That happens when the
+ # start tag token is emitted so values can still be safely appended
+ # to attributes, but we do want to report the parse error in time.
+ self.currentToken["data"][-1][0] = (
+ self.currentToken["data"][-1][0].translate(asciiUpper2Lower))
+ for name, _ in self.currentToken["data"][:-1]:
+ if self.currentToken["data"][-1][0] == name:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "duplicate-attribute"})
+ break
+ # XXX Fix for above XXX
+ if emitToken:
+ self.emitCurrentToken()
+ return True
+
+ def afterAttributeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.stream.charsUntil(spaceCharacters, True)
+ elif data == "=":
+ self.state = self.beforeAttributeValueState
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data in asciiLetters:
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"].append(["\uFFFD", ""])
+ self.state = self.attributeNameState
+ elif data in ("'", '"', "<"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "invalid-character-after-attribute-name"})
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-end-of-tag-but-got-eof"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"].append([data, ""])
+ self.state = self.attributeNameState
+ return True
+
+ def beforeAttributeValueState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.stream.charsUntil(spaceCharacters, True)
+ elif data == "\"":
+ self.state = self.attributeValueDoubleQuotedState
+ elif data == "&":
+ self.state = self.attributeValueUnQuotedState
+ self.stream.unget(data)
+ elif data == "'":
+ self.state = self.attributeValueSingleQuotedState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-attribute-value-but-got-right-bracket"})
+ self.emitCurrentToken()
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][1] += "\uFFFD"
+ self.state = self.attributeValueUnQuotedState
+ elif data in ("=", "<", "`"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "equals-in-unquoted-attribute-value"})
+ self.currentToken["data"][-1][1] += data
+ self.state = self.attributeValueUnQuotedState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-attribute-value-but-got-eof"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][1] += data
+ self.state = self.attributeValueUnQuotedState
+ return True
+
+ def attributeValueDoubleQuotedState(self):
+ data = self.stream.char()
+ if data == "\"":
+ self.state = self.afterAttributeValueState
+ elif data == "&":
+ self.processEntityInAttribute('"')
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][1] += "\uFFFD"
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-attribute-value-double-quote"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][1] += data +\
+ self.stream.charsUntil(("\"", "&", "\u0000"))
+ return True
+
+ def attributeValueSingleQuotedState(self):
+ data = self.stream.char()
+ if data == "'":
+ self.state = self.afterAttributeValueState
+ elif data == "&":
+ self.processEntityInAttribute("'")
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][1] += "\uFFFD"
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-attribute-value-single-quote"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][1] += data +\
+ self.stream.charsUntil(("'", "&", "\u0000"))
+ return True
+
+ def attributeValueUnQuotedState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeAttributeNameState
+ elif data == "&":
+ self.processEntityInAttribute(">")
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data in ('"', "'", "=", "<", "`"):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-character-in-unquoted-attribute-value"})
+ self.currentToken["data"][-1][1] += data
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"][-1][1] += "\uFFFD"
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-attribute-value-no-quotes"})
+ self.state = self.dataState
+ else:
+ self.currentToken["data"][-1][1] += data + self.stream.charsUntil(
+ frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters)
+ return True
+
+ def afterAttributeValueState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeAttributeNameState
+ elif data == ">":
+ self.emitCurrentToken()
+ elif data == "/":
+ self.state = self.selfClosingStartTagState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-EOF-after-attribute-value"})
+ self.stream.unget(data)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-character-after-attribute-value"})
+ self.stream.unget(data)
+ self.state = self.beforeAttributeNameState
+ return True
+
+ def selfClosingStartTagState(self):
+ data = self.stream.char()
+ if data == ">":
+ self.currentToken["selfClosing"] = True
+ self.emitCurrentToken()
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data":
+ "unexpected-EOF-after-solidus-in-tag"})
+ self.stream.unget(data)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-character-after-solidus-in-tag"})
+ self.stream.unget(data)
+ self.state = self.beforeAttributeNameState
+ return True
+
+ def bogusCommentState(self):
+ # Make a new comment token and give it as value all the characters
+ # until the first > or EOF (charsUntil checks for EOF automatically)
+ # and emit it.
+ data = self.stream.charsUntil(">")
+ data = data.replace("\u0000", "\uFFFD")
+ self.tokenQueue.append(
+ {"type": tokenTypes["Comment"], "data": data})
+
+ # Eat the character directly after the bogus comment which is either a
+ # ">" or an EOF.
+ self.stream.char()
+ self.state = self.dataState
+ return True
+
+ def markupDeclarationOpenState(self):
+ charStack = [self.stream.char()]
+ if charStack[-1] == "-":
+ charStack.append(self.stream.char())
+ if charStack[-1] == "-":
+ self.currentToken = {"type": tokenTypes["Comment"], "data": ""}
+ self.state = self.commentStartState
+ return True
+ elif charStack[-1] in ('d', 'D'):
+ matched = True
+ for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'),
+ ('y', 'Y'), ('p', 'P'), ('e', 'E')):
+ charStack.append(self.stream.char())
+ if charStack[-1] not in expected:
+ matched = False
+ break
+ if matched:
+ self.currentToken = {"type": tokenTypes["Doctype"],
+ "name": "",
+ "publicId": None, "systemId": None,
+ "correct": True}
+ self.state = self.doctypeState
+ return True
+ elif (charStack[-1] == "[" and
+ self.parser is not None and
+ self.parser.tree.openElements and
+ self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace):
+ matched = True
+ for expected in ["C", "D", "A", "T", "A", "["]:
+ charStack.append(self.stream.char())
+ if charStack[-1] != expected:
+ matched = False
+ break
+ if matched:
+ self.state = self.cdataSectionState
+ return True
+
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-dashes-or-doctype"})
+
+ while charStack:
+ self.stream.unget(charStack.pop())
+ self.state = self.bogusCommentState
+ return True
+
+ def commentStartState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.state = self.commentStartDashState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "incorrect-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += data
+ self.state = self.commentState
+ return True
+
+ def commentStartDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.state = self.commentEndState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "-\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "incorrect-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += "-" + data
+ self.state = self.commentState
+ return True
+
+ def commentState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.state = self.commentEndDashState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "\uFFFD"
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "eof-in-comment"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += data + \
+ self.stream.charsUntil(("-", "\u0000"))
+ return True
+
+ def commentEndDashState(self):
+ data = self.stream.char()
+ if data == "-":
+ self.state = self.commentEndState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "-\uFFFD"
+ self.state = self.commentState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment-end-dash"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += "-" + data
+ self.state = self.commentState
+ return True
+
+ def commentEndState(self):
+ data = self.stream.char()
+ if data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "--\uFFFD"
+ self.state = self.commentState
+ elif data == "!":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-bang-after-double-dash-in-comment"})
+ self.state = self.commentEndBangState
+ elif data == "-":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-dash-after-double-dash-in-comment"})
+ self.currentToken["data"] += data
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment-double-dash"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ # XXX
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-comment"})
+ self.currentToken["data"] += "--" + data
+ self.state = self.commentState
+ return True
+
+ def commentEndBangState(self):
+ data = self.stream.char()
+ if data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == "-":
+ self.currentToken["data"] += "--!"
+ self.state = self.commentEndDashState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["data"] += "--!\uFFFD"
+ self.state = self.commentState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-comment-end-bang-state"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["data"] += "--!" + data
+ self.state = self.commentState
+ return True
+
+ def doctypeState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeDoctypeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-doctype-name-but-got-eof"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "need-space-after-doctype"})
+ self.stream.unget(data)
+ self.state = self.beforeDoctypeNameState
+ return True
+
+ def beforeDoctypeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-doctype-name-but-got-right-bracket"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["name"] = "\uFFFD"
+ self.state = self.doctypeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-doctype-name-but-got-eof"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["name"] = data
+ self.state = self.doctypeNameState
+ return True
+
+ def doctypeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
+ self.state = self.afterDoctypeNameState
+ elif data == ">":
+ self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["name"] += "\uFFFD"
+ self.state = self.doctypeNameState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype-name"})
+ self.currentToken["correct"] = False
+ self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["name"] += data
+ return True
+
+ def afterDoctypeNameState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.currentToken["correct"] = False
+ self.stream.unget(data)
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ if data in ("p", "P"):
+ matched = True
+ for expected in (("u", "U"), ("b", "B"), ("l", "L"),
+ ("i", "I"), ("c", "C")):
+ data = self.stream.char()
+ if data not in expected:
+ matched = False
+ break
+ if matched:
+ self.state = self.afterDoctypePublicKeywordState
+ return True
+ elif data in ("s", "S"):
+ matched = True
+ for expected in (("y", "Y"), ("s", "S"), ("t", "T"),
+ ("e", "E"), ("m", "M")):
+ data = self.stream.char()
+ if data not in expected:
+ matched = False
+ break
+ if matched:
+ self.state = self.afterDoctypeSystemKeywordState
+ return True
+
+ # All the characters read before the current 'data' will be
+ # [a-zA-Z], so they're garbage in the bogus doctype and can be
+ # discarded; only the latest character might be '>' or EOF
+ # and needs to be ungetted
+ self.stream.unget(data)
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "expected-space-or-right-bracket-in-doctype", "datavars":
+ {"data": data}})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+
+ return True
+
+ def afterDoctypePublicKeywordState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeDoctypePublicIdentifierState
+ elif data in ("'", '"'):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.stream.unget(data)
+ self.state = self.beforeDoctypePublicIdentifierState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.stream.unget(data)
+ self.state = self.beforeDoctypePublicIdentifierState
+ return True
+
+ def beforeDoctypePublicIdentifierState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == "\"":
+ self.currentToken["publicId"] = ""
+ self.state = self.doctypePublicIdentifierDoubleQuotedState
+ elif data == "'":
+ self.currentToken["publicId"] = ""
+ self.state = self.doctypePublicIdentifierSingleQuotedState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+ return True
+
+ def doctypePublicIdentifierDoubleQuotedState(self):
+ data = self.stream.char()
+ if data == "\"":
+ self.state = self.afterDoctypePublicIdentifierState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["publicId"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["publicId"] += data
+ return True
+
+ def doctypePublicIdentifierSingleQuotedState(self):
+ data = self.stream.char()
+ if data == "'":
+ self.state = self.afterDoctypePublicIdentifierState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["publicId"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["publicId"] += data
+ return True
+
+ def afterDoctypePublicIdentifierState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.betweenDoctypePublicAndSystemIdentifiersState
+ elif data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == '"':
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierDoubleQuotedState
+ elif data == "'":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierSingleQuotedState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+ return True
+
+ def betweenDoctypePublicAndSystemIdentifiersState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data == '"':
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierDoubleQuotedState
+ elif data == "'":
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierSingleQuotedState
+ elif data == EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+ return True
+
+ def afterDoctypeSystemKeywordState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ self.state = self.beforeDoctypeSystemIdentifierState
+ elif data in ("'", '"'):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.stream.unget(data)
+ self.state = self.beforeDoctypeSystemIdentifierState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.stream.unget(data)
+ self.state = self.beforeDoctypeSystemIdentifierState
+ return True
+
+ def beforeDoctypeSystemIdentifierState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == "\"":
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierDoubleQuotedState
+ elif data == "'":
+ self.currentToken["systemId"] = ""
+ self.state = self.doctypeSystemIdentifierSingleQuotedState
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.currentToken["correct"] = False
+ self.state = self.bogusDoctypeState
+ return True
+
+ def doctypeSystemIdentifierDoubleQuotedState(self):
+ data = self.stream.char()
+ if data == "\"":
+ self.state = self.afterDoctypeSystemIdentifierState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["systemId"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["systemId"] += data
+ return True
+
+ def doctypeSystemIdentifierSingleQuotedState(self):
+ data = self.stream.char()
+ if data == "'":
+ self.state = self.afterDoctypeSystemIdentifierState
+ elif data == "\u0000":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ self.currentToken["systemId"] += "\uFFFD"
+ elif data == ">":
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-end-of-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.currentToken["systemId"] += data
+ return True
+
+ def afterDoctypeSystemIdentifierState(self):
+ data = self.stream.char()
+ if data in spaceCharacters:
+ pass
+ elif data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "eof-in-doctype"})
+ self.currentToken["correct"] = False
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
+ "unexpected-char-in-doctype"})
+ self.state = self.bogusDoctypeState
+ return True
+
+ def bogusDoctypeState(self):
+ data = self.stream.char()
+ if data == ">":
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ elif data is EOF:
+ # XXX EMIT
+ self.stream.unget(data)
+ self.tokenQueue.append(self.currentToken)
+ self.state = self.dataState
+ else:
+ pass
+ return True
+
+ def cdataSectionState(self):
+ data = []
+ while True:
+ data.append(self.stream.charsUntil("]"))
+ data.append(self.stream.charsUntil(">"))
+ char = self.stream.char()
+ if char == EOF:
+ break
+ else:
+ assert char == ">"
+ if data[-1][-2:] == "]]":
+ data[-1] = data[-1][:-2]
+ break
+ else:
+ data.append(char)
+
+ data = "".join(data) # pylint:disable=redefined-variable-type
+ # Deal with null here rather than in the parser
+ nullCount = data.count("\u0000")
+ if nullCount > 0:
+ for _ in range(nullCount):
+ self.tokenQueue.append({"type": tokenTypes["ParseError"],
+ "data": "invalid-codepoint"})
+ data = data.replace("\u0000", "\uFFFD")
+ if data:
+ self.tokenQueue.append({"type": tokenTypes["Characters"],
+ "data": data})
+ self.state = self.dataState
+ return True
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py
new file mode 100644
index 00000000..a5ba4bf1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py
@@ -0,0 +1,14 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from .py import Trie as PyTrie
+
+Trie = PyTrie
+
+# pylint:disable=wrong-import-position
+try:
+ from .datrie import Trie as DATrie
+except ImportError:
+ pass
+else:
+ Trie = DATrie
+# pylint:enable=wrong-import-position
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..bb097565
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc
new file mode 100644
index 00000000..2b98eddb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-37.pyc
new file mode 100644
index 00000000..f55827e6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc
new file mode 100644
index 00000000..f0d0aa03
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/_base.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/_base.py
new file mode 100644
index 00000000..6b71975f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/_base.py
@@ -0,0 +1,40 @@
+from __future__ import absolute_import, division, unicode_literals
+
+try:
+ from collections.abc import Mapping
+except ImportError: # Python 2.7
+ from collections import Mapping
+
+
+class Trie(Mapping):
+ """Abstract base class for tries"""
+
+ def keys(self, prefix=None):
+ # pylint:disable=arguments-differ
+ keys = super(Trie, self).keys()
+
+ if prefix is None:
+ return set(keys)
+
+ return {x for x in keys if x.startswith(prefix)}
+
+ def has_keys_with_prefix(self, prefix):
+ for key in self.keys():
+ if key.startswith(prefix):
+ return True
+
+ return False
+
+ def longest_prefix(self, prefix):
+ if prefix in self:
+ return prefix
+
+ for i in range(1, len(prefix) + 1):
+ if prefix[:-i] in self:
+ return prefix[:-i]
+
+ raise KeyError(prefix)
+
+ def longest_prefix_item(self, prefix):
+ lprefix = self.longest_prefix(prefix)
+ return (lprefix, self[lprefix])
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py
new file mode 100644
index 00000000..e2e5f866
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py
@@ -0,0 +1,44 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from datrie import Trie as DATrie
+from pip._vendor.six import text_type
+
+from ._base import Trie as ABCTrie
+
+
+class Trie(ABCTrie):
+ def __init__(self, data):
+ chars = set()
+ for key in data.keys():
+ if not isinstance(key, text_type):
+ raise TypeError("All keys must be strings")
+ for char in key:
+ chars.add(char)
+
+ self._data = DATrie("".join(chars))
+ for key, value in data.items():
+ self._data[key] = value
+
+ def __contains__(self, key):
+ return key in self._data
+
+ def __len__(self):
+ return len(self._data)
+
+ def __iter__(self):
+ raise NotImplementedError()
+
+ def __getitem__(self, key):
+ return self._data[key]
+
+ def keys(self, prefix=None):
+ return self._data.keys(prefix)
+
+ def has_keys_with_prefix(self, prefix):
+ return self._data.has_keys_with_prefix(prefix)
+
+ def longest_prefix(self, prefix):
+ return self._data.longest_prefix(prefix)
+
+ def longest_prefix_item(self, prefix):
+ return self._data.longest_prefix_item(prefix)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/py.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/py.py
new file mode 100644
index 00000000..c178b219
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/py.py
@@ -0,0 +1,67 @@
+from __future__ import absolute_import, division, unicode_literals
+from pip._vendor.six import text_type
+
+from bisect import bisect_left
+
+from ._base import Trie as ABCTrie
+
+
+class Trie(ABCTrie):
+ def __init__(self, data):
+ if not all(isinstance(x, text_type) for x in data.keys()):
+ raise TypeError("All keys must be strings")
+
+ self._data = data
+ self._keys = sorted(data.keys())
+ self._cachestr = ""
+ self._cachepoints = (0, len(data))
+
+ def __contains__(self, key):
+ return key in self._data
+
+ def __len__(self):
+ return len(self._data)
+
+ def __iter__(self):
+ return iter(self._data)
+
+ def __getitem__(self, key):
+ return self._data[key]
+
+ def keys(self, prefix=None):
+ if prefix is None or prefix == "" or not self._keys:
+ return set(self._keys)
+
+ if prefix.startswith(self._cachestr):
+ lo, hi = self._cachepoints
+ start = i = bisect_left(self._keys, prefix, lo, hi)
+ else:
+ start = i = bisect_left(self._keys, prefix)
+
+ keys = set()
+ if start == len(self._keys):
+ return keys
+
+ while self._keys[i].startswith(prefix):
+ keys.add(self._keys[i])
+ i += 1
+
+ self._cachestr = prefix
+ self._cachepoints = (start, i)
+
+ return keys
+
+ def has_keys_with_prefix(self, prefix):
+ if prefix in self._data:
+ return True
+
+ if prefix.startswith(self._cachestr):
+ lo, hi = self._cachepoints
+ i = bisect_left(self._keys, prefix, lo, hi)
+ else:
+ i = bisect_left(self._keys, prefix)
+
+ if i == len(self._keys):
+ return False
+
+ return self._keys[i].startswith(prefix)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_utils.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_utils.py
new file mode 100644
index 00000000..0703afb3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/_utils.py
@@ -0,0 +1,124 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from types import ModuleType
+
+from pip._vendor.six import text_type
+
+try:
+ import xml.etree.cElementTree as default_etree
+except ImportError:
+ import xml.etree.ElementTree as default_etree
+
+
+__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair",
+ "surrogatePairToCodepoint", "moduleFactoryFactory",
+ "supports_lone_surrogates"]
+
+
+# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be
+# caught by the below test. In general this would be any platform
+# using UTF-16 as its encoding of unicode strings, such as
+# Jython. This is because UTF-16 itself is based on the use of such
+# surrogates, and there is no mechanism to further escape such
+# escapes.
+try:
+ _x = eval('"\\uD800"') # pylint:disable=eval-used
+ if not isinstance(_x, text_type):
+ # We need this with u"" because of http://bugs.jython.org/issue2039
+ _x = eval('u"\\uD800"') # pylint:disable=eval-used
+ assert isinstance(_x, text_type)
+except: # pylint:disable=bare-except
+ supports_lone_surrogates = False
+else:
+ supports_lone_surrogates = True
+
+
+class MethodDispatcher(dict):
+ """Dict with 2 special properties:
+
+ On initiation, keys that are lists, sets or tuples are converted to
+ multiple keys so accessing any one of the items in the original
+ list-like object returns the matching value
+
+ md = MethodDispatcher({("foo", "bar"):"baz"})
+ md["foo"] == "baz"
+
+ A default value which can be set through the default attribute.
+ """
+
+ def __init__(self, items=()):
+ # Using _dictEntries instead of directly assigning to self is about
+ # twice as fast. Please do careful performance testing before changing
+ # anything here.
+ _dictEntries = []
+ for name, value in items:
+ if isinstance(name, (list, tuple, frozenset, set)):
+ for item in name:
+ _dictEntries.append((item, value))
+ else:
+ _dictEntries.append((name, value))
+ dict.__init__(self, _dictEntries)
+ assert len(self) == len(_dictEntries)
+ self.default = None
+
+ def __getitem__(self, key):
+ return dict.get(self, key, self.default)
+
+
+# Some utility functions to deal with weirdness around UCS2 vs UCS4
+# python builds
+
+def isSurrogatePair(data):
+ return (len(data) == 2 and
+ ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and
+ ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF)
+
+
+def surrogatePairToCodepoint(data):
+ char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 +
+ (ord(data[1]) - 0xDC00))
+ return char_val
+
+# Module Factory Factory (no, this isn't Java, I know)
+# Here to stop this being duplicated all over the place.
+
+
+def moduleFactoryFactory(factory):
+ moduleCache = {}
+
+ def moduleFactory(baseModule, *args, **kwargs):
+ if isinstance(ModuleType.__name__, type("")):
+ name = "_%s_factory" % baseModule.__name__
+ else:
+ name = b"_%s_factory" % baseModule.__name__
+
+ kwargs_tuple = tuple(kwargs.items())
+
+ try:
+ return moduleCache[name][args][kwargs_tuple]
+ except KeyError:
+ mod = ModuleType(name)
+ objs = factory(baseModule, *args, **kwargs)
+ mod.__dict__.update(objs)
+ if "name" not in moduleCache:
+ moduleCache[name] = {}
+ if "args" not in moduleCache[name]:
+ moduleCache[name][args] = {}
+ if "kwargs" not in moduleCache[name][args]:
+ moduleCache[name][args][kwargs_tuple] = {}
+ moduleCache[name][args][kwargs_tuple] = mod
+ return mod
+
+ return moduleFactory
+
+
+def memoize(func):
+ cache = {}
+
+ def wrapped(*args, **kwargs):
+ key = (tuple(args), tuple(kwargs.items()))
+ if key not in cache:
+ cache[key] = func(*args, **kwargs)
+ return cache[key]
+
+ return wrapped
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/constants.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/constants.py
new file mode 100644
index 00000000..1ff80419
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/constants.py
@@ -0,0 +1,2947 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import string
+
+EOF = None
+
+E = {
+ "null-character":
+ "Null character in input stream, replaced with U+FFFD.",
+ "invalid-codepoint":
+ "Invalid codepoint in stream.",
+ "incorrectly-placed-solidus":
+ "Solidus (/) incorrectly placed in tag.",
+ "incorrect-cr-newline-entity":
+ "Incorrect CR newline entity, replaced with LF.",
+ "illegal-windows-1252-entity":
+ "Entity used with illegal number (windows-1252 reference).",
+ "cant-convert-numeric-entity":
+ "Numeric entity couldn't be converted to character "
+ "(codepoint U+%(charAsInt)08x).",
+ "illegal-codepoint-for-numeric-entity":
+ "Numeric entity represents an illegal codepoint: "
+ "U+%(charAsInt)08x.",
+ "numeric-entity-without-semicolon":
+ "Numeric entity didn't end with ';'.",
+ "expected-numeric-entity-but-got-eof":
+ "Numeric entity expected. Got end of file instead.",
+ "expected-numeric-entity":
+ "Numeric entity expected but none found.",
+ "named-entity-without-semicolon":
+ "Named entity didn't end with ';'.",
+ "expected-named-entity":
+ "Named entity expected. Got none.",
+ "attributes-in-end-tag":
+ "End tag contains unexpected attributes.",
+ 'self-closing-flag-on-end-tag':
+ "End tag contains unexpected self-closing flag.",
+ "expected-tag-name-but-got-right-bracket":
+ "Expected tag name. Got '>' instead.",
+ "expected-tag-name-but-got-question-mark":
+ "Expected tag name. Got '?' instead. (HTML doesn't "
+ "support processing instructions.)",
+ "expected-tag-name":
+ "Expected tag name. Got something else instead",
+ "expected-closing-tag-but-got-right-bracket":
+ "Expected closing tag. Got '>' instead. Ignoring '</>'.",
+ "expected-closing-tag-but-got-eof":
+ "Expected closing tag. Unexpected end of file.",
+ "expected-closing-tag-but-got-char":
+ "Expected closing tag. Unexpected character '%(data)s' found.",
+ "eof-in-tag-name":
+ "Unexpected end of file in the tag name.",
+ "expected-attribute-name-but-got-eof":
+ "Unexpected end of file. Expected attribute name instead.",
+ "eof-in-attribute-name":
+ "Unexpected end of file in attribute name.",
+ "invalid-character-in-attribute-name":
+ "Invalid character in attribute name",
+ "duplicate-attribute":
+ "Dropped duplicate attribute on tag.",
+ "expected-end-of-tag-name-but-got-eof":
+ "Unexpected end of file. Expected = or end of tag.",
+ "expected-attribute-value-but-got-eof":
+ "Unexpected end of file. Expected attribute value.",
+ "expected-attribute-value-but-got-right-bracket":
+ "Expected attribute value. Got '>' instead.",
+ 'equals-in-unquoted-attribute-value':
+ "Unexpected = in unquoted attribute",
+ 'unexpected-character-in-unquoted-attribute-value':
+ "Unexpected character in unquoted attribute",
+ "invalid-character-after-attribute-name":
+ "Unexpected character after attribute name.",
+ "unexpected-character-after-attribute-value":
+ "Unexpected character after attribute value.",
+ "eof-in-attribute-value-double-quote":
+ "Unexpected end of file in attribute value (\").",
+ "eof-in-attribute-value-single-quote":
+ "Unexpected end of file in attribute value (').",
+ "eof-in-attribute-value-no-quotes":
+ "Unexpected end of file in attribute value.",
+ "unexpected-EOF-after-solidus-in-tag":
+ "Unexpected end of file in tag. Expected >",
+ "unexpected-character-after-solidus-in-tag":
+ "Unexpected character after / in tag. Expected >",
+ "expected-dashes-or-doctype":
+ "Expected '--' or 'DOCTYPE'. Not found.",
+ "unexpected-bang-after-double-dash-in-comment":
+ "Unexpected ! after -- in comment",
+ "unexpected-space-after-double-dash-in-comment":
+ "Unexpected space after -- in comment",
+ "incorrect-comment":
+ "Incorrect comment.",
+ "eof-in-comment":
+ "Unexpected end of file in comment.",
+ "eof-in-comment-end-dash":
+ "Unexpected end of file in comment (-)",
+ "unexpected-dash-after-double-dash-in-comment":
+ "Unexpected '-' after '--' found in comment.",
+ "eof-in-comment-double-dash":
+ "Unexpected end of file in comment (--).",
+ "eof-in-comment-end-space-state":
+ "Unexpected end of file in comment.",
+ "eof-in-comment-end-bang-state":
+ "Unexpected end of file in comment.",
+ "unexpected-char-in-comment":
+ "Unexpected character in comment found.",
+ "need-space-after-doctype":
+ "No space after literal string 'DOCTYPE'.",
+ "expected-doctype-name-but-got-right-bracket":
+ "Unexpected > character. Expected DOCTYPE name.",
+ "expected-doctype-name-but-got-eof":
+ "Unexpected end of file. Expected DOCTYPE name.",
+ "eof-in-doctype-name":
+ "Unexpected end of file in DOCTYPE name.",
+ "eof-in-doctype":
+ "Unexpected end of file in DOCTYPE.",
+ "expected-space-or-right-bracket-in-doctype":
+ "Expected space or '>'. Got '%(data)s'",
+ "unexpected-end-of-doctype":
+ "Unexpected end of DOCTYPE.",
+ "unexpected-char-in-doctype":
+ "Unexpected character in DOCTYPE.",
+ "eof-in-innerhtml":
+ "XXX innerHTML EOF",
+ "unexpected-doctype":
+ "Unexpected DOCTYPE. Ignored.",
+ "non-html-root":
+ "html needs to be the first start tag.",
+ "expected-doctype-but-got-eof":
+ "Unexpected End of file. Expected DOCTYPE.",
+ "unknown-doctype":
+ "Erroneous DOCTYPE.",
+ "expected-doctype-but-got-chars":
+ "Unexpected non-space characters. Expected DOCTYPE.",
+ "expected-doctype-but-got-start-tag":
+ "Unexpected start tag (%(name)s). Expected DOCTYPE.",
+ "expected-doctype-but-got-end-tag":
+ "Unexpected end tag (%(name)s). Expected DOCTYPE.",
+ "end-tag-after-implied-root":
+ "Unexpected end tag (%(name)s) after the (implied) root element.",
+ "expected-named-closing-tag-but-got-eof":
+ "Unexpected end of file. Expected end tag (%(name)s).",
+ "two-heads-are-not-better-than-one":
+ "Unexpected start tag head in existing head. Ignored.",
+ "unexpected-end-tag":
+ "Unexpected end tag (%(name)s). Ignored.",
+ "unexpected-start-tag-out-of-my-head":
+ "Unexpected start tag (%(name)s) that can be in head. Moved.",
+ "unexpected-start-tag":
+ "Unexpected start tag (%(name)s).",
+ "missing-end-tag":
+ "Missing end tag (%(name)s).",
+ "missing-end-tags":
+ "Missing end tags (%(name)s).",
+ "unexpected-start-tag-implies-end-tag":
+ "Unexpected start tag (%(startName)s) "
+ "implies end tag (%(endName)s).",
+ "unexpected-start-tag-treated-as":
+ "Unexpected start tag (%(originalName)s). Treated as %(newName)s.",
+ "deprecated-tag":
+ "Unexpected start tag %(name)s. Don't use it!",
+ "unexpected-start-tag-ignored":
+ "Unexpected start tag %(name)s. Ignored.",
+ "expected-one-end-tag-but-got-another":
+ "Unexpected end tag (%(gotName)s). "
+ "Missing end tag (%(expectedName)s).",
+ "end-tag-too-early":
+ "End tag (%(name)s) seen too early. Expected other end tag.",
+ "end-tag-too-early-named":
+ "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).",
+ "end-tag-too-early-ignored":
+ "End tag (%(name)s) seen too early. Ignored.",
+ "adoption-agency-1.1":
+ "End tag (%(name)s) violates step 1, "
+ "paragraph 1 of the adoption agency algorithm.",
+ "adoption-agency-1.2":
+ "End tag (%(name)s) violates step 1, "
+ "paragraph 2 of the adoption agency algorithm.",
+ "adoption-agency-1.3":
+ "End tag (%(name)s) violates step 1, "
+ "paragraph 3 of the adoption agency algorithm.",
+ "adoption-agency-4.4":
+ "End tag (%(name)s) violates step 4, "
+ "paragraph 4 of the adoption agency algorithm.",
+ "unexpected-end-tag-treated-as":
+ "Unexpected end tag (%(originalName)s). Treated as %(newName)s.",
+ "no-end-tag":
+ "This element (%(name)s) has no end tag.",
+ "unexpected-implied-end-tag-in-table":
+ "Unexpected implied end tag (%(name)s) in the table phase.",
+ "unexpected-implied-end-tag-in-table-body":
+ "Unexpected implied end tag (%(name)s) in the table body phase.",
+ "unexpected-char-implies-table-voodoo":
+ "Unexpected non-space characters in "
+ "table context caused voodoo mode.",
+ "unexpected-hidden-input-in-table":
+ "Unexpected input with type hidden in table context.",
+ "unexpected-form-in-table":
+ "Unexpected form in table context.",
+ "unexpected-start-tag-implies-table-voodoo":
+ "Unexpected start tag (%(name)s) in "
+ "table context caused voodoo mode.",
+ "unexpected-end-tag-implies-table-voodoo":
+ "Unexpected end tag (%(name)s) in "
+ "table context caused voodoo mode.",
+ "unexpected-cell-in-table-body":
+ "Unexpected table cell start tag (%(name)s) "
+ "in the table body phase.",
+ "unexpected-cell-end-tag":
+ "Got table cell end tag (%(name)s) "
+ "while required end tags are missing.",
+ "unexpected-end-tag-in-table-body":
+ "Unexpected end tag (%(name)s) in the table body phase. Ignored.",
+ "unexpected-implied-end-tag-in-table-row":
+ "Unexpected implied end tag (%(name)s) in the table row phase.",
+ "unexpected-end-tag-in-table-row":
+ "Unexpected end tag (%(name)s) in the table row phase. Ignored.",
+ "unexpected-select-in-select":
+ "Unexpected select start tag in the select phase "
+ "treated as select end tag.",
+ "unexpected-input-in-select":
+ "Unexpected input start tag in the select phase.",
+ "unexpected-start-tag-in-select":
+ "Unexpected start tag token (%(name)s in the select phase. "
+ "Ignored.",
+ "unexpected-end-tag-in-select":
+ "Unexpected end tag (%(name)s) in the select phase. Ignored.",
+ "unexpected-table-element-start-tag-in-select-in-table":
+ "Unexpected table element start tag (%(name)s) in the select in table phase.",
+ "unexpected-table-element-end-tag-in-select-in-table":
+ "Unexpected table element end tag (%(name)s) in the select in table phase.",
+ "unexpected-char-after-body":
+ "Unexpected non-space characters in the after body phase.",
+ "unexpected-start-tag-after-body":
+ "Unexpected start tag token (%(name)s)"
+ " in the after body phase.",
+ "unexpected-end-tag-after-body":
+ "Unexpected end tag token (%(name)s)"
+ " in the after body phase.",
+ "unexpected-char-in-frameset":
+ "Unexpected characters in the frameset phase. Characters ignored.",
+ "unexpected-start-tag-in-frameset":
+ "Unexpected start tag token (%(name)s)"
+ " in the frameset phase. Ignored.",
+ "unexpected-frameset-in-frameset-innerhtml":
+ "Unexpected end tag token (frameset) "
+ "in the frameset phase (innerHTML).",
+ "unexpected-end-tag-in-frameset":
+ "Unexpected end tag token (%(name)s)"
+ " in the frameset phase. Ignored.",
+ "unexpected-char-after-frameset":
+ "Unexpected non-space characters in the "
+ "after frameset phase. Ignored.",
+ "unexpected-start-tag-after-frameset":
+ "Unexpected start tag (%(name)s)"
+ " in the after frameset phase. Ignored.",
+ "unexpected-end-tag-after-frameset":
+ "Unexpected end tag (%(name)s)"
+ " in the after frameset phase. Ignored.",
+ "unexpected-end-tag-after-body-innerhtml":
+ "Unexpected end tag after body(innerHtml)",
+ "expected-eof-but-got-char":
+ "Unexpected non-space characters. Expected end of file.",
+ "expected-eof-but-got-start-tag":
+ "Unexpected start tag (%(name)s)"
+ ". Expected end of file.",
+ "expected-eof-but-got-end-tag":
+ "Unexpected end tag (%(name)s)"
+ ". Expected end of file.",
+ "eof-in-table":
+ "Unexpected end of file. Expected table content.",
+ "eof-in-select":
+ "Unexpected end of file. Expected select content.",
+ "eof-in-frameset":
+ "Unexpected end of file. Expected frameset content.",
+ "eof-in-script-in-script":
+ "Unexpected end of file. Expected script content.",
+ "eof-in-foreign-lands":
+ "Unexpected end of file. Expected foreign content",
+ "non-void-element-with-trailing-solidus":
+ "Trailing solidus not allowed on element %(name)s",
+ "unexpected-html-element-in-foreign-content":
+ "Element %(name)s not allowed in a non-html context",
+ "unexpected-end-tag-before-html":
+ "Unexpected end tag (%(name)s) before html.",
+ "unexpected-inhead-noscript-tag":
+ "Element %(name)s not allowed in a inhead-noscript context",
+ "eof-in-head-noscript":
+ "Unexpected end of file. Expected inhead-noscript content",
+ "char-in-head-noscript":
+ "Unexpected non-space character. Expected inhead-noscript content",
+ "XXX-undefined-error":
+ "Undefined error (this sucks and should be fixed)",
+}
+
+namespaces = {
+ "html": "http://www.w3.org/1999/xhtml",
+ "mathml": "http://www.w3.org/1998/Math/MathML",
+ "svg": "http://www.w3.org/2000/svg",
+ "xlink": "http://www.w3.org/1999/xlink",
+ "xml": "http://www.w3.org/XML/1998/namespace",
+ "xmlns": "http://www.w3.org/2000/xmlns/"
+}
+
+scopingElements = frozenset([
+ (namespaces["html"], "applet"),
+ (namespaces["html"], "caption"),
+ (namespaces["html"], "html"),
+ (namespaces["html"], "marquee"),
+ (namespaces["html"], "object"),
+ (namespaces["html"], "table"),
+ (namespaces["html"], "td"),
+ (namespaces["html"], "th"),
+ (namespaces["mathml"], "mi"),
+ (namespaces["mathml"], "mo"),
+ (namespaces["mathml"], "mn"),
+ (namespaces["mathml"], "ms"),
+ (namespaces["mathml"], "mtext"),
+ (namespaces["mathml"], "annotation-xml"),
+ (namespaces["svg"], "foreignObject"),
+ (namespaces["svg"], "desc"),
+ (namespaces["svg"], "title"),
+])
+
+formattingElements = frozenset([
+ (namespaces["html"], "a"),
+ (namespaces["html"], "b"),
+ (namespaces["html"], "big"),
+ (namespaces["html"], "code"),
+ (namespaces["html"], "em"),
+ (namespaces["html"], "font"),
+ (namespaces["html"], "i"),
+ (namespaces["html"], "nobr"),
+ (namespaces["html"], "s"),
+ (namespaces["html"], "small"),
+ (namespaces["html"], "strike"),
+ (namespaces["html"], "strong"),
+ (namespaces["html"], "tt"),
+ (namespaces["html"], "u")
+])
+
+specialElements = frozenset([
+ (namespaces["html"], "address"),
+ (namespaces["html"], "applet"),
+ (namespaces["html"], "area"),
+ (namespaces["html"], "article"),
+ (namespaces["html"], "aside"),
+ (namespaces["html"], "base"),
+ (namespaces["html"], "basefont"),
+ (namespaces["html"], "bgsound"),
+ (namespaces["html"], "blockquote"),
+ (namespaces["html"], "body"),
+ (namespaces["html"], "br"),
+ (namespaces["html"], "button"),
+ (namespaces["html"], "caption"),
+ (namespaces["html"], "center"),
+ (namespaces["html"], "col"),
+ (namespaces["html"], "colgroup"),
+ (namespaces["html"], "command"),
+ (namespaces["html"], "dd"),
+ (namespaces["html"], "details"),
+ (namespaces["html"], "dir"),
+ (namespaces["html"], "div"),
+ (namespaces["html"], "dl"),
+ (namespaces["html"], "dt"),
+ (namespaces["html"], "embed"),
+ (namespaces["html"], "fieldset"),
+ (namespaces["html"], "figure"),
+ (namespaces["html"], "footer"),
+ (namespaces["html"], "form"),
+ (namespaces["html"], "frame"),
+ (namespaces["html"], "frameset"),
+ (namespaces["html"], "h1"),
+ (namespaces["html"], "h2"),
+ (namespaces["html"], "h3"),
+ (namespaces["html"], "h4"),
+ (namespaces["html"], "h5"),
+ (namespaces["html"], "h6"),
+ (namespaces["html"], "head"),
+ (namespaces["html"], "header"),
+ (namespaces["html"], "hr"),
+ (namespaces["html"], "html"),
+ (namespaces["html"], "iframe"),
+ # Note that image is commented out in the spec as "this isn't an
+ # element that can end up on the stack, so it doesn't matter,"
+ (namespaces["html"], "image"),
+ (namespaces["html"], "img"),
+ (namespaces["html"], "input"),
+ (namespaces["html"], "isindex"),
+ (namespaces["html"], "li"),
+ (namespaces["html"], "link"),
+ (namespaces["html"], "listing"),
+ (namespaces["html"], "marquee"),
+ (namespaces["html"], "menu"),
+ (namespaces["html"], "meta"),
+ (namespaces["html"], "nav"),
+ (namespaces["html"], "noembed"),
+ (namespaces["html"], "noframes"),
+ (namespaces["html"], "noscript"),
+ (namespaces["html"], "object"),
+ (namespaces["html"], "ol"),
+ (namespaces["html"], "p"),
+ (namespaces["html"], "param"),
+ (namespaces["html"], "plaintext"),
+ (namespaces["html"], "pre"),
+ (namespaces["html"], "script"),
+ (namespaces["html"], "section"),
+ (namespaces["html"], "select"),
+ (namespaces["html"], "style"),
+ (namespaces["html"], "table"),
+ (namespaces["html"], "tbody"),
+ (namespaces["html"], "td"),
+ (namespaces["html"], "textarea"),
+ (namespaces["html"], "tfoot"),
+ (namespaces["html"], "th"),
+ (namespaces["html"], "thead"),
+ (namespaces["html"], "title"),
+ (namespaces["html"], "tr"),
+ (namespaces["html"], "ul"),
+ (namespaces["html"], "wbr"),
+ (namespaces["html"], "xmp"),
+ (namespaces["svg"], "foreignObject")
+])
+
+htmlIntegrationPointElements = frozenset([
+ (namespaces["mathml"], "annotation-xml"),
+ (namespaces["svg"], "foreignObject"),
+ (namespaces["svg"], "desc"),
+ (namespaces["svg"], "title")
+])
+
+mathmlTextIntegrationPointElements = frozenset([
+ (namespaces["mathml"], "mi"),
+ (namespaces["mathml"], "mo"),
+ (namespaces["mathml"], "mn"),
+ (namespaces["mathml"], "ms"),
+ (namespaces["mathml"], "mtext")
+])
+
+adjustSVGAttributes = {
+ "attributename": "attributeName",
+ "attributetype": "attributeType",
+ "basefrequency": "baseFrequency",
+ "baseprofile": "baseProfile",
+ "calcmode": "calcMode",
+ "clippathunits": "clipPathUnits",
+ "contentscripttype": "contentScriptType",
+ "contentstyletype": "contentStyleType",
+ "diffuseconstant": "diffuseConstant",
+ "edgemode": "edgeMode",
+ "externalresourcesrequired": "externalResourcesRequired",
+ "filterres": "filterRes",
+ "filterunits": "filterUnits",
+ "glyphref": "glyphRef",
+ "gradienttransform": "gradientTransform",
+ "gradientunits": "gradientUnits",
+ "kernelmatrix": "kernelMatrix",
+ "kernelunitlength": "kernelUnitLength",
+ "keypoints": "keyPoints",
+ "keysplines": "keySplines",
+ "keytimes": "keyTimes",
+ "lengthadjust": "lengthAdjust",
+ "limitingconeangle": "limitingConeAngle",
+ "markerheight": "markerHeight",
+ "markerunits": "markerUnits",
+ "markerwidth": "markerWidth",
+ "maskcontentunits": "maskContentUnits",
+ "maskunits": "maskUnits",
+ "numoctaves": "numOctaves",
+ "pathlength": "pathLength",
+ "patterncontentunits": "patternContentUnits",
+ "patterntransform": "patternTransform",
+ "patternunits": "patternUnits",
+ "pointsatx": "pointsAtX",
+ "pointsaty": "pointsAtY",
+ "pointsatz": "pointsAtZ",
+ "preservealpha": "preserveAlpha",
+ "preserveaspectratio": "preserveAspectRatio",
+ "primitiveunits": "primitiveUnits",
+ "refx": "refX",
+ "refy": "refY",
+ "repeatcount": "repeatCount",
+ "repeatdur": "repeatDur",
+ "requiredextensions": "requiredExtensions",
+ "requiredfeatures": "requiredFeatures",
+ "specularconstant": "specularConstant",
+ "specularexponent": "specularExponent",
+ "spreadmethod": "spreadMethod",
+ "startoffset": "startOffset",
+ "stddeviation": "stdDeviation",
+ "stitchtiles": "stitchTiles",
+ "surfacescale": "surfaceScale",
+ "systemlanguage": "systemLanguage",
+ "tablevalues": "tableValues",
+ "targetx": "targetX",
+ "targety": "targetY",
+ "textlength": "textLength",
+ "viewbox": "viewBox",
+ "viewtarget": "viewTarget",
+ "xchannelselector": "xChannelSelector",
+ "ychannelselector": "yChannelSelector",
+ "zoomandpan": "zoomAndPan"
+}
+
+adjustMathMLAttributes = {"definitionurl": "definitionURL"}
+
+adjustForeignAttributes = {
+ "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
+ "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
+ "xlink:href": ("xlink", "href", namespaces["xlink"]),
+ "xlink:role": ("xlink", "role", namespaces["xlink"]),
+ "xlink:show": ("xlink", "show", namespaces["xlink"]),
+ "xlink:title": ("xlink", "title", namespaces["xlink"]),
+ "xlink:type": ("xlink", "type", namespaces["xlink"]),
+ "xml:base": ("xml", "base", namespaces["xml"]),
+ "xml:lang": ("xml", "lang", namespaces["xml"]),
+ "xml:space": ("xml", "space", namespaces["xml"]),
+ "xmlns": (None, "xmlns", namespaces["xmlns"]),
+ "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
+}
+
+unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in
+ adjustForeignAttributes.items()])
+
+spaceCharacters = frozenset([
+ "\t",
+ "\n",
+ "\u000C",
+ " ",
+ "\r"
+])
+
+tableInsertModeElements = frozenset([
+ "table",
+ "tbody",
+ "tfoot",
+ "thead",
+ "tr"
+])
+
+asciiLowercase = frozenset(string.ascii_lowercase)
+asciiUppercase = frozenset(string.ascii_uppercase)
+asciiLetters = frozenset(string.ascii_letters)
+digits = frozenset(string.digits)
+hexDigits = frozenset(string.hexdigits)
+
+asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
+ for c in string.ascii_uppercase])
+
+# Heading elements need to be ordered
+headingElements = (
+ "h1",
+ "h2",
+ "h3",
+ "h4",
+ "h5",
+ "h6"
+)
+
+voidElements = frozenset([
+ "base",
+ "command",
+ "event-source",
+ "link",
+ "meta",
+ "hr",
+ "br",
+ "img",
+ "embed",
+ "param",
+ "area",
+ "col",
+ "input",
+ "source",
+ "track"
+])
+
+cdataElements = frozenset(['title', 'textarea'])
+
+rcdataElements = frozenset([
+ 'style',
+ 'script',
+ 'xmp',
+ 'iframe',
+ 'noembed',
+ 'noframes',
+ 'noscript'
+])
+
+booleanAttributes = {
+ "": frozenset(["irrelevant", "itemscope"]),
+ "style": frozenset(["scoped"]),
+ "img": frozenset(["ismap"]),
+ "audio": frozenset(["autoplay", "controls"]),
+ "video": frozenset(["autoplay", "controls"]),
+ "script": frozenset(["defer", "async"]),
+ "details": frozenset(["open"]),
+ "datagrid": frozenset(["multiple", "disabled"]),
+ "command": frozenset(["hidden", "disabled", "checked", "default"]),
+ "hr": frozenset(["noshade"]),
+ "menu": frozenset(["autosubmit"]),
+ "fieldset": frozenset(["disabled", "readonly"]),
+ "option": frozenset(["disabled", "readonly", "selected"]),
+ "optgroup": frozenset(["disabled", "readonly"]),
+ "button": frozenset(["disabled", "autofocus"]),
+ "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]),
+ "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]),
+ "output": frozenset(["disabled", "readonly"]),
+ "iframe": frozenset(["seamless"]),
+}
+
+# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
+# therefore can't be a frozenset.
+entitiesWindows1252 = (
+ 8364, # 0x80 0x20AC EURO SIGN
+ 65533, # 0x81 UNDEFINED
+ 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK
+ 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK
+ 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK
+ 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS
+ 8224, # 0x86 0x2020 DAGGER
+ 8225, # 0x87 0x2021 DOUBLE DAGGER
+ 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT
+ 8240, # 0x89 0x2030 PER MILLE SIGN
+ 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON
+ 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE
+ 65533, # 0x8D UNDEFINED
+ 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON
+ 65533, # 0x8F UNDEFINED
+ 65533, # 0x90 UNDEFINED
+ 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK
+ 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK
+ 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK
+ 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK
+ 8226, # 0x95 0x2022 BULLET
+ 8211, # 0x96 0x2013 EN DASH
+ 8212, # 0x97 0x2014 EM DASH
+ 732, # 0x98 0x02DC SMALL TILDE
+ 8482, # 0x99 0x2122 TRADE MARK SIGN
+ 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON
+ 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE
+ 65533, # 0x9D UNDEFINED
+ 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON
+ 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS
+)
+
+xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;'])
+
+entities = {
+ "AElig": "\xc6",
+ "AElig;": "\xc6",
+ "AMP": "&",
+ "AMP;": "&",
+ "Aacute": "\xc1",
+ "Aacute;": "\xc1",
+ "Abreve;": "\u0102",
+ "Acirc": "\xc2",
+ "Acirc;": "\xc2",
+ "Acy;": "\u0410",
+ "Afr;": "\U0001d504",
+ "Agrave": "\xc0",
+ "Agrave;": "\xc0",
+ "Alpha;": "\u0391",
+ "Amacr;": "\u0100",
+ "And;": "\u2a53",
+ "Aogon;": "\u0104",
+ "Aopf;": "\U0001d538",
+ "ApplyFunction;": "\u2061",
+ "Aring": "\xc5",
+ "Aring;": "\xc5",
+ "Ascr;": "\U0001d49c",
+ "Assign;": "\u2254",
+ "Atilde": "\xc3",
+ "Atilde;": "\xc3",
+ "Auml": "\xc4",
+ "Auml;": "\xc4",
+ "Backslash;": "\u2216",
+ "Barv;": "\u2ae7",
+ "Barwed;": "\u2306",
+ "Bcy;": "\u0411",
+ "Because;": "\u2235",
+ "Bernoullis;": "\u212c",
+ "Beta;": "\u0392",
+ "Bfr;": "\U0001d505",
+ "Bopf;": "\U0001d539",
+ "Breve;": "\u02d8",
+ "Bscr;": "\u212c",
+ "Bumpeq;": "\u224e",
+ "CHcy;": "\u0427",
+ "COPY": "\xa9",
+ "COPY;": "\xa9",
+ "Cacute;": "\u0106",
+ "Cap;": "\u22d2",
+ "CapitalDifferentialD;": "\u2145",
+ "Cayleys;": "\u212d",
+ "Ccaron;": "\u010c",
+ "Ccedil": "\xc7",
+ "Ccedil;": "\xc7",
+ "Ccirc;": "\u0108",
+ "Cconint;": "\u2230",
+ "Cdot;": "\u010a",
+ "Cedilla;": "\xb8",
+ "CenterDot;": "\xb7",
+ "Cfr;": "\u212d",
+ "Chi;": "\u03a7",
+ "CircleDot;": "\u2299",
+ "CircleMinus;": "\u2296",
+ "CirclePlus;": "\u2295",
+ "CircleTimes;": "\u2297",
+ "ClockwiseContourIntegral;": "\u2232",
+ "CloseCurlyDoubleQuote;": "\u201d",
+ "CloseCurlyQuote;": "\u2019",
+ "Colon;": "\u2237",
+ "Colone;": "\u2a74",
+ "Congruent;": "\u2261",
+ "Conint;": "\u222f",
+ "ContourIntegral;": "\u222e",
+ "Copf;": "\u2102",
+ "Coproduct;": "\u2210",
+ "CounterClockwiseContourIntegral;": "\u2233",
+ "Cross;": "\u2a2f",
+ "Cscr;": "\U0001d49e",
+ "Cup;": "\u22d3",
+ "CupCap;": "\u224d",
+ "DD;": "\u2145",
+ "DDotrahd;": "\u2911",
+ "DJcy;": "\u0402",
+ "DScy;": "\u0405",
+ "DZcy;": "\u040f",
+ "Dagger;": "\u2021",
+ "Darr;": "\u21a1",
+ "Dashv;": "\u2ae4",
+ "Dcaron;": "\u010e",
+ "Dcy;": "\u0414",
+ "Del;": "\u2207",
+ "Delta;": "\u0394",
+ "Dfr;": "\U0001d507",
+ "DiacriticalAcute;": "\xb4",
+ "DiacriticalDot;": "\u02d9",
+ "DiacriticalDoubleAcute;": "\u02dd",
+ "DiacriticalGrave;": "`",
+ "DiacriticalTilde;": "\u02dc",
+ "Diamond;": "\u22c4",
+ "DifferentialD;": "\u2146",
+ "Dopf;": "\U0001d53b",
+ "Dot;": "\xa8",
+ "DotDot;": "\u20dc",
+ "DotEqual;": "\u2250",
+ "DoubleContourIntegral;": "\u222f",
+ "DoubleDot;": "\xa8",
+ "DoubleDownArrow;": "\u21d3",
+ "DoubleLeftArrow;": "\u21d0",
+ "DoubleLeftRightArrow;": "\u21d4",
+ "DoubleLeftTee;": "\u2ae4",
+ "DoubleLongLeftArrow;": "\u27f8",
+ "DoubleLongLeftRightArrow;": "\u27fa",
+ "DoubleLongRightArrow;": "\u27f9",
+ "DoubleRightArrow;": "\u21d2",
+ "DoubleRightTee;": "\u22a8",
+ "DoubleUpArrow;": "\u21d1",
+ "DoubleUpDownArrow;": "\u21d5",
+ "DoubleVerticalBar;": "\u2225",
+ "DownArrow;": "\u2193",
+ "DownArrowBar;": "\u2913",
+ "DownArrowUpArrow;": "\u21f5",
+ "DownBreve;": "\u0311",
+ "DownLeftRightVector;": "\u2950",
+ "DownLeftTeeVector;": "\u295e",
+ "DownLeftVector;": "\u21bd",
+ "DownLeftVectorBar;": "\u2956",
+ "DownRightTeeVector;": "\u295f",
+ "DownRightVector;": "\u21c1",
+ "DownRightVectorBar;": "\u2957",
+ "DownTee;": "\u22a4",
+ "DownTeeArrow;": "\u21a7",
+ "Downarrow;": "\u21d3",
+ "Dscr;": "\U0001d49f",
+ "Dstrok;": "\u0110",
+ "ENG;": "\u014a",
+ "ETH": "\xd0",
+ "ETH;": "\xd0",
+ "Eacute": "\xc9",
+ "Eacute;": "\xc9",
+ "Ecaron;": "\u011a",
+ "Ecirc": "\xca",
+ "Ecirc;": "\xca",
+ "Ecy;": "\u042d",
+ "Edot;": "\u0116",
+ "Efr;": "\U0001d508",
+ "Egrave": "\xc8",
+ "Egrave;": "\xc8",
+ "Element;": "\u2208",
+ "Emacr;": "\u0112",
+ "EmptySmallSquare;": "\u25fb",
+ "EmptyVerySmallSquare;": "\u25ab",
+ "Eogon;": "\u0118",
+ "Eopf;": "\U0001d53c",
+ "Epsilon;": "\u0395",
+ "Equal;": "\u2a75",
+ "EqualTilde;": "\u2242",
+ "Equilibrium;": "\u21cc",
+ "Escr;": "\u2130",
+ "Esim;": "\u2a73",
+ "Eta;": "\u0397",
+ "Euml": "\xcb",
+ "Euml;": "\xcb",
+ "Exists;": "\u2203",
+ "ExponentialE;": "\u2147",
+ "Fcy;": "\u0424",
+ "Ffr;": "\U0001d509",
+ "FilledSmallSquare;": "\u25fc",
+ "FilledVerySmallSquare;": "\u25aa",
+ "Fopf;": "\U0001d53d",
+ "ForAll;": "\u2200",
+ "Fouriertrf;": "\u2131",
+ "Fscr;": "\u2131",
+ "GJcy;": "\u0403",
+ "GT": ">",
+ "GT;": ">",
+ "Gamma;": "\u0393",
+ "Gammad;": "\u03dc",
+ "Gbreve;": "\u011e",
+ "Gcedil;": "\u0122",
+ "Gcirc;": "\u011c",
+ "Gcy;": "\u0413",
+ "Gdot;": "\u0120",
+ "Gfr;": "\U0001d50a",
+ "Gg;": "\u22d9",
+ "Gopf;": "\U0001d53e",
+ "GreaterEqual;": "\u2265",
+ "GreaterEqualLess;": "\u22db",
+ "GreaterFullEqual;": "\u2267",
+ "GreaterGreater;": "\u2aa2",
+ "GreaterLess;": "\u2277",
+ "GreaterSlantEqual;": "\u2a7e",
+ "GreaterTilde;": "\u2273",
+ "Gscr;": "\U0001d4a2",
+ "Gt;": "\u226b",
+ "HARDcy;": "\u042a",
+ "Hacek;": "\u02c7",
+ "Hat;": "^",
+ "Hcirc;": "\u0124",
+ "Hfr;": "\u210c",
+ "HilbertSpace;": "\u210b",
+ "Hopf;": "\u210d",
+ "HorizontalLine;": "\u2500",
+ "Hscr;": "\u210b",
+ "Hstrok;": "\u0126",
+ "HumpDownHump;": "\u224e",
+ "HumpEqual;": "\u224f",
+ "IEcy;": "\u0415",
+ "IJlig;": "\u0132",
+ "IOcy;": "\u0401",
+ "Iacute": "\xcd",
+ "Iacute;": "\xcd",
+ "Icirc": "\xce",
+ "Icirc;": "\xce",
+ "Icy;": "\u0418",
+ "Idot;": "\u0130",
+ "Ifr;": "\u2111",
+ "Igrave": "\xcc",
+ "Igrave;": "\xcc",
+ "Im;": "\u2111",
+ "Imacr;": "\u012a",
+ "ImaginaryI;": "\u2148",
+ "Implies;": "\u21d2",
+ "Int;": "\u222c",
+ "Integral;": "\u222b",
+ "Intersection;": "\u22c2",
+ "InvisibleComma;": "\u2063",
+ "InvisibleTimes;": "\u2062",
+ "Iogon;": "\u012e",
+ "Iopf;": "\U0001d540",
+ "Iota;": "\u0399",
+ "Iscr;": "\u2110",
+ "Itilde;": "\u0128",
+ "Iukcy;": "\u0406",
+ "Iuml": "\xcf",
+ "Iuml;": "\xcf",
+ "Jcirc;": "\u0134",
+ "Jcy;": "\u0419",
+ "Jfr;": "\U0001d50d",
+ "Jopf;": "\U0001d541",
+ "Jscr;": "\U0001d4a5",
+ "Jsercy;": "\u0408",
+ "Jukcy;": "\u0404",
+ "KHcy;": "\u0425",
+ "KJcy;": "\u040c",
+ "Kappa;": "\u039a",
+ "Kcedil;": "\u0136",
+ "Kcy;": "\u041a",
+ "Kfr;": "\U0001d50e",
+ "Kopf;": "\U0001d542",
+ "Kscr;": "\U0001d4a6",
+ "LJcy;": "\u0409",
+ "LT": "<",
+ "LT;": "<",
+ "Lacute;": "\u0139",
+ "Lambda;": "\u039b",
+ "Lang;": "\u27ea",
+ "Laplacetrf;": "\u2112",
+ "Larr;": "\u219e",
+ "Lcaron;": "\u013d",
+ "Lcedil;": "\u013b",
+ "Lcy;": "\u041b",
+ "LeftAngleBracket;": "\u27e8",
+ "LeftArrow;": "\u2190",
+ "LeftArrowBar;": "\u21e4",
+ "LeftArrowRightArrow;": "\u21c6",
+ "LeftCeiling;": "\u2308",
+ "LeftDoubleBracket;": "\u27e6",
+ "LeftDownTeeVector;": "\u2961",
+ "LeftDownVector;": "\u21c3",
+ "LeftDownVectorBar;": "\u2959",
+ "LeftFloor;": "\u230a",
+ "LeftRightArrow;": "\u2194",
+ "LeftRightVector;": "\u294e",
+ "LeftTee;": "\u22a3",
+ "LeftTeeArrow;": "\u21a4",
+ "LeftTeeVector;": "\u295a",
+ "LeftTriangle;": "\u22b2",
+ "LeftTriangleBar;": "\u29cf",
+ "LeftTriangleEqual;": "\u22b4",
+ "LeftUpDownVector;": "\u2951",
+ "LeftUpTeeVector;": "\u2960",
+ "LeftUpVector;": "\u21bf",
+ "LeftUpVectorBar;": "\u2958",
+ "LeftVector;": "\u21bc",
+ "LeftVectorBar;": "\u2952",
+ "Leftarrow;": "\u21d0",
+ "Leftrightarrow;": "\u21d4",
+ "LessEqualGreater;": "\u22da",
+ "LessFullEqual;": "\u2266",
+ "LessGreater;": "\u2276",
+ "LessLess;": "\u2aa1",
+ "LessSlantEqual;": "\u2a7d",
+ "LessTilde;": "\u2272",
+ "Lfr;": "\U0001d50f",
+ "Ll;": "\u22d8",
+ "Lleftarrow;": "\u21da",
+ "Lmidot;": "\u013f",
+ "LongLeftArrow;": "\u27f5",
+ "LongLeftRightArrow;": "\u27f7",
+ "LongRightArrow;": "\u27f6",
+ "Longleftarrow;": "\u27f8",
+ "Longleftrightarrow;": "\u27fa",
+ "Longrightarrow;": "\u27f9",
+ "Lopf;": "\U0001d543",
+ "LowerLeftArrow;": "\u2199",
+ "LowerRightArrow;": "\u2198",
+ "Lscr;": "\u2112",
+ "Lsh;": "\u21b0",
+ "Lstrok;": "\u0141",
+ "Lt;": "\u226a",
+ "Map;": "\u2905",
+ "Mcy;": "\u041c",
+ "MediumSpace;": "\u205f",
+ "Mellintrf;": "\u2133",
+ "Mfr;": "\U0001d510",
+ "MinusPlus;": "\u2213",
+ "Mopf;": "\U0001d544",
+ "Mscr;": "\u2133",
+ "Mu;": "\u039c",
+ "NJcy;": "\u040a",
+ "Nacute;": "\u0143",
+ "Ncaron;": "\u0147",
+ "Ncedil;": "\u0145",
+ "Ncy;": "\u041d",
+ "NegativeMediumSpace;": "\u200b",
+ "NegativeThickSpace;": "\u200b",
+ "NegativeThinSpace;": "\u200b",
+ "NegativeVeryThinSpace;": "\u200b",
+ "NestedGreaterGreater;": "\u226b",
+ "NestedLessLess;": "\u226a",
+ "NewLine;": "\n",
+ "Nfr;": "\U0001d511",
+ "NoBreak;": "\u2060",
+ "NonBreakingSpace;": "\xa0",
+ "Nopf;": "\u2115",
+ "Not;": "\u2aec",
+ "NotCongruent;": "\u2262",
+ "NotCupCap;": "\u226d",
+ "NotDoubleVerticalBar;": "\u2226",
+ "NotElement;": "\u2209",
+ "NotEqual;": "\u2260",
+ "NotEqualTilde;": "\u2242\u0338",
+ "NotExists;": "\u2204",
+ "NotGreater;": "\u226f",
+ "NotGreaterEqual;": "\u2271",
+ "NotGreaterFullEqual;": "\u2267\u0338",
+ "NotGreaterGreater;": "\u226b\u0338",
+ "NotGreaterLess;": "\u2279",
+ "NotGreaterSlantEqual;": "\u2a7e\u0338",
+ "NotGreaterTilde;": "\u2275",
+ "NotHumpDownHump;": "\u224e\u0338",
+ "NotHumpEqual;": "\u224f\u0338",
+ "NotLeftTriangle;": "\u22ea",
+ "NotLeftTriangleBar;": "\u29cf\u0338",
+ "NotLeftTriangleEqual;": "\u22ec",
+ "NotLess;": "\u226e",
+ "NotLessEqual;": "\u2270",
+ "NotLessGreater;": "\u2278",
+ "NotLessLess;": "\u226a\u0338",
+ "NotLessSlantEqual;": "\u2a7d\u0338",
+ "NotLessTilde;": "\u2274",
+ "NotNestedGreaterGreater;": "\u2aa2\u0338",
+ "NotNestedLessLess;": "\u2aa1\u0338",
+ "NotPrecedes;": "\u2280",
+ "NotPrecedesEqual;": "\u2aaf\u0338",
+ "NotPrecedesSlantEqual;": "\u22e0",
+ "NotReverseElement;": "\u220c",
+ "NotRightTriangle;": "\u22eb",
+ "NotRightTriangleBar;": "\u29d0\u0338",
+ "NotRightTriangleEqual;": "\u22ed",
+ "NotSquareSubset;": "\u228f\u0338",
+ "NotSquareSubsetEqual;": "\u22e2",
+ "NotSquareSuperset;": "\u2290\u0338",
+ "NotSquareSupersetEqual;": "\u22e3",
+ "NotSubset;": "\u2282\u20d2",
+ "NotSubsetEqual;": "\u2288",
+ "NotSucceeds;": "\u2281",
+ "NotSucceedsEqual;": "\u2ab0\u0338",
+ "NotSucceedsSlantEqual;": "\u22e1",
+ "NotSucceedsTilde;": "\u227f\u0338",
+ "NotSuperset;": "\u2283\u20d2",
+ "NotSupersetEqual;": "\u2289",
+ "NotTilde;": "\u2241",
+ "NotTildeEqual;": "\u2244",
+ "NotTildeFullEqual;": "\u2247",
+ "NotTildeTilde;": "\u2249",
+ "NotVerticalBar;": "\u2224",
+ "Nscr;": "\U0001d4a9",
+ "Ntilde": "\xd1",
+ "Ntilde;": "\xd1",
+ "Nu;": "\u039d",
+ "OElig;": "\u0152",
+ "Oacute": "\xd3",
+ "Oacute;": "\xd3",
+ "Ocirc": "\xd4",
+ "Ocirc;": "\xd4",
+ "Ocy;": "\u041e",
+ "Odblac;": "\u0150",
+ "Ofr;": "\U0001d512",
+ "Ograve": "\xd2",
+ "Ograve;": "\xd2",
+ "Omacr;": "\u014c",
+ "Omega;": "\u03a9",
+ "Omicron;": "\u039f",
+ "Oopf;": "\U0001d546",
+ "OpenCurlyDoubleQuote;": "\u201c",
+ "OpenCurlyQuote;": "\u2018",
+ "Or;": "\u2a54",
+ "Oscr;": "\U0001d4aa",
+ "Oslash": "\xd8",
+ "Oslash;": "\xd8",
+ "Otilde": "\xd5",
+ "Otilde;": "\xd5",
+ "Otimes;": "\u2a37",
+ "Ouml": "\xd6",
+ "Ouml;": "\xd6",
+ "OverBar;": "\u203e",
+ "OverBrace;": "\u23de",
+ "OverBracket;": "\u23b4",
+ "OverParenthesis;": "\u23dc",
+ "PartialD;": "\u2202",
+ "Pcy;": "\u041f",
+ "Pfr;": "\U0001d513",
+ "Phi;": "\u03a6",
+ "Pi;": "\u03a0",
+ "PlusMinus;": "\xb1",
+ "Poincareplane;": "\u210c",
+ "Popf;": "\u2119",
+ "Pr;": "\u2abb",
+ "Precedes;": "\u227a",
+ "PrecedesEqual;": "\u2aaf",
+ "PrecedesSlantEqual;": "\u227c",
+ "PrecedesTilde;": "\u227e",
+ "Prime;": "\u2033",
+ "Product;": "\u220f",
+ "Proportion;": "\u2237",
+ "Proportional;": "\u221d",
+ "Pscr;": "\U0001d4ab",
+ "Psi;": "\u03a8",
+ "QUOT": "\"",
+ "QUOT;": "\"",
+ "Qfr;": "\U0001d514",
+ "Qopf;": "\u211a",
+ "Qscr;": "\U0001d4ac",
+ "RBarr;": "\u2910",
+ "REG": "\xae",
+ "REG;": "\xae",
+ "Racute;": "\u0154",
+ "Rang;": "\u27eb",
+ "Rarr;": "\u21a0",
+ "Rarrtl;": "\u2916",
+ "Rcaron;": "\u0158",
+ "Rcedil;": "\u0156",
+ "Rcy;": "\u0420",
+ "Re;": "\u211c",
+ "ReverseElement;": "\u220b",
+ "ReverseEquilibrium;": "\u21cb",
+ "ReverseUpEquilibrium;": "\u296f",
+ "Rfr;": "\u211c",
+ "Rho;": "\u03a1",
+ "RightAngleBracket;": "\u27e9",
+ "RightArrow;": "\u2192",
+ "RightArrowBar;": "\u21e5",
+ "RightArrowLeftArrow;": "\u21c4",
+ "RightCeiling;": "\u2309",
+ "RightDoubleBracket;": "\u27e7",
+ "RightDownTeeVector;": "\u295d",
+ "RightDownVector;": "\u21c2",
+ "RightDownVectorBar;": "\u2955",
+ "RightFloor;": "\u230b",
+ "RightTee;": "\u22a2",
+ "RightTeeArrow;": "\u21a6",
+ "RightTeeVector;": "\u295b",
+ "RightTriangle;": "\u22b3",
+ "RightTriangleBar;": "\u29d0",
+ "RightTriangleEqual;": "\u22b5",
+ "RightUpDownVector;": "\u294f",
+ "RightUpTeeVector;": "\u295c",
+ "RightUpVector;": "\u21be",
+ "RightUpVectorBar;": "\u2954",
+ "RightVector;": "\u21c0",
+ "RightVectorBar;": "\u2953",
+ "Rightarrow;": "\u21d2",
+ "Ropf;": "\u211d",
+ "RoundImplies;": "\u2970",
+ "Rrightarrow;": "\u21db",
+ "Rscr;": "\u211b",
+ "Rsh;": "\u21b1",
+ "RuleDelayed;": "\u29f4",
+ "SHCHcy;": "\u0429",
+ "SHcy;": "\u0428",
+ "SOFTcy;": "\u042c",
+ "Sacute;": "\u015a",
+ "Sc;": "\u2abc",
+ "Scaron;": "\u0160",
+ "Scedil;": "\u015e",
+ "Scirc;": "\u015c",
+ "Scy;": "\u0421",
+ "Sfr;": "\U0001d516",
+ "ShortDownArrow;": "\u2193",
+ "ShortLeftArrow;": "\u2190",
+ "ShortRightArrow;": "\u2192",
+ "ShortUpArrow;": "\u2191",
+ "Sigma;": "\u03a3",
+ "SmallCircle;": "\u2218",
+ "Sopf;": "\U0001d54a",
+ "Sqrt;": "\u221a",
+ "Square;": "\u25a1",
+ "SquareIntersection;": "\u2293",
+ "SquareSubset;": "\u228f",
+ "SquareSubsetEqual;": "\u2291",
+ "SquareSuperset;": "\u2290",
+ "SquareSupersetEqual;": "\u2292",
+ "SquareUnion;": "\u2294",
+ "Sscr;": "\U0001d4ae",
+ "Star;": "\u22c6",
+ "Sub;": "\u22d0",
+ "Subset;": "\u22d0",
+ "SubsetEqual;": "\u2286",
+ "Succeeds;": "\u227b",
+ "SucceedsEqual;": "\u2ab0",
+ "SucceedsSlantEqual;": "\u227d",
+ "SucceedsTilde;": "\u227f",
+ "SuchThat;": "\u220b",
+ "Sum;": "\u2211",
+ "Sup;": "\u22d1",
+ "Superset;": "\u2283",
+ "SupersetEqual;": "\u2287",
+ "Supset;": "\u22d1",
+ "THORN": "\xde",
+ "THORN;": "\xde",
+ "TRADE;": "\u2122",
+ "TSHcy;": "\u040b",
+ "TScy;": "\u0426",
+ "Tab;": "\t",
+ "Tau;": "\u03a4",
+ "Tcaron;": "\u0164",
+ "Tcedil;": "\u0162",
+ "Tcy;": "\u0422",
+ "Tfr;": "\U0001d517",
+ "Therefore;": "\u2234",
+ "Theta;": "\u0398",
+ "ThickSpace;": "\u205f\u200a",
+ "ThinSpace;": "\u2009",
+ "Tilde;": "\u223c",
+ "TildeEqual;": "\u2243",
+ "TildeFullEqual;": "\u2245",
+ "TildeTilde;": "\u2248",
+ "Topf;": "\U0001d54b",
+ "TripleDot;": "\u20db",
+ "Tscr;": "\U0001d4af",
+ "Tstrok;": "\u0166",
+ "Uacute": "\xda",
+ "Uacute;": "\xda",
+ "Uarr;": "\u219f",
+ "Uarrocir;": "\u2949",
+ "Ubrcy;": "\u040e",
+ "Ubreve;": "\u016c",
+ "Ucirc": "\xdb",
+ "Ucirc;": "\xdb",
+ "Ucy;": "\u0423",
+ "Udblac;": "\u0170",
+ "Ufr;": "\U0001d518",
+ "Ugrave": "\xd9",
+ "Ugrave;": "\xd9",
+ "Umacr;": "\u016a",
+ "UnderBar;": "_",
+ "UnderBrace;": "\u23df",
+ "UnderBracket;": "\u23b5",
+ "UnderParenthesis;": "\u23dd",
+ "Union;": "\u22c3",
+ "UnionPlus;": "\u228e",
+ "Uogon;": "\u0172",
+ "Uopf;": "\U0001d54c",
+ "UpArrow;": "\u2191",
+ "UpArrowBar;": "\u2912",
+ "UpArrowDownArrow;": "\u21c5",
+ "UpDownArrow;": "\u2195",
+ "UpEquilibrium;": "\u296e",
+ "UpTee;": "\u22a5",
+ "UpTeeArrow;": "\u21a5",
+ "Uparrow;": "\u21d1",
+ "Updownarrow;": "\u21d5",
+ "UpperLeftArrow;": "\u2196",
+ "UpperRightArrow;": "\u2197",
+ "Upsi;": "\u03d2",
+ "Upsilon;": "\u03a5",
+ "Uring;": "\u016e",
+ "Uscr;": "\U0001d4b0",
+ "Utilde;": "\u0168",
+ "Uuml": "\xdc",
+ "Uuml;": "\xdc",
+ "VDash;": "\u22ab",
+ "Vbar;": "\u2aeb",
+ "Vcy;": "\u0412",
+ "Vdash;": "\u22a9",
+ "Vdashl;": "\u2ae6",
+ "Vee;": "\u22c1",
+ "Verbar;": "\u2016",
+ "Vert;": "\u2016",
+ "VerticalBar;": "\u2223",
+ "VerticalLine;": "|",
+ "VerticalSeparator;": "\u2758",
+ "VerticalTilde;": "\u2240",
+ "VeryThinSpace;": "\u200a",
+ "Vfr;": "\U0001d519",
+ "Vopf;": "\U0001d54d",
+ "Vscr;": "\U0001d4b1",
+ "Vvdash;": "\u22aa",
+ "Wcirc;": "\u0174",
+ "Wedge;": "\u22c0",
+ "Wfr;": "\U0001d51a",
+ "Wopf;": "\U0001d54e",
+ "Wscr;": "\U0001d4b2",
+ "Xfr;": "\U0001d51b",
+ "Xi;": "\u039e",
+ "Xopf;": "\U0001d54f",
+ "Xscr;": "\U0001d4b3",
+ "YAcy;": "\u042f",
+ "YIcy;": "\u0407",
+ "YUcy;": "\u042e",
+ "Yacute": "\xdd",
+ "Yacute;": "\xdd",
+ "Ycirc;": "\u0176",
+ "Ycy;": "\u042b",
+ "Yfr;": "\U0001d51c",
+ "Yopf;": "\U0001d550",
+ "Yscr;": "\U0001d4b4",
+ "Yuml;": "\u0178",
+ "ZHcy;": "\u0416",
+ "Zacute;": "\u0179",
+ "Zcaron;": "\u017d",
+ "Zcy;": "\u0417",
+ "Zdot;": "\u017b",
+ "ZeroWidthSpace;": "\u200b",
+ "Zeta;": "\u0396",
+ "Zfr;": "\u2128",
+ "Zopf;": "\u2124",
+ "Zscr;": "\U0001d4b5",
+ "aacute": "\xe1",
+ "aacute;": "\xe1",
+ "abreve;": "\u0103",
+ "ac;": "\u223e",
+ "acE;": "\u223e\u0333",
+ "acd;": "\u223f",
+ "acirc": "\xe2",
+ "acirc;": "\xe2",
+ "acute": "\xb4",
+ "acute;": "\xb4",
+ "acy;": "\u0430",
+ "aelig": "\xe6",
+ "aelig;": "\xe6",
+ "af;": "\u2061",
+ "afr;": "\U0001d51e",
+ "agrave": "\xe0",
+ "agrave;": "\xe0",
+ "alefsym;": "\u2135",
+ "aleph;": "\u2135",
+ "alpha;": "\u03b1",
+ "amacr;": "\u0101",
+ "amalg;": "\u2a3f",
+ "amp": "&",
+ "amp;": "&",
+ "and;": "\u2227",
+ "andand;": "\u2a55",
+ "andd;": "\u2a5c",
+ "andslope;": "\u2a58",
+ "andv;": "\u2a5a",
+ "ang;": "\u2220",
+ "ange;": "\u29a4",
+ "angle;": "\u2220",
+ "angmsd;": "\u2221",
+ "angmsdaa;": "\u29a8",
+ "angmsdab;": "\u29a9",
+ "angmsdac;": "\u29aa",
+ "angmsdad;": "\u29ab",
+ "angmsdae;": "\u29ac",
+ "angmsdaf;": "\u29ad",
+ "angmsdag;": "\u29ae",
+ "angmsdah;": "\u29af",
+ "angrt;": "\u221f",
+ "angrtvb;": "\u22be",
+ "angrtvbd;": "\u299d",
+ "angsph;": "\u2222",
+ "angst;": "\xc5",
+ "angzarr;": "\u237c",
+ "aogon;": "\u0105",
+ "aopf;": "\U0001d552",
+ "ap;": "\u2248",
+ "apE;": "\u2a70",
+ "apacir;": "\u2a6f",
+ "ape;": "\u224a",
+ "apid;": "\u224b",
+ "apos;": "'",
+ "approx;": "\u2248",
+ "approxeq;": "\u224a",
+ "aring": "\xe5",
+ "aring;": "\xe5",
+ "ascr;": "\U0001d4b6",
+ "ast;": "*",
+ "asymp;": "\u2248",
+ "asympeq;": "\u224d",
+ "atilde": "\xe3",
+ "atilde;": "\xe3",
+ "auml": "\xe4",
+ "auml;": "\xe4",
+ "awconint;": "\u2233",
+ "awint;": "\u2a11",
+ "bNot;": "\u2aed",
+ "backcong;": "\u224c",
+ "backepsilon;": "\u03f6",
+ "backprime;": "\u2035",
+ "backsim;": "\u223d",
+ "backsimeq;": "\u22cd",
+ "barvee;": "\u22bd",
+ "barwed;": "\u2305",
+ "barwedge;": "\u2305",
+ "bbrk;": "\u23b5",
+ "bbrktbrk;": "\u23b6",
+ "bcong;": "\u224c",
+ "bcy;": "\u0431",
+ "bdquo;": "\u201e",
+ "becaus;": "\u2235",
+ "because;": "\u2235",
+ "bemptyv;": "\u29b0",
+ "bepsi;": "\u03f6",
+ "bernou;": "\u212c",
+ "beta;": "\u03b2",
+ "beth;": "\u2136",
+ "between;": "\u226c",
+ "bfr;": "\U0001d51f",
+ "bigcap;": "\u22c2",
+ "bigcirc;": "\u25ef",
+ "bigcup;": "\u22c3",
+ "bigodot;": "\u2a00",
+ "bigoplus;": "\u2a01",
+ "bigotimes;": "\u2a02",
+ "bigsqcup;": "\u2a06",
+ "bigstar;": "\u2605",
+ "bigtriangledown;": "\u25bd",
+ "bigtriangleup;": "\u25b3",
+ "biguplus;": "\u2a04",
+ "bigvee;": "\u22c1",
+ "bigwedge;": "\u22c0",
+ "bkarow;": "\u290d",
+ "blacklozenge;": "\u29eb",
+ "blacksquare;": "\u25aa",
+ "blacktriangle;": "\u25b4",
+ "blacktriangledown;": "\u25be",
+ "blacktriangleleft;": "\u25c2",
+ "blacktriangleright;": "\u25b8",
+ "blank;": "\u2423",
+ "blk12;": "\u2592",
+ "blk14;": "\u2591",
+ "blk34;": "\u2593",
+ "block;": "\u2588",
+ "bne;": "=\u20e5",
+ "bnequiv;": "\u2261\u20e5",
+ "bnot;": "\u2310",
+ "bopf;": "\U0001d553",
+ "bot;": "\u22a5",
+ "bottom;": "\u22a5",
+ "bowtie;": "\u22c8",
+ "boxDL;": "\u2557",
+ "boxDR;": "\u2554",
+ "boxDl;": "\u2556",
+ "boxDr;": "\u2553",
+ "boxH;": "\u2550",
+ "boxHD;": "\u2566",
+ "boxHU;": "\u2569",
+ "boxHd;": "\u2564",
+ "boxHu;": "\u2567",
+ "boxUL;": "\u255d",
+ "boxUR;": "\u255a",
+ "boxUl;": "\u255c",
+ "boxUr;": "\u2559",
+ "boxV;": "\u2551",
+ "boxVH;": "\u256c",
+ "boxVL;": "\u2563",
+ "boxVR;": "\u2560",
+ "boxVh;": "\u256b",
+ "boxVl;": "\u2562",
+ "boxVr;": "\u255f",
+ "boxbox;": "\u29c9",
+ "boxdL;": "\u2555",
+ "boxdR;": "\u2552",
+ "boxdl;": "\u2510",
+ "boxdr;": "\u250c",
+ "boxh;": "\u2500",
+ "boxhD;": "\u2565",
+ "boxhU;": "\u2568",
+ "boxhd;": "\u252c",
+ "boxhu;": "\u2534",
+ "boxminus;": "\u229f",
+ "boxplus;": "\u229e",
+ "boxtimes;": "\u22a0",
+ "boxuL;": "\u255b",
+ "boxuR;": "\u2558",
+ "boxul;": "\u2518",
+ "boxur;": "\u2514",
+ "boxv;": "\u2502",
+ "boxvH;": "\u256a",
+ "boxvL;": "\u2561",
+ "boxvR;": "\u255e",
+ "boxvh;": "\u253c",
+ "boxvl;": "\u2524",
+ "boxvr;": "\u251c",
+ "bprime;": "\u2035",
+ "breve;": "\u02d8",
+ "brvbar": "\xa6",
+ "brvbar;": "\xa6",
+ "bscr;": "\U0001d4b7",
+ "bsemi;": "\u204f",
+ "bsim;": "\u223d",
+ "bsime;": "\u22cd",
+ "bsol;": "\\",
+ "bsolb;": "\u29c5",
+ "bsolhsub;": "\u27c8",
+ "bull;": "\u2022",
+ "bullet;": "\u2022",
+ "bump;": "\u224e",
+ "bumpE;": "\u2aae",
+ "bumpe;": "\u224f",
+ "bumpeq;": "\u224f",
+ "cacute;": "\u0107",
+ "cap;": "\u2229",
+ "capand;": "\u2a44",
+ "capbrcup;": "\u2a49",
+ "capcap;": "\u2a4b",
+ "capcup;": "\u2a47",
+ "capdot;": "\u2a40",
+ "caps;": "\u2229\ufe00",
+ "caret;": "\u2041",
+ "caron;": "\u02c7",
+ "ccaps;": "\u2a4d",
+ "ccaron;": "\u010d",
+ "ccedil": "\xe7",
+ "ccedil;": "\xe7",
+ "ccirc;": "\u0109",
+ "ccups;": "\u2a4c",
+ "ccupssm;": "\u2a50",
+ "cdot;": "\u010b",
+ "cedil": "\xb8",
+ "cedil;": "\xb8",
+ "cemptyv;": "\u29b2",
+ "cent": "\xa2",
+ "cent;": "\xa2",
+ "centerdot;": "\xb7",
+ "cfr;": "\U0001d520",
+ "chcy;": "\u0447",
+ "check;": "\u2713",
+ "checkmark;": "\u2713",
+ "chi;": "\u03c7",
+ "cir;": "\u25cb",
+ "cirE;": "\u29c3",
+ "circ;": "\u02c6",
+ "circeq;": "\u2257",
+ "circlearrowleft;": "\u21ba",
+ "circlearrowright;": "\u21bb",
+ "circledR;": "\xae",
+ "circledS;": "\u24c8",
+ "circledast;": "\u229b",
+ "circledcirc;": "\u229a",
+ "circleddash;": "\u229d",
+ "cire;": "\u2257",
+ "cirfnint;": "\u2a10",
+ "cirmid;": "\u2aef",
+ "cirscir;": "\u29c2",
+ "clubs;": "\u2663",
+ "clubsuit;": "\u2663",
+ "colon;": ":",
+ "colone;": "\u2254",
+ "coloneq;": "\u2254",
+ "comma;": ",",
+ "commat;": "@",
+ "comp;": "\u2201",
+ "compfn;": "\u2218",
+ "complement;": "\u2201",
+ "complexes;": "\u2102",
+ "cong;": "\u2245",
+ "congdot;": "\u2a6d",
+ "conint;": "\u222e",
+ "copf;": "\U0001d554",
+ "coprod;": "\u2210",
+ "copy": "\xa9",
+ "copy;": "\xa9",
+ "copysr;": "\u2117",
+ "crarr;": "\u21b5",
+ "cross;": "\u2717",
+ "cscr;": "\U0001d4b8",
+ "csub;": "\u2acf",
+ "csube;": "\u2ad1",
+ "csup;": "\u2ad0",
+ "csupe;": "\u2ad2",
+ "ctdot;": "\u22ef",
+ "cudarrl;": "\u2938",
+ "cudarrr;": "\u2935",
+ "cuepr;": "\u22de",
+ "cuesc;": "\u22df",
+ "cularr;": "\u21b6",
+ "cularrp;": "\u293d",
+ "cup;": "\u222a",
+ "cupbrcap;": "\u2a48",
+ "cupcap;": "\u2a46",
+ "cupcup;": "\u2a4a",
+ "cupdot;": "\u228d",
+ "cupor;": "\u2a45",
+ "cups;": "\u222a\ufe00",
+ "curarr;": "\u21b7",
+ "curarrm;": "\u293c",
+ "curlyeqprec;": "\u22de",
+ "curlyeqsucc;": "\u22df",
+ "curlyvee;": "\u22ce",
+ "curlywedge;": "\u22cf",
+ "curren": "\xa4",
+ "curren;": "\xa4",
+ "curvearrowleft;": "\u21b6",
+ "curvearrowright;": "\u21b7",
+ "cuvee;": "\u22ce",
+ "cuwed;": "\u22cf",
+ "cwconint;": "\u2232",
+ "cwint;": "\u2231",
+ "cylcty;": "\u232d",
+ "dArr;": "\u21d3",
+ "dHar;": "\u2965",
+ "dagger;": "\u2020",
+ "daleth;": "\u2138",
+ "darr;": "\u2193",
+ "dash;": "\u2010",
+ "dashv;": "\u22a3",
+ "dbkarow;": "\u290f",
+ "dblac;": "\u02dd",
+ "dcaron;": "\u010f",
+ "dcy;": "\u0434",
+ "dd;": "\u2146",
+ "ddagger;": "\u2021",
+ "ddarr;": "\u21ca",
+ "ddotseq;": "\u2a77",
+ "deg": "\xb0",
+ "deg;": "\xb0",
+ "delta;": "\u03b4",
+ "demptyv;": "\u29b1",
+ "dfisht;": "\u297f",
+ "dfr;": "\U0001d521",
+ "dharl;": "\u21c3",
+ "dharr;": "\u21c2",
+ "diam;": "\u22c4",
+ "diamond;": "\u22c4",
+ "diamondsuit;": "\u2666",
+ "diams;": "\u2666",
+ "die;": "\xa8",
+ "digamma;": "\u03dd",
+ "disin;": "\u22f2",
+ "div;": "\xf7",
+ "divide": "\xf7",
+ "divide;": "\xf7",
+ "divideontimes;": "\u22c7",
+ "divonx;": "\u22c7",
+ "djcy;": "\u0452",
+ "dlcorn;": "\u231e",
+ "dlcrop;": "\u230d",
+ "dollar;": "$",
+ "dopf;": "\U0001d555",
+ "dot;": "\u02d9",
+ "doteq;": "\u2250",
+ "doteqdot;": "\u2251",
+ "dotminus;": "\u2238",
+ "dotplus;": "\u2214",
+ "dotsquare;": "\u22a1",
+ "doublebarwedge;": "\u2306",
+ "downarrow;": "\u2193",
+ "downdownarrows;": "\u21ca",
+ "downharpoonleft;": "\u21c3",
+ "downharpoonright;": "\u21c2",
+ "drbkarow;": "\u2910",
+ "drcorn;": "\u231f",
+ "drcrop;": "\u230c",
+ "dscr;": "\U0001d4b9",
+ "dscy;": "\u0455",
+ "dsol;": "\u29f6",
+ "dstrok;": "\u0111",
+ "dtdot;": "\u22f1",
+ "dtri;": "\u25bf",
+ "dtrif;": "\u25be",
+ "duarr;": "\u21f5",
+ "duhar;": "\u296f",
+ "dwangle;": "\u29a6",
+ "dzcy;": "\u045f",
+ "dzigrarr;": "\u27ff",
+ "eDDot;": "\u2a77",
+ "eDot;": "\u2251",
+ "eacute": "\xe9",
+ "eacute;": "\xe9",
+ "easter;": "\u2a6e",
+ "ecaron;": "\u011b",
+ "ecir;": "\u2256",
+ "ecirc": "\xea",
+ "ecirc;": "\xea",
+ "ecolon;": "\u2255",
+ "ecy;": "\u044d",
+ "edot;": "\u0117",
+ "ee;": "\u2147",
+ "efDot;": "\u2252",
+ "efr;": "\U0001d522",
+ "eg;": "\u2a9a",
+ "egrave": "\xe8",
+ "egrave;": "\xe8",
+ "egs;": "\u2a96",
+ "egsdot;": "\u2a98",
+ "el;": "\u2a99",
+ "elinters;": "\u23e7",
+ "ell;": "\u2113",
+ "els;": "\u2a95",
+ "elsdot;": "\u2a97",
+ "emacr;": "\u0113",
+ "empty;": "\u2205",
+ "emptyset;": "\u2205",
+ "emptyv;": "\u2205",
+ "emsp13;": "\u2004",
+ "emsp14;": "\u2005",
+ "emsp;": "\u2003",
+ "eng;": "\u014b",
+ "ensp;": "\u2002",
+ "eogon;": "\u0119",
+ "eopf;": "\U0001d556",
+ "epar;": "\u22d5",
+ "eparsl;": "\u29e3",
+ "eplus;": "\u2a71",
+ "epsi;": "\u03b5",
+ "epsilon;": "\u03b5",
+ "epsiv;": "\u03f5",
+ "eqcirc;": "\u2256",
+ "eqcolon;": "\u2255",
+ "eqsim;": "\u2242",
+ "eqslantgtr;": "\u2a96",
+ "eqslantless;": "\u2a95",
+ "equals;": "=",
+ "equest;": "\u225f",
+ "equiv;": "\u2261",
+ "equivDD;": "\u2a78",
+ "eqvparsl;": "\u29e5",
+ "erDot;": "\u2253",
+ "erarr;": "\u2971",
+ "escr;": "\u212f",
+ "esdot;": "\u2250",
+ "esim;": "\u2242",
+ "eta;": "\u03b7",
+ "eth": "\xf0",
+ "eth;": "\xf0",
+ "euml": "\xeb",
+ "euml;": "\xeb",
+ "euro;": "\u20ac",
+ "excl;": "!",
+ "exist;": "\u2203",
+ "expectation;": "\u2130",
+ "exponentiale;": "\u2147",
+ "fallingdotseq;": "\u2252",
+ "fcy;": "\u0444",
+ "female;": "\u2640",
+ "ffilig;": "\ufb03",
+ "fflig;": "\ufb00",
+ "ffllig;": "\ufb04",
+ "ffr;": "\U0001d523",
+ "filig;": "\ufb01",
+ "fjlig;": "fj",
+ "flat;": "\u266d",
+ "fllig;": "\ufb02",
+ "fltns;": "\u25b1",
+ "fnof;": "\u0192",
+ "fopf;": "\U0001d557",
+ "forall;": "\u2200",
+ "fork;": "\u22d4",
+ "forkv;": "\u2ad9",
+ "fpartint;": "\u2a0d",
+ "frac12": "\xbd",
+ "frac12;": "\xbd",
+ "frac13;": "\u2153",
+ "frac14": "\xbc",
+ "frac14;": "\xbc",
+ "frac15;": "\u2155",
+ "frac16;": "\u2159",
+ "frac18;": "\u215b",
+ "frac23;": "\u2154",
+ "frac25;": "\u2156",
+ "frac34": "\xbe",
+ "frac34;": "\xbe",
+ "frac35;": "\u2157",
+ "frac38;": "\u215c",
+ "frac45;": "\u2158",
+ "frac56;": "\u215a",
+ "frac58;": "\u215d",
+ "frac78;": "\u215e",
+ "frasl;": "\u2044",
+ "frown;": "\u2322",
+ "fscr;": "\U0001d4bb",
+ "gE;": "\u2267",
+ "gEl;": "\u2a8c",
+ "gacute;": "\u01f5",
+ "gamma;": "\u03b3",
+ "gammad;": "\u03dd",
+ "gap;": "\u2a86",
+ "gbreve;": "\u011f",
+ "gcirc;": "\u011d",
+ "gcy;": "\u0433",
+ "gdot;": "\u0121",
+ "ge;": "\u2265",
+ "gel;": "\u22db",
+ "geq;": "\u2265",
+ "geqq;": "\u2267",
+ "geqslant;": "\u2a7e",
+ "ges;": "\u2a7e",
+ "gescc;": "\u2aa9",
+ "gesdot;": "\u2a80",
+ "gesdoto;": "\u2a82",
+ "gesdotol;": "\u2a84",
+ "gesl;": "\u22db\ufe00",
+ "gesles;": "\u2a94",
+ "gfr;": "\U0001d524",
+ "gg;": "\u226b",
+ "ggg;": "\u22d9",
+ "gimel;": "\u2137",
+ "gjcy;": "\u0453",
+ "gl;": "\u2277",
+ "glE;": "\u2a92",
+ "gla;": "\u2aa5",
+ "glj;": "\u2aa4",
+ "gnE;": "\u2269",
+ "gnap;": "\u2a8a",
+ "gnapprox;": "\u2a8a",
+ "gne;": "\u2a88",
+ "gneq;": "\u2a88",
+ "gneqq;": "\u2269",
+ "gnsim;": "\u22e7",
+ "gopf;": "\U0001d558",
+ "grave;": "`",
+ "gscr;": "\u210a",
+ "gsim;": "\u2273",
+ "gsime;": "\u2a8e",
+ "gsiml;": "\u2a90",
+ "gt": ">",
+ "gt;": ">",
+ "gtcc;": "\u2aa7",
+ "gtcir;": "\u2a7a",
+ "gtdot;": "\u22d7",
+ "gtlPar;": "\u2995",
+ "gtquest;": "\u2a7c",
+ "gtrapprox;": "\u2a86",
+ "gtrarr;": "\u2978",
+ "gtrdot;": "\u22d7",
+ "gtreqless;": "\u22db",
+ "gtreqqless;": "\u2a8c",
+ "gtrless;": "\u2277",
+ "gtrsim;": "\u2273",
+ "gvertneqq;": "\u2269\ufe00",
+ "gvnE;": "\u2269\ufe00",
+ "hArr;": "\u21d4",
+ "hairsp;": "\u200a",
+ "half;": "\xbd",
+ "hamilt;": "\u210b",
+ "hardcy;": "\u044a",
+ "harr;": "\u2194",
+ "harrcir;": "\u2948",
+ "harrw;": "\u21ad",
+ "hbar;": "\u210f",
+ "hcirc;": "\u0125",
+ "hearts;": "\u2665",
+ "heartsuit;": "\u2665",
+ "hellip;": "\u2026",
+ "hercon;": "\u22b9",
+ "hfr;": "\U0001d525",
+ "hksearow;": "\u2925",
+ "hkswarow;": "\u2926",
+ "hoarr;": "\u21ff",
+ "homtht;": "\u223b",
+ "hookleftarrow;": "\u21a9",
+ "hookrightarrow;": "\u21aa",
+ "hopf;": "\U0001d559",
+ "horbar;": "\u2015",
+ "hscr;": "\U0001d4bd",
+ "hslash;": "\u210f",
+ "hstrok;": "\u0127",
+ "hybull;": "\u2043",
+ "hyphen;": "\u2010",
+ "iacute": "\xed",
+ "iacute;": "\xed",
+ "ic;": "\u2063",
+ "icirc": "\xee",
+ "icirc;": "\xee",
+ "icy;": "\u0438",
+ "iecy;": "\u0435",
+ "iexcl": "\xa1",
+ "iexcl;": "\xa1",
+ "iff;": "\u21d4",
+ "ifr;": "\U0001d526",
+ "igrave": "\xec",
+ "igrave;": "\xec",
+ "ii;": "\u2148",
+ "iiiint;": "\u2a0c",
+ "iiint;": "\u222d",
+ "iinfin;": "\u29dc",
+ "iiota;": "\u2129",
+ "ijlig;": "\u0133",
+ "imacr;": "\u012b",
+ "image;": "\u2111",
+ "imagline;": "\u2110",
+ "imagpart;": "\u2111",
+ "imath;": "\u0131",
+ "imof;": "\u22b7",
+ "imped;": "\u01b5",
+ "in;": "\u2208",
+ "incare;": "\u2105",
+ "infin;": "\u221e",
+ "infintie;": "\u29dd",
+ "inodot;": "\u0131",
+ "int;": "\u222b",
+ "intcal;": "\u22ba",
+ "integers;": "\u2124",
+ "intercal;": "\u22ba",
+ "intlarhk;": "\u2a17",
+ "intprod;": "\u2a3c",
+ "iocy;": "\u0451",
+ "iogon;": "\u012f",
+ "iopf;": "\U0001d55a",
+ "iota;": "\u03b9",
+ "iprod;": "\u2a3c",
+ "iquest": "\xbf",
+ "iquest;": "\xbf",
+ "iscr;": "\U0001d4be",
+ "isin;": "\u2208",
+ "isinE;": "\u22f9",
+ "isindot;": "\u22f5",
+ "isins;": "\u22f4",
+ "isinsv;": "\u22f3",
+ "isinv;": "\u2208",
+ "it;": "\u2062",
+ "itilde;": "\u0129",
+ "iukcy;": "\u0456",
+ "iuml": "\xef",
+ "iuml;": "\xef",
+ "jcirc;": "\u0135",
+ "jcy;": "\u0439",
+ "jfr;": "\U0001d527",
+ "jmath;": "\u0237",
+ "jopf;": "\U0001d55b",
+ "jscr;": "\U0001d4bf",
+ "jsercy;": "\u0458",
+ "jukcy;": "\u0454",
+ "kappa;": "\u03ba",
+ "kappav;": "\u03f0",
+ "kcedil;": "\u0137",
+ "kcy;": "\u043a",
+ "kfr;": "\U0001d528",
+ "kgreen;": "\u0138",
+ "khcy;": "\u0445",
+ "kjcy;": "\u045c",
+ "kopf;": "\U0001d55c",
+ "kscr;": "\U0001d4c0",
+ "lAarr;": "\u21da",
+ "lArr;": "\u21d0",
+ "lAtail;": "\u291b",
+ "lBarr;": "\u290e",
+ "lE;": "\u2266",
+ "lEg;": "\u2a8b",
+ "lHar;": "\u2962",
+ "lacute;": "\u013a",
+ "laemptyv;": "\u29b4",
+ "lagran;": "\u2112",
+ "lambda;": "\u03bb",
+ "lang;": "\u27e8",
+ "langd;": "\u2991",
+ "langle;": "\u27e8",
+ "lap;": "\u2a85",
+ "laquo": "\xab",
+ "laquo;": "\xab",
+ "larr;": "\u2190",
+ "larrb;": "\u21e4",
+ "larrbfs;": "\u291f",
+ "larrfs;": "\u291d",
+ "larrhk;": "\u21a9",
+ "larrlp;": "\u21ab",
+ "larrpl;": "\u2939",
+ "larrsim;": "\u2973",
+ "larrtl;": "\u21a2",
+ "lat;": "\u2aab",
+ "latail;": "\u2919",
+ "late;": "\u2aad",
+ "lates;": "\u2aad\ufe00",
+ "lbarr;": "\u290c",
+ "lbbrk;": "\u2772",
+ "lbrace;": "{",
+ "lbrack;": "[",
+ "lbrke;": "\u298b",
+ "lbrksld;": "\u298f",
+ "lbrkslu;": "\u298d",
+ "lcaron;": "\u013e",
+ "lcedil;": "\u013c",
+ "lceil;": "\u2308",
+ "lcub;": "{",
+ "lcy;": "\u043b",
+ "ldca;": "\u2936",
+ "ldquo;": "\u201c",
+ "ldquor;": "\u201e",
+ "ldrdhar;": "\u2967",
+ "ldrushar;": "\u294b",
+ "ldsh;": "\u21b2",
+ "le;": "\u2264",
+ "leftarrow;": "\u2190",
+ "leftarrowtail;": "\u21a2",
+ "leftharpoondown;": "\u21bd",
+ "leftharpoonup;": "\u21bc",
+ "leftleftarrows;": "\u21c7",
+ "leftrightarrow;": "\u2194",
+ "leftrightarrows;": "\u21c6",
+ "leftrightharpoons;": "\u21cb",
+ "leftrightsquigarrow;": "\u21ad",
+ "leftthreetimes;": "\u22cb",
+ "leg;": "\u22da",
+ "leq;": "\u2264",
+ "leqq;": "\u2266",
+ "leqslant;": "\u2a7d",
+ "les;": "\u2a7d",
+ "lescc;": "\u2aa8",
+ "lesdot;": "\u2a7f",
+ "lesdoto;": "\u2a81",
+ "lesdotor;": "\u2a83",
+ "lesg;": "\u22da\ufe00",
+ "lesges;": "\u2a93",
+ "lessapprox;": "\u2a85",
+ "lessdot;": "\u22d6",
+ "lesseqgtr;": "\u22da",
+ "lesseqqgtr;": "\u2a8b",
+ "lessgtr;": "\u2276",
+ "lesssim;": "\u2272",
+ "lfisht;": "\u297c",
+ "lfloor;": "\u230a",
+ "lfr;": "\U0001d529",
+ "lg;": "\u2276",
+ "lgE;": "\u2a91",
+ "lhard;": "\u21bd",
+ "lharu;": "\u21bc",
+ "lharul;": "\u296a",
+ "lhblk;": "\u2584",
+ "ljcy;": "\u0459",
+ "ll;": "\u226a",
+ "llarr;": "\u21c7",
+ "llcorner;": "\u231e",
+ "llhard;": "\u296b",
+ "lltri;": "\u25fa",
+ "lmidot;": "\u0140",
+ "lmoust;": "\u23b0",
+ "lmoustache;": "\u23b0",
+ "lnE;": "\u2268",
+ "lnap;": "\u2a89",
+ "lnapprox;": "\u2a89",
+ "lne;": "\u2a87",
+ "lneq;": "\u2a87",
+ "lneqq;": "\u2268",
+ "lnsim;": "\u22e6",
+ "loang;": "\u27ec",
+ "loarr;": "\u21fd",
+ "lobrk;": "\u27e6",
+ "longleftarrow;": "\u27f5",
+ "longleftrightarrow;": "\u27f7",
+ "longmapsto;": "\u27fc",
+ "longrightarrow;": "\u27f6",
+ "looparrowleft;": "\u21ab",
+ "looparrowright;": "\u21ac",
+ "lopar;": "\u2985",
+ "lopf;": "\U0001d55d",
+ "loplus;": "\u2a2d",
+ "lotimes;": "\u2a34",
+ "lowast;": "\u2217",
+ "lowbar;": "_",
+ "loz;": "\u25ca",
+ "lozenge;": "\u25ca",
+ "lozf;": "\u29eb",
+ "lpar;": "(",
+ "lparlt;": "\u2993",
+ "lrarr;": "\u21c6",
+ "lrcorner;": "\u231f",
+ "lrhar;": "\u21cb",
+ "lrhard;": "\u296d",
+ "lrm;": "\u200e",
+ "lrtri;": "\u22bf",
+ "lsaquo;": "\u2039",
+ "lscr;": "\U0001d4c1",
+ "lsh;": "\u21b0",
+ "lsim;": "\u2272",
+ "lsime;": "\u2a8d",
+ "lsimg;": "\u2a8f",
+ "lsqb;": "[",
+ "lsquo;": "\u2018",
+ "lsquor;": "\u201a",
+ "lstrok;": "\u0142",
+ "lt": "<",
+ "lt;": "<",
+ "ltcc;": "\u2aa6",
+ "ltcir;": "\u2a79",
+ "ltdot;": "\u22d6",
+ "lthree;": "\u22cb",
+ "ltimes;": "\u22c9",
+ "ltlarr;": "\u2976",
+ "ltquest;": "\u2a7b",
+ "ltrPar;": "\u2996",
+ "ltri;": "\u25c3",
+ "ltrie;": "\u22b4",
+ "ltrif;": "\u25c2",
+ "lurdshar;": "\u294a",
+ "luruhar;": "\u2966",
+ "lvertneqq;": "\u2268\ufe00",
+ "lvnE;": "\u2268\ufe00",
+ "mDDot;": "\u223a",
+ "macr": "\xaf",
+ "macr;": "\xaf",
+ "male;": "\u2642",
+ "malt;": "\u2720",
+ "maltese;": "\u2720",
+ "map;": "\u21a6",
+ "mapsto;": "\u21a6",
+ "mapstodown;": "\u21a7",
+ "mapstoleft;": "\u21a4",
+ "mapstoup;": "\u21a5",
+ "marker;": "\u25ae",
+ "mcomma;": "\u2a29",
+ "mcy;": "\u043c",
+ "mdash;": "\u2014",
+ "measuredangle;": "\u2221",
+ "mfr;": "\U0001d52a",
+ "mho;": "\u2127",
+ "micro": "\xb5",
+ "micro;": "\xb5",
+ "mid;": "\u2223",
+ "midast;": "*",
+ "midcir;": "\u2af0",
+ "middot": "\xb7",
+ "middot;": "\xb7",
+ "minus;": "\u2212",
+ "minusb;": "\u229f",
+ "minusd;": "\u2238",
+ "minusdu;": "\u2a2a",
+ "mlcp;": "\u2adb",
+ "mldr;": "\u2026",
+ "mnplus;": "\u2213",
+ "models;": "\u22a7",
+ "mopf;": "\U0001d55e",
+ "mp;": "\u2213",
+ "mscr;": "\U0001d4c2",
+ "mstpos;": "\u223e",
+ "mu;": "\u03bc",
+ "multimap;": "\u22b8",
+ "mumap;": "\u22b8",
+ "nGg;": "\u22d9\u0338",
+ "nGt;": "\u226b\u20d2",
+ "nGtv;": "\u226b\u0338",
+ "nLeftarrow;": "\u21cd",
+ "nLeftrightarrow;": "\u21ce",
+ "nLl;": "\u22d8\u0338",
+ "nLt;": "\u226a\u20d2",
+ "nLtv;": "\u226a\u0338",
+ "nRightarrow;": "\u21cf",
+ "nVDash;": "\u22af",
+ "nVdash;": "\u22ae",
+ "nabla;": "\u2207",
+ "nacute;": "\u0144",
+ "nang;": "\u2220\u20d2",
+ "nap;": "\u2249",
+ "napE;": "\u2a70\u0338",
+ "napid;": "\u224b\u0338",
+ "napos;": "\u0149",
+ "napprox;": "\u2249",
+ "natur;": "\u266e",
+ "natural;": "\u266e",
+ "naturals;": "\u2115",
+ "nbsp": "\xa0",
+ "nbsp;": "\xa0",
+ "nbump;": "\u224e\u0338",
+ "nbumpe;": "\u224f\u0338",
+ "ncap;": "\u2a43",
+ "ncaron;": "\u0148",
+ "ncedil;": "\u0146",
+ "ncong;": "\u2247",
+ "ncongdot;": "\u2a6d\u0338",
+ "ncup;": "\u2a42",
+ "ncy;": "\u043d",
+ "ndash;": "\u2013",
+ "ne;": "\u2260",
+ "neArr;": "\u21d7",
+ "nearhk;": "\u2924",
+ "nearr;": "\u2197",
+ "nearrow;": "\u2197",
+ "nedot;": "\u2250\u0338",
+ "nequiv;": "\u2262",
+ "nesear;": "\u2928",
+ "nesim;": "\u2242\u0338",
+ "nexist;": "\u2204",
+ "nexists;": "\u2204",
+ "nfr;": "\U0001d52b",
+ "ngE;": "\u2267\u0338",
+ "nge;": "\u2271",
+ "ngeq;": "\u2271",
+ "ngeqq;": "\u2267\u0338",
+ "ngeqslant;": "\u2a7e\u0338",
+ "nges;": "\u2a7e\u0338",
+ "ngsim;": "\u2275",
+ "ngt;": "\u226f",
+ "ngtr;": "\u226f",
+ "nhArr;": "\u21ce",
+ "nharr;": "\u21ae",
+ "nhpar;": "\u2af2",
+ "ni;": "\u220b",
+ "nis;": "\u22fc",
+ "nisd;": "\u22fa",
+ "niv;": "\u220b",
+ "njcy;": "\u045a",
+ "nlArr;": "\u21cd",
+ "nlE;": "\u2266\u0338",
+ "nlarr;": "\u219a",
+ "nldr;": "\u2025",
+ "nle;": "\u2270",
+ "nleftarrow;": "\u219a",
+ "nleftrightarrow;": "\u21ae",
+ "nleq;": "\u2270",
+ "nleqq;": "\u2266\u0338",
+ "nleqslant;": "\u2a7d\u0338",
+ "nles;": "\u2a7d\u0338",
+ "nless;": "\u226e",
+ "nlsim;": "\u2274",
+ "nlt;": "\u226e",
+ "nltri;": "\u22ea",
+ "nltrie;": "\u22ec",
+ "nmid;": "\u2224",
+ "nopf;": "\U0001d55f",
+ "not": "\xac",
+ "not;": "\xac",
+ "notin;": "\u2209",
+ "notinE;": "\u22f9\u0338",
+ "notindot;": "\u22f5\u0338",
+ "notinva;": "\u2209",
+ "notinvb;": "\u22f7",
+ "notinvc;": "\u22f6",
+ "notni;": "\u220c",
+ "notniva;": "\u220c",
+ "notnivb;": "\u22fe",
+ "notnivc;": "\u22fd",
+ "npar;": "\u2226",
+ "nparallel;": "\u2226",
+ "nparsl;": "\u2afd\u20e5",
+ "npart;": "\u2202\u0338",
+ "npolint;": "\u2a14",
+ "npr;": "\u2280",
+ "nprcue;": "\u22e0",
+ "npre;": "\u2aaf\u0338",
+ "nprec;": "\u2280",
+ "npreceq;": "\u2aaf\u0338",
+ "nrArr;": "\u21cf",
+ "nrarr;": "\u219b",
+ "nrarrc;": "\u2933\u0338",
+ "nrarrw;": "\u219d\u0338",
+ "nrightarrow;": "\u219b",
+ "nrtri;": "\u22eb",
+ "nrtrie;": "\u22ed",
+ "nsc;": "\u2281",
+ "nsccue;": "\u22e1",
+ "nsce;": "\u2ab0\u0338",
+ "nscr;": "\U0001d4c3",
+ "nshortmid;": "\u2224",
+ "nshortparallel;": "\u2226",
+ "nsim;": "\u2241",
+ "nsime;": "\u2244",
+ "nsimeq;": "\u2244",
+ "nsmid;": "\u2224",
+ "nspar;": "\u2226",
+ "nsqsube;": "\u22e2",
+ "nsqsupe;": "\u22e3",
+ "nsub;": "\u2284",
+ "nsubE;": "\u2ac5\u0338",
+ "nsube;": "\u2288",
+ "nsubset;": "\u2282\u20d2",
+ "nsubseteq;": "\u2288",
+ "nsubseteqq;": "\u2ac5\u0338",
+ "nsucc;": "\u2281",
+ "nsucceq;": "\u2ab0\u0338",
+ "nsup;": "\u2285",
+ "nsupE;": "\u2ac6\u0338",
+ "nsupe;": "\u2289",
+ "nsupset;": "\u2283\u20d2",
+ "nsupseteq;": "\u2289",
+ "nsupseteqq;": "\u2ac6\u0338",
+ "ntgl;": "\u2279",
+ "ntilde": "\xf1",
+ "ntilde;": "\xf1",
+ "ntlg;": "\u2278",
+ "ntriangleleft;": "\u22ea",
+ "ntrianglelefteq;": "\u22ec",
+ "ntriangleright;": "\u22eb",
+ "ntrianglerighteq;": "\u22ed",
+ "nu;": "\u03bd",
+ "num;": "#",
+ "numero;": "\u2116",
+ "numsp;": "\u2007",
+ "nvDash;": "\u22ad",
+ "nvHarr;": "\u2904",
+ "nvap;": "\u224d\u20d2",
+ "nvdash;": "\u22ac",
+ "nvge;": "\u2265\u20d2",
+ "nvgt;": ">\u20d2",
+ "nvinfin;": "\u29de",
+ "nvlArr;": "\u2902",
+ "nvle;": "\u2264\u20d2",
+ "nvlt;": "<\u20d2",
+ "nvltrie;": "\u22b4\u20d2",
+ "nvrArr;": "\u2903",
+ "nvrtrie;": "\u22b5\u20d2",
+ "nvsim;": "\u223c\u20d2",
+ "nwArr;": "\u21d6",
+ "nwarhk;": "\u2923",
+ "nwarr;": "\u2196",
+ "nwarrow;": "\u2196",
+ "nwnear;": "\u2927",
+ "oS;": "\u24c8",
+ "oacute": "\xf3",
+ "oacute;": "\xf3",
+ "oast;": "\u229b",
+ "ocir;": "\u229a",
+ "ocirc": "\xf4",
+ "ocirc;": "\xf4",
+ "ocy;": "\u043e",
+ "odash;": "\u229d",
+ "odblac;": "\u0151",
+ "odiv;": "\u2a38",
+ "odot;": "\u2299",
+ "odsold;": "\u29bc",
+ "oelig;": "\u0153",
+ "ofcir;": "\u29bf",
+ "ofr;": "\U0001d52c",
+ "ogon;": "\u02db",
+ "ograve": "\xf2",
+ "ograve;": "\xf2",
+ "ogt;": "\u29c1",
+ "ohbar;": "\u29b5",
+ "ohm;": "\u03a9",
+ "oint;": "\u222e",
+ "olarr;": "\u21ba",
+ "olcir;": "\u29be",
+ "olcross;": "\u29bb",
+ "oline;": "\u203e",
+ "olt;": "\u29c0",
+ "omacr;": "\u014d",
+ "omega;": "\u03c9",
+ "omicron;": "\u03bf",
+ "omid;": "\u29b6",
+ "ominus;": "\u2296",
+ "oopf;": "\U0001d560",
+ "opar;": "\u29b7",
+ "operp;": "\u29b9",
+ "oplus;": "\u2295",
+ "or;": "\u2228",
+ "orarr;": "\u21bb",
+ "ord;": "\u2a5d",
+ "order;": "\u2134",
+ "orderof;": "\u2134",
+ "ordf": "\xaa",
+ "ordf;": "\xaa",
+ "ordm": "\xba",
+ "ordm;": "\xba",
+ "origof;": "\u22b6",
+ "oror;": "\u2a56",
+ "orslope;": "\u2a57",
+ "orv;": "\u2a5b",
+ "oscr;": "\u2134",
+ "oslash": "\xf8",
+ "oslash;": "\xf8",
+ "osol;": "\u2298",
+ "otilde": "\xf5",
+ "otilde;": "\xf5",
+ "otimes;": "\u2297",
+ "otimesas;": "\u2a36",
+ "ouml": "\xf6",
+ "ouml;": "\xf6",
+ "ovbar;": "\u233d",
+ "par;": "\u2225",
+ "para": "\xb6",
+ "para;": "\xb6",
+ "parallel;": "\u2225",
+ "parsim;": "\u2af3",
+ "parsl;": "\u2afd",
+ "part;": "\u2202",
+ "pcy;": "\u043f",
+ "percnt;": "%",
+ "period;": ".",
+ "permil;": "\u2030",
+ "perp;": "\u22a5",
+ "pertenk;": "\u2031",
+ "pfr;": "\U0001d52d",
+ "phi;": "\u03c6",
+ "phiv;": "\u03d5",
+ "phmmat;": "\u2133",
+ "phone;": "\u260e",
+ "pi;": "\u03c0",
+ "pitchfork;": "\u22d4",
+ "piv;": "\u03d6",
+ "planck;": "\u210f",
+ "planckh;": "\u210e",
+ "plankv;": "\u210f",
+ "plus;": "+",
+ "plusacir;": "\u2a23",
+ "plusb;": "\u229e",
+ "pluscir;": "\u2a22",
+ "plusdo;": "\u2214",
+ "plusdu;": "\u2a25",
+ "pluse;": "\u2a72",
+ "plusmn": "\xb1",
+ "plusmn;": "\xb1",
+ "plussim;": "\u2a26",
+ "plustwo;": "\u2a27",
+ "pm;": "\xb1",
+ "pointint;": "\u2a15",
+ "popf;": "\U0001d561",
+ "pound": "\xa3",
+ "pound;": "\xa3",
+ "pr;": "\u227a",
+ "prE;": "\u2ab3",
+ "prap;": "\u2ab7",
+ "prcue;": "\u227c",
+ "pre;": "\u2aaf",
+ "prec;": "\u227a",
+ "precapprox;": "\u2ab7",
+ "preccurlyeq;": "\u227c",
+ "preceq;": "\u2aaf",
+ "precnapprox;": "\u2ab9",
+ "precneqq;": "\u2ab5",
+ "precnsim;": "\u22e8",
+ "precsim;": "\u227e",
+ "prime;": "\u2032",
+ "primes;": "\u2119",
+ "prnE;": "\u2ab5",
+ "prnap;": "\u2ab9",
+ "prnsim;": "\u22e8",
+ "prod;": "\u220f",
+ "profalar;": "\u232e",
+ "profline;": "\u2312",
+ "profsurf;": "\u2313",
+ "prop;": "\u221d",
+ "propto;": "\u221d",
+ "prsim;": "\u227e",
+ "prurel;": "\u22b0",
+ "pscr;": "\U0001d4c5",
+ "psi;": "\u03c8",
+ "puncsp;": "\u2008",
+ "qfr;": "\U0001d52e",
+ "qint;": "\u2a0c",
+ "qopf;": "\U0001d562",
+ "qprime;": "\u2057",
+ "qscr;": "\U0001d4c6",
+ "quaternions;": "\u210d",
+ "quatint;": "\u2a16",
+ "quest;": "?",
+ "questeq;": "\u225f",
+ "quot": "\"",
+ "quot;": "\"",
+ "rAarr;": "\u21db",
+ "rArr;": "\u21d2",
+ "rAtail;": "\u291c",
+ "rBarr;": "\u290f",
+ "rHar;": "\u2964",
+ "race;": "\u223d\u0331",
+ "racute;": "\u0155",
+ "radic;": "\u221a",
+ "raemptyv;": "\u29b3",
+ "rang;": "\u27e9",
+ "rangd;": "\u2992",
+ "range;": "\u29a5",
+ "rangle;": "\u27e9",
+ "raquo": "\xbb",
+ "raquo;": "\xbb",
+ "rarr;": "\u2192",
+ "rarrap;": "\u2975",
+ "rarrb;": "\u21e5",
+ "rarrbfs;": "\u2920",
+ "rarrc;": "\u2933",
+ "rarrfs;": "\u291e",
+ "rarrhk;": "\u21aa",
+ "rarrlp;": "\u21ac",
+ "rarrpl;": "\u2945",
+ "rarrsim;": "\u2974",
+ "rarrtl;": "\u21a3",
+ "rarrw;": "\u219d",
+ "ratail;": "\u291a",
+ "ratio;": "\u2236",
+ "rationals;": "\u211a",
+ "rbarr;": "\u290d",
+ "rbbrk;": "\u2773",
+ "rbrace;": "}",
+ "rbrack;": "]",
+ "rbrke;": "\u298c",
+ "rbrksld;": "\u298e",
+ "rbrkslu;": "\u2990",
+ "rcaron;": "\u0159",
+ "rcedil;": "\u0157",
+ "rceil;": "\u2309",
+ "rcub;": "}",
+ "rcy;": "\u0440",
+ "rdca;": "\u2937",
+ "rdldhar;": "\u2969",
+ "rdquo;": "\u201d",
+ "rdquor;": "\u201d",
+ "rdsh;": "\u21b3",
+ "real;": "\u211c",
+ "realine;": "\u211b",
+ "realpart;": "\u211c",
+ "reals;": "\u211d",
+ "rect;": "\u25ad",
+ "reg": "\xae",
+ "reg;": "\xae",
+ "rfisht;": "\u297d",
+ "rfloor;": "\u230b",
+ "rfr;": "\U0001d52f",
+ "rhard;": "\u21c1",
+ "rharu;": "\u21c0",
+ "rharul;": "\u296c",
+ "rho;": "\u03c1",
+ "rhov;": "\u03f1",
+ "rightarrow;": "\u2192",
+ "rightarrowtail;": "\u21a3",
+ "rightharpoondown;": "\u21c1",
+ "rightharpoonup;": "\u21c0",
+ "rightleftarrows;": "\u21c4",
+ "rightleftharpoons;": "\u21cc",
+ "rightrightarrows;": "\u21c9",
+ "rightsquigarrow;": "\u219d",
+ "rightthreetimes;": "\u22cc",
+ "ring;": "\u02da",
+ "risingdotseq;": "\u2253",
+ "rlarr;": "\u21c4",
+ "rlhar;": "\u21cc",
+ "rlm;": "\u200f",
+ "rmoust;": "\u23b1",
+ "rmoustache;": "\u23b1",
+ "rnmid;": "\u2aee",
+ "roang;": "\u27ed",
+ "roarr;": "\u21fe",
+ "robrk;": "\u27e7",
+ "ropar;": "\u2986",
+ "ropf;": "\U0001d563",
+ "roplus;": "\u2a2e",
+ "rotimes;": "\u2a35",
+ "rpar;": ")",
+ "rpargt;": "\u2994",
+ "rppolint;": "\u2a12",
+ "rrarr;": "\u21c9",
+ "rsaquo;": "\u203a",
+ "rscr;": "\U0001d4c7",
+ "rsh;": "\u21b1",
+ "rsqb;": "]",
+ "rsquo;": "\u2019",
+ "rsquor;": "\u2019",
+ "rthree;": "\u22cc",
+ "rtimes;": "\u22ca",
+ "rtri;": "\u25b9",
+ "rtrie;": "\u22b5",
+ "rtrif;": "\u25b8",
+ "rtriltri;": "\u29ce",
+ "ruluhar;": "\u2968",
+ "rx;": "\u211e",
+ "sacute;": "\u015b",
+ "sbquo;": "\u201a",
+ "sc;": "\u227b",
+ "scE;": "\u2ab4",
+ "scap;": "\u2ab8",
+ "scaron;": "\u0161",
+ "sccue;": "\u227d",
+ "sce;": "\u2ab0",
+ "scedil;": "\u015f",
+ "scirc;": "\u015d",
+ "scnE;": "\u2ab6",
+ "scnap;": "\u2aba",
+ "scnsim;": "\u22e9",
+ "scpolint;": "\u2a13",
+ "scsim;": "\u227f",
+ "scy;": "\u0441",
+ "sdot;": "\u22c5",
+ "sdotb;": "\u22a1",
+ "sdote;": "\u2a66",
+ "seArr;": "\u21d8",
+ "searhk;": "\u2925",
+ "searr;": "\u2198",
+ "searrow;": "\u2198",
+ "sect": "\xa7",
+ "sect;": "\xa7",
+ "semi;": ";",
+ "seswar;": "\u2929",
+ "setminus;": "\u2216",
+ "setmn;": "\u2216",
+ "sext;": "\u2736",
+ "sfr;": "\U0001d530",
+ "sfrown;": "\u2322",
+ "sharp;": "\u266f",
+ "shchcy;": "\u0449",
+ "shcy;": "\u0448",
+ "shortmid;": "\u2223",
+ "shortparallel;": "\u2225",
+ "shy": "\xad",
+ "shy;": "\xad",
+ "sigma;": "\u03c3",
+ "sigmaf;": "\u03c2",
+ "sigmav;": "\u03c2",
+ "sim;": "\u223c",
+ "simdot;": "\u2a6a",
+ "sime;": "\u2243",
+ "simeq;": "\u2243",
+ "simg;": "\u2a9e",
+ "simgE;": "\u2aa0",
+ "siml;": "\u2a9d",
+ "simlE;": "\u2a9f",
+ "simne;": "\u2246",
+ "simplus;": "\u2a24",
+ "simrarr;": "\u2972",
+ "slarr;": "\u2190",
+ "smallsetminus;": "\u2216",
+ "smashp;": "\u2a33",
+ "smeparsl;": "\u29e4",
+ "smid;": "\u2223",
+ "smile;": "\u2323",
+ "smt;": "\u2aaa",
+ "smte;": "\u2aac",
+ "smtes;": "\u2aac\ufe00",
+ "softcy;": "\u044c",
+ "sol;": "/",
+ "solb;": "\u29c4",
+ "solbar;": "\u233f",
+ "sopf;": "\U0001d564",
+ "spades;": "\u2660",
+ "spadesuit;": "\u2660",
+ "spar;": "\u2225",
+ "sqcap;": "\u2293",
+ "sqcaps;": "\u2293\ufe00",
+ "sqcup;": "\u2294",
+ "sqcups;": "\u2294\ufe00",
+ "sqsub;": "\u228f",
+ "sqsube;": "\u2291",
+ "sqsubset;": "\u228f",
+ "sqsubseteq;": "\u2291",
+ "sqsup;": "\u2290",
+ "sqsupe;": "\u2292",
+ "sqsupset;": "\u2290",
+ "sqsupseteq;": "\u2292",
+ "squ;": "\u25a1",
+ "square;": "\u25a1",
+ "squarf;": "\u25aa",
+ "squf;": "\u25aa",
+ "srarr;": "\u2192",
+ "sscr;": "\U0001d4c8",
+ "ssetmn;": "\u2216",
+ "ssmile;": "\u2323",
+ "sstarf;": "\u22c6",
+ "star;": "\u2606",
+ "starf;": "\u2605",
+ "straightepsilon;": "\u03f5",
+ "straightphi;": "\u03d5",
+ "strns;": "\xaf",
+ "sub;": "\u2282",
+ "subE;": "\u2ac5",
+ "subdot;": "\u2abd",
+ "sube;": "\u2286",
+ "subedot;": "\u2ac3",
+ "submult;": "\u2ac1",
+ "subnE;": "\u2acb",
+ "subne;": "\u228a",
+ "subplus;": "\u2abf",
+ "subrarr;": "\u2979",
+ "subset;": "\u2282",
+ "subseteq;": "\u2286",
+ "subseteqq;": "\u2ac5",
+ "subsetneq;": "\u228a",
+ "subsetneqq;": "\u2acb",
+ "subsim;": "\u2ac7",
+ "subsub;": "\u2ad5",
+ "subsup;": "\u2ad3",
+ "succ;": "\u227b",
+ "succapprox;": "\u2ab8",
+ "succcurlyeq;": "\u227d",
+ "succeq;": "\u2ab0",
+ "succnapprox;": "\u2aba",
+ "succneqq;": "\u2ab6",
+ "succnsim;": "\u22e9",
+ "succsim;": "\u227f",
+ "sum;": "\u2211",
+ "sung;": "\u266a",
+ "sup1": "\xb9",
+ "sup1;": "\xb9",
+ "sup2": "\xb2",
+ "sup2;": "\xb2",
+ "sup3": "\xb3",
+ "sup3;": "\xb3",
+ "sup;": "\u2283",
+ "supE;": "\u2ac6",
+ "supdot;": "\u2abe",
+ "supdsub;": "\u2ad8",
+ "supe;": "\u2287",
+ "supedot;": "\u2ac4",
+ "suphsol;": "\u27c9",
+ "suphsub;": "\u2ad7",
+ "suplarr;": "\u297b",
+ "supmult;": "\u2ac2",
+ "supnE;": "\u2acc",
+ "supne;": "\u228b",
+ "supplus;": "\u2ac0",
+ "supset;": "\u2283",
+ "supseteq;": "\u2287",
+ "supseteqq;": "\u2ac6",
+ "supsetneq;": "\u228b",
+ "supsetneqq;": "\u2acc",
+ "supsim;": "\u2ac8",
+ "supsub;": "\u2ad4",
+ "supsup;": "\u2ad6",
+ "swArr;": "\u21d9",
+ "swarhk;": "\u2926",
+ "swarr;": "\u2199",
+ "swarrow;": "\u2199",
+ "swnwar;": "\u292a",
+ "szlig": "\xdf",
+ "szlig;": "\xdf",
+ "target;": "\u2316",
+ "tau;": "\u03c4",
+ "tbrk;": "\u23b4",
+ "tcaron;": "\u0165",
+ "tcedil;": "\u0163",
+ "tcy;": "\u0442",
+ "tdot;": "\u20db",
+ "telrec;": "\u2315",
+ "tfr;": "\U0001d531",
+ "there4;": "\u2234",
+ "therefore;": "\u2234",
+ "theta;": "\u03b8",
+ "thetasym;": "\u03d1",
+ "thetav;": "\u03d1",
+ "thickapprox;": "\u2248",
+ "thicksim;": "\u223c",
+ "thinsp;": "\u2009",
+ "thkap;": "\u2248",
+ "thksim;": "\u223c",
+ "thorn": "\xfe",
+ "thorn;": "\xfe",
+ "tilde;": "\u02dc",
+ "times": "\xd7",
+ "times;": "\xd7",
+ "timesb;": "\u22a0",
+ "timesbar;": "\u2a31",
+ "timesd;": "\u2a30",
+ "tint;": "\u222d",
+ "toea;": "\u2928",
+ "top;": "\u22a4",
+ "topbot;": "\u2336",
+ "topcir;": "\u2af1",
+ "topf;": "\U0001d565",
+ "topfork;": "\u2ada",
+ "tosa;": "\u2929",
+ "tprime;": "\u2034",
+ "trade;": "\u2122",
+ "triangle;": "\u25b5",
+ "triangledown;": "\u25bf",
+ "triangleleft;": "\u25c3",
+ "trianglelefteq;": "\u22b4",
+ "triangleq;": "\u225c",
+ "triangleright;": "\u25b9",
+ "trianglerighteq;": "\u22b5",
+ "tridot;": "\u25ec",
+ "trie;": "\u225c",
+ "triminus;": "\u2a3a",
+ "triplus;": "\u2a39",
+ "trisb;": "\u29cd",
+ "tritime;": "\u2a3b",
+ "trpezium;": "\u23e2",
+ "tscr;": "\U0001d4c9",
+ "tscy;": "\u0446",
+ "tshcy;": "\u045b",
+ "tstrok;": "\u0167",
+ "twixt;": "\u226c",
+ "twoheadleftarrow;": "\u219e",
+ "twoheadrightarrow;": "\u21a0",
+ "uArr;": "\u21d1",
+ "uHar;": "\u2963",
+ "uacute": "\xfa",
+ "uacute;": "\xfa",
+ "uarr;": "\u2191",
+ "ubrcy;": "\u045e",
+ "ubreve;": "\u016d",
+ "ucirc": "\xfb",
+ "ucirc;": "\xfb",
+ "ucy;": "\u0443",
+ "udarr;": "\u21c5",
+ "udblac;": "\u0171",
+ "udhar;": "\u296e",
+ "ufisht;": "\u297e",
+ "ufr;": "\U0001d532",
+ "ugrave": "\xf9",
+ "ugrave;": "\xf9",
+ "uharl;": "\u21bf",
+ "uharr;": "\u21be",
+ "uhblk;": "\u2580",
+ "ulcorn;": "\u231c",
+ "ulcorner;": "\u231c",
+ "ulcrop;": "\u230f",
+ "ultri;": "\u25f8",
+ "umacr;": "\u016b",
+ "uml": "\xa8",
+ "uml;": "\xa8",
+ "uogon;": "\u0173",
+ "uopf;": "\U0001d566",
+ "uparrow;": "\u2191",
+ "updownarrow;": "\u2195",
+ "upharpoonleft;": "\u21bf",
+ "upharpoonright;": "\u21be",
+ "uplus;": "\u228e",
+ "upsi;": "\u03c5",
+ "upsih;": "\u03d2",
+ "upsilon;": "\u03c5",
+ "upuparrows;": "\u21c8",
+ "urcorn;": "\u231d",
+ "urcorner;": "\u231d",
+ "urcrop;": "\u230e",
+ "uring;": "\u016f",
+ "urtri;": "\u25f9",
+ "uscr;": "\U0001d4ca",
+ "utdot;": "\u22f0",
+ "utilde;": "\u0169",
+ "utri;": "\u25b5",
+ "utrif;": "\u25b4",
+ "uuarr;": "\u21c8",
+ "uuml": "\xfc",
+ "uuml;": "\xfc",
+ "uwangle;": "\u29a7",
+ "vArr;": "\u21d5",
+ "vBar;": "\u2ae8",
+ "vBarv;": "\u2ae9",
+ "vDash;": "\u22a8",
+ "vangrt;": "\u299c",
+ "varepsilon;": "\u03f5",
+ "varkappa;": "\u03f0",
+ "varnothing;": "\u2205",
+ "varphi;": "\u03d5",
+ "varpi;": "\u03d6",
+ "varpropto;": "\u221d",
+ "varr;": "\u2195",
+ "varrho;": "\u03f1",
+ "varsigma;": "\u03c2",
+ "varsubsetneq;": "\u228a\ufe00",
+ "varsubsetneqq;": "\u2acb\ufe00",
+ "varsupsetneq;": "\u228b\ufe00",
+ "varsupsetneqq;": "\u2acc\ufe00",
+ "vartheta;": "\u03d1",
+ "vartriangleleft;": "\u22b2",
+ "vartriangleright;": "\u22b3",
+ "vcy;": "\u0432",
+ "vdash;": "\u22a2",
+ "vee;": "\u2228",
+ "veebar;": "\u22bb",
+ "veeeq;": "\u225a",
+ "vellip;": "\u22ee",
+ "verbar;": "|",
+ "vert;": "|",
+ "vfr;": "\U0001d533",
+ "vltri;": "\u22b2",
+ "vnsub;": "\u2282\u20d2",
+ "vnsup;": "\u2283\u20d2",
+ "vopf;": "\U0001d567",
+ "vprop;": "\u221d",
+ "vrtri;": "\u22b3",
+ "vscr;": "\U0001d4cb",
+ "vsubnE;": "\u2acb\ufe00",
+ "vsubne;": "\u228a\ufe00",
+ "vsupnE;": "\u2acc\ufe00",
+ "vsupne;": "\u228b\ufe00",
+ "vzigzag;": "\u299a",
+ "wcirc;": "\u0175",
+ "wedbar;": "\u2a5f",
+ "wedge;": "\u2227",
+ "wedgeq;": "\u2259",
+ "weierp;": "\u2118",
+ "wfr;": "\U0001d534",
+ "wopf;": "\U0001d568",
+ "wp;": "\u2118",
+ "wr;": "\u2240",
+ "wreath;": "\u2240",
+ "wscr;": "\U0001d4cc",
+ "xcap;": "\u22c2",
+ "xcirc;": "\u25ef",
+ "xcup;": "\u22c3",
+ "xdtri;": "\u25bd",
+ "xfr;": "\U0001d535",
+ "xhArr;": "\u27fa",
+ "xharr;": "\u27f7",
+ "xi;": "\u03be",
+ "xlArr;": "\u27f8",
+ "xlarr;": "\u27f5",
+ "xmap;": "\u27fc",
+ "xnis;": "\u22fb",
+ "xodot;": "\u2a00",
+ "xopf;": "\U0001d569",
+ "xoplus;": "\u2a01",
+ "xotime;": "\u2a02",
+ "xrArr;": "\u27f9",
+ "xrarr;": "\u27f6",
+ "xscr;": "\U0001d4cd",
+ "xsqcup;": "\u2a06",
+ "xuplus;": "\u2a04",
+ "xutri;": "\u25b3",
+ "xvee;": "\u22c1",
+ "xwedge;": "\u22c0",
+ "yacute": "\xfd",
+ "yacute;": "\xfd",
+ "yacy;": "\u044f",
+ "ycirc;": "\u0177",
+ "ycy;": "\u044b",
+ "yen": "\xa5",
+ "yen;": "\xa5",
+ "yfr;": "\U0001d536",
+ "yicy;": "\u0457",
+ "yopf;": "\U0001d56a",
+ "yscr;": "\U0001d4ce",
+ "yucy;": "\u044e",
+ "yuml": "\xff",
+ "yuml;": "\xff",
+ "zacute;": "\u017a",
+ "zcaron;": "\u017e",
+ "zcy;": "\u0437",
+ "zdot;": "\u017c",
+ "zeetrf;": "\u2128",
+ "zeta;": "\u03b6",
+ "zfr;": "\U0001d537",
+ "zhcy;": "\u0436",
+ "zigrarr;": "\u21dd",
+ "zopf;": "\U0001d56b",
+ "zscr;": "\U0001d4cf",
+ "zwj;": "\u200d",
+ "zwnj;": "\u200c",
+}
+
+replacementCharacters = {
+ 0x0: "\uFFFD",
+ 0x0d: "\u000D",
+ 0x80: "\u20AC",
+ 0x81: "\u0081",
+ 0x82: "\u201A",
+ 0x83: "\u0192",
+ 0x84: "\u201E",
+ 0x85: "\u2026",
+ 0x86: "\u2020",
+ 0x87: "\u2021",
+ 0x88: "\u02C6",
+ 0x89: "\u2030",
+ 0x8A: "\u0160",
+ 0x8B: "\u2039",
+ 0x8C: "\u0152",
+ 0x8D: "\u008D",
+ 0x8E: "\u017D",
+ 0x8F: "\u008F",
+ 0x90: "\u0090",
+ 0x91: "\u2018",
+ 0x92: "\u2019",
+ 0x93: "\u201C",
+ 0x94: "\u201D",
+ 0x95: "\u2022",
+ 0x96: "\u2013",
+ 0x97: "\u2014",
+ 0x98: "\u02DC",
+ 0x99: "\u2122",
+ 0x9A: "\u0161",
+ 0x9B: "\u203A",
+ 0x9C: "\u0153",
+ 0x9D: "\u009D",
+ 0x9E: "\u017E",
+ 0x9F: "\u0178",
+}
+
+tokenTypes = {
+ "Doctype": 0,
+ "Characters": 1,
+ "SpaceCharacters": 2,
+ "StartTag": 3,
+ "EndTag": 4,
+ "EmptyTag": 5,
+ "Comment": 6,
+ "ParseError": 7
+}
+
+tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"],
+ tokenTypes["EmptyTag"]])
+
+
+prefixes = dict([(v, k) for k, v in namespaces.items()])
+prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
+
+
+class DataLossWarning(UserWarning):
+ """Raised when the current tree is unable to represent the input data"""
+ pass
+
+
+class _ReparseException(Exception):
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__init__.py
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..742a8027
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pyc
new file mode 100644
index 00000000..6f9a0050
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pyc
new file mode 100644
index 00000000..75c79173
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pyc
new file mode 100644
index 00000000..e98a89f6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pyc
new file mode 100644
index 00000000..63b7ddb5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pyc
new file mode 100644
index 00000000..c6090666
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pyc
new file mode 100644
index 00000000..985e92e0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pyc
new file mode 100644
index 00000000..86dc6bd7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py
new file mode 100644
index 00000000..5ba926e3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py
@@ -0,0 +1,29 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from . import base
+
+from collections import OrderedDict
+
+
+def _attr_key(attr):
+ """Return an appropriate key for an attribute for sorting
+
+ Attributes have a namespace that can be either ``None`` or a string. We
+ can't compare the two because they're different types, so we convert
+ ``None`` to an empty string first.
+
+ """
+ return (attr[0][0] or ''), attr[0][1]
+
+
+class Filter(base.Filter):
+ """Alphabetizes attributes for elements"""
+ def __iter__(self):
+ for token in base.Filter.__iter__(self):
+ if token["type"] in ("StartTag", "EmptyTag"):
+ attrs = OrderedDict()
+ for name, value in sorted(token["data"].items(),
+ key=_attr_key):
+ attrs[name] = value
+ token["data"] = attrs
+ yield token
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/base.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/base.py
new file mode 100644
index 00000000..c7dbaed0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/base.py
@@ -0,0 +1,12 @@
+from __future__ import absolute_import, division, unicode_literals
+
+
+class Filter(object):
+ def __init__(self, source):
+ self.source = source
+
+ def __iter__(self):
+ return iter(self.source)
+
+ def __getattr__(self, name):
+ return getattr(self.source, name)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py
new file mode 100644
index 00000000..aefb5c84
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py
@@ -0,0 +1,73 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from . import base
+
+
+class Filter(base.Filter):
+ """Injects ``<meta charset=ENCODING>`` tag into head of document"""
+ def __init__(self, source, encoding):
+ """Creates a Filter
+
+ :arg source: the source token stream
+
+ :arg encoding: the encoding to set
+
+ """
+ base.Filter.__init__(self, source)
+ self.encoding = encoding
+
+ def __iter__(self):
+ state = "pre_head"
+ meta_found = (self.encoding is None)
+ pending = []
+
+ for token in base.Filter.__iter__(self):
+ type = token["type"]
+ if type == "StartTag":
+ if token["name"].lower() == "head":
+ state = "in_head"
+
+ elif type == "EmptyTag":
+ if token["name"].lower() == "meta":
+ # replace charset with actual encoding
+ has_http_equiv_content_type = False
+ for (namespace, name), value in token["data"].items():
+ if namespace is not None:
+ continue
+ elif name.lower() == 'charset':
+ token["data"][(namespace, name)] = self.encoding
+ meta_found = True
+ break
+ elif name == 'http-equiv' and value.lower() == 'content-type':
+ has_http_equiv_content_type = True
+ else:
+ if has_http_equiv_content_type and (None, "content") in token["data"]:
+ token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding
+ meta_found = True
+
+ elif token["name"].lower() == "head" and not meta_found:
+ # insert meta into empty head
+ yield {"type": "StartTag", "name": "head",
+ "data": token["data"]}
+ yield {"type": "EmptyTag", "name": "meta",
+ "data": {(None, "charset"): self.encoding}}
+ yield {"type": "EndTag", "name": "head"}
+ meta_found = True
+ continue
+
+ elif type == "EndTag":
+ if token["name"].lower() == "head" and pending:
+ # insert meta into head (if necessary) and flush pending queue
+ yield pending.pop(0)
+ if not meta_found:
+ yield {"type": "EmptyTag", "name": "meta",
+ "data": {(None, "charset"): self.encoding}}
+ while pending:
+ yield pending.pop(0)
+ meta_found = True
+ state = "post_head"
+
+ if state == "in_head":
+ pending.append(token)
+ else:
+ yield token
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/lint.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/lint.py
new file mode 100644
index 00000000..fcc07eec
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/lint.py
@@ -0,0 +1,93 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from pip._vendor.six import text_type
+
+from . import base
+from ..constants import namespaces, voidElements
+
+from ..constants import spaceCharacters
+spaceCharacters = "".join(spaceCharacters)
+
+
+class Filter(base.Filter):
+ """Lints the token stream for errors
+
+ If it finds any errors, it'll raise an ``AssertionError``.
+
+ """
+ def __init__(self, source, require_matching_tags=True):
+ """Creates a Filter
+
+ :arg source: the source token stream
+
+ :arg require_matching_tags: whether or not to require matching tags
+
+ """
+ super(Filter, self).__init__(source)
+ self.require_matching_tags = require_matching_tags
+
+ def __iter__(self):
+ open_elements = []
+ for token in base.Filter.__iter__(self):
+ type = token["type"]
+ if type in ("StartTag", "EmptyTag"):
+ namespace = token["namespace"]
+ name = token["name"]
+ assert namespace is None or isinstance(namespace, text_type)
+ assert namespace != ""
+ assert isinstance(name, text_type)
+ assert name != ""
+ assert isinstance(token["data"], dict)
+ if (not namespace or namespace == namespaces["html"]) and name in voidElements:
+ assert type == "EmptyTag"
+ else:
+ assert type == "StartTag"
+ if type == "StartTag" and self.require_matching_tags:
+ open_elements.append((namespace, name))
+ for (namespace, name), value in token["data"].items():
+ assert namespace is None or isinstance(namespace, text_type)
+ assert namespace != ""
+ assert isinstance(name, text_type)
+ assert name != ""
+ assert isinstance(value, text_type)
+
+ elif type == "EndTag":
+ namespace = token["namespace"]
+ name = token["name"]
+ assert namespace is None or isinstance(namespace, text_type)
+ assert namespace != ""
+ assert isinstance(name, text_type)
+ assert name != ""
+ if (not namespace or namespace == namespaces["html"]) and name in voidElements:
+ assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name}
+ elif self.require_matching_tags:
+ start = open_elements.pop()
+ assert start == (namespace, name)
+
+ elif type == "Comment":
+ data = token["data"]
+ assert isinstance(data, text_type)
+
+ elif type in ("Characters", "SpaceCharacters"):
+ data = token["data"]
+ assert isinstance(data, text_type)
+ assert data != ""
+ if type == "SpaceCharacters":
+ assert data.strip(spaceCharacters) == ""
+
+ elif type == "Doctype":
+ name = token["name"]
+ assert name is None or isinstance(name, text_type)
+ assert token["publicId"] is None or isinstance(name, text_type)
+ assert token["systemId"] is None or isinstance(name, text_type)
+
+ elif type == "Entity":
+ assert isinstance(token["name"], text_type)
+
+ elif type == "SerializerError":
+ assert isinstance(token["data"], text_type)
+
+ else:
+ assert False, "Unknown token type: %(type)s" % {"type": type}
+
+ yield token
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py
new file mode 100644
index 00000000..4a865012
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py
@@ -0,0 +1,207 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from . import base
+
+
+class Filter(base.Filter):
+ """Removes optional tags from the token stream"""
+ def slider(self):
+ previous1 = previous2 = None
+ for token in self.source:
+ if previous1 is not None:
+ yield previous2, previous1, token
+ previous2 = previous1
+ previous1 = token
+ if previous1 is not None:
+ yield previous2, previous1, None
+
+ def __iter__(self):
+ for previous, token, next in self.slider():
+ type = token["type"]
+ if type == "StartTag":
+ if (token["data"] or
+ not self.is_optional_start(token["name"], previous, next)):
+ yield token
+ elif type == "EndTag":
+ if not self.is_optional_end(token["name"], next):
+ yield token
+ else:
+ yield token
+
+ def is_optional_start(self, tagname, previous, next):
+ type = next and next["type"] or None
+ if tagname in 'html':
+ # An html element's start tag may be omitted if the first thing
+ # inside the html element is not a space character or a comment.
+ return type not in ("Comment", "SpaceCharacters")
+ elif tagname == 'head':
+ # A head element's start tag may be omitted if the first thing
+ # inside the head element is an element.
+ # XXX: we also omit the start tag if the head element is empty
+ if type in ("StartTag", "EmptyTag"):
+ return True
+ elif type == "EndTag":
+ return next["name"] == "head"
+ elif tagname == 'body':
+ # A body element's start tag may be omitted if the first thing
+ # inside the body element is not a space character or a comment,
+ # except if the first thing inside the body element is a script
+ # or style element and the node immediately preceding the body
+ # element is a head element whose end tag has been omitted.
+ if type in ("Comment", "SpaceCharacters"):
+ return False
+ elif type == "StartTag":
+ # XXX: we do not look at the preceding event, so we never omit
+ # the body element's start tag if it's followed by a script or
+ # a style element.
+ return next["name"] not in ('script', 'style')
+ else:
+ return True
+ elif tagname == 'colgroup':
+ # A colgroup element's start tag may be omitted if the first thing
+ # inside the colgroup element is a col element, and if the element
+ # is not immediately preceded by another colgroup element whose
+ # end tag has been omitted.
+ if type in ("StartTag", "EmptyTag"):
+ # XXX: we do not look at the preceding event, so instead we never
+ # omit the colgroup element's end tag when it is immediately
+ # followed by another colgroup element. See is_optional_end.
+ return next["name"] == "col"
+ else:
+ return False
+ elif tagname == 'tbody':
+ # A tbody element's start tag may be omitted if the first thing
+ # inside the tbody element is a tr element, and if the element is
+ # not immediately preceded by a tbody, thead, or tfoot element
+ # whose end tag has been omitted.
+ if type == "StartTag":
+ # omit the thead and tfoot elements' end tag when they are
+ # immediately followed by a tbody element. See is_optional_end.
+ if previous and previous['type'] == 'EndTag' and \
+ previous['name'] in ('tbody', 'thead', 'tfoot'):
+ return False
+ return next["name"] == 'tr'
+ else:
+ return False
+ return False
+
+ def is_optional_end(self, tagname, next):
+ type = next and next["type"] or None
+ if tagname in ('html', 'head', 'body'):
+ # An html element's end tag may be omitted if the html element
+ # is not immediately followed by a space character or a comment.
+ return type not in ("Comment", "SpaceCharacters")
+ elif tagname in ('li', 'optgroup', 'tr'):
+ # A li element's end tag may be omitted if the li element is
+ # immediately followed by another li element or if there is
+ # no more content in the parent element.
+ # An optgroup element's end tag may be omitted if the optgroup
+ # element is immediately followed by another optgroup element,
+ # or if there is no more content in the parent element.
+ # A tr element's end tag may be omitted if the tr element is
+ # immediately followed by another tr element, or if there is
+ # no more content in the parent element.
+ if type == "StartTag":
+ return next["name"] == tagname
+ else:
+ return type == "EndTag" or type is None
+ elif tagname in ('dt', 'dd'):
+ # A dt element's end tag may be omitted if the dt element is
+ # immediately followed by another dt element or a dd element.
+ # A dd element's end tag may be omitted if the dd element is
+ # immediately followed by another dd element or a dt element,
+ # or if there is no more content in the parent element.
+ if type == "StartTag":
+ return next["name"] in ('dt', 'dd')
+ elif tagname == 'dd':
+ return type == "EndTag" or type is None
+ else:
+ return False
+ elif tagname == 'p':
+ # A p element's end tag may be omitted if the p element is
+ # immediately followed by an address, article, aside,
+ # blockquote, datagrid, dialog, dir, div, dl, fieldset,
+ # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
+ # nav, ol, p, pre, section, table, or ul, element, or if
+ # there is no more content in the parent element.
+ if type in ("StartTag", "EmptyTag"):
+ return next["name"] in ('address', 'article', 'aside',
+ 'blockquote', 'datagrid', 'dialog',
+ 'dir', 'div', 'dl', 'fieldset', 'footer',
+ 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
+ 'header', 'hr', 'menu', 'nav', 'ol',
+ 'p', 'pre', 'section', 'table', 'ul')
+ else:
+ return type == "EndTag" or type is None
+ elif tagname == 'option':
+ # An option element's end tag may be omitted if the option
+ # element is immediately followed by another option element,
+ # or if it is immediately followed by an <code>optgroup</code>
+ # element, or if there is no more content in the parent
+ # element.
+ if type == "StartTag":
+ return next["name"] in ('option', 'optgroup')
+ else:
+ return type == "EndTag" or type is None
+ elif tagname in ('rt', 'rp'):
+ # An rt element's end tag may be omitted if the rt element is
+ # immediately followed by an rt or rp element, or if there is
+ # no more content in the parent element.
+ # An rp element's end tag may be omitted if the rp element is
+ # immediately followed by an rt or rp element, or if there is
+ # no more content in the parent element.
+ if type == "StartTag":
+ return next["name"] in ('rt', 'rp')
+ else:
+ return type == "EndTag" or type is None
+ elif tagname == 'colgroup':
+ # A colgroup element's end tag may be omitted if the colgroup
+ # element is not immediately followed by a space character or
+ # a comment.
+ if type in ("Comment", "SpaceCharacters"):
+ return False
+ elif type == "StartTag":
+ # XXX: we also look for an immediately following colgroup
+ # element. See is_optional_start.
+ return next["name"] != 'colgroup'
+ else:
+ return True
+ elif tagname in ('thead', 'tbody'):
+ # A thead element's end tag may be omitted if the thead element
+ # is immediately followed by a tbody or tfoot element.
+ # A tbody element's end tag may be omitted if the tbody element
+ # is immediately followed by a tbody or tfoot element, or if
+ # there is no more content in the parent element.
+ # A tfoot element's end tag may be omitted if the tfoot element
+ # is immediately followed by a tbody element, or if there is no
+ # more content in the parent element.
+ # XXX: we never omit the end tag when the following element is
+ # a tbody. See is_optional_start.
+ if type == "StartTag":
+ return next["name"] in ['tbody', 'tfoot']
+ elif tagname == 'tbody':
+ return type == "EndTag" or type is None
+ else:
+ return False
+ elif tagname == 'tfoot':
+ # A tfoot element's end tag may be omitted if the tfoot element
+ # is immediately followed by a tbody element, or if there is no
+ # more content in the parent element.
+ # XXX: we never omit the end tag when the following element is
+ # a tbody. See is_optional_start.
+ if type == "StartTag":
+ return next["name"] == 'tbody'
+ else:
+ return type == "EndTag" or type is None
+ elif tagname in ('td', 'th'):
+ # A td element's end tag may be omitted if the td element is
+ # immediately followed by a td or th element, or if there is
+ # no more content in the parent element.
+ # A th element's end tag may be omitted if the th element is
+ # immediately followed by a td or th element, or if there is
+ # no more content in the parent element.
+ if type == "StartTag":
+ return next["name"] in ('td', 'th')
+ else:
+ return type == "EndTag" or type is None
+ return False
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py
new file mode 100644
index 00000000..af8e77b8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py
@@ -0,0 +1,896 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import re
+from xml.sax.saxutils import escape, unescape
+
+from pip._vendor.six.moves import urllib_parse as urlparse
+
+from . import base
+from ..constants import namespaces, prefixes
+
+__all__ = ["Filter"]
+
+
+allowed_elements = frozenset((
+ (namespaces['html'], 'a'),
+ (namespaces['html'], 'abbr'),
+ (namespaces['html'], 'acronym'),
+ (namespaces['html'], 'address'),
+ (namespaces['html'], 'area'),
+ (namespaces['html'], 'article'),
+ (namespaces['html'], 'aside'),
+ (namespaces['html'], 'audio'),
+ (namespaces['html'], 'b'),
+ (namespaces['html'], 'big'),
+ (namespaces['html'], 'blockquote'),
+ (namespaces['html'], 'br'),
+ (namespaces['html'], 'button'),
+ (namespaces['html'], 'canvas'),
+ (namespaces['html'], 'caption'),
+ (namespaces['html'], 'center'),
+ (namespaces['html'], 'cite'),
+ (namespaces['html'], 'code'),
+ (namespaces['html'], 'col'),
+ (namespaces['html'], 'colgroup'),
+ (namespaces['html'], 'command'),
+ (namespaces['html'], 'datagrid'),
+ (namespaces['html'], 'datalist'),
+ (namespaces['html'], 'dd'),
+ (namespaces['html'], 'del'),
+ (namespaces['html'], 'details'),
+ (namespaces['html'], 'dfn'),
+ (namespaces['html'], 'dialog'),
+ (namespaces['html'], 'dir'),
+ (namespaces['html'], 'div'),
+ (namespaces['html'], 'dl'),
+ (namespaces['html'], 'dt'),
+ (namespaces['html'], 'em'),
+ (namespaces['html'], 'event-source'),
+ (namespaces['html'], 'fieldset'),
+ (namespaces['html'], 'figcaption'),
+ (namespaces['html'], 'figure'),
+ (namespaces['html'], 'footer'),
+ (namespaces['html'], 'font'),
+ (namespaces['html'], 'form'),
+ (namespaces['html'], 'header'),
+ (namespaces['html'], 'h1'),
+ (namespaces['html'], 'h2'),
+ (namespaces['html'], 'h3'),
+ (namespaces['html'], 'h4'),
+ (namespaces['html'], 'h5'),
+ (namespaces['html'], 'h6'),
+ (namespaces['html'], 'hr'),
+ (namespaces['html'], 'i'),
+ (namespaces['html'], 'img'),
+ (namespaces['html'], 'input'),
+ (namespaces['html'], 'ins'),
+ (namespaces['html'], 'keygen'),
+ (namespaces['html'], 'kbd'),
+ (namespaces['html'], 'label'),
+ (namespaces['html'], 'legend'),
+ (namespaces['html'], 'li'),
+ (namespaces['html'], 'm'),
+ (namespaces['html'], 'map'),
+ (namespaces['html'], 'menu'),
+ (namespaces['html'], 'meter'),
+ (namespaces['html'], 'multicol'),
+ (namespaces['html'], 'nav'),
+ (namespaces['html'], 'nextid'),
+ (namespaces['html'], 'ol'),
+ (namespaces['html'], 'output'),
+ (namespaces['html'], 'optgroup'),
+ (namespaces['html'], 'option'),
+ (namespaces['html'], 'p'),
+ (namespaces['html'], 'pre'),
+ (namespaces['html'], 'progress'),
+ (namespaces['html'], 'q'),
+ (namespaces['html'], 's'),
+ (namespaces['html'], 'samp'),
+ (namespaces['html'], 'section'),
+ (namespaces['html'], 'select'),
+ (namespaces['html'], 'small'),
+ (namespaces['html'], 'sound'),
+ (namespaces['html'], 'source'),
+ (namespaces['html'], 'spacer'),
+ (namespaces['html'], 'span'),
+ (namespaces['html'], 'strike'),
+ (namespaces['html'], 'strong'),
+ (namespaces['html'], 'sub'),
+ (namespaces['html'], 'sup'),
+ (namespaces['html'], 'table'),
+ (namespaces['html'], 'tbody'),
+ (namespaces['html'], 'td'),
+ (namespaces['html'], 'textarea'),
+ (namespaces['html'], 'time'),
+ (namespaces['html'], 'tfoot'),
+ (namespaces['html'], 'th'),
+ (namespaces['html'], 'thead'),
+ (namespaces['html'], 'tr'),
+ (namespaces['html'], 'tt'),
+ (namespaces['html'], 'u'),
+ (namespaces['html'], 'ul'),
+ (namespaces['html'], 'var'),
+ (namespaces['html'], 'video'),
+ (namespaces['mathml'], 'maction'),
+ (namespaces['mathml'], 'math'),
+ (namespaces['mathml'], 'merror'),
+ (namespaces['mathml'], 'mfrac'),
+ (namespaces['mathml'], 'mi'),
+ (namespaces['mathml'], 'mmultiscripts'),
+ (namespaces['mathml'], 'mn'),
+ (namespaces['mathml'], 'mo'),
+ (namespaces['mathml'], 'mover'),
+ (namespaces['mathml'], 'mpadded'),
+ (namespaces['mathml'], 'mphantom'),
+ (namespaces['mathml'], 'mprescripts'),
+ (namespaces['mathml'], 'mroot'),
+ (namespaces['mathml'], 'mrow'),
+ (namespaces['mathml'], 'mspace'),
+ (namespaces['mathml'], 'msqrt'),
+ (namespaces['mathml'], 'mstyle'),
+ (namespaces['mathml'], 'msub'),
+ (namespaces['mathml'], 'msubsup'),
+ (namespaces['mathml'], 'msup'),
+ (namespaces['mathml'], 'mtable'),
+ (namespaces['mathml'], 'mtd'),
+ (namespaces['mathml'], 'mtext'),
+ (namespaces['mathml'], 'mtr'),
+ (namespaces['mathml'], 'munder'),
+ (namespaces['mathml'], 'munderover'),
+ (namespaces['mathml'], 'none'),
+ (namespaces['svg'], 'a'),
+ (namespaces['svg'], 'animate'),
+ (namespaces['svg'], 'animateColor'),
+ (namespaces['svg'], 'animateMotion'),
+ (namespaces['svg'], 'animateTransform'),
+ (namespaces['svg'], 'clipPath'),
+ (namespaces['svg'], 'circle'),
+ (namespaces['svg'], 'defs'),
+ (namespaces['svg'], 'desc'),
+ (namespaces['svg'], 'ellipse'),
+ (namespaces['svg'], 'font-face'),
+ (namespaces['svg'], 'font-face-name'),
+ (namespaces['svg'], 'font-face-src'),
+ (namespaces['svg'], 'g'),
+ (namespaces['svg'], 'glyph'),
+ (namespaces['svg'], 'hkern'),
+ (namespaces['svg'], 'linearGradient'),
+ (namespaces['svg'], 'line'),
+ (namespaces['svg'], 'marker'),
+ (namespaces['svg'], 'metadata'),
+ (namespaces['svg'], 'missing-glyph'),
+ (namespaces['svg'], 'mpath'),
+ (namespaces['svg'], 'path'),
+ (namespaces['svg'], 'polygon'),
+ (namespaces['svg'], 'polyline'),
+ (namespaces['svg'], 'radialGradient'),
+ (namespaces['svg'], 'rect'),
+ (namespaces['svg'], 'set'),
+ (namespaces['svg'], 'stop'),
+ (namespaces['svg'], 'svg'),
+ (namespaces['svg'], 'switch'),
+ (namespaces['svg'], 'text'),
+ (namespaces['svg'], 'title'),
+ (namespaces['svg'], 'tspan'),
+ (namespaces['svg'], 'use'),
+))
+
+allowed_attributes = frozenset((
+ # HTML attributes
+ (None, 'abbr'),
+ (None, 'accept'),
+ (None, 'accept-charset'),
+ (None, 'accesskey'),
+ (None, 'action'),
+ (None, 'align'),
+ (None, 'alt'),
+ (None, 'autocomplete'),
+ (None, 'autofocus'),
+ (None, 'axis'),
+ (None, 'background'),
+ (None, 'balance'),
+ (None, 'bgcolor'),
+ (None, 'bgproperties'),
+ (None, 'border'),
+ (None, 'bordercolor'),
+ (None, 'bordercolordark'),
+ (None, 'bordercolorlight'),
+ (None, 'bottompadding'),
+ (None, 'cellpadding'),
+ (None, 'cellspacing'),
+ (None, 'ch'),
+ (None, 'challenge'),
+ (None, 'char'),
+ (None, 'charoff'),
+ (None, 'choff'),
+ (None, 'charset'),
+ (None, 'checked'),
+ (None, 'cite'),
+ (None, 'class'),
+ (None, 'clear'),
+ (None, 'color'),
+ (None, 'cols'),
+ (None, 'colspan'),
+ (None, 'compact'),
+ (None, 'contenteditable'),
+ (None, 'controls'),
+ (None, 'coords'),
+ (None, 'data'),
+ (None, 'datafld'),
+ (None, 'datapagesize'),
+ (None, 'datasrc'),
+ (None, 'datetime'),
+ (None, 'default'),
+ (None, 'delay'),
+ (None, 'dir'),
+ (None, 'disabled'),
+ (None, 'draggable'),
+ (None, 'dynsrc'),
+ (None, 'enctype'),
+ (None, 'end'),
+ (None, 'face'),
+ (None, 'for'),
+ (None, 'form'),
+ (None, 'frame'),
+ (None, 'galleryimg'),
+ (None, 'gutter'),
+ (None, 'headers'),
+ (None, 'height'),
+ (None, 'hidefocus'),
+ (None, 'hidden'),
+ (None, 'high'),
+ (None, 'href'),
+ (None, 'hreflang'),
+ (None, 'hspace'),
+ (None, 'icon'),
+ (None, 'id'),
+ (None, 'inputmode'),
+ (None, 'ismap'),
+ (None, 'keytype'),
+ (None, 'label'),
+ (None, 'leftspacing'),
+ (None, 'lang'),
+ (None, 'list'),
+ (None, 'longdesc'),
+ (None, 'loop'),
+ (None, 'loopcount'),
+ (None, 'loopend'),
+ (None, 'loopstart'),
+ (None, 'low'),
+ (None, 'lowsrc'),
+ (None, 'max'),
+ (None, 'maxlength'),
+ (None, 'media'),
+ (None, 'method'),
+ (None, 'min'),
+ (None, 'multiple'),
+ (None, 'name'),
+ (None, 'nohref'),
+ (None, 'noshade'),
+ (None, 'nowrap'),
+ (None, 'open'),
+ (None, 'optimum'),
+ (None, 'pattern'),
+ (None, 'ping'),
+ (None, 'point-size'),
+ (None, 'poster'),
+ (None, 'pqg'),
+ (None, 'preload'),
+ (None, 'prompt'),
+ (None, 'radiogroup'),
+ (None, 'readonly'),
+ (None, 'rel'),
+ (None, 'repeat-max'),
+ (None, 'repeat-min'),
+ (None, 'replace'),
+ (None, 'required'),
+ (None, 'rev'),
+ (None, 'rightspacing'),
+ (None, 'rows'),
+ (None, 'rowspan'),
+ (None, 'rules'),
+ (None, 'scope'),
+ (None, 'selected'),
+ (None, 'shape'),
+ (None, 'size'),
+ (None, 'span'),
+ (None, 'src'),
+ (None, 'start'),
+ (None, 'step'),
+ (None, 'style'),
+ (None, 'summary'),
+ (None, 'suppress'),
+ (None, 'tabindex'),
+ (None, 'target'),
+ (None, 'template'),
+ (None, 'title'),
+ (None, 'toppadding'),
+ (None, 'type'),
+ (None, 'unselectable'),
+ (None, 'usemap'),
+ (None, 'urn'),
+ (None, 'valign'),
+ (None, 'value'),
+ (None, 'variable'),
+ (None, 'volume'),
+ (None, 'vspace'),
+ (None, 'vrml'),
+ (None, 'width'),
+ (None, 'wrap'),
+ (namespaces['xml'], 'lang'),
+ # MathML attributes
+ (None, 'actiontype'),
+ (None, 'align'),
+ (None, 'columnalign'),
+ (None, 'columnalign'),
+ (None, 'columnalign'),
+ (None, 'columnlines'),
+ (None, 'columnspacing'),
+ (None, 'columnspan'),
+ (None, 'depth'),
+ (None, 'display'),
+ (None, 'displaystyle'),
+ (None, 'equalcolumns'),
+ (None, 'equalrows'),
+ (None, 'fence'),
+ (None, 'fontstyle'),
+ (None, 'fontweight'),
+ (None, 'frame'),
+ (None, 'height'),
+ (None, 'linethickness'),
+ (None, 'lspace'),
+ (None, 'mathbackground'),
+ (None, 'mathcolor'),
+ (None, 'mathvariant'),
+ (None, 'mathvariant'),
+ (None, 'maxsize'),
+ (None, 'minsize'),
+ (None, 'other'),
+ (None, 'rowalign'),
+ (None, 'rowalign'),
+ (None, 'rowalign'),
+ (None, 'rowlines'),
+ (None, 'rowspacing'),
+ (None, 'rowspan'),
+ (None, 'rspace'),
+ (None, 'scriptlevel'),
+ (None, 'selection'),
+ (None, 'separator'),
+ (None, 'stretchy'),
+ (None, 'width'),
+ (None, 'width'),
+ (namespaces['xlink'], 'href'),
+ (namespaces['xlink'], 'show'),
+ (namespaces['xlink'], 'type'),
+ # SVG attributes
+ (None, 'accent-height'),
+ (None, 'accumulate'),
+ (None, 'additive'),
+ (None, 'alphabetic'),
+ (None, 'arabic-form'),
+ (None, 'ascent'),
+ (None, 'attributeName'),
+ (None, 'attributeType'),
+ (None, 'baseProfile'),
+ (None, 'bbox'),
+ (None, 'begin'),
+ (None, 'by'),
+ (None, 'calcMode'),
+ (None, 'cap-height'),
+ (None, 'class'),
+ (None, 'clip-path'),
+ (None, 'color'),
+ (None, 'color-rendering'),
+ (None, 'content'),
+ (None, 'cx'),
+ (None, 'cy'),
+ (None, 'd'),
+ (None, 'dx'),
+ (None, 'dy'),
+ (None, 'descent'),
+ (None, 'display'),
+ (None, 'dur'),
+ (None, 'end'),
+ (None, 'fill'),
+ (None, 'fill-opacity'),
+ (None, 'fill-rule'),
+ (None, 'font-family'),
+ (None, 'font-size'),
+ (None, 'font-stretch'),
+ (None, 'font-style'),
+ (None, 'font-variant'),
+ (None, 'font-weight'),
+ (None, 'from'),
+ (None, 'fx'),
+ (None, 'fy'),
+ (None, 'g1'),
+ (None, 'g2'),
+ (None, 'glyph-name'),
+ (None, 'gradientUnits'),
+ (None, 'hanging'),
+ (None, 'height'),
+ (None, 'horiz-adv-x'),
+ (None, 'horiz-origin-x'),
+ (None, 'id'),
+ (None, 'ideographic'),
+ (None, 'k'),
+ (None, 'keyPoints'),
+ (None, 'keySplines'),
+ (None, 'keyTimes'),
+ (None, 'lang'),
+ (None, 'marker-end'),
+ (None, 'marker-mid'),
+ (None, 'marker-start'),
+ (None, 'markerHeight'),
+ (None, 'markerUnits'),
+ (None, 'markerWidth'),
+ (None, 'mathematical'),
+ (None, 'max'),
+ (None, 'min'),
+ (None, 'name'),
+ (None, 'offset'),
+ (None, 'opacity'),
+ (None, 'orient'),
+ (None, 'origin'),
+ (None, 'overline-position'),
+ (None, 'overline-thickness'),
+ (None, 'panose-1'),
+ (None, 'path'),
+ (None, 'pathLength'),
+ (None, 'points'),
+ (None, 'preserveAspectRatio'),
+ (None, 'r'),
+ (None, 'refX'),
+ (None, 'refY'),
+ (None, 'repeatCount'),
+ (None, 'repeatDur'),
+ (None, 'requiredExtensions'),
+ (None, 'requiredFeatures'),
+ (None, 'restart'),
+ (None, 'rotate'),
+ (None, 'rx'),
+ (None, 'ry'),
+ (None, 'slope'),
+ (None, 'stemh'),
+ (None, 'stemv'),
+ (None, 'stop-color'),
+ (None, 'stop-opacity'),
+ (None, 'strikethrough-position'),
+ (None, 'strikethrough-thickness'),
+ (None, 'stroke'),
+ (None, 'stroke-dasharray'),
+ (None, 'stroke-dashoffset'),
+ (None, 'stroke-linecap'),
+ (None, 'stroke-linejoin'),
+ (None, 'stroke-miterlimit'),
+ (None, 'stroke-opacity'),
+ (None, 'stroke-width'),
+ (None, 'systemLanguage'),
+ (None, 'target'),
+ (None, 'text-anchor'),
+ (None, 'to'),
+ (None, 'transform'),
+ (None, 'type'),
+ (None, 'u1'),
+ (None, 'u2'),
+ (None, 'underline-position'),
+ (None, 'underline-thickness'),
+ (None, 'unicode'),
+ (None, 'unicode-range'),
+ (None, 'units-per-em'),
+ (None, 'values'),
+ (None, 'version'),
+ (None, 'viewBox'),
+ (None, 'visibility'),
+ (None, 'width'),
+ (None, 'widths'),
+ (None, 'x'),
+ (None, 'x-height'),
+ (None, 'x1'),
+ (None, 'x2'),
+ (namespaces['xlink'], 'actuate'),
+ (namespaces['xlink'], 'arcrole'),
+ (namespaces['xlink'], 'href'),
+ (namespaces['xlink'], 'role'),
+ (namespaces['xlink'], 'show'),
+ (namespaces['xlink'], 'title'),
+ (namespaces['xlink'], 'type'),
+ (namespaces['xml'], 'base'),
+ (namespaces['xml'], 'lang'),
+ (namespaces['xml'], 'space'),
+ (None, 'y'),
+ (None, 'y1'),
+ (None, 'y2'),
+ (None, 'zoomAndPan'),
+))
+
+attr_val_is_uri = frozenset((
+ (None, 'href'),
+ (None, 'src'),
+ (None, 'cite'),
+ (None, 'action'),
+ (None, 'longdesc'),
+ (None, 'poster'),
+ (None, 'background'),
+ (None, 'datasrc'),
+ (None, 'dynsrc'),
+ (None, 'lowsrc'),
+ (None, 'ping'),
+ (namespaces['xlink'], 'href'),
+ (namespaces['xml'], 'base'),
+))
+
+svg_attr_val_allows_ref = frozenset((
+ (None, 'clip-path'),
+ (None, 'color-profile'),
+ (None, 'cursor'),
+ (None, 'fill'),
+ (None, 'filter'),
+ (None, 'marker'),
+ (None, 'marker-start'),
+ (None, 'marker-mid'),
+ (None, 'marker-end'),
+ (None, 'mask'),
+ (None, 'stroke'),
+))
+
+svg_allow_local_href = frozenset((
+ (None, 'altGlyph'),
+ (None, 'animate'),
+ (None, 'animateColor'),
+ (None, 'animateMotion'),
+ (None, 'animateTransform'),
+ (None, 'cursor'),
+ (None, 'feImage'),
+ (None, 'filter'),
+ (None, 'linearGradient'),
+ (None, 'pattern'),
+ (None, 'radialGradient'),
+ (None, 'textpath'),
+ (None, 'tref'),
+ (None, 'set'),
+ (None, 'use')
+))
+
+allowed_css_properties = frozenset((
+ 'azimuth',
+ 'background-color',
+ 'border-bottom-color',
+ 'border-collapse',
+ 'border-color',
+ 'border-left-color',
+ 'border-right-color',
+ 'border-top-color',
+ 'clear',
+ 'color',
+ 'cursor',
+ 'direction',
+ 'display',
+ 'elevation',
+ 'float',
+ 'font',
+ 'font-family',
+ 'font-size',
+ 'font-style',
+ 'font-variant',
+ 'font-weight',
+ 'height',
+ 'letter-spacing',
+ 'line-height',
+ 'overflow',
+ 'pause',
+ 'pause-after',
+ 'pause-before',
+ 'pitch',
+ 'pitch-range',
+ 'richness',
+ 'speak',
+ 'speak-header',
+ 'speak-numeral',
+ 'speak-punctuation',
+ 'speech-rate',
+ 'stress',
+ 'text-align',
+ 'text-decoration',
+ 'text-indent',
+ 'unicode-bidi',
+ 'vertical-align',
+ 'voice-family',
+ 'volume',
+ 'white-space',
+ 'width',
+))
+
+allowed_css_keywords = frozenset((
+ 'auto',
+ 'aqua',
+ 'black',
+ 'block',
+ 'blue',
+ 'bold',
+ 'both',
+ 'bottom',
+ 'brown',
+ 'center',
+ 'collapse',
+ 'dashed',
+ 'dotted',
+ 'fuchsia',
+ 'gray',
+ 'green',
+ '!important',
+ 'italic',
+ 'left',
+ 'lime',
+ 'maroon',
+ 'medium',
+ 'none',
+ 'navy',
+ 'normal',
+ 'nowrap',
+ 'olive',
+ 'pointer',
+ 'purple',
+ 'red',
+ 'right',
+ 'solid',
+ 'silver',
+ 'teal',
+ 'top',
+ 'transparent',
+ 'underline',
+ 'white',
+ 'yellow',
+))
+
+allowed_svg_properties = frozenset((
+ 'fill',
+ 'fill-opacity',
+ 'fill-rule',
+ 'stroke',
+ 'stroke-width',
+ 'stroke-linecap',
+ 'stroke-linejoin',
+ 'stroke-opacity',
+))
+
+allowed_protocols = frozenset((
+ 'ed2k',
+ 'ftp',
+ 'http',
+ 'https',
+ 'irc',
+ 'mailto',
+ 'news',
+ 'gopher',
+ 'nntp',
+ 'telnet',
+ 'webcal',
+ 'xmpp',
+ 'callto',
+ 'feed',
+ 'urn',
+ 'aim',
+ 'rsync',
+ 'tag',
+ 'ssh',
+ 'sftp',
+ 'rtsp',
+ 'afs',
+ 'data',
+))
+
+allowed_content_types = frozenset((
+ 'image/png',
+ 'image/jpeg',
+ 'image/gif',
+ 'image/webp',
+ 'image/bmp',
+ 'text/plain',
+))
+
+
+data_content_type = re.compile(r'''
+ ^
+ # Match a content type <application>/<type>
+ (?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)
+ # Match any character set and encoding
+ (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)
+ |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)
+ # Assume the rest is data
+ ,.*
+ $
+ ''',
+ re.VERBOSE)
+
+
+class Filter(base.Filter):
+ """Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes"""
+ def __init__(self,
+ source,
+ allowed_elements=allowed_elements,
+ allowed_attributes=allowed_attributes,
+ allowed_css_properties=allowed_css_properties,
+ allowed_css_keywords=allowed_css_keywords,
+ allowed_svg_properties=allowed_svg_properties,
+ allowed_protocols=allowed_protocols,
+ allowed_content_types=allowed_content_types,
+ attr_val_is_uri=attr_val_is_uri,
+ svg_attr_val_allows_ref=svg_attr_val_allows_ref,
+ svg_allow_local_href=svg_allow_local_href):
+ """Creates a Filter
+
+ :arg allowed_elements: set of elements to allow--everything else will
+ be escaped
+
+ :arg allowed_attributes: set of attributes to allow in
+ elements--everything else will be stripped
+
+ :arg allowed_css_properties: set of CSS properties to allow--everything
+ else will be stripped
+
+ :arg allowed_css_keywords: set of CSS keywords to allow--everything
+ else will be stripped
+
+ :arg allowed_svg_properties: set of SVG properties to allow--everything
+ else will be removed
+
+ :arg allowed_protocols: set of allowed protocols for URIs
+
+ :arg allowed_content_types: set of allowed content types for ``data`` URIs.
+
+ :arg attr_val_is_uri: set of attributes that have URI values--values
+ that have a scheme not listed in ``allowed_protocols`` are removed
+
+ :arg svg_attr_val_allows_ref: set of SVG attributes that can have
+ references
+
+ :arg svg_allow_local_href: set of SVG elements that can have local
+ hrefs--these are removed
+
+ """
+ super(Filter, self).__init__(source)
+ self.allowed_elements = allowed_elements
+ self.allowed_attributes = allowed_attributes
+ self.allowed_css_properties = allowed_css_properties
+ self.allowed_css_keywords = allowed_css_keywords
+ self.allowed_svg_properties = allowed_svg_properties
+ self.allowed_protocols = allowed_protocols
+ self.allowed_content_types = allowed_content_types
+ self.attr_val_is_uri = attr_val_is_uri
+ self.svg_attr_val_allows_ref = svg_attr_val_allows_ref
+ self.svg_allow_local_href = svg_allow_local_href
+
+ def __iter__(self):
+ for token in base.Filter.__iter__(self):
+ token = self.sanitize_token(token)
+ if token:
+ yield token
+
+ # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and
+ # stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes
+ # are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and
+ # ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI
+ # are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are
+ # allowed.
+ #
+ # sanitize_html('<script> do_nasty_stuff() </script>')
+ # => &lt;script> do_nasty_stuff() &lt;/script>
+ # sanitize_html('<a href="javascript: sucker();">Click here for $100</a>')
+ # => <a>Click here for $100</a>
+ def sanitize_token(self, token):
+
+ # accommodate filters which use token_type differently
+ token_type = token["type"]
+ if token_type in ("StartTag", "EndTag", "EmptyTag"):
+ name = token["name"]
+ namespace = token["namespace"]
+ if ((namespace, name) in self.allowed_elements or
+ (namespace is None and
+ (namespaces["html"], name) in self.allowed_elements)):
+ return self.allowed_token(token)
+ else:
+ return self.disallowed_token(token)
+ elif token_type == "Comment":
+ pass
+ else:
+ return token
+
+ def allowed_token(self, token):
+ if "data" in token:
+ attrs = token["data"]
+ attr_names = set(attrs.keys())
+
+ # Remove forbidden attributes
+ for to_remove in (attr_names - self.allowed_attributes):
+ del token["data"][to_remove]
+ attr_names.remove(to_remove)
+
+ # Remove attributes with disallowed URL values
+ for attr in (attr_names & self.attr_val_is_uri):
+ assert attr in attrs
+ # I don't have a clue where this regexp comes from or why it matches those
+ # characters, nor why we call unescape. I just know it's always been here.
+ # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all
+ # this will do is remove *more* than it otherwise would.
+ val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '',
+ unescape(attrs[attr])).lower()
+ # remove replacement characters from unescaped characters
+ val_unescaped = val_unescaped.replace("\ufffd", "")
+ try:
+ uri = urlparse.urlparse(val_unescaped)
+ except ValueError:
+ uri = None
+ del attrs[attr]
+ if uri and uri.scheme:
+ if uri.scheme not in self.allowed_protocols:
+ del attrs[attr]
+ if uri.scheme == 'data':
+ m = data_content_type.match(uri.path)
+ if not m:
+ del attrs[attr]
+ elif m.group('content_type') not in self.allowed_content_types:
+ del attrs[attr]
+
+ for attr in self.svg_attr_val_allows_ref:
+ if attr in attrs:
+ attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
+ ' ',
+ unescape(attrs[attr]))
+ if (token["name"] in self.svg_allow_local_href and
+ (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*',
+ attrs[(namespaces['xlink'], 'href')])):
+ del attrs[(namespaces['xlink'], 'href')]
+ if (None, 'style') in attrs:
+ attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')])
+ token["data"] = attrs
+ return token
+
+ def disallowed_token(self, token):
+ token_type = token["type"]
+ if token_type == "EndTag":
+ token["data"] = "</%s>" % token["name"]
+ elif token["data"]:
+ assert token_type in ("StartTag", "EmptyTag")
+ attrs = []
+ for (ns, name), v in token["data"].items():
+ attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v)))
+ token["data"] = "<%s%s>" % (token["name"], ''.join(attrs))
+ else:
+ token["data"] = "<%s>" % token["name"]
+ if token.get("selfClosing"):
+ token["data"] = token["data"][:-1] + "/>"
+
+ token["type"] = "Characters"
+
+ del token["name"]
+ return token
+
+ def sanitize_css(self, style):
+ # disallow urls
+ style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
+
+ # gauntlet
+ if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
+ return ''
+ if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
+ return ''
+
+ clean = []
+ for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style):
+ if not value:
+ continue
+ if prop.lower() in self.allowed_css_properties:
+ clean.append(prop + ': ' + value + ';')
+ elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
+ 'padding']:
+ for keyword in value.split():
+ if keyword not in self.allowed_css_keywords and \
+ not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa
+ break
+ else:
+ clean.append(prop + ': ' + value + ';')
+ elif prop.lower() in self.allowed_svg_properties:
+ clean.append(prop + ': ' + value + ';')
+
+ return ' '.join(clean)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py
new file mode 100644
index 00000000..0d12584b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py
@@ -0,0 +1,38 @@
+from __future__ import absolute_import, division, unicode_literals
+
+import re
+
+from . import base
+from ..constants import rcdataElements, spaceCharacters
+spaceCharacters = "".join(spaceCharacters)
+
+SPACES_REGEX = re.compile("[%s]+" % spaceCharacters)
+
+
+class Filter(base.Filter):
+ """Collapses whitespace except in pre, textarea, and script elements"""
+ spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements))
+
+ def __iter__(self):
+ preserve = 0
+ for token in base.Filter.__iter__(self):
+ type = token["type"]
+ if type == "StartTag" \
+ and (preserve or token["name"] in self.spacePreserveElements):
+ preserve += 1
+
+ elif type == "EndTag" and preserve:
+ preserve -= 1
+
+ elif not preserve and type == "SpaceCharacters" and token["data"]:
+ # Test on token["data"] above to not introduce spaces where there were not
+ token["data"] = " "
+
+ elif not preserve and type == "Characters":
+ token["data"] = collapse_spaces(token["data"])
+
+ yield token
+
+
+def collapse_spaces(text):
+ return SPACES_REGEX.sub(' ', text)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py
new file mode 100644
index 00000000..ae41a133
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py
@@ -0,0 +1,2791 @@
+from __future__ import absolute_import, division, unicode_literals
+from pip._vendor.six import with_metaclass, viewkeys
+
+import types
+from collections import OrderedDict
+
+from . import _inputstream
+from . import _tokenizer
+
+from . import treebuilders
+from .treebuilders.base import Marker
+
+from . import _utils
+from .constants import (
+ spaceCharacters, asciiUpper2Lower,
+ specialElements, headingElements, cdataElements, rcdataElements,
+ tokenTypes, tagTokenTypes,
+ namespaces,
+ htmlIntegrationPointElements, mathmlTextIntegrationPointElements,
+ adjustForeignAttributes as adjustForeignAttributesMap,
+ adjustMathMLAttributes, adjustSVGAttributes,
+ E,
+ _ReparseException
+)
+
+
+def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):
+ """Parse an HTML document as a string or file-like object into a tree
+
+ :arg doc: the document to parse as a string or file-like object
+
+ :arg treebuilder: the treebuilder to use when parsing
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5parser import parse
+ >>> parse('<html><body><p>This is a doc</p></body></html>')
+ <Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0>
+
+ """
+ tb = treebuilders.getTreeBuilder(treebuilder)
+ p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
+ return p.parse(doc, **kwargs)
+
+
+def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs):
+ """Parse an HTML fragment as a string or file-like object into a tree
+
+ :arg doc: the fragment to parse as a string or file-like object
+
+ :arg container: the container context to parse the fragment in
+
+ :arg treebuilder: the treebuilder to use when parsing
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5libparser import parseFragment
+ >>> parseFragment('<b>this is a fragment</b>')
+ <Element u'DOCUMENT_FRAGMENT' at 0x7feac484b090>
+
+ """
+ tb = treebuilders.getTreeBuilder(treebuilder)
+ p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements)
+ return p.parseFragment(doc, container=container, **kwargs)
+
+
+def method_decorator_metaclass(function):
+ class Decorated(type):
+ def __new__(meta, classname, bases, classDict):
+ for attributeName, attribute in classDict.items():
+ if isinstance(attribute, types.FunctionType):
+ attribute = function(attribute)
+
+ classDict[attributeName] = attribute
+ return type.__new__(meta, classname, bases, classDict)
+ return Decorated
+
+
+class HTMLParser(object):
+ """HTML parser
+
+ Generates a tree structure from a stream of (possibly malformed) HTML.
+
+ """
+
+ def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False):
+ """
+ :arg tree: a treebuilder class controlling the type of tree that will be
+ returned. Built in treebuilders can be accessed through
+ html5lib.treebuilders.getTreeBuilder(treeType)
+
+ :arg strict: raise an exception when a parse error is encountered
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ :arg debug: whether or not to enable debug mode which logs things
+
+ Example:
+
+ >>> from html5lib.html5parser import HTMLParser
+ >>> parser = HTMLParser() # generates parser with etree builder
+ >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict
+
+ """
+
+ # Raise an exception on the first error encountered
+ self.strict = strict
+
+ if tree is None:
+ tree = treebuilders.getTreeBuilder("etree")
+ self.tree = tree(namespaceHTMLElements)
+ self.errors = []
+
+ self.phases = dict([(name, cls(self, self.tree)) for name, cls in
+ getPhases(debug).items()])
+
+ def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs):
+
+ self.innerHTMLMode = innerHTML
+ self.container = container
+ self.scripting = scripting
+ self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs)
+ self.reset()
+
+ try:
+ self.mainLoop()
+ except _ReparseException:
+ self.reset()
+ self.mainLoop()
+
+ def reset(self):
+ self.tree.reset()
+ self.firstStartTag = False
+ self.errors = []
+ self.log = [] # only used with debug mode
+ # "quirks" / "limited quirks" / "no quirks"
+ self.compatMode = "no quirks"
+
+ if self.innerHTMLMode:
+ self.innerHTML = self.container.lower()
+
+ if self.innerHTML in cdataElements:
+ self.tokenizer.state = self.tokenizer.rcdataState
+ elif self.innerHTML in rcdataElements:
+ self.tokenizer.state = self.tokenizer.rawtextState
+ elif self.innerHTML == 'plaintext':
+ self.tokenizer.state = self.tokenizer.plaintextState
+ else:
+ # state already is data state
+ # self.tokenizer.state = self.tokenizer.dataState
+ pass
+ self.phase = self.phases["beforeHtml"]
+ self.phase.insertHtmlElement()
+ self.resetInsertionMode()
+ else:
+ self.innerHTML = False # pylint:disable=redefined-variable-type
+ self.phase = self.phases["initial"]
+
+ self.lastPhase = None
+
+ self.beforeRCDataPhase = None
+
+ self.framesetOK = True
+
+ @property
+ def documentEncoding(self):
+ """Name of the character encoding that was used to decode the input stream, or
+ :obj:`None` if that is not determined yet
+
+ """
+ if not hasattr(self, 'tokenizer'):
+ return None
+ return self.tokenizer.stream.charEncoding[0].name
+
+ def isHTMLIntegrationPoint(self, element):
+ if (element.name == "annotation-xml" and
+ element.namespace == namespaces["mathml"]):
+ return ("encoding" in element.attributes and
+ element.attributes["encoding"].translate(
+ asciiUpper2Lower) in
+ ("text/html", "application/xhtml+xml"))
+ else:
+ return (element.namespace, element.name) in htmlIntegrationPointElements
+
+ def isMathMLTextIntegrationPoint(self, element):
+ return (element.namespace, element.name) in mathmlTextIntegrationPointElements
+
+ def mainLoop(self):
+ CharactersToken = tokenTypes["Characters"]
+ SpaceCharactersToken = tokenTypes["SpaceCharacters"]
+ StartTagToken = tokenTypes["StartTag"]
+ EndTagToken = tokenTypes["EndTag"]
+ CommentToken = tokenTypes["Comment"]
+ DoctypeToken = tokenTypes["Doctype"]
+ ParseErrorToken = tokenTypes["ParseError"]
+
+ for token in self.normalizedTokens():
+ prev_token = None
+ new_token = token
+ while new_token is not None:
+ prev_token = new_token
+ currentNode = self.tree.openElements[-1] if self.tree.openElements else None
+ currentNodeNamespace = currentNode.namespace if currentNode else None
+ currentNodeName = currentNode.name if currentNode else None
+
+ type = new_token["type"]
+
+ if type == ParseErrorToken:
+ self.parseError(new_token["data"], new_token.get("datavars", {}))
+ new_token = None
+ else:
+ if (len(self.tree.openElements) == 0 or
+ currentNodeNamespace == self.tree.defaultNamespace or
+ (self.isMathMLTextIntegrationPoint(currentNode) and
+ ((type == StartTagToken and
+ token["name"] not in frozenset(["mglyph", "malignmark"])) or
+ type in (CharactersToken, SpaceCharactersToken))) or
+ (currentNodeNamespace == namespaces["mathml"] and
+ currentNodeName == "annotation-xml" and
+ type == StartTagToken and
+ token["name"] == "svg") or
+ (self.isHTMLIntegrationPoint(currentNode) and
+ type in (StartTagToken, CharactersToken, SpaceCharactersToken))):
+ phase = self.phase
+ else:
+ phase = self.phases["inForeignContent"]
+
+ if type == CharactersToken:
+ new_token = phase.processCharacters(new_token)
+ elif type == SpaceCharactersToken:
+ new_token = phase.processSpaceCharacters(new_token)
+ elif type == StartTagToken:
+ new_token = phase.processStartTag(new_token)
+ elif type == EndTagToken:
+ new_token = phase.processEndTag(new_token)
+ elif type == CommentToken:
+ new_token = phase.processComment(new_token)
+ elif type == DoctypeToken:
+ new_token = phase.processDoctype(new_token)
+
+ if (type == StartTagToken and prev_token["selfClosing"] and
+ not prev_token["selfClosingAcknowledged"]):
+ self.parseError("non-void-element-with-trailing-solidus",
+ {"name": prev_token["name"]})
+
+ # When the loop finishes it's EOF
+ reprocess = True
+ phases = []
+ while reprocess:
+ phases.append(self.phase)
+ reprocess = self.phase.processEOF()
+ if reprocess:
+ assert self.phase not in phases
+
+ def normalizedTokens(self):
+ for token in self.tokenizer:
+ yield self.normalizeToken(token)
+
+ def parse(self, stream, *args, **kwargs):
+ """Parse a HTML document into a well-formed tree
+
+ :arg stream: a file-like object or string containing the HTML to be parsed
+
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element).
+
+ :arg scripting: treat noscript elements as if JavaScript was turned on
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5parser import HTMLParser
+ >>> parser = HTMLParser()
+ >>> parser.parse('<html><body><p>This is a doc</p></body></html>')
+ <Element u'{http://www.w3.org/1999/xhtml}html' at 0x7feac4909db0>
+
+ """
+ self._parse(stream, False, None, *args, **kwargs)
+ return self.tree.getDocument()
+
+ def parseFragment(self, stream, *args, **kwargs):
+ """Parse a HTML fragment into a well-formed tree fragment
+
+ :arg container: name of the element we're setting the innerHTML
+ property if set to None, default to 'div'
+
+ :arg stream: a file-like object or string containing the HTML to be parsed
+
+ The optional encoding parameter must be a string that indicates
+ the encoding. If specified, that encoding will be used,
+ regardless of any BOM or later declaration (such as in a meta
+ element)
+
+ :arg scripting: treat noscript elements as if JavaScript was turned on
+
+ :returns: parsed tree
+
+ Example:
+
+ >>> from html5lib.html5libparser import HTMLParser
+ >>> parser = HTMLParser()
+ >>> parser.parseFragment('<b>this is a fragment</b>')
+ <Element u'DOCUMENT_FRAGMENT' at 0x7feac484b090>
+
+ """
+ self._parse(stream, True, *args, **kwargs)
+ return self.tree.getFragment()
+
+ def parseError(self, errorcode="XXX-undefined-error", datavars=None):
+ # XXX The idea is to make errorcode mandatory.
+ if datavars is None:
+ datavars = {}
+ self.errors.append((self.tokenizer.stream.position(), errorcode, datavars))
+ if self.strict:
+ raise ParseError(E[errorcode] % datavars)
+
+ def normalizeToken(self, token):
+ # HTML5 specific normalizations to the token stream
+ if token["type"] == tokenTypes["StartTag"]:
+ raw = token["data"]
+ token["data"] = OrderedDict(raw)
+ if len(raw) > len(token["data"]):
+ # we had some duplicated attribute, fix so first wins
+ token["data"].update(raw[::-1])
+
+ return token
+
+ def adjustMathMLAttributes(self, token):
+ adjust_attributes(token, adjustMathMLAttributes)
+
+ def adjustSVGAttributes(self, token):
+ adjust_attributes(token, adjustSVGAttributes)
+
+ def adjustForeignAttributes(self, token):
+ adjust_attributes(token, adjustForeignAttributesMap)
+
+ def reparseTokenNormal(self, token):
+ # pylint:disable=unused-argument
+ self.parser.phase()
+
+ def resetInsertionMode(self):
+ # The name of this method is mostly historical. (It's also used in the
+ # specification.)
+ last = False
+ newModes = {
+ "select": "inSelect",
+ "td": "inCell",
+ "th": "inCell",
+ "tr": "inRow",
+ "tbody": "inTableBody",
+ "thead": "inTableBody",
+ "tfoot": "inTableBody",
+ "caption": "inCaption",
+ "colgroup": "inColumnGroup",
+ "table": "inTable",
+ "head": "inBody",
+ "body": "inBody",
+ "frameset": "inFrameset",
+ "html": "beforeHead"
+ }
+ for node in self.tree.openElements[::-1]:
+ nodeName = node.name
+ new_phase = None
+ if node == self.tree.openElements[0]:
+ assert self.innerHTML
+ last = True
+ nodeName = self.innerHTML
+ # Check for conditions that should only happen in the innerHTML
+ # case
+ if nodeName in ("select", "colgroup", "head", "html"):
+ assert self.innerHTML
+
+ if not last and node.namespace != self.tree.defaultNamespace:
+ continue
+
+ if nodeName in newModes:
+ new_phase = self.phases[newModes[nodeName]]
+ break
+ elif last:
+ new_phase = self.phases["inBody"]
+ break
+
+ self.phase = new_phase
+
+ def parseRCDataRawtext(self, token, contentType):
+ # Generic RCDATA/RAWTEXT Parsing algorithm
+ assert contentType in ("RAWTEXT", "RCDATA")
+
+ self.tree.insertElement(token)
+
+ if contentType == "RAWTEXT":
+ self.tokenizer.state = self.tokenizer.rawtextState
+ else:
+ self.tokenizer.state = self.tokenizer.rcdataState
+
+ self.originalPhase = self.phase
+
+ self.phase = self.phases["text"]
+
+
+@_utils.memoize
+def getPhases(debug):
+ def log(function):
+ """Logger that records which phase processes each token"""
+ type_names = dict((value, key) for key, value in
+ tokenTypes.items())
+
+ def wrapped(self, *args, **kwargs):
+ if function.__name__.startswith("process") and len(args) > 0:
+ token = args[0]
+ try:
+ info = {"type": type_names[token['type']]}
+ except:
+ raise
+ if token['type'] in tagTokenTypes:
+ info["name"] = token['name']
+
+ self.parser.log.append((self.parser.tokenizer.state.__name__,
+ self.parser.phase.__class__.__name__,
+ self.__class__.__name__,
+ function.__name__,
+ info))
+ return function(self, *args, **kwargs)
+ else:
+ return function(self, *args, **kwargs)
+ return wrapped
+
+ def getMetaclass(use_metaclass, metaclass_func):
+ if use_metaclass:
+ return method_decorator_metaclass(metaclass_func)
+ else:
+ return type
+
+ # pylint:disable=unused-argument
+ class Phase(with_metaclass(getMetaclass(debug, log))):
+ """Base class for helper object that implements each phase of processing
+ """
+
+ def __init__(self, parser, tree):
+ self.parser = parser
+ self.tree = tree
+
+ def processEOF(self):
+ raise NotImplementedError
+
+ def processComment(self, token):
+ # For most phases the following is correct. Where it's not it will be
+ # overridden.
+ self.tree.insertComment(token, self.tree.openElements[-1])
+
+ def processDoctype(self, token):
+ self.parser.parseError("unexpected-doctype")
+
+ def processCharacters(self, token):
+ self.tree.insertText(token["data"])
+
+ def processSpaceCharacters(self, token):
+ self.tree.insertText(token["data"])
+
+ def processStartTag(self, token):
+ return self.startTagHandler[token["name"]](token)
+
+ def startTagHtml(self, token):
+ if not self.parser.firstStartTag and token["name"] == "html":
+ self.parser.parseError("non-html-root")
+ # XXX Need a check here to see if the first start tag token emitted is
+ # this token... If it's not, invoke self.parser.parseError().
+ for attr, value in token["data"].items():
+ if attr not in self.tree.openElements[0].attributes:
+ self.tree.openElements[0].attributes[attr] = value
+ self.parser.firstStartTag = False
+
+ def processEndTag(self, token):
+ return self.endTagHandler[token["name"]](token)
+
+ class InitialPhase(Phase):
+ def processSpaceCharacters(self, token):
+ pass
+
+ def processComment(self, token):
+ self.tree.insertComment(token, self.tree.document)
+
+ def processDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+ correct = token["correct"]
+
+ if (name != "html" or publicId is not None or
+ systemId is not None and systemId != "about:legacy-compat"):
+ self.parser.parseError("unknown-doctype")
+
+ if publicId is None:
+ publicId = ""
+
+ self.tree.insertDoctype(token)
+
+ if publicId != "":
+ publicId = publicId.translate(asciiUpper2Lower)
+
+ if (not correct or token["name"] != "html" or
+ publicId.startswith(
+ ("+//silmaril//dtd html pro v0r11 19970101//",
+ "-//advasoft ltd//dtd html 3.0 aswedit + extensions//",
+ "-//as//dtd html 3.0 aswedit + extensions//",
+ "-//ietf//dtd html 2.0 level 1//",
+ "-//ietf//dtd html 2.0 level 2//",
+ "-//ietf//dtd html 2.0 strict level 1//",
+ "-//ietf//dtd html 2.0 strict level 2//",
+ "-//ietf//dtd html 2.0 strict//",
+ "-//ietf//dtd html 2.0//",
+ "-//ietf//dtd html 2.1e//",
+ "-//ietf//dtd html 3.0//",
+ "-//ietf//dtd html 3.2 final//",
+ "-//ietf//dtd html 3.2//",
+ "-//ietf//dtd html 3//",
+ "-//ietf//dtd html level 0//",
+ "-//ietf//dtd html level 1//",
+ "-//ietf//dtd html level 2//",
+ "-//ietf//dtd html level 3//",
+ "-//ietf//dtd html strict level 0//",
+ "-//ietf//dtd html strict level 1//",
+ "-//ietf//dtd html strict level 2//",
+ "-//ietf//dtd html strict level 3//",
+ "-//ietf//dtd html strict//",
+ "-//ietf//dtd html//",
+ "-//metrius//dtd metrius presentational//",
+ "-//microsoft//dtd internet explorer 2.0 html strict//",
+ "-//microsoft//dtd internet explorer 2.0 html//",
+ "-//microsoft//dtd internet explorer 2.0 tables//",
+ "-//microsoft//dtd internet explorer 3.0 html strict//",
+ "-//microsoft//dtd internet explorer 3.0 html//",
+ "-//microsoft//dtd internet explorer 3.0 tables//",
+ "-//netscape comm. corp.//dtd html//",
+ "-//netscape comm. corp.//dtd strict html//",
+ "-//o'reilly and associates//dtd html 2.0//",
+ "-//o'reilly and associates//dtd html extended 1.0//",
+ "-//o'reilly and associates//dtd html extended relaxed 1.0//",
+ "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//",
+ "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//",
+ "-//spyglass//dtd html 2.0 extended//",
+ "-//sq//dtd html 2.0 hotmetal + extensions//",
+ "-//sun microsystems corp.//dtd hotjava html//",
+ "-//sun microsystems corp.//dtd hotjava strict html//",
+ "-//w3c//dtd html 3 1995-03-24//",
+ "-//w3c//dtd html 3.2 draft//",
+ "-//w3c//dtd html 3.2 final//",
+ "-//w3c//dtd html 3.2//",
+ "-//w3c//dtd html 3.2s draft//",
+ "-//w3c//dtd html 4.0 frameset//",
+ "-//w3c//dtd html 4.0 transitional//",
+ "-//w3c//dtd html experimental 19960712//",
+ "-//w3c//dtd html experimental 970421//",
+ "-//w3c//dtd w3 html//",
+ "-//w3o//dtd w3 html 3.0//",
+ "-//webtechs//dtd mozilla html 2.0//",
+ "-//webtechs//dtd mozilla html//")) or
+ publicId in ("-//w3o//dtd w3 html strict 3.0//en//",
+ "-/w3c/dtd html 4.0 transitional/en",
+ "html") or
+ publicId.startswith(
+ ("-//w3c//dtd html 4.01 frameset//",
+ "-//w3c//dtd html 4.01 transitional//")) and
+ systemId is None or
+ systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"):
+ self.parser.compatMode = "quirks"
+ elif (publicId.startswith(
+ ("-//w3c//dtd xhtml 1.0 frameset//",
+ "-//w3c//dtd xhtml 1.0 transitional//")) or
+ publicId.startswith(
+ ("-//w3c//dtd html 4.01 frameset//",
+ "-//w3c//dtd html 4.01 transitional//")) and
+ systemId is not None):
+ self.parser.compatMode = "limited quirks"
+
+ self.parser.phase = self.parser.phases["beforeHtml"]
+
+ def anythingElse(self):
+ self.parser.compatMode = "quirks"
+ self.parser.phase = self.parser.phases["beforeHtml"]
+
+ def processCharacters(self, token):
+ self.parser.parseError("expected-doctype-but-got-chars")
+ self.anythingElse()
+ return token
+
+ def processStartTag(self, token):
+ self.parser.parseError("expected-doctype-but-got-start-tag",
+ {"name": token["name"]})
+ self.anythingElse()
+ return token
+
+ def processEndTag(self, token):
+ self.parser.parseError("expected-doctype-but-got-end-tag",
+ {"name": token["name"]})
+ self.anythingElse()
+ return token
+
+ def processEOF(self):
+ self.parser.parseError("expected-doctype-but-got-eof")
+ self.anythingElse()
+ return True
+
+ class BeforeHtmlPhase(Phase):
+ # helper methods
+ def insertHtmlElement(self):
+ self.tree.insertRoot(impliedTagToken("html", "StartTag"))
+ self.parser.phase = self.parser.phases["beforeHead"]
+
+ # other
+ def processEOF(self):
+ self.insertHtmlElement()
+ return True
+
+ def processComment(self, token):
+ self.tree.insertComment(token, self.tree.document)
+
+ def processSpaceCharacters(self, token):
+ pass
+
+ def processCharacters(self, token):
+ self.insertHtmlElement()
+ return token
+
+ def processStartTag(self, token):
+ if token["name"] == "html":
+ self.parser.firstStartTag = True
+ self.insertHtmlElement()
+ return token
+
+ def processEndTag(self, token):
+ if token["name"] not in ("head", "body", "html", "br"):
+ self.parser.parseError("unexpected-end-tag-before-html",
+ {"name": token["name"]})
+ else:
+ self.insertHtmlElement()
+ return token
+
+ class BeforeHeadPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("head", self.startTagHead)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ (("head", "body", "html", "br"), self.endTagImplyHead)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ def processEOF(self):
+ self.startTagHead(impliedTagToken("head", "StartTag"))
+ return True
+
+ def processSpaceCharacters(self, token):
+ pass
+
+ def processCharacters(self, token):
+ self.startTagHead(impliedTagToken("head", "StartTag"))
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagHead(self, token):
+ self.tree.insertElement(token)
+ self.tree.headPointer = self.tree.openElements[-1]
+ self.parser.phase = self.parser.phases["inHead"]
+
+ def startTagOther(self, token):
+ self.startTagHead(impliedTagToken("head", "StartTag"))
+ return token
+
+ def endTagImplyHead(self, token):
+ self.startTagHead(impliedTagToken("head", "StartTag"))
+ return token
+
+ def endTagOther(self, token):
+ self.parser.parseError("end-tag-after-implied-root",
+ {"name": token["name"]})
+
+ class InHeadPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("title", self.startTagTitle),
+ (("noframes", "style"), self.startTagNoFramesStyle),
+ ("noscript", self.startTagNoscript),
+ ("script", self.startTagScript),
+ (("base", "basefont", "bgsound", "command", "link"),
+ self.startTagBaseLinkCommand),
+ ("meta", self.startTagMeta),
+ ("head", self.startTagHead)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("head", self.endTagHead),
+ (("br", "html", "body"), self.endTagHtmlBodyBr)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ # the real thing
+ def processEOF(self):
+ self.anythingElse()
+ return True
+
+ def processCharacters(self, token):
+ self.anythingElse()
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagHead(self, token):
+ self.parser.parseError("two-heads-are-not-better-than-one")
+
+ def startTagBaseLinkCommand(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagMeta(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ attributes = token["data"]
+ if self.parser.tokenizer.stream.charEncoding[1] == "tentative":
+ if "charset" in attributes:
+ self.parser.tokenizer.stream.changeEncoding(attributes["charset"])
+ elif ("content" in attributes and
+ "http-equiv" in attributes and
+ attributes["http-equiv"].lower() == "content-type"):
+ # Encoding it as UTF-8 here is a hack, as really we should pass
+ # the abstract Unicode string, and just use the
+ # ContentAttrParser on that, but using UTF-8 allows all chars
+ # to be encoded and as a ASCII-superset works.
+ data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8"))
+ parser = _inputstream.ContentAttrParser(data)
+ codec = parser.parse()
+ self.parser.tokenizer.stream.changeEncoding(codec)
+
+ def startTagTitle(self, token):
+ self.parser.parseRCDataRawtext(token, "RCDATA")
+
+ def startTagNoFramesStyle(self, token):
+ # Need to decide whether to implement the scripting-disabled case
+ self.parser.parseRCDataRawtext(token, "RAWTEXT")
+
+ def startTagNoscript(self, token):
+ if self.parser.scripting:
+ self.parser.parseRCDataRawtext(token, "RAWTEXT")
+ else:
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inHeadNoscript"]
+
+ def startTagScript(self, token):
+ self.tree.insertElement(token)
+ self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState
+ self.parser.originalPhase = self.parser.phase
+ self.parser.phase = self.parser.phases["text"]
+
+ def startTagOther(self, token):
+ self.anythingElse()
+ return token
+
+ def endTagHead(self, token):
+ node = self.parser.tree.openElements.pop()
+ assert node.name == "head", "Expected head got %s" % node.name
+ self.parser.phase = self.parser.phases["afterHead"]
+
+ def endTagHtmlBodyBr(self, token):
+ self.anythingElse()
+ return token
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def anythingElse(self):
+ self.endTagHead(impliedTagToken("head"))
+
+ class InHeadNoscriptPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand),
+ (("head", "noscript"), self.startTagHeadNoscript),
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("noscript", self.endTagNoscript),
+ ("br", self.endTagBr),
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ def processEOF(self):
+ self.parser.parseError("eof-in-head-noscript")
+ self.anythingElse()
+ return True
+
+ def processComment(self, token):
+ return self.parser.phases["inHead"].processComment(token)
+
+ def processCharacters(self, token):
+ self.parser.parseError("char-in-head-noscript")
+ self.anythingElse()
+ return token
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inHead"].processSpaceCharacters(token)
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagBaseLinkCommand(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagHeadNoscript(self, token):
+ self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
+ self.anythingElse()
+ return token
+
+ def endTagNoscript(self, token):
+ node = self.parser.tree.openElements.pop()
+ assert node.name == "noscript", "Expected noscript got %s" % node.name
+ self.parser.phase = self.parser.phases["inHead"]
+
+ def endTagBr(self, token):
+ self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]})
+ self.anythingElse()
+ return token
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def anythingElse(self):
+ # Caller must raise parse error first!
+ self.endTagNoscript(impliedTagToken("noscript"))
+
+ class AfterHeadPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("body", self.startTagBody),
+ ("frameset", self.startTagFrameset),
+ (("base", "basefont", "bgsound", "link", "meta", "noframes", "script",
+ "style", "title"),
+ self.startTagFromHead),
+ ("head", self.startTagHead)
+ ])
+ self.startTagHandler.default = self.startTagOther
+ self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"),
+ self.endTagHtmlBodyBr)])
+ self.endTagHandler.default = self.endTagOther
+
+ def processEOF(self):
+ self.anythingElse()
+ return True
+
+ def processCharacters(self, token):
+ self.anythingElse()
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagBody(self, token):
+ self.parser.framesetOK = False
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inBody"]
+
+ def startTagFrameset(self, token):
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inFrameset"]
+
+ def startTagFromHead(self, token):
+ self.parser.parseError("unexpected-start-tag-out-of-my-head",
+ {"name": token["name"]})
+ self.tree.openElements.append(self.tree.headPointer)
+ self.parser.phases["inHead"].processStartTag(token)
+ for node in self.tree.openElements[::-1]:
+ if node.name == "head":
+ self.tree.openElements.remove(node)
+ break
+
+ def startTagHead(self, token):
+ self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
+
+ def startTagOther(self, token):
+ self.anythingElse()
+ return token
+
+ def endTagHtmlBodyBr(self, token):
+ self.anythingElse()
+ return token
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def anythingElse(self):
+ self.tree.insertElement(impliedTagToken("body", "StartTag"))
+ self.parser.phase = self.parser.phases["inBody"]
+ self.parser.framesetOK = True
+
+ class InBodyPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody
+ # the really-really-really-very crazy mode
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ # Set this to the default handler
+ self.processSpaceCharacters = self.processSpaceCharactersNonPre
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ (("base", "basefont", "bgsound", "command", "link", "meta",
+ "script", "style", "title"),
+ self.startTagProcessInHead),
+ ("body", self.startTagBody),
+ ("frameset", self.startTagFrameset),
+ (("address", "article", "aside", "blockquote", "center", "details",
+ "dir", "div", "dl", "fieldset", "figcaption", "figure",
+ "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p",
+ "section", "summary", "ul"),
+ self.startTagCloseP),
+ (headingElements, self.startTagHeading),
+ (("pre", "listing"), self.startTagPreListing),
+ ("form", self.startTagForm),
+ (("li", "dd", "dt"), self.startTagListItem),
+ ("plaintext", self.startTagPlaintext),
+ ("a", self.startTagA),
+ (("b", "big", "code", "em", "font", "i", "s", "small", "strike",
+ "strong", "tt", "u"), self.startTagFormatting),
+ ("nobr", self.startTagNobr),
+ ("button", self.startTagButton),
+ (("applet", "marquee", "object"), self.startTagAppletMarqueeObject),
+ ("xmp", self.startTagXmp),
+ ("table", self.startTagTable),
+ (("area", "br", "embed", "img", "keygen", "wbr"),
+ self.startTagVoidFormatting),
+ (("param", "source", "track"), self.startTagParamSource),
+ ("input", self.startTagInput),
+ ("hr", self.startTagHr),
+ ("image", self.startTagImage),
+ ("isindex", self.startTagIsIndex),
+ ("textarea", self.startTagTextarea),
+ ("iframe", self.startTagIFrame),
+ ("noscript", self.startTagNoscript),
+ (("noembed", "noframes"), self.startTagRawtext),
+ ("select", self.startTagSelect),
+ (("rp", "rt"), self.startTagRpRt),
+ (("option", "optgroup"), self.startTagOpt),
+ (("math"), self.startTagMath),
+ (("svg"), self.startTagSvg),
+ (("caption", "col", "colgroup", "frame", "head",
+ "tbody", "td", "tfoot", "th", "thead",
+ "tr"), self.startTagMisplaced)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("body", self.endTagBody),
+ ("html", self.endTagHtml),
+ (("address", "article", "aside", "blockquote", "button", "center",
+ "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure",
+ "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre",
+ "section", "summary", "ul"), self.endTagBlock),
+ ("form", self.endTagForm),
+ ("p", self.endTagP),
+ (("dd", "dt", "li"), self.endTagListItem),
+ (headingElements, self.endTagHeading),
+ (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small",
+ "strike", "strong", "tt", "u"), self.endTagFormatting),
+ (("applet", "marquee", "object"), self.endTagAppletMarqueeObject),
+ ("br", self.endTagBr),
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ def isMatchingFormattingElement(self, node1, node2):
+ return (node1.name == node2.name and
+ node1.namespace == node2.namespace and
+ node1.attributes == node2.attributes)
+
+ # helper
+ def addFormattingElement(self, token):
+ self.tree.insertElement(token)
+ element = self.tree.openElements[-1]
+
+ matchingElements = []
+ for node in self.tree.activeFormattingElements[::-1]:
+ if node is Marker:
+ break
+ elif self.isMatchingFormattingElement(node, element):
+ matchingElements.append(node)
+
+ assert len(matchingElements) <= 3
+ if len(matchingElements) == 3:
+ self.tree.activeFormattingElements.remove(matchingElements[-1])
+ self.tree.activeFormattingElements.append(element)
+
+ # the real deal
+ def processEOF(self):
+ allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td",
+ "tfoot", "th", "thead", "tr", "body",
+ "html"))
+ for node in self.tree.openElements[::-1]:
+ if node.name not in allowed_elements:
+ self.parser.parseError("expected-closing-tag-but-got-eof")
+ break
+ # Stop parsing
+
+ def processSpaceCharactersDropNewline(self, token):
+ # Sometimes (start of <pre>, <listing>, and <textarea> blocks) we
+ # want to drop leading newlines
+ data = token["data"]
+ self.processSpaceCharacters = self.processSpaceCharactersNonPre
+ if (data.startswith("\n") and
+ self.tree.openElements[-1].name in ("pre", "listing", "textarea") and
+ not self.tree.openElements[-1].hasContent()):
+ data = data[1:]
+ if data:
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertText(data)
+
+ def processCharacters(self, token):
+ if token["data"] == "\u0000":
+ # The tokenizer should always emit null on its own
+ return
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertText(token["data"])
+ # This must be bad for performance
+ if (self.parser.framesetOK and
+ any([char not in spaceCharacters
+ for char in token["data"]])):
+ self.parser.framesetOK = False
+
+ def processSpaceCharactersNonPre(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertText(token["data"])
+
+ def startTagProcessInHead(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagBody(self, token):
+ self.parser.parseError("unexpected-start-tag", {"name": "body"})
+ if (len(self.tree.openElements) == 1 or
+ self.tree.openElements[1].name != "body"):
+ assert self.parser.innerHTML
+ else:
+ self.parser.framesetOK = False
+ for attr, value in token["data"].items():
+ if attr not in self.tree.openElements[1].attributes:
+ self.tree.openElements[1].attributes[attr] = value
+
+ def startTagFrameset(self, token):
+ self.parser.parseError("unexpected-start-tag", {"name": "frameset"})
+ if (len(self.tree.openElements) == 1 or self.tree.openElements[1].name != "body"):
+ assert self.parser.innerHTML
+ elif not self.parser.framesetOK:
+ pass
+ else:
+ if self.tree.openElements[1].parent:
+ self.tree.openElements[1].parent.removeChild(self.tree.openElements[1])
+ while self.tree.openElements[-1].name != "html":
+ self.tree.openElements.pop()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inFrameset"]
+
+ def startTagCloseP(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+
+ def startTagPreListing(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.parser.framesetOK = False
+ self.processSpaceCharacters = self.processSpaceCharactersDropNewline
+
+ def startTagForm(self, token):
+ if self.tree.formPointer:
+ self.parser.parseError("unexpected-start-tag", {"name": "form"})
+ else:
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.tree.formPointer = self.tree.openElements[-1]
+
+ def startTagListItem(self, token):
+ self.parser.framesetOK = False
+
+ stopNamesMap = {"li": ["li"],
+ "dt": ["dt", "dd"],
+ "dd": ["dt", "dd"]}
+ stopNames = stopNamesMap[token["name"]]
+ for node in reversed(self.tree.openElements):
+ if node.name in stopNames:
+ self.parser.phase.processEndTag(
+ impliedTagToken(node.name, "EndTag"))
+ break
+ if (node.nameTuple in specialElements and
+ node.name not in ("address", "div", "p")):
+ break
+
+ if self.tree.elementInScope("p", variant="button"):
+ self.parser.phase.processEndTag(
+ impliedTagToken("p", "EndTag"))
+
+ self.tree.insertElement(token)
+
+ def startTagPlaintext(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.parser.tokenizer.state = self.parser.tokenizer.plaintextState
+
+ def startTagHeading(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ if self.tree.openElements[-1].name in headingElements:
+ self.parser.parseError("unexpected-start-tag", {"name": token["name"]})
+ self.tree.openElements.pop()
+ self.tree.insertElement(token)
+
+ def startTagA(self, token):
+ afeAElement = self.tree.elementInActiveFormattingElements("a")
+ if afeAElement:
+ self.parser.parseError("unexpected-start-tag-implies-end-tag",
+ {"startName": "a", "endName": "a"})
+ self.endTagFormatting(impliedTagToken("a"))
+ if afeAElement in self.tree.openElements:
+ self.tree.openElements.remove(afeAElement)
+ if afeAElement in self.tree.activeFormattingElements:
+ self.tree.activeFormattingElements.remove(afeAElement)
+ self.tree.reconstructActiveFormattingElements()
+ self.addFormattingElement(token)
+
+ def startTagFormatting(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.addFormattingElement(token)
+
+ def startTagNobr(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ if self.tree.elementInScope("nobr"):
+ self.parser.parseError("unexpected-start-tag-implies-end-tag",
+ {"startName": "nobr", "endName": "nobr"})
+ self.processEndTag(impliedTagToken("nobr"))
+ # XXX Need tests that trigger the following
+ self.tree.reconstructActiveFormattingElements()
+ self.addFormattingElement(token)
+
+ def startTagButton(self, token):
+ if self.tree.elementInScope("button"):
+ self.parser.parseError("unexpected-start-tag-implies-end-tag",
+ {"startName": "button", "endName": "button"})
+ self.processEndTag(impliedTagToken("button"))
+ return token
+ else:
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+ self.parser.framesetOK = False
+
+ def startTagAppletMarqueeObject(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+ self.tree.activeFormattingElements.append(Marker)
+ self.parser.framesetOK = False
+
+ def startTagXmp(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.reconstructActiveFormattingElements()
+ self.parser.framesetOK = False
+ self.parser.parseRCDataRawtext(token, "RAWTEXT")
+
+ def startTagTable(self, token):
+ if self.parser.compatMode != "quirks":
+ if self.tree.elementInScope("p", variant="button"):
+ self.processEndTag(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.parser.framesetOK = False
+ self.parser.phase = self.parser.phases["inTable"]
+
+ def startTagVoidFormatting(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+ self.parser.framesetOK = False
+
+ def startTagInput(self, token):
+ framesetOK = self.parser.framesetOK
+ self.startTagVoidFormatting(token)
+ if ("type" in token["data"] and
+ token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
+ # input type=hidden doesn't change framesetOK
+ self.parser.framesetOK = framesetOK
+
+ def startTagParamSource(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagHr(self, token):
+ if self.tree.elementInScope("p", variant="button"):
+ self.endTagP(impliedTagToken("p"))
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+ self.parser.framesetOK = False
+
+ def startTagImage(self, token):
+ # No really...
+ self.parser.parseError("unexpected-start-tag-treated-as",
+ {"originalName": "image", "newName": "img"})
+ self.processStartTag(impliedTagToken("img", "StartTag",
+ attributes=token["data"],
+ selfClosing=token["selfClosing"]))
+
+ def startTagIsIndex(self, token):
+ self.parser.parseError("deprecated-tag", {"name": "isindex"})
+ if self.tree.formPointer:
+ return
+ form_attrs = {}
+ if "action" in token["data"]:
+ form_attrs["action"] = token["data"]["action"]
+ self.processStartTag(impliedTagToken("form", "StartTag",
+ attributes=form_attrs))
+ self.processStartTag(impliedTagToken("hr", "StartTag"))
+ self.processStartTag(impliedTagToken("label", "StartTag"))
+ # XXX Localization ...
+ if "prompt" in token["data"]:
+ prompt = token["data"]["prompt"]
+ else:
+ prompt = "This is a searchable index. Enter search keywords: "
+ self.processCharacters(
+ {"type": tokenTypes["Characters"], "data": prompt})
+ attributes = token["data"].copy()
+ if "action" in attributes:
+ del attributes["action"]
+ if "prompt" in attributes:
+ del attributes["prompt"]
+ attributes["name"] = "isindex"
+ self.processStartTag(impliedTagToken("input", "StartTag",
+ attributes=attributes,
+ selfClosing=token["selfClosing"]))
+ self.processEndTag(impliedTagToken("label"))
+ self.processStartTag(impliedTagToken("hr", "StartTag"))
+ self.processEndTag(impliedTagToken("form"))
+
+ def startTagTextarea(self, token):
+ self.tree.insertElement(token)
+ self.parser.tokenizer.state = self.parser.tokenizer.rcdataState
+ self.processSpaceCharacters = self.processSpaceCharactersDropNewline
+ self.parser.framesetOK = False
+
+ def startTagIFrame(self, token):
+ self.parser.framesetOK = False
+ self.startTagRawtext(token)
+
+ def startTagNoscript(self, token):
+ if self.parser.scripting:
+ self.startTagRawtext(token)
+ else:
+ self.startTagOther(token)
+
+ def startTagRawtext(self, token):
+ """iframe, noembed noframes, noscript(if scripting enabled)"""
+ self.parser.parseRCDataRawtext(token, "RAWTEXT")
+
+ def startTagOpt(self, token):
+ if self.tree.openElements[-1].name == "option":
+ self.parser.phase.processEndTag(impliedTagToken("option"))
+ self.tree.reconstructActiveFormattingElements()
+ self.parser.tree.insertElement(token)
+
+ def startTagSelect(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+ self.parser.framesetOK = False
+ if self.parser.phase in (self.parser.phases["inTable"],
+ self.parser.phases["inCaption"],
+ self.parser.phases["inColumnGroup"],
+ self.parser.phases["inTableBody"],
+ self.parser.phases["inRow"],
+ self.parser.phases["inCell"]):
+ self.parser.phase = self.parser.phases["inSelectInTable"]
+ else:
+ self.parser.phase = self.parser.phases["inSelect"]
+
+ def startTagRpRt(self, token):
+ if self.tree.elementInScope("ruby"):
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != "ruby":
+ self.parser.parseError()
+ self.tree.insertElement(token)
+
+ def startTagMath(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.parser.adjustMathMLAttributes(token)
+ self.parser.adjustForeignAttributes(token)
+ token["namespace"] = namespaces["mathml"]
+ self.tree.insertElement(token)
+ # Need to get the parse error right for the case where the token
+ # has a namespace not equal to the xmlns attribute
+ if token["selfClosing"]:
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagSvg(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.parser.adjustSVGAttributes(token)
+ self.parser.adjustForeignAttributes(token)
+ token["namespace"] = namespaces["svg"]
+ self.tree.insertElement(token)
+ # Need to get the parse error right for the case where the token
+ # has a namespace not equal to the xmlns attribute
+ if token["selfClosing"]:
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagMisplaced(self, token):
+ """ Elements that should be children of other elements that have a
+ different insertion mode; here they are ignored
+ "caption", "col", "colgroup", "frame", "frameset", "head",
+ "option", "optgroup", "tbody", "td", "tfoot", "th", "thead",
+ "tr", "noscript"
+ """
+ self.parser.parseError("unexpected-start-tag-ignored", {"name": token["name"]})
+
+ def startTagOther(self, token):
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(token)
+
+ def endTagP(self, token):
+ if not self.tree.elementInScope("p", variant="button"):
+ self.startTagCloseP(impliedTagToken("p", "StartTag"))
+ self.parser.parseError("unexpected-end-tag", {"name": "p"})
+ self.endTagP(impliedTagToken("p", "EndTag"))
+ else:
+ self.tree.generateImpliedEndTags("p")
+ if self.tree.openElements[-1].name != "p":
+ self.parser.parseError("unexpected-end-tag", {"name": "p"})
+ node = self.tree.openElements.pop()
+ while node.name != "p":
+ node = self.tree.openElements.pop()
+
+ def endTagBody(self, token):
+ if not self.tree.elementInScope("body"):
+ self.parser.parseError()
+ return
+ elif self.tree.openElements[-1].name != "body":
+ for node in self.tree.openElements[2:]:
+ if node.name not in frozenset(("dd", "dt", "li", "optgroup",
+ "option", "p", "rp", "rt",
+ "tbody", "td", "tfoot",
+ "th", "thead", "tr", "body",
+ "html")):
+ # Not sure this is the correct name for the parse error
+ self.parser.parseError(
+ "expected-one-end-tag-but-got-another",
+ {"gotName": "body", "expectedName": node.name})
+ break
+ self.parser.phase = self.parser.phases["afterBody"]
+
+ def endTagHtml(self, token):
+ # We repeat the test for the body end tag token being ignored here
+ if self.tree.elementInScope("body"):
+ self.endTagBody(impliedTagToken("body"))
+ return token
+
+ def endTagBlock(self, token):
+ # Put us back in the right whitespace handling mode
+ if token["name"] == "pre":
+ self.processSpaceCharacters = self.processSpaceCharactersNonPre
+ inScope = self.tree.elementInScope(token["name"])
+ if inScope:
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("end-tag-too-early", {"name": token["name"]})
+ if inScope:
+ node = self.tree.openElements.pop()
+ while node.name != token["name"]:
+ node = self.tree.openElements.pop()
+
+ def endTagForm(self, token):
+ node = self.tree.formPointer
+ self.tree.formPointer = None
+ if node is None or not self.tree.elementInScope(node):
+ self.parser.parseError("unexpected-end-tag",
+ {"name": "form"})
+ else:
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1] != node:
+ self.parser.parseError("end-tag-too-early-ignored",
+ {"name": "form"})
+ self.tree.openElements.remove(node)
+
+ def endTagListItem(self, token):
+ if token["name"] == "li":
+ variant = "list"
+ else:
+ variant = None
+ if not self.tree.elementInScope(token["name"], variant=variant):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+ else:
+ self.tree.generateImpliedEndTags(exclude=token["name"])
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError(
+ "end-tag-too-early",
+ {"name": token["name"]})
+ node = self.tree.openElements.pop()
+ while node.name != token["name"]:
+ node = self.tree.openElements.pop()
+
+ def endTagHeading(self, token):
+ for item in headingElements:
+ if self.tree.elementInScope(item):
+ self.tree.generateImpliedEndTags()
+ break
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("end-tag-too-early", {"name": token["name"]})
+
+ for item in headingElements:
+ if self.tree.elementInScope(item):
+ item = self.tree.openElements.pop()
+ while item.name not in headingElements:
+ item = self.tree.openElements.pop()
+ break
+
+ def endTagFormatting(self, token):
+ """The much-feared adoption agency algorithm"""
+ # http://svn.whatwg.org/webapps/complete.html#adoptionAgency revision 7867
+ # XXX Better parseError messages appreciated.
+
+ # Step 1
+ outerLoopCounter = 0
+
+ # Step 2
+ while outerLoopCounter < 8:
+
+ # Step 3
+ outerLoopCounter += 1
+
+ # Step 4:
+
+ # Let the formatting element be the last element in
+ # the list of active formatting elements that:
+ # - is between the end of the list and the last scope
+ # marker in the list, if any, or the start of the list
+ # otherwise, and
+ # - has the same tag name as the token.
+ formattingElement = self.tree.elementInActiveFormattingElements(
+ token["name"])
+ if (not formattingElement or
+ (formattingElement in self.tree.openElements and
+ not self.tree.elementInScope(formattingElement.name))):
+ # If there is no such node, then abort these steps
+ # and instead act as described in the "any other
+ # end tag" entry below.
+ self.endTagOther(token)
+ return
+
+ # Otherwise, if there is such a node, but that node is
+ # not in the stack of open elements, then this is a
+ # parse error; remove the element from the list, and
+ # abort these steps.
+ elif formattingElement not in self.tree.openElements:
+ self.parser.parseError("adoption-agency-1.2", {"name": token["name"]})
+ self.tree.activeFormattingElements.remove(formattingElement)
+ return
+
+ # Otherwise, if there is such a node, and that node is
+ # also in the stack of open elements, but the element
+ # is not in scope, then this is a parse error; ignore
+ # the token, and abort these steps.
+ elif not self.tree.elementInScope(formattingElement.name):
+ self.parser.parseError("adoption-agency-4.4", {"name": token["name"]})
+ return
+
+ # Otherwise, there is a formatting element and that
+ # element is in the stack and is in scope. If the
+ # element is not the current node, this is a parse
+ # error. In any case, proceed with the algorithm as
+ # written in the following steps.
+ else:
+ if formattingElement != self.tree.openElements[-1]:
+ self.parser.parseError("adoption-agency-1.3", {"name": token["name"]})
+
+ # Step 5:
+
+ # Let the furthest block be the topmost node in the
+ # stack of open elements that is lower in the stack
+ # than the formatting element, and is an element in
+ # the special category. There might not be one.
+ afeIndex = self.tree.openElements.index(formattingElement)
+ furthestBlock = None
+ for element in self.tree.openElements[afeIndex:]:
+ if element.nameTuple in specialElements:
+ furthestBlock = element
+ break
+
+ # Step 6:
+
+ # If there is no furthest block, then the UA must
+ # first pop all the nodes from the bottom of the stack
+ # of open elements, from the current node up to and
+ # including the formatting element, then remove the
+ # formatting element from the list of active
+ # formatting elements, and finally abort these steps.
+ if furthestBlock is None:
+ element = self.tree.openElements.pop()
+ while element != formattingElement:
+ element = self.tree.openElements.pop()
+ self.tree.activeFormattingElements.remove(element)
+ return
+
+ # Step 7
+ commonAncestor = self.tree.openElements[afeIndex - 1]
+
+ # Step 8:
+ # The bookmark is supposed to help us identify where to reinsert
+ # nodes in step 15. We have to ensure that we reinsert nodes after
+ # the node before the active formatting element. Note the bookmark
+ # can move in step 9.7
+ bookmark = self.tree.activeFormattingElements.index(formattingElement)
+
+ # Step 9
+ lastNode = node = furthestBlock
+ innerLoopCounter = 0
+
+ index = self.tree.openElements.index(node)
+ while innerLoopCounter < 3:
+ innerLoopCounter += 1
+ # Node is element before node in open elements
+ index -= 1
+ node = self.tree.openElements[index]
+ if node not in self.tree.activeFormattingElements:
+ self.tree.openElements.remove(node)
+ continue
+ # Step 9.6
+ if node == formattingElement:
+ break
+ # Step 9.7
+ if lastNode == furthestBlock:
+ bookmark = self.tree.activeFormattingElements.index(node) + 1
+ # Step 9.8
+ clone = node.cloneNode()
+ # Replace node with clone
+ self.tree.activeFormattingElements[
+ self.tree.activeFormattingElements.index(node)] = clone
+ self.tree.openElements[
+ self.tree.openElements.index(node)] = clone
+ node = clone
+ # Step 9.9
+ # Remove lastNode from its parents, if any
+ if lastNode.parent:
+ lastNode.parent.removeChild(lastNode)
+ node.appendChild(lastNode)
+ # Step 9.10
+ lastNode = node
+
+ # Step 10
+ # Foster parent lastNode if commonAncestor is a
+ # table, tbody, tfoot, thead, or tr we need to foster
+ # parent the lastNode
+ if lastNode.parent:
+ lastNode.parent.removeChild(lastNode)
+
+ if commonAncestor.name in frozenset(("table", "tbody", "tfoot", "thead", "tr")):
+ parent, insertBefore = self.tree.getTableMisnestedNodePosition()
+ parent.insertBefore(lastNode, insertBefore)
+ else:
+ commonAncestor.appendChild(lastNode)
+
+ # Step 11
+ clone = formattingElement.cloneNode()
+
+ # Step 12
+ furthestBlock.reparentChildren(clone)
+
+ # Step 13
+ furthestBlock.appendChild(clone)
+
+ # Step 14
+ self.tree.activeFormattingElements.remove(formattingElement)
+ self.tree.activeFormattingElements.insert(bookmark, clone)
+
+ # Step 15
+ self.tree.openElements.remove(formattingElement)
+ self.tree.openElements.insert(
+ self.tree.openElements.index(furthestBlock) + 1, clone)
+
+ def endTagAppletMarqueeObject(self, token):
+ if self.tree.elementInScope(token["name"]):
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("end-tag-too-early", {"name": token["name"]})
+
+ if self.tree.elementInScope(token["name"]):
+ element = self.tree.openElements.pop()
+ while element.name != token["name"]:
+ element = self.tree.openElements.pop()
+ self.tree.clearActiveFormattingElements()
+
+ def endTagBr(self, token):
+ self.parser.parseError("unexpected-end-tag-treated-as",
+ {"originalName": "br", "newName": "br element"})
+ self.tree.reconstructActiveFormattingElements()
+ self.tree.insertElement(impliedTagToken("br", "StartTag"))
+ self.tree.openElements.pop()
+
+ def endTagOther(self, token):
+ for node in self.tree.openElements[::-1]:
+ if node.name == token["name"]:
+ self.tree.generateImpliedEndTags(exclude=token["name"])
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+ while self.tree.openElements.pop() != node:
+ pass
+ break
+ else:
+ if node.nameTuple in specialElements:
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+ break
+
+ class TextPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+ self.startTagHandler = _utils.MethodDispatcher([])
+ self.startTagHandler.default = self.startTagOther
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("script", self.endTagScript)])
+ self.endTagHandler.default = self.endTagOther
+
+ def processCharacters(self, token):
+ self.tree.insertText(token["data"])
+
+ def processEOF(self):
+ self.parser.parseError("expected-named-closing-tag-but-got-eof",
+ {"name": self.tree.openElements[-1].name})
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.originalPhase
+ return True
+
+ def startTagOther(self, token):
+ assert False, "Tried to process start tag %s in RCDATA/RAWTEXT mode" % token['name']
+
+ def endTagScript(self, token):
+ node = self.tree.openElements.pop()
+ assert node.name == "script"
+ self.parser.phase = self.parser.originalPhase
+ # The rest of this method is all stuff that only happens if
+ # document.write works
+
+ def endTagOther(self, token):
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.originalPhase
+
+ class InTablePhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-table
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("caption", self.startTagCaption),
+ ("colgroup", self.startTagColgroup),
+ ("col", self.startTagCol),
+ (("tbody", "tfoot", "thead"), self.startTagRowGroup),
+ (("td", "th", "tr"), self.startTagImplyTbody),
+ ("table", self.startTagTable),
+ (("style", "script"), self.startTagStyleScript),
+ ("input", self.startTagInput),
+ ("form", self.startTagForm)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("table", self.endTagTable),
+ (("body", "caption", "col", "colgroup", "html", "tbody", "td",
+ "tfoot", "th", "thead", "tr"), self.endTagIgnore)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ # helper methods
+ def clearStackToTableContext(self):
+ # "clear the stack back to a table context"
+ while self.tree.openElements[-1].name not in ("table", "html"):
+ # self.parser.parseError("unexpected-implied-end-tag-in-table",
+ # {"name": self.tree.openElements[-1].name})
+ self.tree.openElements.pop()
+ # When the current node is <html> it's an innerHTML case
+
+ # processing methods
+ def processEOF(self):
+ if self.tree.openElements[-1].name != "html":
+ self.parser.parseError("eof-in-table")
+ else:
+ assert self.parser.innerHTML
+ # Stop parsing
+
+ def processSpaceCharacters(self, token):
+ originalPhase = self.parser.phase
+ self.parser.phase = self.parser.phases["inTableText"]
+ self.parser.phase.originalPhase = originalPhase
+ self.parser.phase.processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ originalPhase = self.parser.phase
+ self.parser.phase = self.parser.phases["inTableText"]
+ self.parser.phase.originalPhase = originalPhase
+ self.parser.phase.processCharacters(token)
+
+ def insertText(self, token):
+ # If we get here there must be at least one non-whitespace character
+ # Do the table magic!
+ self.tree.insertFromTable = True
+ self.parser.phases["inBody"].processCharacters(token)
+ self.tree.insertFromTable = False
+
+ def startTagCaption(self, token):
+ self.clearStackToTableContext()
+ self.tree.activeFormattingElements.append(Marker)
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inCaption"]
+
+ def startTagColgroup(self, token):
+ self.clearStackToTableContext()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inColumnGroup"]
+
+ def startTagCol(self, token):
+ self.startTagColgroup(impliedTagToken("colgroup", "StartTag"))
+ return token
+
+ def startTagRowGroup(self, token):
+ self.clearStackToTableContext()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inTableBody"]
+
+ def startTagImplyTbody(self, token):
+ self.startTagRowGroup(impliedTagToken("tbody", "StartTag"))
+ return token
+
+ def startTagTable(self, token):
+ self.parser.parseError("unexpected-start-tag-implies-end-tag",
+ {"startName": "table", "endName": "table"})
+ self.parser.phase.processEndTag(impliedTagToken("table"))
+ if not self.parser.innerHTML:
+ return token
+
+ def startTagStyleScript(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagInput(self, token):
+ if ("type" in token["data"] and
+ token["data"]["type"].translate(asciiUpper2Lower) == "hidden"):
+ self.parser.parseError("unexpected-hidden-input-in-table")
+ self.tree.insertElement(token)
+ # XXX associate with form
+ self.tree.openElements.pop()
+ else:
+ self.startTagOther(token)
+
+ def startTagForm(self, token):
+ self.parser.parseError("unexpected-form-in-table")
+ if self.tree.formPointer is None:
+ self.tree.insertElement(token)
+ self.tree.formPointer = self.tree.openElements[-1]
+ self.tree.openElements.pop()
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-implies-table-voodoo", {"name": token["name"]})
+ # Do the table magic!
+ self.tree.insertFromTable = True
+ self.parser.phases["inBody"].processStartTag(token)
+ self.tree.insertFromTable = False
+
+ def endTagTable(self, token):
+ if self.tree.elementInScope("table", variant="table"):
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != "table":
+ self.parser.parseError("end-tag-too-early-named",
+ {"gotName": "table",
+ "expectedName": self.tree.openElements[-1].name})
+ while self.tree.openElements[-1].name != "table":
+ self.tree.openElements.pop()
+ self.tree.openElements.pop()
+ self.parser.resetInsertionMode()
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-implies-table-voodoo", {"name": token["name"]})
+ # Do the table magic!
+ self.tree.insertFromTable = True
+ self.parser.phases["inBody"].processEndTag(token)
+ self.tree.insertFromTable = False
+
+ class InTableTextPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+ self.originalPhase = None
+ self.characterTokens = []
+
+ def flushCharacters(self):
+ data = "".join([item["data"] for item in self.characterTokens])
+ if any([item not in spaceCharacters for item in data]):
+ token = {"type": tokenTypes["Characters"], "data": data}
+ self.parser.phases["inTable"].insertText(token)
+ elif data:
+ self.tree.insertText(data)
+ self.characterTokens = []
+
+ def processComment(self, token):
+ self.flushCharacters()
+ self.parser.phase = self.originalPhase
+ return token
+
+ def processEOF(self):
+ self.flushCharacters()
+ self.parser.phase = self.originalPhase
+ return True
+
+ def processCharacters(self, token):
+ if token["data"] == "\u0000":
+ return
+ self.characterTokens.append(token)
+
+ def processSpaceCharacters(self, token):
+ # pretty sure we should never reach here
+ self.characterTokens.append(token)
+ # assert False
+
+ def processStartTag(self, token):
+ self.flushCharacters()
+ self.parser.phase = self.originalPhase
+ return token
+
+ def processEndTag(self, token):
+ self.flushCharacters()
+ self.parser.phase = self.originalPhase
+ return token
+
+ class InCaptionPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-caption
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
+ "thead", "tr"), self.startTagTableElement)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("caption", self.endTagCaption),
+ ("table", self.endTagTable),
+ (("body", "col", "colgroup", "html", "tbody", "td", "tfoot", "th",
+ "thead", "tr"), self.endTagIgnore)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ def ignoreEndTagCaption(self):
+ return not self.tree.elementInScope("caption", variant="table")
+
+ def processEOF(self):
+ self.parser.phases["inBody"].processEOF()
+
+ def processCharacters(self, token):
+ return self.parser.phases["inBody"].processCharacters(token)
+
+ def startTagTableElement(self, token):
+ self.parser.parseError()
+ # XXX Have to duplicate logic here to find out if the tag is ignored
+ ignoreEndTag = self.ignoreEndTagCaption()
+ self.parser.phase.processEndTag(impliedTagToken("caption"))
+ if not ignoreEndTag:
+ return token
+
+ def startTagOther(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def endTagCaption(self, token):
+ if not self.ignoreEndTagCaption():
+ # AT this code is quite similar to endTagTable in "InTable"
+ self.tree.generateImpliedEndTags()
+ if self.tree.openElements[-1].name != "caption":
+ self.parser.parseError("expected-one-end-tag-but-got-another",
+ {"gotName": "caption",
+ "expectedName": self.tree.openElements[-1].name})
+ while self.tree.openElements[-1].name != "caption":
+ self.tree.openElements.pop()
+ self.tree.openElements.pop()
+ self.tree.clearActiveFormattingElements()
+ self.parser.phase = self.parser.phases["inTable"]
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagTable(self, token):
+ self.parser.parseError()
+ ignoreEndTag = self.ignoreEndTagCaption()
+ self.parser.phase.processEndTag(impliedTagToken("caption"))
+ if not ignoreEndTag:
+ return token
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def endTagOther(self, token):
+ return self.parser.phases["inBody"].processEndTag(token)
+
+ class InColumnGroupPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-column
+
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("col", self.startTagCol)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("colgroup", self.endTagColgroup),
+ ("col", self.endTagCol)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ def ignoreEndTagColgroup(self):
+ return self.tree.openElements[-1].name == "html"
+
+ def processEOF(self):
+ if self.tree.openElements[-1].name == "html":
+ assert self.parser.innerHTML
+ return
+ else:
+ ignoreEndTag = self.ignoreEndTagColgroup()
+ self.endTagColgroup(impliedTagToken("colgroup"))
+ if not ignoreEndTag:
+ return True
+
+ def processCharacters(self, token):
+ ignoreEndTag = self.ignoreEndTagColgroup()
+ self.endTagColgroup(impliedTagToken("colgroup"))
+ if not ignoreEndTag:
+ return token
+
+ def startTagCol(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def startTagOther(self, token):
+ ignoreEndTag = self.ignoreEndTagColgroup()
+ self.endTagColgroup(impliedTagToken("colgroup"))
+ if not ignoreEndTag:
+ return token
+
+ def endTagColgroup(self, token):
+ if self.ignoreEndTagColgroup():
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+ else:
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.phases["inTable"]
+
+ def endTagCol(self, token):
+ self.parser.parseError("no-end-tag", {"name": "col"})
+
+ def endTagOther(self, token):
+ ignoreEndTag = self.ignoreEndTagColgroup()
+ self.endTagColgroup(impliedTagToken("colgroup"))
+ if not ignoreEndTag:
+ return token
+
+ class InTableBodyPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-table0
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("tr", self.startTagTr),
+ (("td", "th"), self.startTagTableCell),
+ (("caption", "col", "colgroup", "tbody", "tfoot", "thead"),
+ self.startTagTableOther)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ (("tbody", "tfoot", "thead"), self.endTagTableRowGroup),
+ ("table", self.endTagTable),
+ (("body", "caption", "col", "colgroup", "html", "td", "th",
+ "tr"), self.endTagIgnore)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ # helper methods
+ def clearStackToTableBodyContext(self):
+ while self.tree.openElements[-1].name not in ("tbody", "tfoot",
+ "thead", "html"):
+ # self.parser.parseError("unexpected-implied-end-tag-in-table",
+ # {"name": self.tree.openElements[-1].name})
+ self.tree.openElements.pop()
+ if self.tree.openElements[-1].name == "html":
+ assert self.parser.innerHTML
+
+ # the rest
+ def processEOF(self):
+ self.parser.phases["inTable"].processEOF()
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inTable"].processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ return self.parser.phases["inTable"].processCharacters(token)
+
+ def startTagTr(self, token):
+ self.clearStackToTableBodyContext()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inRow"]
+
+ def startTagTableCell(self, token):
+ self.parser.parseError("unexpected-cell-in-table-body",
+ {"name": token["name"]})
+ self.startTagTr(impliedTagToken("tr", "StartTag"))
+ return token
+
+ def startTagTableOther(self, token):
+ # XXX AT Any ideas on how to share this with endTagTable?
+ if (self.tree.elementInScope("tbody", variant="table") or
+ self.tree.elementInScope("thead", variant="table") or
+ self.tree.elementInScope("tfoot", variant="table")):
+ self.clearStackToTableBodyContext()
+ self.endTagTableRowGroup(
+ impliedTagToken(self.tree.openElements[-1].name))
+ return token
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def startTagOther(self, token):
+ return self.parser.phases["inTable"].processStartTag(token)
+
+ def endTagTableRowGroup(self, token):
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.clearStackToTableBodyContext()
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.phases["inTable"]
+ else:
+ self.parser.parseError("unexpected-end-tag-in-table-body",
+ {"name": token["name"]})
+
+ def endTagTable(self, token):
+ if (self.tree.elementInScope("tbody", variant="table") or
+ self.tree.elementInScope("thead", variant="table") or
+ self.tree.elementInScope("tfoot", variant="table")):
+ self.clearStackToTableBodyContext()
+ self.endTagTableRowGroup(
+ impliedTagToken(self.tree.openElements[-1].name))
+ return token
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag-in-table-body",
+ {"name": token["name"]})
+
+ def endTagOther(self, token):
+ return self.parser.phases["inTable"].processEndTag(token)
+
+ class InRowPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-row
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ (("td", "th"), self.startTagTableCell),
+ (("caption", "col", "colgroup", "tbody", "tfoot", "thead",
+ "tr"), self.startTagTableOther)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("tr", self.endTagTr),
+ ("table", self.endTagTable),
+ (("tbody", "tfoot", "thead"), self.endTagTableRowGroup),
+ (("body", "caption", "col", "colgroup", "html", "td", "th"),
+ self.endTagIgnore)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ # helper methods (XXX unify this with other table helper methods)
+ def clearStackToTableRowContext(self):
+ while self.tree.openElements[-1].name not in ("tr", "html"):
+ self.parser.parseError("unexpected-implied-end-tag-in-table-row",
+ {"name": self.tree.openElements[-1].name})
+ self.tree.openElements.pop()
+
+ def ignoreEndTagTr(self):
+ return not self.tree.elementInScope("tr", variant="table")
+
+ # the rest
+ def processEOF(self):
+ self.parser.phases["inTable"].processEOF()
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inTable"].processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ return self.parser.phases["inTable"].processCharacters(token)
+
+ def startTagTableCell(self, token):
+ self.clearStackToTableRowContext()
+ self.tree.insertElement(token)
+ self.parser.phase = self.parser.phases["inCell"]
+ self.tree.activeFormattingElements.append(Marker)
+
+ def startTagTableOther(self, token):
+ ignoreEndTag = self.ignoreEndTagTr()
+ self.endTagTr(impliedTagToken("tr"))
+ # XXX how are we sure it's always ignored in the innerHTML case?
+ if not ignoreEndTag:
+ return token
+
+ def startTagOther(self, token):
+ return self.parser.phases["inTable"].processStartTag(token)
+
+ def endTagTr(self, token):
+ if not self.ignoreEndTagTr():
+ self.clearStackToTableRowContext()
+ self.tree.openElements.pop()
+ self.parser.phase = self.parser.phases["inTableBody"]
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagTable(self, token):
+ ignoreEndTag = self.ignoreEndTagTr()
+ self.endTagTr(impliedTagToken("tr"))
+ # Reprocess the current tag if the tr end tag was not ignored
+ # XXX how are we sure it's always ignored in the innerHTML case?
+ if not ignoreEndTag:
+ return token
+
+ def endTagTableRowGroup(self, token):
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.endTagTr(impliedTagToken("tr"))
+ return token
+ else:
+ self.parser.parseError()
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag-in-table-row",
+ {"name": token["name"]})
+
+ def endTagOther(self, token):
+ return self.parser.phases["inTable"].processEndTag(token)
+
+ class InCellPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-cell
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ (("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
+ "thead", "tr"), self.startTagTableOther)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ (("td", "th"), self.endTagTableCell),
+ (("body", "caption", "col", "colgroup", "html"), self.endTagIgnore),
+ (("table", "tbody", "tfoot", "thead", "tr"), self.endTagImply)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ # helper
+ def closeCell(self):
+ if self.tree.elementInScope("td", variant="table"):
+ self.endTagTableCell(impliedTagToken("td"))
+ elif self.tree.elementInScope("th", variant="table"):
+ self.endTagTableCell(impliedTagToken("th"))
+
+ # the rest
+ def processEOF(self):
+ self.parser.phases["inBody"].processEOF()
+
+ def processCharacters(self, token):
+ return self.parser.phases["inBody"].processCharacters(token)
+
+ def startTagTableOther(self, token):
+ if (self.tree.elementInScope("td", variant="table") or
+ self.tree.elementInScope("th", variant="table")):
+ self.closeCell()
+ return token
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def startTagOther(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def endTagTableCell(self, token):
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.tree.generateImpliedEndTags(token["name"])
+ if self.tree.openElements[-1].name != token["name"]:
+ self.parser.parseError("unexpected-cell-end-tag",
+ {"name": token["name"]})
+ while True:
+ node = self.tree.openElements.pop()
+ if node.name == token["name"]:
+ break
+ else:
+ self.tree.openElements.pop()
+ self.tree.clearActiveFormattingElements()
+ self.parser.phase = self.parser.phases["inRow"]
+ else:
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def endTagIgnore(self, token):
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ def endTagImply(self, token):
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.closeCell()
+ return token
+ else:
+ # sometimes innerHTML case
+ self.parser.parseError()
+
+ def endTagOther(self, token):
+ return self.parser.phases["inBody"].processEndTag(token)
+
+ class InSelectPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("option", self.startTagOption),
+ ("optgroup", self.startTagOptgroup),
+ ("select", self.startTagSelect),
+ (("input", "keygen", "textarea"), self.startTagInput),
+ ("script", self.startTagScript)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("option", self.endTagOption),
+ ("optgroup", self.endTagOptgroup),
+ ("select", self.endTagSelect)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-select
+ def processEOF(self):
+ if self.tree.openElements[-1].name != "html":
+ self.parser.parseError("eof-in-select")
+ else:
+ assert self.parser.innerHTML
+
+ def processCharacters(self, token):
+ if token["data"] == "\u0000":
+ return
+ self.tree.insertText(token["data"])
+
+ def startTagOption(self, token):
+ # We need to imply </option> if <option> is the current node.
+ if self.tree.openElements[-1].name == "option":
+ self.tree.openElements.pop()
+ self.tree.insertElement(token)
+
+ def startTagOptgroup(self, token):
+ if self.tree.openElements[-1].name == "option":
+ self.tree.openElements.pop()
+ if self.tree.openElements[-1].name == "optgroup":
+ self.tree.openElements.pop()
+ self.tree.insertElement(token)
+
+ def startTagSelect(self, token):
+ self.parser.parseError("unexpected-select-in-select")
+ self.endTagSelect(impliedTagToken("select"))
+
+ def startTagInput(self, token):
+ self.parser.parseError("unexpected-input-in-select")
+ if self.tree.elementInScope("select", variant="select"):
+ self.endTagSelect(impliedTagToken("select"))
+ return token
+ else:
+ assert self.parser.innerHTML
+
+ def startTagScript(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-in-select",
+ {"name": token["name"]})
+
+ def endTagOption(self, token):
+ if self.tree.openElements[-1].name == "option":
+ self.tree.openElements.pop()
+ else:
+ self.parser.parseError("unexpected-end-tag-in-select",
+ {"name": "option"})
+
+ def endTagOptgroup(self, token):
+ # </optgroup> implicitly closes <option>
+ if (self.tree.openElements[-1].name == "option" and
+ self.tree.openElements[-2].name == "optgroup"):
+ self.tree.openElements.pop()
+ # It also closes </optgroup>
+ if self.tree.openElements[-1].name == "optgroup":
+ self.tree.openElements.pop()
+ # But nothing else
+ else:
+ self.parser.parseError("unexpected-end-tag-in-select",
+ {"name": "optgroup"})
+
+ def endTagSelect(self, token):
+ if self.tree.elementInScope("select", variant="select"):
+ node = self.tree.openElements.pop()
+ while node.name != "select":
+ node = self.tree.openElements.pop()
+ self.parser.resetInsertionMode()
+ else:
+ # innerHTML case
+ assert self.parser.innerHTML
+ self.parser.parseError()
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-in-select",
+ {"name": token["name"]})
+
+ class InSelectInTablePhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
+ self.startTagTable)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ (("caption", "table", "tbody", "tfoot", "thead", "tr", "td", "th"),
+ self.endTagTable)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ def processEOF(self):
+ self.parser.phases["inSelect"].processEOF()
+
+ def processCharacters(self, token):
+ return self.parser.phases["inSelect"].processCharacters(token)
+
+ def startTagTable(self, token):
+ self.parser.parseError("unexpected-table-element-start-tag-in-select-in-table", {"name": token["name"]})
+ self.endTagOther(impliedTagToken("select"))
+ return token
+
+ def startTagOther(self, token):
+ return self.parser.phases["inSelect"].processStartTag(token)
+
+ def endTagTable(self, token):
+ self.parser.parseError("unexpected-table-element-end-tag-in-select-in-table", {"name": token["name"]})
+ if self.tree.elementInScope(token["name"], variant="table"):
+ self.endTagOther(impliedTagToken("select"))
+ return token
+
+ def endTagOther(self, token):
+ return self.parser.phases["inSelect"].processEndTag(token)
+
+ class InForeignContentPhase(Phase):
+ breakoutElements = frozenset(["b", "big", "blockquote", "body", "br",
+ "center", "code", "dd", "div", "dl", "dt",
+ "em", "embed", "h1", "h2", "h3",
+ "h4", "h5", "h6", "head", "hr", "i", "img",
+ "li", "listing", "menu", "meta", "nobr",
+ "ol", "p", "pre", "ruby", "s", "small",
+ "span", "strong", "strike", "sub", "sup",
+ "table", "tt", "u", "ul", "var"])
+
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ def adjustSVGTagNames(self, token):
+ replacements = {"altglyph": "altGlyph",
+ "altglyphdef": "altGlyphDef",
+ "altglyphitem": "altGlyphItem",
+ "animatecolor": "animateColor",
+ "animatemotion": "animateMotion",
+ "animatetransform": "animateTransform",
+ "clippath": "clipPath",
+ "feblend": "feBlend",
+ "fecolormatrix": "feColorMatrix",
+ "fecomponenttransfer": "feComponentTransfer",
+ "fecomposite": "feComposite",
+ "feconvolvematrix": "feConvolveMatrix",
+ "fediffuselighting": "feDiffuseLighting",
+ "fedisplacementmap": "feDisplacementMap",
+ "fedistantlight": "feDistantLight",
+ "feflood": "feFlood",
+ "fefunca": "feFuncA",
+ "fefuncb": "feFuncB",
+ "fefuncg": "feFuncG",
+ "fefuncr": "feFuncR",
+ "fegaussianblur": "feGaussianBlur",
+ "feimage": "feImage",
+ "femerge": "feMerge",
+ "femergenode": "feMergeNode",
+ "femorphology": "feMorphology",
+ "feoffset": "feOffset",
+ "fepointlight": "fePointLight",
+ "fespecularlighting": "feSpecularLighting",
+ "fespotlight": "feSpotLight",
+ "fetile": "feTile",
+ "feturbulence": "feTurbulence",
+ "foreignobject": "foreignObject",
+ "glyphref": "glyphRef",
+ "lineargradient": "linearGradient",
+ "radialgradient": "radialGradient",
+ "textpath": "textPath"}
+
+ if token["name"] in replacements:
+ token["name"] = replacements[token["name"]]
+
+ def processCharacters(self, token):
+ if token["data"] == "\u0000":
+ token["data"] = "\uFFFD"
+ elif (self.parser.framesetOK and
+ any(char not in spaceCharacters for char in token["data"])):
+ self.parser.framesetOK = False
+ Phase.processCharacters(self, token)
+
+ def processStartTag(self, token):
+ currentNode = self.tree.openElements[-1]
+ if (token["name"] in self.breakoutElements or
+ (token["name"] == "font" and
+ set(token["data"].keys()) & set(["color", "face", "size"]))):
+ self.parser.parseError("unexpected-html-element-in-foreign-content",
+ {"name": token["name"]})
+ while (self.tree.openElements[-1].namespace !=
+ self.tree.defaultNamespace and
+ not self.parser.isHTMLIntegrationPoint(self.tree.openElements[-1]) and
+ not self.parser.isMathMLTextIntegrationPoint(self.tree.openElements[-1])):
+ self.tree.openElements.pop()
+ return token
+
+ else:
+ if currentNode.namespace == namespaces["mathml"]:
+ self.parser.adjustMathMLAttributes(token)
+ elif currentNode.namespace == namespaces["svg"]:
+ self.adjustSVGTagNames(token)
+ self.parser.adjustSVGAttributes(token)
+ self.parser.adjustForeignAttributes(token)
+ token["namespace"] = currentNode.namespace
+ self.tree.insertElement(token)
+ if token["selfClosing"]:
+ self.tree.openElements.pop()
+ token["selfClosingAcknowledged"] = True
+
+ def processEndTag(self, token):
+ nodeIndex = len(self.tree.openElements) - 1
+ node = self.tree.openElements[-1]
+ if node.name.translate(asciiUpper2Lower) != token["name"]:
+ self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
+
+ while True:
+ if node.name.translate(asciiUpper2Lower) == token["name"]:
+ # XXX this isn't in the spec but it seems necessary
+ if self.parser.phase == self.parser.phases["inTableText"]:
+ self.parser.phase.flushCharacters()
+ self.parser.phase = self.parser.phase.originalPhase
+ while self.tree.openElements.pop() != node:
+ assert self.tree.openElements
+ new_token = None
+ break
+ nodeIndex -= 1
+
+ node = self.tree.openElements[nodeIndex]
+ if node.namespace != self.tree.defaultNamespace:
+ continue
+ else:
+ new_token = self.parser.phase.processEndTag(token)
+ break
+ return new_token
+
+ class AfterBodyPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([("html", self.endTagHtml)])
+ self.endTagHandler.default = self.endTagOther
+
+ def processEOF(self):
+ # Stop parsing
+ pass
+
+ def processComment(self, token):
+ # This is needed because data is to be appended to the <html> element
+ # here and not to whatever is currently open.
+ self.tree.insertComment(token, self.tree.openElements[0])
+
+ def processCharacters(self, token):
+ self.parser.parseError("unexpected-char-after-body")
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-after-body",
+ {"name": token["name"]})
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ def endTagHtml(self, name):
+ if self.parser.innerHTML:
+ self.parser.parseError("unexpected-end-tag-after-body-innerhtml")
+ else:
+ self.parser.phase = self.parser.phases["afterAfterBody"]
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-after-body",
+ {"name": token["name"]})
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ class InFramesetPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#in-frameset
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("frameset", self.startTagFrameset),
+ ("frame", self.startTagFrame),
+ ("noframes", self.startTagNoframes)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("frameset", self.endTagFrameset)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ def processEOF(self):
+ if self.tree.openElements[-1].name != "html":
+ self.parser.parseError("eof-in-frameset")
+ else:
+ assert self.parser.innerHTML
+
+ def processCharacters(self, token):
+ self.parser.parseError("unexpected-char-in-frameset")
+
+ def startTagFrameset(self, token):
+ self.tree.insertElement(token)
+
+ def startTagFrame(self, token):
+ self.tree.insertElement(token)
+ self.tree.openElements.pop()
+
+ def startTagNoframes(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-in-frameset",
+ {"name": token["name"]})
+
+ def endTagFrameset(self, token):
+ if self.tree.openElements[-1].name == "html":
+ # innerHTML case
+ self.parser.parseError("unexpected-frameset-in-frameset-innerhtml")
+ else:
+ self.tree.openElements.pop()
+ if (not self.parser.innerHTML and
+ self.tree.openElements[-1].name != "frameset"):
+ # If we're not in innerHTML mode and the current node is not a
+ # "frameset" element (anymore) then switch.
+ self.parser.phase = self.parser.phases["afterFrameset"]
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-in-frameset",
+ {"name": token["name"]})
+
+ class AfterFramesetPhase(Phase):
+ # http://www.whatwg.org/specs/web-apps/current-work/#after3
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("noframes", self.startTagNoframes)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ self.endTagHandler = _utils.MethodDispatcher([
+ ("html", self.endTagHtml)
+ ])
+ self.endTagHandler.default = self.endTagOther
+
+ def processEOF(self):
+ # Stop parsing
+ pass
+
+ def processCharacters(self, token):
+ self.parser.parseError("unexpected-char-after-frameset")
+
+ def startTagNoframes(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("unexpected-start-tag-after-frameset",
+ {"name": token["name"]})
+
+ def endTagHtml(self, token):
+ self.parser.phase = self.parser.phases["afterAfterFrameset"]
+
+ def endTagOther(self, token):
+ self.parser.parseError("unexpected-end-tag-after-frameset",
+ {"name": token["name"]})
+
+ class AfterAfterBodyPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ def processEOF(self):
+ pass
+
+ def processComment(self, token):
+ self.tree.insertComment(token, self.tree.document)
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inBody"].processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ self.parser.parseError("expected-eof-but-got-char")
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("expected-eof-but-got-start-tag",
+ {"name": token["name"]})
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ def processEndTag(self, token):
+ self.parser.parseError("expected-eof-but-got-end-tag",
+ {"name": token["name"]})
+ self.parser.phase = self.parser.phases["inBody"]
+ return token
+
+ class AfterAfterFramesetPhase(Phase):
+ def __init__(self, parser, tree):
+ Phase.__init__(self, parser, tree)
+
+ self.startTagHandler = _utils.MethodDispatcher([
+ ("html", self.startTagHtml),
+ ("noframes", self.startTagNoFrames)
+ ])
+ self.startTagHandler.default = self.startTagOther
+
+ def processEOF(self):
+ pass
+
+ def processComment(self, token):
+ self.tree.insertComment(token, self.tree.document)
+
+ def processSpaceCharacters(self, token):
+ return self.parser.phases["inBody"].processSpaceCharacters(token)
+
+ def processCharacters(self, token):
+ self.parser.parseError("expected-eof-but-got-char")
+
+ def startTagHtml(self, token):
+ return self.parser.phases["inBody"].processStartTag(token)
+
+ def startTagNoFrames(self, token):
+ return self.parser.phases["inHead"].processStartTag(token)
+
+ def startTagOther(self, token):
+ self.parser.parseError("expected-eof-but-got-start-tag",
+ {"name": token["name"]})
+
+ def processEndTag(self, token):
+ self.parser.parseError("expected-eof-but-got-end-tag",
+ {"name": token["name"]})
+ # pylint:enable=unused-argument
+
+ return {
+ "initial": InitialPhase,
+ "beforeHtml": BeforeHtmlPhase,
+ "beforeHead": BeforeHeadPhase,
+ "inHead": InHeadPhase,
+ "inHeadNoscript": InHeadNoscriptPhase,
+ "afterHead": AfterHeadPhase,
+ "inBody": InBodyPhase,
+ "text": TextPhase,
+ "inTable": InTablePhase,
+ "inTableText": InTableTextPhase,
+ "inCaption": InCaptionPhase,
+ "inColumnGroup": InColumnGroupPhase,
+ "inTableBody": InTableBodyPhase,
+ "inRow": InRowPhase,
+ "inCell": InCellPhase,
+ "inSelect": InSelectPhase,
+ "inSelectInTable": InSelectInTablePhase,
+ "inForeignContent": InForeignContentPhase,
+ "afterBody": AfterBodyPhase,
+ "inFrameset": InFramesetPhase,
+ "afterFrameset": AfterFramesetPhase,
+ "afterAfterBody": AfterAfterBodyPhase,
+ "afterAfterFrameset": AfterAfterFramesetPhase,
+ # XXX after after frameset
+ }
+
+
+def adjust_attributes(token, replacements):
+ needs_adjustment = viewkeys(token['data']) & viewkeys(replacements)
+ if needs_adjustment:
+ token['data'] = OrderedDict((replacements.get(k, k), v)
+ for k, v in token['data'].items())
+
+
+def impliedTagToken(name, type="EndTag", attributes=None,
+ selfClosing=False):
+ if attributes is None:
+ attributes = {}
+ return {"type": tokenTypes[type], "name": name, "data": attributes,
+ "selfClosing": selfClosing}
+
+
+class ParseError(Exception):
+ """Error in parsed document"""
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/serializer.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/serializer.py
new file mode 100644
index 00000000..53f4d44c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/serializer.py
@@ -0,0 +1,409 @@
+from __future__ import absolute_import, division, unicode_literals
+from pip._vendor.six import text_type
+
+import re
+
+from codecs import register_error, xmlcharrefreplace_errors
+
+from .constants import voidElements, booleanAttributes, spaceCharacters
+from .constants import rcdataElements, entities, xmlEntities
+from . import treewalkers, _utils
+from xml.sax.saxutils import escape
+
+_quoteAttributeSpecChars = "".join(spaceCharacters) + "\"'=<>`"
+_quoteAttributeSpec = re.compile("[" + _quoteAttributeSpecChars + "]")
+_quoteAttributeLegacy = re.compile("[" + _quoteAttributeSpecChars +
+ "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n"
+ "\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15"
+ "\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
+ "\x20\x2f\x60\xa0\u1680\u180e\u180f\u2000"
+ "\u2001\u2002\u2003\u2004\u2005\u2006\u2007"
+ "\u2008\u2009\u200a\u2028\u2029\u202f\u205f"
+ "\u3000]")
+
+
+_encode_entity_map = {}
+_is_ucs4 = len("\U0010FFFF") == 1
+for k, v in list(entities.items()):
+ # skip multi-character entities
+ if ((_is_ucs4 and len(v) > 1) or
+ (not _is_ucs4 and len(v) > 2)):
+ continue
+ if v != "&":
+ if len(v) == 2:
+ v = _utils.surrogatePairToCodepoint(v)
+ else:
+ v = ord(v)
+ if v not in _encode_entity_map or k.islower():
+ # prefer &lt; over &LT; and similarly for &amp;, &gt;, etc.
+ _encode_entity_map[v] = k
+
+
+def htmlentityreplace_errors(exc):
+ if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)):
+ res = []
+ codepoints = []
+ skip = False
+ for i, c in enumerate(exc.object[exc.start:exc.end]):
+ if skip:
+ skip = False
+ continue
+ index = i + exc.start
+ if _utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]):
+ codepoint = _utils.surrogatePairToCodepoint(exc.object[index:index + 2])
+ skip = True
+ else:
+ codepoint = ord(c)
+ codepoints.append(codepoint)
+ for cp in codepoints:
+ e = _encode_entity_map.get(cp)
+ if e:
+ res.append("&")
+ res.append(e)
+ if not e.endswith(";"):
+ res.append(";")
+ else:
+ res.append("&#x%s;" % (hex(cp)[2:]))
+ return ("".join(res), exc.end)
+ else:
+ return xmlcharrefreplace_errors(exc)
+
+
+register_error("htmlentityreplace", htmlentityreplace_errors)
+
+
+def serialize(input, tree="etree", encoding=None, **serializer_opts):
+ """Serializes the input token stream using the specified treewalker
+
+ :arg input: the token stream to serialize
+
+ :arg tree: the treewalker to use
+
+ :arg encoding: the encoding to use
+
+ :arg serializer_opts: any options to pass to the
+ :py:class:`html5lib.serializer.HTMLSerializer` that gets created
+
+ :returns: the tree serialized as a string
+
+ Example:
+
+ >>> from html5lib.html5parser import parse
+ >>> from html5lib.serializer import serialize
+ >>> token_stream = parse('<html><body><p>Hi!</p></body></html>')
+ >>> serialize(token_stream, omit_optional_tags=False)
+ '<html><head></head><body><p>Hi!</p></body></html>'
+
+ """
+ # XXX: Should we cache this?
+ walker = treewalkers.getTreeWalker(tree)
+ s = HTMLSerializer(**serializer_opts)
+ return s.render(walker(input), encoding)
+
+
+class HTMLSerializer(object):
+
+ # attribute quoting options
+ quote_attr_values = "legacy" # be secure by default
+ quote_char = '"'
+ use_best_quote_char = True
+
+ # tag syntax options
+ omit_optional_tags = True
+ minimize_boolean_attributes = True
+ use_trailing_solidus = False
+ space_before_trailing_solidus = True
+
+ # escaping options
+ escape_lt_in_attrs = False
+ escape_rcdata = False
+ resolve_entities = True
+
+ # miscellaneous options
+ alphabetical_attributes = False
+ inject_meta_charset = True
+ strip_whitespace = False
+ sanitize = False
+
+ options = ("quote_attr_values", "quote_char", "use_best_quote_char",
+ "omit_optional_tags", "minimize_boolean_attributes",
+ "use_trailing_solidus", "space_before_trailing_solidus",
+ "escape_lt_in_attrs", "escape_rcdata", "resolve_entities",
+ "alphabetical_attributes", "inject_meta_charset",
+ "strip_whitespace", "sanitize")
+
+ def __init__(self, **kwargs):
+ """Initialize HTMLSerializer
+
+ :arg inject_meta_charset: Whether or not to inject the meta charset.
+
+ Defaults to ``True``.
+
+ :arg quote_attr_values: Whether to quote attribute values that don't
+ require quoting per legacy browser behavior (``"legacy"``), when
+ required by the standard (``"spec"``), or always (``"always"``).
+
+ Defaults to ``"legacy"``.
+
+ :arg quote_char: Use given quote character for attribute quoting.
+
+ Defaults to ``"`` which will use double quotes unless attribute
+ value contains a double quote, in which case single quotes are
+ used.
+
+ :arg escape_lt_in_attrs: Whether or not to escape ``<`` in attribute
+ values.
+
+ Defaults to ``False``.
+
+ :arg escape_rcdata: Whether to escape characters that need to be
+ escaped within normal elements within rcdata elements such as
+ style.
+
+ Defaults to ``False``.
+
+ :arg resolve_entities: Whether to resolve named character entities that
+ appear in the source tree. The XML predefined entities &lt; &gt;
+ &amp; &quot; &apos; are unaffected by this setting.
+
+ Defaults to ``True``.
+
+ :arg strip_whitespace: Whether to remove semantically meaningless
+ whitespace. (This compresses all whitespace to a single space
+ except within ``pre``.)
+
+ Defaults to ``False``.
+
+ :arg minimize_boolean_attributes: Shortens boolean attributes to give
+ just the attribute value, for example::
+
+ <input disabled="disabled">
+
+ becomes::
+
+ <input disabled>
+
+ Defaults to ``True``.
+
+ :arg use_trailing_solidus: Includes a close-tag slash at the end of the
+ start tag of void elements (empty elements whose end tag is
+ forbidden). E.g. ``<hr/>``.
+
+ Defaults to ``False``.
+
+ :arg space_before_trailing_solidus: Places a space immediately before
+ the closing slash in a tag using a trailing solidus. E.g.
+ ``<hr />``. Requires ``use_trailing_solidus=True``.
+
+ Defaults to ``True``.
+
+ :arg sanitize: Strip all unsafe or unknown constructs from output.
+ See :py:class:`html5lib.filters.sanitizer.Filter`.
+
+ Defaults to ``False``.
+
+ :arg omit_optional_tags: Omit start/end tags that are optional.
+
+ Defaults to ``True``.
+
+ :arg alphabetical_attributes: Reorder attributes to be in alphabetical order.
+
+ Defaults to ``False``.
+
+ """
+ unexpected_args = frozenset(kwargs) - frozenset(self.options)
+ if len(unexpected_args) > 0:
+ raise TypeError("__init__() got an unexpected keyword argument '%s'" % next(iter(unexpected_args)))
+ if 'quote_char' in kwargs:
+ self.use_best_quote_char = False
+ for attr in self.options:
+ setattr(self, attr, kwargs.get(attr, getattr(self, attr)))
+ self.errors = []
+ self.strict = False
+
+ def encode(self, string):
+ assert(isinstance(string, text_type))
+ if self.encoding:
+ return string.encode(self.encoding, "htmlentityreplace")
+ else:
+ return string
+
+ def encodeStrict(self, string):
+ assert(isinstance(string, text_type))
+ if self.encoding:
+ return string.encode(self.encoding, "strict")
+ else:
+ return string
+
+ def serialize(self, treewalker, encoding=None):
+ # pylint:disable=too-many-nested-blocks
+ self.encoding = encoding
+ in_cdata = False
+ self.errors = []
+
+ if encoding and self.inject_meta_charset:
+ from .filters.inject_meta_charset import Filter
+ treewalker = Filter(treewalker, encoding)
+ # Alphabetical attributes is here under the assumption that none of
+ # the later filters add or change order of attributes; it needs to be
+ # before the sanitizer so escaped elements come out correctly
+ if self.alphabetical_attributes:
+ from .filters.alphabeticalattributes import Filter
+ treewalker = Filter(treewalker)
+ # WhitespaceFilter should be used before OptionalTagFilter
+ # for maximum efficiently of this latter filter
+ if self.strip_whitespace:
+ from .filters.whitespace import Filter
+ treewalker = Filter(treewalker)
+ if self.sanitize:
+ from .filters.sanitizer import Filter
+ treewalker = Filter(treewalker)
+ if self.omit_optional_tags:
+ from .filters.optionaltags import Filter
+ treewalker = Filter(treewalker)
+
+ for token in treewalker:
+ type = token["type"]
+ if type == "Doctype":
+ doctype = "<!DOCTYPE %s" % token["name"]
+
+ if token["publicId"]:
+ doctype += ' PUBLIC "%s"' % token["publicId"]
+ elif token["systemId"]:
+ doctype += " SYSTEM"
+ if token["systemId"]:
+ if token["systemId"].find('"') >= 0:
+ if token["systemId"].find("'") >= 0:
+ self.serializeError("System identifer contains both single and double quote characters")
+ quote_char = "'"
+ else:
+ quote_char = '"'
+ doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char)
+
+ doctype += ">"
+ yield self.encodeStrict(doctype)
+
+ elif type in ("Characters", "SpaceCharacters"):
+ if type == "SpaceCharacters" or in_cdata:
+ if in_cdata and token["data"].find("</") >= 0:
+ self.serializeError("Unexpected </ in CDATA")
+ yield self.encode(token["data"])
+ else:
+ yield self.encode(escape(token["data"]))
+
+ elif type in ("StartTag", "EmptyTag"):
+ name = token["name"]
+ yield self.encodeStrict("<%s" % name)
+ if name in rcdataElements and not self.escape_rcdata:
+ in_cdata = True
+ elif in_cdata:
+ self.serializeError("Unexpected child element of a CDATA element")
+ for (_, attr_name), attr_value in token["data"].items():
+ # TODO: Add namespace support here
+ k = attr_name
+ v = attr_value
+ yield self.encodeStrict(' ')
+
+ yield self.encodeStrict(k)
+ if not self.minimize_boolean_attributes or \
+ (k not in booleanAttributes.get(name, tuple()) and
+ k not in booleanAttributes.get("", tuple())):
+ yield self.encodeStrict("=")
+ if self.quote_attr_values == "always" or len(v) == 0:
+ quote_attr = True
+ elif self.quote_attr_values == "spec":
+ quote_attr = _quoteAttributeSpec.search(v) is not None
+ elif self.quote_attr_values == "legacy":
+ quote_attr = _quoteAttributeLegacy.search(v) is not None
+ else:
+ raise ValueError("quote_attr_values must be one of: "
+ "'always', 'spec', or 'legacy'")
+ v = v.replace("&", "&amp;")
+ if self.escape_lt_in_attrs:
+ v = v.replace("<", "&lt;")
+ if quote_attr:
+ quote_char = self.quote_char
+ if self.use_best_quote_char:
+ if "'" in v and '"' not in v:
+ quote_char = '"'
+ elif '"' in v and "'" not in v:
+ quote_char = "'"
+ if quote_char == "'":
+ v = v.replace("'", "&#39;")
+ else:
+ v = v.replace('"', "&quot;")
+ yield self.encodeStrict(quote_char)
+ yield self.encode(v)
+ yield self.encodeStrict(quote_char)
+ else:
+ yield self.encode(v)
+ if name in voidElements and self.use_trailing_solidus:
+ if self.space_before_trailing_solidus:
+ yield self.encodeStrict(" /")
+ else:
+ yield self.encodeStrict("/")
+ yield self.encode(">")
+
+ elif type == "EndTag":
+ name = token["name"]
+ if name in rcdataElements:
+ in_cdata = False
+ elif in_cdata:
+ self.serializeError("Unexpected child element of a CDATA element")
+ yield self.encodeStrict("</%s>" % name)
+
+ elif type == "Comment":
+ data = token["data"]
+ if data.find("--") >= 0:
+ self.serializeError("Comment contains --")
+ yield self.encodeStrict("<!--%s-->" % token["data"])
+
+ elif type == "Entity":
+ name = token["name"]
+ key = name + ";"
+ if key not in entities:
+ self.serializeError("Entity %s not recognized" % name)
+ if self.resolve_entities and key not in xmlEntities:
+ data = entities[key]
+ else:
+ data = "&%s;" % name
+ yield self.encodeStrict(data)
+
+ else:
+ self.serializeError(token["data"])
+
+ def render(self, treewalker, encoding=None):
+ """Serializes the stream from the treewalker into a string
+
+ :arg treewalker: the treewalker to serialize
+
+ :arg encoding: the string encoding to use
+
+ :returns: the serialized tree
+
+ Example:
+
+ >>> from html5lib import parse, getTreeWalker
+ >>> from html5lib.serializer import HTMLSerializer
+ >>> token_stream = parse('<html><body>Hi!</body></html>')
+ >>> walker = getTreeWalker('etree')
+ >>> serializer = HTMLSerializer(omit_optional_tags=False)
+ >>> serializer.render(walker(token_stream))
+ '<html><head></head><body>Hi!</body></html>'
+
+ """
+ if encoding:
+ return b"".join(list(self.serialize(treewalker, encoding)))
+ else:
+ return "".join(list(self.serialize(treewalker)))
+
+ def serializeError(self, data="XXX ERROR MESSAGE NEEDED"):
+ # XXX The idea is to make data mandatory.
+ self.errors.append(data)
+ if self.strict:
+ raise SerializeError
+
+
+class SerializeError(Exception):
+ """Error in serialized tree"""
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py
new file mode 100644
index 00000000..7ef59590
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py
@@ -0,0 +1,30 @@
+"""Tree adapters let you convert from one tree structure to another
+
+Example:
+
+.. code-block:: python
+
+ from pip._vendor import html5lib
+ from pip._vendor.html5lib.treeadapters import genshi
+
+ doc = '<html><body>Hi!</body></html>'
+ treebuilder = html5lib.getTreeBuilder('etree')
+ parser = html5lib.HTMLParser(tree=treebuilder)
+ tree = parser.parse(doc)
+ TreeWalker = html5lib.getTreeWalker('etree')
+
+ genshi_tree = genshi.to_genshi(TreeWalker(tree))
+
+"""
+from __future__ import absolute_import, division, unicode_literals
+
+from . import sax
+
+__all__ = ["sax"]
+
+try:
+ from . import genshi # noqa
+except ImportError:
+ pass
+else:
+ __all__.append("genshi")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..6d4ae732
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pyc
new file mode 100644
index 00000000..30c93203
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pyc
new file mode 100644
index 00000000..c3567384
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py
new file mode 100644
index 00000000..61d5fb6a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py
@@ -0,0 +1,54 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from genshi.core import QName, Attrs
+from genshi.core import START, END, TEXT, COMMENT, DOCTYPE
+
+
+def to_genshi(walker):
+ """Convert a tree to a genshi tree
+
+ :arg walker: the treewalker to use to walk the tree to convert it
+
+ :returns: generator of genshi nodes
+
+ """
+ text = []
+ for token in walker:
+ type = token["type"]
+ if type in ("Characters", "SpaceCharacters"):
+ text.append(token["data"])
+ elif text:
+ yield TEXT, "".join(text), (None, -1, -1)
+ text = []
+
+ if type in ("StartTag", "EmptyTag"):
+ if token["namespace"]:
+ name = "{%s}%s" % (token["namespace"], token["name"])
+ else:
+ name = token["name"]
+ attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value)
+ for attr, value in token["data"].items()])
+ yield (START, (QName(name), attrs), (None, -1, -1))
+ if type == "EmptyTag":
+ type = "EndTag"
+
+ if type == "EndTag":
+ if token["namespace"]:
+ name = "{%s}%s" % (token["namespace"], token["name"])
+ else:
+ name = token["name"]
+
+ yield END, QName(name), (None, -1, -1)
+
+ elif type == "Comment":
+ yield COMMENT, token["data"], (None, -1, -1)
+
+ elif type == "Doctype":
+ yield DOCTYPE, (token["name"], token["publicId"],
+ token["systemId"]), (None, -1, -1)
+
+ else:
+ pass # FIXME: What to do?
+
+ if text:
+ yield TEXT, "".join(text), (None, -1, -1)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py
new file mode 100644
index 00000000..f4ccea5a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py
@@ -0,0 +1,50 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from xml.sax.xmlreader import AttributesNSImpl
+
+from ..constants import adjustForeignAttributes, unadjustForeignAttributes
+
+prefix_mapping = {}
+for prefix, localName, namespace in adjustForeignAttributes.values():
+ if prefix is not None:
+ prefix_mapping[prefix] = namespace
+
+
+def to_sax(walker, handler):
+ """Call SAX-like content handler based on treewalker walker
+
+ :arg walker: the treewalker to use to walk the tree to convert it
+
+ :arg handler: SAX handler to use
+
+ """
+ handler.startDocument()
+ for prefix, namespace in prefix_mapping.items():
+ handler.startPrefixMapping(prefix, namespace)
+
+ for token in walker:
+ type = token["type"]
+ if type == "Doctype":
+ continue
+ elif type in ("StartTag", "EmptyTag"):
+ attrs = AttributesNSImpl(token["data"],
+ unadjustForeignAttributes)
+ handler.startElementNS((token["namespace"], token["name"]),
+ token["name"],
+ attrs)
+ if type == "EmptyTag":
+ handler.endElementNS((token["namespace"], token["name"]),
+ token["name"])
+ elif type == "EndTag":
+ handler.endElementNS((token["namespace"], token["name"]),
+ token["name"])
+ elif type in ("Characters", "SpaceCharacters"):
+ handler.characters(token["data"])
+ elif type == "Comment":
+ pass
+ else:
+ assert False, "Unknown token type"
+
+ for prefix, namespace in prefix_mapping.items():
+ handler.endPrefixMapping(prefix)
+ handler.endDocument()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py
new file mode 100644
index 00000000..d44447ea
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py
@@ -0,0 +1,88 @@
+"""A collection of modules for building different kinds of trees from HTML
+documents.
+
+To create a treebuilder for a new type of tree, you need to do
+implement several things:
+
+1. A set of classes for various types of elements: Document, Doctype, Comment,
+ Element. These must implement the interface of ``base.treebuilders.Node``
+ (although comment nodes have a different signature for their constructor,
+ see ``treebuilders.etree.Comment``) Textual content may also be implemented
+ as another node type, or not, as your tree implementation requires.
+
+2. A treebuilder object (called ``TreeBuilder`` by convention) that inherits
+ from ``treebuilders.base.TreeBuilder``. This has 4 required attributes:
+
+ * ``documentClass`` - the class to use for the bottommost node of a document
+ * ``elementClass`` - the class to use for HTML Elements
+ * ``commentClass`` - the class to use for comments
+ * ``doctypeClass`` - the class to use for doctypes
+
+ It also has one required method:
+
+ * ``getDocument`` - Returns the root node of the complete document tree
+
+3. If you wish to run the unit tests, you must also create a ``testSerializer``
+ method on your treebuilder which accepts a node and returns a string
+ containing Node and its children serialized according to the format used in
+ the unittests
+
+"""
+
+from __future__ import absolute_import, division, unicode_literals
+
+from .._utils import default_etree
+
+treeBuilderCache = {}
+
+
+def getTreeBuilder(treeType, implementation=None, **kwargs):
+ """Get a TreeBuilder class for various types of trees with built-in support
+
+ :arg treeType: the name of the tree type required (case-insensitive). Supported
+ values are:
+
+ * "dom" - A generic builder for DOM implementations, defaulting to a
+ xml.dom.minidom based implementation.
+ * "etree" - A generic builder for tree implementations exposing an
+ ElementTree-like interface, defaulting to xml.etree.cElementTree if
+ available and xml.etree.ElementTree if not.
+ * "lxml" - A etree-based builder for lxml.etree, handling limitations
+ of lxml's implementation.
+
+ :arg implementation: (Currently applies to the "etree" and "dom" tree
+ types). A module implementing the tree type e.g. xml.etree.ElementTree
+ or xml.etree.cElementTree.
+
+ :arg kwargs: Any additional options to pass to the TreeBuilder when
+ creating it.
+
+ Example:
+
+ >>> from html5lib.treebuilders import getTreeBuilder
+ >>> builder = getTreeBuilder('etree')
+
+ """
+
+ treeType = treeType.lower()
+ if treeType not in treeBuilderCache:
+ if treeType == "dom":
+ from . import dom
+ # Come up with a sane default (pref. from the stdlib)
+ if implementation is None:
+ from xml.dom import minidom
+ implementation = minidom
+ # NEVER cache here, caching is done in the dom submodule
+ return dom.getDomModule(implementation, **kwargs).TreeBuilder
+ elif treeType == "lxml":
+ from . import etree_lxml
+ treeBuilderCache[treeType] = etree_lxml.TreeBuilder
+ elif treeType == "etree":
+ from . import etree
+ if implementation is None:
+ implementation = default_etree
+ # NEVER cache here, caching is done in the etree submodule
+ return etree.getETreeModule(implementation, **kwargs).TreeBuilder
+ else:
+ raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
+ return treeBuilderCache.get(treeType)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..4160076a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc
new file mode 100644
index 00000000..f3290210
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pyc
new file mode 100644
index 00000000..9e461bea
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc
new file mode 100644
index 00000000..bd7bceb3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pyc
new file mode 100644
index 00000000..8aa7773e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py
new file mode 100644
index 00000000..73973db5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/base.py
@@ -0,0 +1,417 @@
+from __future__ import absolute_import, division, unicode_literals
+from pip._vendor.six import text_type
+
+from ..constants import scopingElements, tableInsertModeElements, namespaces
+
+# The scope markers are inserted when entering object elements,
+# marquees, table cells, and table captions, and are used to prevent formatting
+# from "leaking" into tables, object elements, and marquees.
+Marker = None
+
+listElementsMap = {
+ None: (frozenset(scopingElements), False),
+ "button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False),
+ "list": (frozenset(scopingElements | set([(namespaces["html"], "ol"),
+ (namespaces["html"], "ul")])), False),
+ "table": (frozenset([(namespaces["html"], "html"),
+ (namespaces["html"], "table")]), False),
+ "select": (frozenset([(namespaces["html"], "optgroup"),
+ (namespaces["html"], "option")]), True)
+}
+
+
+class Node(object):
+ """Represents an item in the tree"""
+ def __init__(self, name):
+ """Creates a Node
+
+ :arg name: The tag name associated with the node
+
+ """
+ # The tag name assocaited with the node
+ self.name = name
+ # The parent of the current node (or None for the document node)
+ self.parent = None
+ # The value of the current node (applies to text nodes and comments)
+ self.value = None
+ # A dict holding name -> value pairs for attributes of the node
+ self.attributes = {}
+ # A list of child nodes of the current node. This must include all
+ # elements but not necessarily other node types.
+ self.childNodes = []
+ # A list of miscellaneous flags that can be set on the node.
+ self._flags = []
+
+ def __str__(self):
+ attributesStr = " ".join(["%s=\"%s\"" % (name, value)
+ for name, value in
+ self.attributes.items()])
+ if attributesStr:
+ return "<%s %s>" % (self.name, attributesStr)
+ else:
+ return "<%s>" % (self.name)
+
+ def __repr__(self):
+ return "<%s>" % (self.name)
+
+ def appendChild(self, node):
+ """Insert node as a child of the current node
+
+ :arg node: the node to insert
+
+ """
+ raise NotImplementedError
+
+ def insertText(self, data, insertBefore=None):
+ """Insert data as text in the current node, positioned before the
+ start of node insertBefore or to the end of the node's text.
+
+ :arg data: the data to insert
+
+ :arg insertBefore: True if you want to insert the text before the node
+ and False if you want to insert it after the node
+
+ """
+ raise NotImplementedError
+
+ def insertBefore(self, node, refNode):
+ """Insert node as a child of the current node, before refNode in the
+ list of child nodes. Raises ValueError if refNode is not a child of
+ the current node
+
+ :arg node: the node to insert
+
+ :arg refNode: the child node to insert the node before
+
+ """
+ raise NotImplementedError
+
+ def removeChild(self, node):
+ """Remove node from the children of the current node
+
+ :arg node: the child node to remove
+
+ """
+ raise NotImplementedError
+
+ def reparentChildren(self, newParent):
+ """Move all the children of the current node to newParent.
+ This is needed so that trees that don't store text as nodes move the
+ text in the correct way
+
+ :arg newParent: the node to move all this node's children to
+
+ """
+ # XXX - should this method be made more general?
+ for child in self.childNodes:
+ newParent.appendChild(child)
+ self.childNodes = []
+
+ def cloneNode(self):
+ """Return a shallow copy of the current node i.e. a node with the same
+ name and attributes but with no parent or child nodes
+ """
+ raise NotImplementedError
+
+ def hasContent(self):
+ """Return true if the node has children or text, false otherwise
+ """
+ raise NotImplementedError
+
+
+class ActiveFormattingElements(list):
+ def append(self, node):
+ equalCount = 0
+ if node != Marker:
+ for element in self[::-1]:
+ if element == Marker:
+ break
+ if self.nodesEqual(element, node):
+ equalCount += 1
+ if equalCount == 3:
+ self.remove(element)
+ break
+ list.append(self, node)
+
+ def nodesEqual(self, node1, node2):
+ if not node1.nameTuple == node2.nameTuple:
+ return False
+
+ if not node1.attributes == node2.attributes:
+ return False
+
+ return True
+
+
+class TreeBuilder(object):
+ """Base treebuilder implementation
+
+ * documentClass - the class to use for the bottommost node of a document
+ * elementClass - the class to use for HTML Elements
+ * commentClass - the class to use for comments
+ * doctypeClass - the class to use for doctypes
+
+ """
+ # pylint:disable=not-callable
+
+ # Document class
+ documentClass = None
+
+ # The class to use for creating a node
+ elementClass = None
+
+ # The class to use for creating comments
+ commentClass = None
+
+ # The class to use for creating doctypes
+ doctypeClass = None
+
+ # Fragment class
+ fragmentClass = None
+
+ def __init__(self, namespaceHTMLElements):
+ """Create a TreeBuilder
+
+ :arg namespaceHTMLElements: whether or not to namespace HTML elements
+
+ """
+ if namespaceHTMLElements:
+ self.defaultNamespace = "http://www.w3.org/1999/xhtml"
+ else:
+ self.defaultNamespace = None
+ self.reset()
+
+ def reset(self):
+ self.openElements = []
+ self.activeFormattingElements = ActiveFormattingElements()
+
+ # XXX - rename these to headElement, formElement
+ self.headPointer = None
+ self.formPointer = None
+
+ self.insertFromTable = False
+
+ self.document = self.documentClass()
+
+ def elementInScope(self, target, variant=None):
+
+ # If we pass a node in we match that. if we pass a string
+ # match any node with that name
+ exactNode = hasattr(target, "nameTuple")
+ if not exactNode:
+ if isinstance(target, text_type):
+ target = (namespaces["html"], target)
+ assert isinstance(target, tuple)
+
+ listElements, invert = listElementsMap[variant]
+
+ for node in reversed(self.openElements):
+ if exactNode and node == target:
+ return True
+ elif not exactNode and node.nameTuple == target:
+ return True
+ elif (invert ^ (node.nameTuple in listElements)):
+ return False
+
+ assert False # We should never reach this point
+
+ def reconstructActiveFormattingElements(self):
+ # Within this algorithm the order of steps described in the
+ # specification is not quite the same as the order of steps in the
+ # code. It should still do the same though.
+
+ # Step 1: stop the algorithm when there's nothing to do.
+ if not self.activeFormattingElements:
+ return
+
+ # Step 2 and step 3: we start with the last element. So i is -1.
+ i = len(self.activeFormattingElements) - 1
+ entry = self.activeFormattingElements[i]
+ if entry == Marker or entry in self.openElements:
+ return
+
+ # Step 6
+ while entry != Marker and entry not in self.openElements:
+ if i == 0:
+ # This will be reset to 0 below
+ i = -1
+ break
+ i -= 1
+ # Step 5: let entry be one earlier in the list.
+ entry = self.activeFormattingElements[i]
+
+ while True:
+ # Step 7
+ i += 1
+
+ # Step 8
+ entry = self.activeFormattingElements[i]
+ clone = entry.cloneNode() # Mainly to get a new copy of the attributes
+
+ # Step 9
+ element = self.insertElement({"type": "StartTag",
+ "name": clone.name,
+ "namespace": clone.namespace,
+ "data": clone.attributes})
+
+ # Step 10
+ self.activeFormattingElements[i] = element
+
+ # Step 11
+ if element == self.activeFormattingElements[-1]:
+ break
+
+ def clearActiveFormattingElements(self):
+ entry = self.activeFormattingElements.pop()
+ while self.activeFormattingElements and entry != Marker:
+ entry = self.activeFormattingElements.pop()
+
+ def elementInActiveFormattingElements(self, name):
+ """Check if an element exists between the end of the active
+ formatting elements and the last marker. If it does, return it, else
+ return false"""
+
+ for item in self.activeFormattingElements[::-1]:
+ # Check for Marker first because if it's a Marker it doesn't have a
+ # name attribute.
+ if item == Marker:
+ break
+ elif item.name == name:
+ return item
+ return False
+
+ def insertRoot(self, token):
+ element = self.createElement(token)
+ self.openElements.append(element)
+ self.document.appendChild(element)
+
+ def insertDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+
+ doctype = self.doctypeClass(name, publicId, systemId)
+ self.document.appendChild(doctype)
+
+ def insertComment(self, token, parent=None):
+ if parent is None:
+ parent = self.openElements[-1]
+ parent.appendChild(self.commentClass(token["data"]))
+
+ def createElement(self, token):
+ """Create an element but don't insert it anywhere"""
+ name = token["name"]
+ namespace = token.get("namespace", self.defaultNamespace)
+ element = self.elementClass(name, namespace)
+ element.attributes = token["data"]
+ return element
+
+ def _getInsertFromTable(self):
+ return self._insertFromTable
+
+ def _setInsertFromTable(self, value):
+ """Switch the function used to insert an element from the
+ normal one to the misnested table one and back again"""
+ self._insertFromTable = value
+ if value:
+ self.insertElement = self.insertElementTable
+ else:
+ self.insertElement = self.insertElementNormal
+
+ insertFromTable = property(_getInsertFromTable, _setInsertFromTable)
+
+ def insertElementNormal(self, token):
+ name = token["name"]
+ assert isinstance(name, text_type), "Element %s not unicode" % name
+ namespace = token.get("namespace", self.defaultNamespace)
+ element = self.elementClass(name, namespace)
+ element.attributes = token["data"]
+ self.openElements[-1].appendChild(element)
+ self.openElements.append(element)
+ return element
+
+ def insertElementTable(self, token):
+ """Create an element and insert it into the tree"""
+ element = self.createElement(token)
+ if self.openElements[-1].name not in tableInsertModeElements:
+ return self.insertElementNormal(token)
+ else:
+ # We should be in the InTable mode. This means we want to do
+ # special magic element rearranging
+ parent, insertBefore = self.getTableMisnestedNodePosition()
+ if insertBefore is None:
+ parent.appendChild(element)
+ else:
+ parent.insertBefore(element, insertBefore)
+ self.openElements.append(element)
+ return element
+
+ def insertText(self, data, parent=None):
+ """Insert text data."""
+ if parent is None:
+ parent = self.openElements[-1]
+
+ if (not self.insertFromTable or (self.insertFromTable and
+ self.openElements[-1].name
+ not in tableInsertModeElements)):
+ parent.insertText(data)
+ else:
+ # We should be in the InTable mode. This means we want to do
+ # special magic element rearranging
+ parent, insertBefore = self.getTableMisnestedNodePosition()
+ parent.insertText(data, insertBefore)
+
+ def getTableMisnestedNodePosition(self):
+ """Get the foster parent element, and sibling to insert before
+ (or None) when inserting a misnested table node"""
+ # The foster parent element is the one which comes before the most
+ # recently opened table element
+ # XXX - this is really inelegant
+ lastTable = None
+ fosterParent = None
+ insertBefore = None
+ for elm in self.openElements[::-1]:
+ if elm.name == "table":
+ lastTable = elm
+ break
+ if lastTable:
+ # XXX - we should really check that this parent is actually a
+ # node here
+ if lastTable.parent:
+ fosterParent = lastTable.parent
+ insertBefore = lastTable
+ else:
+ fosterParent = self.openElements[
+ self.openElements.index(lastTable) - 1]
+ else:
+ fosterParent = self.openElements[0]
+ return fosterParent, insertBefore
+
+ def generateImpliedEndTags(self, exclude=None):
+ name = self.openElements[-1].name
+ # XXX td, th and tr are not actually needed
+ if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and
+ name != exclude):
+ self.openElements.pop()
+ # XXX This is not entirely what the specification says. We should
+ # investigate it more closely.
+ self.generateImpliedEndTags(exclude)
+
+ def getDocument(self):
+ """Return the final tree"""
+ return self.document
+
+ def getFragment(self):
+ """Return the final fragment"""
+ # assert self.innerHTML
+ fragment = self.fragmentClass()
+ self.openElements[0].reparentChildren(fragment)
+ return fragment
+
+ def testSerializer(self, node):
+ """Serialize the subtree of node in the format required by unit tests
+
+ :arg node: the node from which to start serializing
+
+ """
+ raise NotImplementedError
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.py
new file mode 100644
index 00000000..d8b53004
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.py
@@ -0,0 +1,239 @@
+from __future__ import absolute_import, division, unicode_literals
+
+
+try:
+ from collections.abc import MutableMapping
+except ImportError: # Python 2.7
+ from collections import MutableMapping
+from xml.dom import minidom, Node
+import weakref
+
+from . import base
+from .. import constants
+from ..constants import namespaces
+from .._utils import moduleFactoryFactory
+
+
+def getDomBuilder(DomImplementation):
+ Dom = DomImplementation
+
+ class AttrList(MutableMapping):
+ def __init__(self, element):
+ self.element = element
+
+ def __iter__(self):
+ return iter(self.element.attributes.keys())
+
+ def __setitem__(self, name, value):
+ if isinstance(name, tuple):
+ raise NotImplementedError
+ else:
+ attr = self.element.ownerDocument.createAttribute(name)
+ attr.value = value
+ self.element.attributes[name] = attr
+
+ def __len__(self):
+ return len(self.element.attributes)
+
+ def items(self):
+ return list(self.element.attributes.items())
+
+ def values(self):
+ return list(self.element.attributes.values())
+
+ def __getitem__(self, name):
+ if isinstance(name, tuple):
+ raise NotImplementedError
+ else:
+ return self.element.attributes[name].value
+
+ def __delitem__(self, name):
+ if isinstance(name, tuple):
+ raise NotImplementedError
+ else:
+ del self.element.attributes[name]
+
+ class NodeBuilder(base.Node):
+ def __init__(self, element):
+ base.Node.__init__(self, element.nodeName)
+ self.element = element
+
+ namespace = property(lambda self: hasattr(self.element, "namespaceURI") and
+ self.element.namespaceURI or None)
+
+ def appendChild(self, node):
+ node.parent = self
+ self.element.appendChild(node.element)
+
+ def insertText(self, data, insertBefore=None):
+ text = self.element.ownerDocument.createTextNode(data)
+ if insertBefore:
+ self.element.insertBefore(text, insertBefore.element)
+ else:
+ self.element.appendChild(text)
+
+ def insertBefore(self, node, refNode):
+ self.element.insertBefore(node.element, refNode.element)
+ node.parent = self
+
+ def removeChild(self, node):
+ if node.element.parentNode == self.element:
+ self.element.removeChild(node.element)
+ node.parent = None
+
+ def reparentChildren(self, newParent):
+ while self.element.hasChildNodes():
+ child = self.element.firstChild
+ self.element.removeChild(child)
+ newParent.element.appendChild(child)
+ self.childNodes = []
+
+ def getAttributes(self):
+ return AttrList(self.element)
+
+ def setAttributes(self, attributes):
+ if attributes:
+ for name, value in list(attributes.items()):
+ if isinstance(name, tuple):
+ if name[0] is not None:
+ qualifiedName = (name[0] + ":" + name[1])
+ else:
+ qualifiedName = name[1]
+ self.element.setAttributeNS(name[2], qualifiedName,
+ value)
+ else:
+ self.element.setAttribute(
+ name, value)
+ attributes = property(getAttributes, setAttributes)
+
+ def cloneNode(self):
+ return NodeBuilder(self.element.cloneNode(False))
+
+ def hasContent(self):
+ return self.element.hasChildNodes()
+
+ def getNameTuple(self):
+ if self.namespace is None:
+ return namespaces["html"], self.name
+ else:
+ return self.namespace, self.name
+
+ nameTuple = property(getNameTuple)
+
+ class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable
+ def documentClass(self):
+ self.dom = Dom.getDOMImplementation().createDocument(None, None, None)
+ return weakref.proxy(self)
+
+ def insertDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+
+ domimpl = Dom.getDOMImplementation()
+ doctype = domimpl.createDocumentType(name, publicId, systemId)
+ self.document.appendChild(NodeBuilder(doctype))
+ if Dom == minidom:
+ doctype.ownerDocument = self.dom
+
+ def elementClass(self, name, namespace=None):
+ if namespace is None and self.defaultNamespace is None:
+ node = self.dom.createElement(name)
+ else:
+ node = self.dom.createElementNS(namespace, name)
+
+ return NodeBuilder(node)
+
+ def commentClass(self, data):
+ return NodeBuilder(self.dom.createComment(data))
+
+ def fragmentClass(self):
+ return NodeBuilder(self.dom.createDocumentFragment())
+
+ def appendChild(self, node):
+ self.dom.appendChild(node.element)
+
+ def testSerializer(self, element):
+ return testSerializer(element)
+
+ def getDocument(self):
+ return self.dom
+
+ def getFragment(self):
+ return base.TreeBuilder.getFragment(self).element
+
+ def insertText(self, data, parent=None):
+ data = data
+ if parent != self:
+ base.TreeBuilder.insertText(self, data, parent)
+ else:
+ # HACK: allow text nodes as children of the document node
+ if hasattr(self.dom, '_child_node_types'):
+ # pylint:disable=protected-access
+ if Node.TEXT_NODE not in self.dom._child_node_types:
+ self.dom._child_node_types = list(self.dom._child_node_types)
+ self.dom._child_node_types.append(Node.TEXT_NODE)
+ self.dom.appendChild(self.dom.createTextNode(data))
+
+ implementation = DomImplementation
+ name = None
+
+ def testSerializer(element):
+ element.normalize()
+ rv = []
+
+ def serializeElement(element, indent=0):
+ if element.nodeType == Node.DOCUMENT_TYPE_NODE:
+ if element.name:
+ if element.publicId or element.systemId:
+ publicId = element.publicId or ""
+ systemId = element.systemId or ""
+ rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %
+ (' ' * indent, element.name, publicId, systemId))
+ else:
+ rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name))
+ else:
+ rv.append("|%s<!DOCTYPE >" % (' ' * indent,))
+ elif element.nodeType == Node.DOCUMENT_NODE:
+ rv.append("#document")
+ elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
+ rv.append("#document-fragment")
+ elif element.nodeType == Node.COMMENT_NODE:
+ rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue))
+ elif element.nodeType == Node.TEXT_NODE:
+ rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue))
+ else:
+ if (hasattr(element, "namespaceURI") and
+ element.namespaceURI is not None):
+ name = "%s %s" % (constants.prefixes[element.namespaceURI],
+ element.nodeName)
+ else:
+ name = element.nodeName
+ rv.append("|%s<%s>" % (' ' * indent, name))
+ if element.hasAttributes():
+ attributes = []
+ for i in range(len(element.attributes)):
+ attr = element.attributes.item(i)
+ name = attr.nodeName
+ value = attr.value
+ ns = attr.namespaceURI
+ if ns:
+ name = "%s %s" % (constants.prefixes[ns], attr.localName)
+ else:
+ name = attr.nodeName
+ attributes.append((name, value))
+
+ for name, value in sorted(attributes):
+ rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
+ indent += 2
+ for child in element.childNodes:
+ serializeElement(child, indent)
+ serializeElement(element, 0)
+
+ return "\n".join(rv)
+
+ return locals()
+
+
+# The actual means to get a module!
+getDomModule = moduleFactoryFactory(getDomBuilder)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py
new file mode 100644
index 00000000..0dedf441
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree.py
@@ -0,0 +1,340 @@
+from __future__ import absolute_import, division, unicode_literals
+# pylint:disable=protected-access
+
+from pip._vendor.six import text_type
+
+import re
+
+from . import base
+from .. import _ihatexml
+from .. import constants
+from ..constants import namespaces
+from .._utils import moduleFactoryFactory
+
+tag_regexp = re.compile("{([^}]*)}(.*)")
+
+
+def getETreeBuilder(ElementTreeImplementation, fullTree=False):
+ ElementTree = ElementTreeImplementation
+ ElementTreeCommentType = ElementTree.Comment("asd").tag
+
+ class Element(base.Node):
+ def __init__(self, name, namespace=None):
+ self._name = name
+ self._namespace = namespace
+ self._element = ElementTree.Element(self._getETreeTag(name,
+ namespace))
+ if namespace is None:
+ self.nameTuple = namespaces["html"], self._name
+ else:
+ self.nameTuple = self._namespace, self._name
+ self.parent = None
+ self._childNodes = []
+ self._flags = []
+
+ def _getETreeTag(self, name, namespace):
+ if namespace is None:
+ etree_tag = name
+ else:
+ etree_tag = "{%s}%s" % (namespace, name)
+ return etree_tag
+
+ def _setName(self, name):
+ self._name = name
+ self._element.tag = self._getETreeTag(self._name, self._namespace)
+
+ def _getName(self):
+ return self._name
+
+ name = property(_getName, _setName)
+
+ def _setNamespace(self, namespace):
+ self._namespace = namespace
+ self._element.tag = self._getETreeTag(self._name, self._namespace)
+
+ def _getNamespace(self):
+ return self._namespace
+
+ namespace = property(_getNamespace, _setNamespace)
+
+ def _getAttributes(self):
+ return self._element.attrib
+
+ def _setAttributes(self, attributes):
+ # Delete existing attributes first
+ # XXX - there may be a better way to do this...
+ for key in list(self._element.attrib.keys()):
+ del self._element.attrib[key]
+ for key, value in attributes.items():
+ if isinstance(key, tuple):
+ name = "{%s}%s" % (key[2], key[1])
+ else:
+ name = key
+ self._element.set(name, value)
+
+ attributes = property(_getAttributes, _setAttributes)
+
+ def _getChildNodes(self):
+ return self._childNodes
+
+ def _setChildNodes(self, value):
+ del self._element[:]
+ self._childNodes = []
+ for element in value:
+ self.insertChild(element)
+
+ childNodes = property(_getChildNodes, _setChildNodes)
+
+ def hasContent(self):
+ """Return true if the node has children or text"""
+ return bool(self._element.text or len(self._element))
+
+ def appendChild(self, node):
+ self._childNodes.append(node)
+ self._element.append(node._element)
+ node.parent = self
+
+ def insertBefore(self, node, refNode):
+ index = list(self._element).index(refNode._element)
+ self._element.insert(index, node._element)
+ node.parent = self
+
+ def removeChild(self, node):
+ self._childNodes.remove(node)
+ self._element.remove(node._element)
+ node.parent = None
+
+ def insertText(self, data, insertBefore=None):
+ if not(len(self._element)):
+ if not self._element.text:
+ self._element.text = ""
+ self._element.text += data
+ elif insertBefore is None:
+ # Insert the text as the tail of the last child element
+ if not self._element[-1].tail:
+ self._element[-1].tail = ""
+ self._element[-1].tail += data
+ else:
+ # Insert the text before the specified node
+ children = list(self._element)
+ index = children.index(insertBefore._element)
+ if index > 0:
+ if not self._element[index - 1].tail:
+ self._element[index - 1].tail = ""
+ self._element[index - 1].tail += data
+ else:
+ if not self._element.text:
+ self._element.text = ""
+ self._element.text += data
+
+ def cloneNode(self):
+ element = type(self)(self.name, self.namespace)
+ for name, value in self.attributes.items():
+ element.attributes[name] = value
+ return element
+
+ def reparentChildren(self, newParent):
+ if newParent.childNodes:
+ newParent.childNodes[-1]._element.tail += self._element.text
+ else:
+ if not newParent._element.text:
+ newParent._element.text = ""
+ if self._element.text is not None:
+ newParent._element.text += self._element.text
+ self._element.text = ""
+ base.Node.reparentChildren(self, newParent)
+
+ class Comment(Element):
+ def __init__(self, data):
+ # Use the superclass constructor to set all properties on the
+ # wrapper element
+ self._element = ElementTree.Comment(data)
+ self.parent = None
+ self._childNodes = []
+ self._flags = []
+
+ def _getData(self):
+ return self._element.text
+
+ def _setData(self, value):
+ self._element.text = value
+
+ data = property(_getData, _setData)
+
+ class DocumentType(Element):
+ def __init__(self, name, publicId, systemId):
+ Element.__init__(self, "<!DOCTYPE>")
+ self._element.text = name
+ self.publicId = publicId
+ self.systemId = systemId
+
+ def _getPublicId(self):
+ return self._element.get("publicId", "")
+
+ def _setPublicId(self, value):
+ if value is not None:
+ self._element.set("publicId", value)
+
+ publicId = property(_getPublicId, _setPublicId)
+
+ def _getSystemId(self):
+ return self._element.get("systemId", "")
+
+ def _setSystemId(self, value):
+ if value is not None:
+ self._element.set("systemId", value)
+
+ systemId = property(_getSystemId, _setSystemId)
+
+ class Document(Element):
+ def __init__(self):
+ Element.__init__(self, "DOCUMENT_ROOT")
+
+ class DocumentFragment(Element):
+ def __init__(self):
+ Element.__init__(self, "DOCUMENT_FRAGMENT")
+
+ def testSerializer(element):
+ rv = []
+
+ def serializeElement(element, indent=0):
+ if not(hasattr(element, "tag")):
+ element = element.getroot()
+ if element.tag == "<!DOCTYPE>":
+ if element.get("publicId") or element.get("systemId"):
+ publicId = element.get("publicId") or ""
+ systemId = element.get("systemId") or ""
+ rv.append("""<!DOCTYPE %s "%s" "%s">""" %
+ (element.text, publicId, systemId))
+ else:
+ rv.append("<!DOCTYPE %s>" % (element.text,))
+ elif element.tag == "DOCUMENT_ROOT":
+ rv.append("#document")
+ if element.text is not None:
+ rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
+ if element.tail is not None:
+ raise TypeError("Document node cannot have tail")
+ if hasattr(element, "attrib") and len(element.attrib):
+ raise TypeError("Document node cannot have attributes")
+ elif element.tag == ElementTreeCommentType:
+ rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
+ else:
+ assert isinstance(element.tag, text_type), \
+ "Expected unicode, got %s, %s" % (type(element.tag), element.tag)
+ nsmatch = tag_regexp.match(element.tag)
+
+ if nsmatch is None:
+ name = element.tag
+ else:
+ ns, name = nsmatch.groups()
+ prefix = constants.prefixes[ns]
+ name = "%s %s" % (prefix, name)
+ rv.append("|%s<%s>" % (' ' * indent, name))
+
+ if hasattr(element, "attrib"):
+ attributes = []
+ for name, value in element.attrib.items():
+ nsmatch = tag_regexp.match(name)
+ if nsmatch is not None:
+ ns, name = nsmatch.groups()
+ prefix = constants.prefixes[ns]
+ attr_string = "%s %s" % (prefix, name)
+ else:
+ attr_string = name
+ attributes.append((attr_string, value))
+
+ for name, value in sorted(attributes):
+ rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
+ if element.text:
+ rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
+ indent += 2
+ for child in element:
+ serializeElement(child, indent)
+ if element.tail:
+ rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
+ serializeElement(element, 0)
+
+ return "\n".join(rv)
+
+ def tostring(element): # pylint:disable=unused-variable
+ """Serialize an element and its child nodes to a string"""
+ rv = []
+ filter = _ihatexml.InfosetFilter()
+
+ def serializeElement(element):
+ if isinstance(element, ElementTree.ElementTree):
+ element = element.getroot()
+
+ if element.tag == "<!DOCTYPE>":
+ if element.get("publicId") or element.get("systemId"):
+ publicId = element.get("publicId") or ""
+ systemId = element.get("systemId") or ""
+ rv.append("""<!DOCTYPE %s PUBLIC "%s" "%s">""" %
+ (element.text, publicId, systemId))
+ else:
+ rv.append("<!DOCTYPE %s>" % (element.text,))
+ elif element.tag == "DOCUMENT_ROOT":
+ if element.text is not None:
+ rv.append(element.text)
+ if element.tail is not None:
+ raise TypeError("Document node cannot have tail")
+ if hasattr(element, "attrib") and len(element.attrib):
+ raise TypeError("Document node cannot have attributes")
+
+ for child in element:
+ serializeElement(child)
+
+ elif element.tag == ElementTreeCommentType:
+ rv.append("<!--%s-->" % (element.text,))
+ else:
+ # This is assumed to be an ordinary element
+ if not element.attrib:
+ rv.append("<%s>" % (filter.fromXmlName(element.tag),))
+ else:
+ attr = " ".join(["%s=\"%s\"" % (
+ filter.fromXmlName(name), value)
+ for name, value in element.attrib.items()])
+ rv.append("<%s %s>" % (element.tag, attr))
+ if element.text:
+ rv.append(element.text)
+
+ for child in element:
+ serializeElement(child)
+
+ rv.append("</%s>" % (element.tag,))
+
+ if element.tail:
+ rv.append(element.tail)
+
+ serializeElement(element)
+
+ return "".join(rv)
+
+ class TreeBuilder(base.TreeBuilder): # pylint:disable=unused-variable
+ documentClass = Document
+ doctypeClass = DocumentType
+ elementClass = Element
+ commentClass = Comment
+ fragmentClass = DocumentFragment
+ implementation = ElementTreeImplementation
+
+ def testSerializer(self, element):
+ return testSerializer(element)
+
+ def getDocument(self):
+ if fullTree:
+ return self.document._element
+ else:
+ if self.defaultNamespace is not None:
+ return self.document._element.find(
+ "{%s}html" % self.defaultNamespace)
+ else:
+ return self.document._element.find("html")
+
+ def getFragment(self):
+ return base.TreeBuilder.getFragment(self)._element
+
+ return locals()
+
+
+getETreeModule = moduleFactoryFactory(getETreeBuilder)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py
new file mode 100644
index 00000000..ca12a99c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py
@@ -0,0 +1,366 @@
+"""Module for supporting the lxml.etree library. The idea here is to use as much
+of the native library as possible, without using fragile hacks like custom element
+names that break between releases. The downside of this is that we cannot represent
+all possible trees; specifically the following are known to cause problems:
+
+Text or comments as siblings of the root element
+Docypes with no name
+
+When any of these things occur, we emit a DataLossWarning
+"""
+
+from __future__ import absolute_import, division, unicode_literals
+# pylint:disable=protected-access
+
+import warnings
+import re
+import sys
+
+from . import base
+from ..constants import DataLossWarning
+from .. import constants
+from . import etree as etree_builders
+from .. import _ihatexml
+
+import lxml.etree as etree
+
+
+fullTree = True
+tag_regexp = re.compile("{([^}]*)}(.*)")
+
+comment_type = etree.Comment("asd").tag
+
+
+class DocumentType(object):
+ def __init__(self, name, publicId, systemId):
+ self.name = name
+ self.publicId = publicId
+ self.systemId = systemId
+
+
+class Document(object):
+ def __init__(self):
+ self._elementTree = None
+ self._childNodes = []
+
+ def appendChild(self, element):
+ self._elementTree.getroot().addnext(element._element)
+
+ def _getChildNodes(self):
+ return self._childNodes
+
+ childNodes = property(_getChildNodes)
+
+
+def testSerializer(element):
+ rv = []
+ infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True)
+
+ def serializeElement(element, indent=0):
+ if not hasattr(element, "tag"):
+ if hasattr(element, "getroot"):
+ # Full tree case
+ rv.append("#document")
+ if element.docinfo.internalDTD:
+ if not (element.docinfo.public_id or
+ element.docinfo.system_url):
+ dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
+ else:
+ dtd_str = """<!DOCTYPE %s "%s" "%s">""" % (
+ element.docinfo.root_name,
+ element.docinfo.public_id,
+ element.docinfo.system_url)
+ rv.append("|%s%s" % (' ' * (indent + 2), dtd_str))
+ next_element = element.getroot()
+ while next_element.getprevious() is not None:
+ next_element = next_element.getprevious()
+ while next_element is not None:
+ serializeElement(next_element, indent + 2)
+ next_element = next_element.getnext()
+ elif isinstance(element, str) or isinstance(element, bytes):
+ # Text in a fragment
+ assert isinstance(element, str) or sys.version_info[0] == 2
+ rv.append("|%s\"%s\"" % (' ' * indent, element))
+ else:
+ # Fragment case
+ rv.append("#document-fragment")
+ for next_element in element:
+ serializeElement(next_element, indent + 2)
+ elif element.tag == comment_type:
+ rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
+ if hasattr(element, "tail") and element.tail:
+ rv.append("|%s\"%s\"" % (' ' * indent, element.tail))
+ else:
+ assert isinstance(element, etree._Element)
+ nsmatch = etree_builders.tag_regexp.match(element.tag)
+ if nsmatch is not None:
+ ns = nsmatch.group(1)
+ tag = nsmatch.group(2)
+ prefix = constants.prefixes[ns]
+ rv.append("|%s<%s %s>" % (' ' * indent, prefix,
+ infosetFilter.fromXmlName(tag)))
+ else:
+ rv.append("|%s<%s>" % (' ' * indent,
+ infosetFilter.fromXmlName(element.tag)))
+
+ if hasattr(element, "attrib"):
+ attributes = []
+ for name, value in element.attrib.items():
+ nsmatch = tag_regexp.match(name)
+ if nsmatch is not None:
+ ns, name = nsmatch.groups()
+ name = infosetFilter.fromXmlName(name)
+ prefix = constants.prefixes[ns]
+ attr_string = "%s %s" % (prefix, name)
+ else:
+ attr_string = infosetFilter.fromXmlName(name)
+ attributes.append((attr_string, value))
+
+ for name, value in sorted(attributes):
+ rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
+
+ if element.text:
+ rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
+ indent += 2
+ for child in element:
+ serializeElement(child, indent)
+ if hasattr(element, "tail") and element.tail:
+ rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
+ serializeElement(element, 0)
+
+ return "\n".join(rv)
+
+
+def tostring(element):
+ """Serialize an element and its child nodes to a string"""
+ rv = []
+
+ def serializeElement(element):
+ if not hasattr(element, "tag"):
+ if element.docinfo.internalDTD:
+ if element.docinfo.doctype:
+ dtd_str = element.docinfo.doctype
+ else:
+ dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
+ rv.append(dtd_str)
+ serializeElement(element.getroot())
+
+ elif element.tag == comment_type:
+ rv.append("<!--%s-->" % (element.text,))
+
+ else:
+ # This is assumed to be an ordinary element
+ if not element.attrib:
+ rv.append("<%s>" % (element.tag,))
+ else:
+ attr = " ".join(["%s=\"%s\"" % (name, value)
+ for name, value in element.attrib.items()])
+ rv.append("<%s %s>" % (element.tag, attr))
+ if element.text:
+ rv.append(element.text)
+
+ for child in element:
+ serializeElement(child)
+
+ rv.append("</%s>" % (element.tag,))
+
+ if hasattr(element, "tail") and element.tail:
+ rv.append(element.tail)
+
+ serializeElement(element)
+
+ return "".join(rv)
+
+
+class TreeBuilder(base.TreeBuilder):
+ documentClass = Document
+ doctypeClass = DocumentType
+ elementClass = None
+ commentClass = None
+ fragmentClass = Document
+ implementation = etree
+
+ def __init__(self, namespaceHTMLElements, fullTree=False):
+ builder = etree_builders.getETreeModule(etree, fullTree=fullTree)
+ infosetFilter = self.infosetFilter = _ihatexml.InfosetFilter(preventDoubleDashComments=True)
+ self.namespaceHTMLElements = namespaceHTMLElements
+
+ class Attributes(dict):
+ def __init__(self, element, value=None):
+ if value is None:
+ value = {}
+ self._element = element
+ dict.__init__(self, value) # pylint:disable=non-parent-init-called
+ for key, value in self.items():
+ if isinstance(key, tuple):
+ name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
+ else:
+ name = infosetFilter.coerceAttribute(key)
+ self._element._element.attrib[name] = value
+
+ def __setitem__(self, key, value):
+ dict.__setitem__(self, key, value)
+ if isinstance(key, tuple):
+ name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
+ else:
+ name = infosetFilter.coerceAttribute(key)
+ self._element._element.attrib[name] = value
+
+ class Element(builder.Element):
+ def __init__(self, name, namespace):
+ name = infosetFilter.coerceElement(name)
+ builder.Element.__init__(self, name, namespace=namespace)
+ self._attributes = Attributes(self)
+
+ def _setName(self, name):
+ self._name = infosetFilter.coerceElement(name)
+ self._element.tag = self._getETreeTag(
+ self._name, self._namespace)
+
+ def _getName(self):
+ return infosetFilter.fromXmlName(self._name)
+
+ name = property(_getName, _setName)
+
+ def _getAttributes(self):
+ return self._attributes
+
+ def _setAttributes(self, attributes):
+ self._attributes = Attributes(self, attributes)
+
+ attributes = property(_getAttributes, _setAttributes)
+
+ def insertText(self, data, insertBefore=None):
+ data = infosetFilter.coerceCharacters(data)
+ builder.Element.insertText(self, data, insertBefore)
+
+ def appendChild(self, child):
+ builder.Element.appendChild(self, child)
+
+ class Comment(builder.Comment):
+ def __init__(self, data):
+ data = infosetFilter.coerceComment(data)
+ builder.Comment.__init__(self, data)
+
+ def _setData(self, data):
+ data = infosetFilter.coerceComment(data)
+ self._element.text = data
+
+ def _getData(self):
+ return self._element.text
+
+ data = property(_getData, _setData)
+
+ self.elementClass = Element
+ self.commentClass = Comment
+ # self.fragmentClass = builder.DocumentFragment
+ base.TreeBuilder.__init__(self, namespaceHTMLElements)
+
+ def reset(self):
+ base.TreeBuilder.reset(self)
+ self.insertComment = self.insertCommentInitial
+ self.initial_comments = []
+ self.doctype = None
+
+ def testSerializer(self, element):
+ return testSerializer(element)
+
+ def getDocument(self):
+ if fullTree:
+ return self.document._elementTree
+ else:
+ return self.document._elementTree.getroot()
+
+ def getFragment(self):
+ fragment = []
+ element = self.openElements[0]._element
+ if element.text:
+ fragment.append(element.text)
+ fragment.extend(list(element))
+ if element.tail:
+ fragment.append(element.tail)
+ return fragment
+
+ def insertDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+
+ if not name:
+ warnings.warn("lxml cannot represent empty doctype", DataLossWarning)
+ self.doctype = None
+ else:
+ coercedName = self.infosetFilter.coerceElement(name)
+ if coercedName != name:
+ warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning)
+
+ doctype = self.doctypeClass(coercedName, publicId, systemId)
+ self.doctype = doctype
+
+ def insertCommentInitial(self, data, parent=None):
+ assert parent is None or parent is self.document
+ assert self.document._elementTree is None
+ self.initial_comments.append(data)
+
+ def insertCommentMain(self, data, parent=None):
+ if (parent == self.document and
+ self.document._elementTree.getroot()[-1].tag == comment_type):
+ warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning)
+ super(TreeBuilder, self).insertComment(data, parent)
+
+ def insertRoot(self, token):
+ # Because of the way libxml2 works, it doesn't seem to be possible to
+ # alter information like the doctype after the tree has been parsed.
+ # Therefore we need to use the built-in parser to create our initial
+ # tree, after which we can add elements like normal
+ docStr = ""
+ if self.doctype:
+ assert self.doctype.name
+ docStr += "<!DOCTYPE %s" % self.doctype.name
+ if (self.doctype.publicId is not None or
+ self.doctype.systemId is not None):
+ docStr += (' PUBLIC "%s" ' %
+ (self.infosetFilter.coercePubid(self.doctype.publicId or "")))
+ if self.doctype.systemId:
+ sysid = self.doctype.systemId
+ if sysid.find("'") >= 0 and sysid.find('"') >= 0:
+ warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning)
+ sysid = sysid.replace("'", 'U00027')
+ if sysid.find("'") >= 0:
+ docStr += '"%s"' % sysid
+ else:
+ docStr += "'%s'" % sysid
+ else:
+ docStr += "''"
+ docStr += ">"
+ if self.doctype.name != token["name"]:
+ warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning)
+ docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>"
+ root = etree.fromstring(docStr)
+
+ # Append the initial comments:
+ for comment_token in self.initial_comments:
+ comment = self.commentClass(comment_token["data"])
+ root.addprevious(comment._element)
+
+ # Create the root document and add the ElementTree to it
+ self.document = self.documentClass()
+ self.document._elementTree = root.getroottree()
+
+ # Give the root element the right name
+ name = token["name"]
+ namespace = token.get("namespace", self.defaultNamespace)
+ if namespace is None:
+ etree_tag = name
+ else:
+ etree_tag = "{%s}%s" % (namespace, name)
+ root.tag = etree_tag
+
+ # Add the root element to the internal child/open data structures
+ root_element = self.elementClass(name, namespace)
+ root_element._element = root
+ self.document._childNodes.append(root_element)
+ self.openElements.append(root_element)
+
+ # Reset to the default insert comment function
+ self.insertComment = self.insertCommentMain
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py
new file mode 100644
index 00000000..9bec2076
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py
@@ -0,0 +1,154 @@
+"""A collection of modules for iterating through different kinds of
+tree, generating tokens identical to those produced by the tokenizer
+module.
+
+To create a tree walker for a new type of tree, you need to do
+implement a tree walker object (called TreeWalker by convention) that
+implements a 'serialize' method taking a tree as sole argument and
+returning an iterator generating tokens.
+"""
+
+from __future__ import absolute_import, division, unicode_literals
+
+from .. import constants
+from .._utils import default_etree
+
+__all__ = ["getTreeWalker", "pprint"]
+
+treeWalkerCache = {}
+
+
+def getTreeWalker(treeType, implementation=None, **kwargs):
+ """Get a TreeWalker class for various types of tree with built-in support
+
+ :arg str treeType: the name of the tree type required (case-insensitive).
+ Supported values are:
+
+ * "dom": The xml.dom.minidom DOM implementation
+ * "etree": A generic walker for tree implementations exposing an
+ elementtree-like interface (known to work with ElementTree,
+ cElementTree and lxml.etree).
+ * "lxml": Optimized walker for lxml.etree
+ * "genshi": a Genshi stream
+
+ :arg implementation: A module implementing the tree type e.g.
+ xml.etree.ElementTree or cElementTree (Currently applies to the "etree"
+ tree type only).
+
+ :arg kwargs: keyword arguments passed to the etree walker--for other
+ walkers, this has no effect
+
+ :returns: a TreeWalker class
+
+ """
+
+ treeType = treeType.lower()
+ if treeType not in treeWalkerCache:
+ if treeType == "dom":
+ from . import dom
+ treeWalkerCache[treeType] = dom.TreeWalker
+ elif treeType == "genshi":
+ from . import genshi
+ treeWalkerCache[treeType] = genshi.TreeWalker
+ elif treeType == "lxml":
+ from . import etree_lxml
+ treeWalkerCache[treeType] = etree_lxml.TreeWalker
+ elif treeType == "etree":
+ from . import etree
+ if implementation is None:
+ implementation = default_etree
+ # XXX: NEVER cache here, caching is done in the etree submodule
+ return etree.getETreeModule(implementation, **kwargs).TreeWalker
+ return treeWalkerCache.get(treeType)
+
+
+def concatenateCharacterTokens(tokens):
+ pendingCharacters = []
+ for token in tokens:
+ type = token["type"]
+ if type in ("Characters", "SpaceCharacters"):
+ pendingCharacters.append(token["data"])
+ else:
+ if pendingCharacters:
+ yield {"type": "Characters", "data": "".join(pendingCharacters)}
+ pendingCharacters = []
+ yield token
+ if pendingCharacters:
+ yield {"type": "Characters", "data": "".join(pendingCharacters)}
+
+
+def pprint(walker):
+ """Pretty printer for tree walkers
+
+ Takes a TreeWalker instance and pretty prints the output of walking the tree.
+
+ :arg walker: a TreeWalker instance
+
+ """
+ output = []
+ indent = 0
+ for token in concatenateCharacterTokens(walker):
+ type = token["type"]
+ if type in ("StartTag", "EmptyTag"):
+ # tag name
+ if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
+ if token["namespace"] in constants.prefixes:
+ ns = constants.prefixes[token["namespace"]]
+ else:
+ ns = token["namespace"]
+ name = "%s %s" % (ns, token["name"])
+ else:
+ name = token["name"]
+ output.append("%s<%s>" % (" " * indent, name))
+ indent += 2
+ # attributes (sorted for consistent ordering)
+ attrs = token["data"]
+ for (namespace, localname), value in sorted(attrs.items()):
+ if namespace:
+ if namespace in constants.prefixes:
+ ns = constants.prefixes[namespace]
+ else:
+ ns = namespace
+ name = "%s %s" % (ns, localname)
+ else:
+ name = localname
+ output.append("%s%s=\"%s\"" % (" " * indent, name, value))
+ # self-closing
+ if type == "EmptyTag":
+ indent -= 2
+
+ elif type == "EndTag":
+ indent -= 2
+
+ elif type == "Comment":
+ output.append("%s<!-- %s -->" % (" " * indent, token["data"]))
+
+ elif type == "Doctype":
+ if token["name"]:
+ if token["publicId"]:
+ output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
+ (" " * indent,
+ token["name"],
+ token["publicId"],
+ token["systemId"] if token["systemId"] else ""))
+ elif token["systemId"]:
+ output.append("""%s<!DOCTYPE %s "" "%s">""" %
+ (" " * indent,
+ token["name"],
+ token["systemId"]))
+ else:
+ output.append("%s<!DOCTYPE %s>" % (" " * indent,
+ token["name"]))
+ else:
+ output.append("%s<!DOCTYPE >" % (" " * indent,))
+
+ elif type == "Characters":
+ output.append("%s\"%s\"" % (" " * indent, token["data"]))
+
+ elif type == "SpaceCharacters":
+ assert False, "concatenateCharacterTokens should have got rid of all Space tokens"
+
+ else:
+ raise ValueError("Unknown token type, %s" % type)
+
+ return "\n".join(output)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..e33f4ae2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pyc
new file mode 100644
index 00000000..a3e9c3c9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pyc
new file mode 100644
index 00000000..3e3407de
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pyc
new file mode 100644
index 00000000..23bc58c5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pyc
new file mode 100644
index 00000000..730e09a6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pyc
new file mode 100644
index 00000000..918c8cdd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py
new file mode 100644
index 00000000..80c474c4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py
@@ -0,0 +1,252 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from xml.dom import Node
+from ..constants import namespaces, voidElements, spaceCharacters
+
+__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN",
+ "TreeWalker", "NonRecursiveTreeWalker"]
+
+DOCUMENT = Node.DOCUMENT_NODE
+DOCTYPE = Node.DOCUMENT_TYPE_NODE
+TEXT = Node.TEXT_NODE
+ELEMENT = Node.ELEMENT_NODE
+COMMENT = Node.COMMENT_NODE
+ENTITY = Node.ENTITY_NODE
+UNKNOWN = "<#UNKNOWN#>"
+
+spaceCharacters = "".join(spaceCharacters)
+
+
+class TreeWalker(object):
+ """Walks a tree yielding tokens
+
+ Tokens are dicts that all have a ``type`` field specifying the type of the
+ token.
+
+ """
+ def __init__(self, tree):
+ """Creates a TreeWalker
+
+ :arg tree: the tree to walk
+
+ """
+ self.tree = tree
+
+ def __iter__(self):
+ raise NotImplementedError
+
+ def error(self, msg):
+ """Generates an error token with the given message
+
+ :arg msg: the error message
+
+ :returns: SerializeError token
+
+ """
+ return {"type": "SerializeError", "data": msg}
+
+ def emptyTag(self, namespace, name, attrs, hasChildren=False):
+ """Generates an EmptyTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :arg attrs: the attributes of the element as a dict
+
+ :arg hasChildren: whether or not to yield a SerializationError because
+ this tag shouldn't have children
+
+ :returns: EmptyTag token
+
+ """
+ yield {"type": "EmptyTag", "name": name,
+ "namespace": namespace,
+ "data": attrs}
+ if hasChildren:
+ yield self.error("Void element has children")
+
+ def startTag(self, namespace, name, attrs):
+ """Generates a StartTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :arg attrs: the attributes of the element as a dict
+
+ :returns: StartTag token
+
+ """
+ return {"type": "StartTag",
+ "name": name,
+ "namespace": namespace,
+ "data": attrs}
+
+ def endTag(self, namespace, name):
+ """Generates an EndTag token
+
+ :arg namespace: the namespace of the token--can be ``None``
+
+ :arg name: the name of the element
+
+ :returns: EndTag token
+
+ """
+ return {"type": "EndTag",
+ "name": name,
+ "namespace": namespace}
+
+ def text(self, data):
+ """Generates SpaceCharacters and Characters tokens
+
+ Depending on what's in the data, this generates one or more
+ ``SpaceCharacters`` and ``Characters`` tokens.
+
+ For example:
+
+ >>> from html5lib.treewalkers.base import TreeWalker
+ >>> # Give it an empty tree just so it instantiates
+ >>> walker = TreeWalker([])
+ >>> list(walker.text(''))
+ []
+ >>> list(walker.text(' '))
+ [{u'data': ' ', u'type': u'SpaceCharacters'}]
+ >>> list(walker.text(' abc ')) # doctest: +NORMALIZE_WHITESPACE
+ [{u'data': ' ', u'type': u'SpaceCharacters'},
+ {u'data': u'abc', u'type': u'Characters'},
+ {u'data': u' ', u'type': u'SpaceCharacters'}]
+
+ :arg data: the text data
+
+ :returns: one or more ``SpaceCharacters`` and ``Characters`` tokens
+
+ """
+ data = data
+ middle = data.lstrip(spaceCharacters)
+ left = data[:len(data) - len(middle)]
+ if left:
+ yield {"type": "SpaceCharacters", "data": left}
+ data = middle
+ middle = data.rstrip(spaceCharacters)
+ right = data[len(middle):]
+ if middle:
+ yield {"type": "Characters", "data": middle}
+ if right:
+ yield {"type": "SpaceCharacters", "data": right}
+
+ def comment(self, data):
+ """Generates a Comment token
+
+ :arg data: the comment
+
+ :returns: Comment token
+
+ """
+ return {"type": "Comment", "data": data}
+
+ def doctype(self, name, publicId=None, systemId=None):
+ """Generates a Doctype token
+
+ :arg name:
+
+ :arg publicId:
+
+ :arg systemId:
+
+ :returns: the Doctype token
+
+ """
+ return {"type": "Doctype",
+ "name": name,
+ "publicId": publicId,
+ "systemId": systemId}
+
+ def entity(self, name):
+ """Generates an Entity token
+
+ :arg name: the entity name
+
+ :returns: an Entity token
+
+ """
+ return {"type": "Entity", "name": name}
+
+ def unknown(self, nodeType):
+ """Handles unknown node types"""
+ return self.error("Unknown node type: " + nodeType)
+
+
+class NonRecursiveTreeWalker(TreeWalker):
+ def getNodeDetails(self, node):
+ raise NotImplementedError
+
+ def getFirstChild(self, node):
+ raise NotImplementedError
+
+ def getNextSibling(self, node):
+ raise NotImplementedError
+
+ def getParentNode(self, node):
+ raise NotImplementedError
+
+ def __iter__(self):
+ currentNode = self.tree
+ while currentNode is not None:
+ details = self.getNodeDetails(currentNode)
+ type, details = details[0], details[1:]
+ hasChildren = False
+
+ if type == DOCTYPE:
+ yield self.doctype(*details)
+
+ elif type == TEXT:
+ for token in self.text(*details):
+ yield token
+
+ elif type == ELEMENT:
+ namespace, name, attributes, hasChildren = details
+ if (not namespace or namespace == namespaces["html"]) and name in voidElements:
+ for token in self.emptyTag(namespace, name, attributes,
+ hasChildren):
+ yield token
+ hasChildren = False
+ else:
+ yield self.startTag(namespace, name, attributes)
+
+ elif type == COMMENT:
+ yield self.comment(details[0])
+
+ elif type == ENTITY:
+ yield self.entity(details[0])
+
+ elif type == DOCUMENT:
+ hasChildren = True
+
+ else:
+ yield self.unknown(details[0])
+
+ if hasChildren:
+ firstChild = self.getFirstChild(currentNode)
+ else:
+ firstChild = None
+
+ if firstChild is not None:
+ currentNode = firstChild
+ else:
+ while currentNode is not None:
+ details = self.getNodeDetails(currentNode)
+ type, details = details[0], details[1:]
+ if type == ELEMENT:
+ namespace, name, attributes, hasChildren = details
+ if (namespace and namespace != namespaces["html"]) or name not in voidElements:
+ yield self.endTag(namespace, name)
+ if self.tree is currentNode:
+ currentNode = None
+ break
+ nextSibling = self.getNextSibling(currentNode)
+ if nextSibling is not None:
+ currentNode = nextSibling
+ break
+ else:
+ currentNode = self.getParentNode(currentNode)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.py
new file mode 100644
index 00000000..b0c89b00
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/dom.py
@@ -0,0 +1,43 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from xml.dom import Node
+
+from . import base
+
+
+class TreeWalker(base.NonRecursiveTreeWalker):
+ def getNodeDetails(self, node):
+ if node.nodeType == Node.DOCUMENT_TYPE_NODE:
+ return base.DOCTYPE, node.name, node.publicId, node.systemId
+
+ elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
+ return base.TEXT, node.nodeValue
+
+ elif node.nodeType == Node.ELEMENT_NODE:
+ attrs = {}
+ for attr in list(node.attributes.keys()):
+ attr = node.getAttributeNode(attr)
+ if attr.namespaceURI:
+ attrs[(attr.namespaceURI, attr.localName)] = attr.value
+ else:
+ attrs[(None, attr.name)] = attr.value
+ return (base.ELEMENT, node.namespaceURI, node.nodeName,
+ attrs, node.hasChildNodes())
+
+ elif node.nodeType == Node.COMMENT_NODE:
+ return base.COMMENT, node.nodeValue
+
+ elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
+ return (base.DOCUMENT,)
+
+ else:
+ return base.UNKNOWN, node.nodeType
+
+ def getFirstChild(self, node):
+ return node.firstChild
+
+ def getNextSibling(self, node):
+ return node.nextSibling
+
+ def getParentNode(self, node):
+ return node.parentNode
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.py
new file mode 100644
index 00000000..95fc0c17
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree.py
@@ -0,0 +1,130 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from collections import OrderedDict
+import re
+
+from pip._vendor.six import string_types
+
+from . import base
+from .._utils import moduleFactoryFactory
+
+tag_regexp = re.compile("{([^}]*)}(.*)")
+
+
+def getETreeBuilder(ElementTreeImplementation):
+ ElementTree = ElementTreeImplementation
+ ElementTreeCommentType = ElementTree.Comment("asd").tag
+
+ class TreeWalker(base.NonRecursiveTreeWalker): # pylint:disable=unused-variable
+ """Given the particular ElementTree representation, this implementation,
+ to avoid using recursion, returns "nodes" as tuples with the following
+ content:
+
+ 1. The current element
+
+ 2. The index of the element relative to its parent
+
+ 3. A stack of ancestor elements
+
+ 4. A flag "text", "tail" or None to indicate if the current node is a
+ text node; either the text or tail of the current element (1)
+ """
+ def getNodeDetails(self, node):
+ if isinstance(node, tuple): # It might be the root Element
+ elt, _, _, flag = node
+ if flag in ("text", "tail"):
+ return base.TEXT, getattr(elt, flag)
+ else:
+ node = elt
+
+ if not(hasattr(node, "tag")):
+ node = node.getroot()
+
+ if node.tag in ("DOCUMENT_ROOT", "DOCUMENT_FRAGMENT"):
+ return (base.DOCUMENT,)
+
+ elif node.tag == "<!DOCTYPE>":
+ return (base.DOCTYPE, node.text,
+ node.get("publicId"), node.get("systemId"))
+
+ elif node.tag == ElementTreeCommentType:
+ return base.COMMENT, node.text
+
+ else:
+ assert isinstance(node.tag, string_types), type(node.tag)
+ # This is assumed to be an ordinary element
+ match = tag_regexp.match(node.tag)
+ if match:
+ namespace, tag = match.groups()
+ else:
+ namespace = None
+ tag = node.tag
+ attrs = OrderedDict()
+ for name, value in list(node.attrib.items()):
+ match = tag_regexp.match(name)
+ if match:
+ attrs[(match.group(1), match.group(2))] = value
+ else:
+ attrs[(None, name)] = value
+ return (base.ELEMENT, namespace, tag,
+ attrs, len(node) or node.text)
+
+ def getFirstChild(self, node):
+ if isinstance(node, tuple):
+ element, key, parents, flag = node
+ else:
+ element, key, parents, flag = node, None, [], None
+
+ if flag in ("text", "tail"):
+ return None
+ else:
+ if element.text:
+ return element, key, parents, "text"
+ elif len(element):
+ parents.append(element)
+ return element[0], 0, parents, None
+ else:
+ return None
+
+ def getNextSibling(self, node):
+ if isinstance(node, tuple):
+ element, key, parents, flag = node
+ else:
+ return None
+
+ if flag == "text":
+ if len(element):
+ parents.append(element)
+ return element[0], 0, parents, None
+ else:
+ return None
+ else:
+ if element.tail and flag != "tail":
+ return element, key, parents, "tail"
+ elif key < len(parents[-1]) - 1:
+ return parents[-1][key + 1], key + 1, parents, None
+ else:
+ return None
+
+ def getParentNode(self, node):
+ if isinstance(node, tuple):
+ element, key, parents, flag = node
+ else:
+ return None
+
+ if flag == "text":
+ if not parents:
+ return element
+ else:
+ return element, key, parents, None
+ else:
+ parent = parents.pop()
+ if not parents:
+ return parent
+ else:
+ assert list(parents[-1]).count(parent) == 1
+ return parent, list(parents[-1]).index(parent), parents, None
+
+ return locals()
+
+getETreeModule = moduleFactoryFactory(getETreeBuilder)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py
new file mode 100644
index 00000000..e81ddf33
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/etree_lxml.py
@@ -0,0 +1,213 @@
+from __future__ import absolute_import, division, unicode_literals
+from pip._vendor.six import text_type
+
+from lxml import etree
+from ..treebuilders.etree import tag_regexp
+
+from . import base
+
+from .. import _ihatexml
+
+
+def ensure_str(s):
+ if s is None:
+ return None
+ elif isinstance(s, text_type):
+ return s
+ else:
+ return s.decode("ascii", "strict")
+
+
+class Root(object):
+ def __init__(self, et):
+ self.elementtree = et
+ self.children = []
+
+ try:
+ if et.docinfo.internalDTD:
+ self.children.append(Doctype(self,
+ ensure_str(et.docinfo.root_name),
+ ensure_str(et.docinfo.public_id),
+ ensure_str(et.docinfo.system_url)))
+ except AttributeError:
+ pass
+
+ try:
+ node = et.getroot()
+ except AttributeError:
+ node = et
+
+ while node.getprevious() is not None:
+ node = node.getprevious()
+ while node is not None:
+ self.children.append(node)
+ node = node.getnext()
+
+ self.text = None
+ self.tail = None
+
+ def __getitem__(self, key):
+ return self.children[key]
+
+ def getnext(self):
+ return None
+
+ def __len__(self):
+ return 1
+
+
+class Doctype(object):
+ def __init__(self, root_node, name, public_id, system_id):
+ self.root_node = root_node
+ self.name = name
+ self.public_id = public_id
+ self.system_id = system_id
+
+ self.text = None
+ self.tail = None
+
+ def getnext(self):
+ return self.root_node.children[1]
+
+
+class FragmentRoot(Root):
+ def __init__(self, children):
+ self.children = [FragmentWrapper(self, child) for child in children]
+ self.text = self.tail = None
+
+ def getnext(self):
+ return None
+
+
+class FragmentWrapper(object):
+ def __init__(self, fragment_root, obj):
+ self.root_node = fragment_root
+ self.obj = obj
+ if hasattr(self.obj, 'text'):
+ self.text = ensure_str(self.obj.text)
+ else:
+ self.text = None
+ if hasattr(self.obj, 'tail'):
+ self.tail = ensure_str(self.obj.tail)
+ else:
+ self.tail = None
+
+ def __getattr__(self, name):
+ return getattr(self.obj, name)
+
+ def getnext(self):
+ siblings = self.root_node.children
+ idx = siblings.index(self)
+ if idx < len(siblings) - 1:
+ return siblings[idx + 1]
+ else:
+ return None
+
+ def __getitem__(self, key):
+ return self.obj[key]
+
+ def __bool__(self):
+ return bool(self.obj)
+
+ def getparent(self):
+ return None
+
+ def __str__(self):
+ return str(self.obj)
+
+ def __unicode__(self):
+ return str(self.obj)
+
+ def __len__(self):
+ return len(self.obj)
+
+
+class TreeWalker(base.NonRecursiveTreeWalker):
+ def __init__(self, tree):
+ # pylint:disable=redefined-variable-type
+ if isinstance(tree, list):
+ self.fragmentChildren = set(tree)
+ tree = FragmentRoot(tree)
+ else:
+ self.fragmentChildren = set()
+ tree = Root(tree)
+ base.NonRecursiveTreeWalker.__init__(self, tree)
+ self.filter = _ihatexml.InfosetFilter()
+
+ def getNodeDetails(self, node):
+ if isinstance(node, tuple): # Text node
+ node, key = node
+ assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
+ return base.TEXT, ensure_str(getattr(node, key))
+
+ elif isinstance(node, Root):
+ return (base.DOCUMENT,)
+
+ elif isinstance(node, Doctype):
+ return base.DOCTYPE, node.name, node.public_id, node.system_id
+
+ elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"):
+ return base.TEXT, ensure_str(node.obj)
+
+ elif node.tag == etree.Comment:
+ return base.COMMENT, ensure_str(node.text)
+
+ elif node.tag == etree.Entity:
+ return base.ENTITY, ensure_str(node.text)[1:-1] # strip &;
+
+ else:
+ # This is assumed to be an ordinary element
+ match = tag_regexp.match(ensure_str(node.tag))
+ if match:
+ namespace, tag = match.groups()
+ else:
+ namespace = None
+ tag = ensure_str(node.tag)
+ attrs = {}
+ for name, value in list(node.attrib.items()):
+ name = ensure_str(name)
+ value = ensure_str(value)
+ match = tag_regexp.match(name)
+ if match:
+ attrs[(match.group(1), match.group(2))] = value
+ else:
+ attrs[(None, name)] = value
+ return (base.ELEMENT, namespace, self.filter.fromXmlName(tag),
+ attrs, len(node) > 0 or node.text)
+
+ def getFirstChild(self, node):
+ assert not isinstance(node, tuple), "Text nodes have no children"
+
+ assert len(node) or node.text, "Node has no children"
+ if node.text:
+ return (node, "text")
+ else:
+ return node[0]
+
+ def getNextSibling(self, node):
+ if isinstance(node, tuple): # Text node
+ node, key = node
+ assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
+ if key == "text":
+ # XXX: we cannot use a "bool(node) and node[0] or None" construct here
+ # because node[0] might evaluate to False if it has no child element
+ if len(node):
+ return node[0]
+ else:
+ return None
+ else: # tail
+ return node.getnext()
+
+ return (node, "tail") if node.tail else node.getnext()
+
+ def getParentNode(self, node):
+ if isinstance(node, tuple): # Text node
+ node, key = node
+ assert key in ("text", "tail"), "Text nodes are text or tail, found %s" % key
+ if key == "text":
+ return node
+ # else: fallback to "normal" processing
+ elif node in self.fragmentChildren:
+ return None
+
+ return node.getparent()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py
new file mode 100644
index 00000000..7483be27
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py
@@ -0,0 +1,69 @@
+from __future__ import absolute_import, division, unicode_literals
+
+from genshi.core import QName
+from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT
+from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT
+
+from . import base
+
+from ..constants import voidElements, namespaces
+
+
+class TreeWalker(base.TreeWalker):
+ def __iter__(self):
+ # Buffer the events so we can pass in the following one
+ previous = None
+ for event in self.tree:
+ if previous is not None:
+ for token in self.tokens(previous, event):
+ yield token
+ previous = event
+
+ # Don't forget the final event!
+ if previous is not None:
+ for token in self.tokens(previous, None):
+ yield token
+
+ def tokens(self, event, next):
+ kind, data, _ = event
+ if kind == START:
+ tag, attribs = data
+ name = tag.localname
+ namespace = tag.namespace
+ converted_attribs = {}
+ for k, v in attribs:
+ if isinstance(k, QName):
+ converted_attribs[(k.namespace, k.localname)] = v
+ else:
+ converted_attribs[(None, k)] = v
+
+ if namespace == namespaces["html"] and name in voidElements:
+ for token in self.emptyTag(namespace, name, converted_attribs,
+ not next or next[0] != END or
+ next[1] != tag):
+ yield token
+ else:
+ yield self.startTag(namespace, name, converted_attribs)
+
+ elif kind == END:
+ name = data.localname
+ namespace = data.namespace
+ if namespace != namespaces["html"] or name not in voidElements:
+ yield self.endTag(namespace, name)
+
+ elif kind == COMMENT:
+ yield self.comment(data)
+
+ elif kind == TEXT:
+ for token in self.text(data):
+ yield token
+
+ elif kind == DOCTYPE:
+ yield self.doctype(*data)
+
+ elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS,
+ START_CDATA, END_CDATA, PI):
+ pass
+
+ else:
+ yield self.unknown(kind)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__init__.py
new file mode 100644
index 00000000..847bf935
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__init__.py
@@ -0,0 +1,2 @@
+from .package_data import __version__
+from .core import *
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..f57cf52a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-37.pyc
new file mode 100644
index 00000000..5252bf7d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/codec.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..1cd10ab5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/core.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/core.cpython-37.pyc
new file mode 100644
index 00000000..3fc07800
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/core.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc
new file mode 100644
index 00000000..d0dbb1a7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc
new file mode 100644
index 00000000..335806ce
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc
new file mode 100644
index 00000000..7e5f9b95
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-37.pyc
new file mode 100644
index 00000000..831dfa7a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/__pycache__/uts46data.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/codec.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/codec.py
new file mode 100644
index 00000000..98c65ead
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/codec.py
@@ -0,0 +1,118 @@
+from .core import encode, decode, alabel, ulabel, IDNAError
+import codecs
+import re
+
+_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
+
+class Codec(codecs.Codec):
+
+ def encode(self, data, errors='strict'):
+
+ if errors != 'strict':
+ raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
+
+ if not data:
+ return "", 0
+
+ return encode(data), len(data)
+
+ def decode(self, data, errors='strict'):
+
+ if errors != 'strict':
+ raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
+
+ if not data:
+ return u"", 0
+
+ return decode(data), len(data)
+
+class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
+ def _buffer_encode(self, data, errors, final):
+ if errors != 'strict':
+ raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
+
+ if not data:
+ return ("", 0)
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = u''
+ if labels:
+ if not labels[-1]:
+ trailing_dot = '.'
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = '.'
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(alabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ # Join with U+002E
+ result = ".".join(result) + trailing_dot
+ size += len(trailing_dot)
+ return (result, size)
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ def _buffer_decode(self, data, errors, final):
+ if errors != 'strict':
+ raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
+
+ if not data:
+ return (u"", 0)
+
+ # IDNA allows decoding to operate on Unicode strings, too.
+ if isinstance(data, unicode):
+ labels = _unicode_dots_re.split(data)
+ else:
+ # Must be ASCII string
+ data = str(data)
+ unicode(data, "ascii")
+ labels = data.split(".")
+
+ trailing_dot = u''
+ if labels:
+ if not labels[-1]:
+ trailing_dot = u'.'
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = u'.'
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(ulabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ result = u".".join(result) + trailing_dot
+ size += len(trailing_dot)
+ return (result, size)
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+ pass
+
+class StreamReader(Codec, codecs.StreamReader):
+ pass
+
+def getregentry():
+ return codecs.CodecInfo(
+ name='idna',
+ encode=Codec().encode,
+ decode=Codec().decode,
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamwriter=StreamWriter,
+ streamreader=StreamReader,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/compat.py
new file mode 100644
index 00000000..4d47f336
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/compat.py
@@ -0,0 +1,12 @@
+from .core import *
+from .codec import *
+
+def ToASCII(label):
+ return encode(label)
+
+def ToUnicode(label):
+ return decode(label)
+
+def nameprep(s):
+ raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/core.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/core.py
new file mode 100644
index 00000000..104624ad
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/core.py
@@ -0,0 +1,396 @@
+from . import idnadata
+import bisect
+import unicodedata
+import re
+import sys
+from .intranges import intranges_contain
+
+_virama_combining_class = 9
+_alabel_prefix = b'xn--'
+_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
+
+if sys.version_info[0] == 3:
+ unicode = str
+ unichr = chr
+
+class IDNAError(UnicodeError):
+ """ Base exception for all IDNA-encoding related problems """
+ pass
+
+
+class IDNABidiError(IDNAError):
+ """ Exception when bidirectional requirements are not satisfied """
+ pass
+
+
+class InvalidCodepoint(IDNAError):
+ """ Exception when a disallowed or unallocated codepoint is used """
+ pass
+
+
+class InvalidCodepointContext(IDNAError):
+ """ Exception when the codepoint is not valid in the context it is used """
+ pass
+
+
+def _combining_class(cp):
+ v = unicodedata.combining(unichr(cp))
+ if v == 0:
+ if not unicodedata.name(unichr(cp)):
+ raise ValueError("Unknown character in unicodedata")
+ return v
+
+def _is_script(cp, script):
+ return intranges_contain(ord(cp), idnadata.scripts[script])
+
+def _punycode(s):
+ return s.encode('punycode')
+
+def _unot(s):
+ return 'U+{0:04X}'.format(s)
+
+
+def valid_label_length(label):
+
+ if len(label) > 63:
+ return False
+ return True
+
+
+def valid_string_length(label, trailing_dot):
+
+ if len(label) > (254 if trailing_dot else 253):
+ return False
+ return True
+
+
+def check_bidi(label, check_ltr=False):
+
+ # Bidi rules should only be applied if string contains RTL characters
+ bidi_label = False
+ for (idx, cp) in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+ if direction == '':
+ # String likely comes from a newer version of Unicode
+ raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx))
+ if direction in ['R', 'AL', 'AN']:
+ bidi_label = True
+ if not bidi_label and not check_ltr:
+ return True
+
+ # Bidi rule 1
+ direction = unicodedata.bidirectional(label[0])
+ if direction in ['R', 'AL']:
+ rtl = True
+ elif direction == 'L':
+ rtl = False
+ else:
+ raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label)))
+
+ valid_ending = False
+ number_type = False
+ for (idx, cp) in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+
+ if rtl:
+ # Bidi rule 2
+ if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+ raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx))
+ # Bidi rule 3
+ if direction in ['R', 'AL', 'EN', 'AN']:
+ valid_ending = True
+ elif direction != 'NSM':
+ valid_ending = False
+ # Bidi rule 4
+ if direction in ['AN', 'EN']:
+ if not number_type:
+ number_type = direction
+ else:
+ if number_type != direction:
+ raise IDNABidiError('Can not mix numeral types in a right-to-left label')
+ else:
+ # Bidi rule 5
+ if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
+ raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx))
+ # Bidi rule 6
+ if direction in ['L', 'EN']:
+ valid_ending = True
+ elif direction != 'NSM':
+ valid_ending = False
+
+ if not valid_ending:
+ raise IDNABidiError('Label ends with illegal codepoint directionality')
+
+ return True
+
+
+def check_initial_combiner(label):
+
+ if unicodedata.category(label[0])[0] == 'M':
+ raise IDNAError('Label begins with an illegal combining character')
+ return True
+
+
+def check_hyphen_ok(label):
+
+ if label[2:4] == '--':
+ raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
+ if label[0] == '-' or label[-1] == '-':
+ raise IDNAError('Label must not start or end with a hyphen')
+ return True
+
+
+def check_nfc(label):
+
+ if unicodedata.normalize('NFC', label) != label:
+ raise IDNAError('Label must be in Normalization Form C')
+
+
+def valid_contextj(label, pos):
+
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x200c:
+
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+
+ ok = False
+ for i in range(pos-1, -1, -1):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord('T'):
+ continue
+ if joining_type in [ord('L'), ord('D')]:
+ ok = True
+ break
+
+ if not ok:
+ return False
+
+ ok = False
+ for i in range(pos+1, len(label)):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord('T'):
+ continue
+ if joining_type in [ord('R'), ord('D')]:
+ ok = True
+ break
+ return ok
+
+ if cp_value == 0x200d:
+
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+ return False
+
+ else:
+
+ return False
+
+
+def valid_contexto(label, pos, exception=False):
+
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x00b7:
+ if 0 < pos < len(label)-1:
+ if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
+ return True
+ return False
+
+ elif cp_value == 0x0375:
+ if pos < len(label)-1 and len(label) > 1:
+ return _is_script(label[pos + 1], 'Greek')
+ return False
+
+ elif cp_value == 0x05f3 or cp_value == 0x05f4:
+ if pos > 0:
+ return _is_script(label[pos - 1], 'Hebrew')
+ return False
+
+ elif cp_value == 0x30fb:
+ for cp in label:
+ if cp == u'\u30fb':
+ continue
+ if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
+ return True
+ return False
+
+ elif 0x660 <= cp_value <= 0x669:
+ for cp in label:
+ if 0x6f0 <= ord(cp) <= 0x06f9:
+ return False
+ return True
+
+ elif 0x6f0 <= cp_value <= 0x6f9:
+ for cp in label:
+ if 0x660 <= ord(cp) <= 0x0669:
+ return False
+ return True
+
+
+def check_label(label):
+
+ if isinstance(label, (bytes, bytearray)):
+ label = label.decode('utf-8')
+ if len(label) == 0:
+ raise IDNAError('Empty Label')
+
+ check_nfc(label)
+ check_hyphen_ok(label)
+ check_initial_combiner(label)
+
+ for (pos, cp) in enumerate(label):
+ cp_value = ord(cp)
+ if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
+ continue
+ elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
+ try:
+ if not valid_contextj(label, pos):
+ raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(
+ _unot(cp_value), pos+1, repr(label)))
+ except ValueError:
+ raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format(
+ _unot(cp_value), pos+1, repr(label)))
+ elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
+ if not valid_contexto(label, pos):
+ raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
+ else:
+ raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
+
+ check_bidi(label)
+
+
+def alabel(label):
+
+ try:
+ label = label.encode('ascii')
+ ulabel(label)
+ if not valid_label_length(label):
+ raise IDNAError('Label too long')
+ return label
+ except UnicodeEncodeError:
+ pass
+
+ if not label:
+ raise IDNAError('No Input')
+
+ label = unicode(label)
+ check_label(label)
+ label = _punycode(label)
+ label = _alabel_prefix + label
+
+ if not valid_label_length(label):
+ raise IDNAError('Label too long')
+
+ return label
+
+
+def ulabel(label):
+
+ if not isinstance(label, (bytes, bytearray)):
+ try:
+ label = label.encode('ascii')
+ except UnicodeEncodeError:
+ check_label(label)
+ return label
+
+ label = label.lower()
+ if label.startswith(_alabel_prefix):
+ label = label[len(_alabel_prefix):]
+ else:
+ check_label(label)
+ return label.decode('ascii')
+
+ label = label.decode('punycode')
+ check_label(label)
+ return label
+
+
+def uts46_remap(domain, std3_rules=True, transitional=False):
+ """Re-map the characters in the string according to UTS46 processing."""
+ from .uts46data import uts46data
+ output = u""
+ try:
+ for pos, char in enumerate(domain):
+ code_point = ord(char)
+ uts46row = uts46data[code_point if code_point < 256 else
+ bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
+ status = uts46row[1]
+ replacement = uts46row[2] if len(uts46row) == 3 else None
+ if (status == "V" or
+ (status == "D" and not transitional) or
+ (status == "3" and not std3_rules and replacement is None)):
+ output += char
+ elif replacement is not None and (status == "M" or
+ (status == "3" and not std3_rules) or
+ (status == "D" and transitional)):
+ output += replacement
+ elif status != "I":
+ raise IndexError()
+ return unicodedata.normalize("NFC", output)
+ except IndexError:
+ raise InvalidCodepoint(
+ "Codepoint {0} not allowed at position {1} in {2}".format(
+ _unot(code_point), pos + 1, repr(domain)))
+
+
+def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False):
+
+ if isinstance(s, (bytes, bytearray)):
+ s = s.decode("ascii")
+ if uts46:
+ s = uts46_remap(s, std3_rules, transitional)
+ trailing_dot = False
+ result = []
+ if strict:
+ labels = s.split('.')
+ else:
+ labels = _unicode_dots_re.split(s)
+ if not labels or labels == ['']:
+ raise IDNAError('Empty domain')
+ if labels[-1] == '':
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = alabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError('Empty label')
+ if trailing_dot:
+ result.append(b'')
+ s = b'.'.join(result)
+ if not valid_string_length(s, trailing_dot):
+ raise IDNAError('Domain too long')
+ return s
+
+
+def decode(s, strict=False, uts46=False, std3_rules=False):
+
+ if isinstance(s, (bytes, bytearray)):
+ s = s.decode("ascii")
+ if uts46:
+ s = uts46_remap(s, std3_rules, False)
+ trailing_dot = False
+ result = []
+ if not strict:
+ labels = _unicode_dots_re.split(s)
+ else:
+ labels = s.split(u'.')
+ if not labels or labels == ['']:
+ raise IDNAError('Empty domain')
+ if not labels[-1]:
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = ulabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError('Empty label')
+ if trailing_dot:
+ result.append(u'')
+ return u'.'.join(result)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/idnadata.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/idnadata.py
new file mode 100644
index 00000000..a80c959d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/idnadata.py
@@ -0,0 +1,1979 @@
+# This file is automatically generated by tools/idna-data
+
+__version__ = "11.0.0"
+scripts = {
+ 'Greek': (
+ 0x37000000374,
+ 0x37500000378,
+ 0x37a0000037e,
+ 0x37f00000380,
+ 0x38400000385,
+ 0x38600000387,
+ 0x3880000038b,
+ 0x38c0000038d,
+ 0x38e000003a2,
+ 0x3a3000003e2,
+ 0x3f000000400,
+ 0x1d2600001d2b,
+ 0x1d5d00001d62,
+ 0x1d6600001d6b,
+ 0x1dbf00001dc0,
+ 0x1f0000001f16,
+ 0x1f1800001f1e,
+ 0x1f2000001f46,
+ 0x1f4800001f4e,
+ 0x1f5000001f58,
+ 0x1f5900001f5a,
+ 0x1f5b00001f5c,
+ 0x1f5d00001f5e,
+ 0x1f5f00001f7e,
+ 0x1f8000001fb5,
+ 0x1fb600001fc5,
+ 0x1fc600001fd4,
+ 0x1fd600001fdc,
+ 0x1fdd00001ff0,
+ 0x1ff200001ff5,
+ 0x1ff600001fff,
+ 0x212600002127,
+ 0xab650000ab66,
+ 0x101400001018f,
+ 0x101a0000101a1,
+ 0x1d2000001d246,
+ ),
+ 'Han': (
+ 0x2e8000002e9a,
+ 0x2e9b00002ef4,
+ 0x2f0000002fd6,
+ 0x300500003006,
+ 0x300700003008,
+ 0x30210000302a,
+ 0x30380000303c,
+ 0x340000004db6,
+ 0x4e0000009ff0,
+ 0xf9000000fa6e,
+ 0xfa700000fada,
+ 0x200000002a6d7,
+ 0x2a7000002b735,
+ 0x2b7400002b81e,
+ 0x2b8200002cea2,
+ 0x2ceb00002ebe1,
+ 0x2f8000002fa1e,
+ ),
+ 'Hebrew': (
+ 0x591000005c8,
+ 0x5d0000005eb,
+ 0x5ef000005f5,
+ 0xfb1d0000fb37,
+ 0xfb380000fb3d,
+ 0xfb3e0000fb3f,
+ 0xfb400000fb42,
+ 0xfb430000fb45,
+ 0xfb460000fb50,
+ ),
+ 'Hiragana': (
+ 0x304100003097,
+ 0x309d000030a0,
+ 0x1b0010001b11f,
+ 0x1f2000001f201,
+ ),
+ 'Katakana': (
+ 0x30a1000030fb,
+ 0x30fd00003100,
+ 0x31f000003200,
+ 0x32d0000032ff,
+ 0x330000003358,
+ 0xff660000ff70,
+ 0xff710000ff9e,
+ 0x1b0000001b001,
+ ),
+}
+joining_types = {
+ 0x600: 85,
+ 0x601: 85,
+ 0x602: 85,
+ 0x603: 85,
+ 0x604: 85,
+ 0x605: 85,
+ 0x608: 85,
+ 0x60b: 85,
+ 0x620: 68,
+ 0x621: 85,
+ 0x622: 82,
+ 0x623: 82,
+ 0x624: 82,
+ 0x625: 82,
+ 0x626: 68,
+ 0x627: 82,
+ 0x628: 68,
+ 0x629: 82,
+ 0x62a: 68,
+ 0x62b: 68,
+ 0x62c: 68,
+ 0x62d: 68,
+ 0x62e: 68,
+ 0x62f: 82,
+ 0x630: 82,
+ 0x631: 82,
+ 0x632: 82,
+ 0x633: 68,
+ 0x634: 68,
+ 0x635: 68,
+ 0x636: 68,
+ 0x637: 68,
+ 0x638: 68,
+ 0x639: 68,
+ 0x63a: 68,
+ 0x63b: 68,
+ 0x63c: 68,
+ 0x63d: 68,
+ 0x63e: 68,
+ 0x63f: 68,
+ 0x640: 67,
+ 0x641: 68,
+ 0x642: 68,
+ 0x643: 68,
+ 0x644: 68,
+ 0x645: 68,
+ 0x646: 68,
+ 0x647: 68,
+ 0x648: 82,
+ 0x649: 68,
+ 0x64a: 68,
+ 0x66e: 68,
+ 0x66f: 68,
+ 0x671: 82,
+ 0x672: 82,
+ 0x673: 82,
+ 0x674: 85,
+ 0x675: 82,
+ 0x676: 82,
+ 0x677: 82,
+ 0x678: 68,
+ 0x679: 68,
+ 0x67a: 68,
+ 0x67b: 68,
+ 0x67c: 68,
+ 0x67d: 68,
+ 0x67e: 68,
+ 0x67f: 68,
+ 0x680: 68,
+ 0x681: 68,
+ 0x682: 68,
+ 0x683: 68,
+ 0x684: 68,
+ 0x685: 68,
+ 0x686: 68,
+ 0x687: 68,
+ 0x688: 82,
+ 0x689: 82,
+ 0x68a: 82,
+ 0x68b: 82,
+ 0x68c: 82,
+ 0x68d: 82,
+ 0x68e: 82,
+ 0x68f: 82,
+ 0x690: 82,
+ 0x691: 82,
+ 0x692: 82,
+ 0x693: 82,
+ 0x694: 82,
+ 0x695: 82,
+ 0x696: 82,
+ 0x697: 82,
+ 0x698: 82,
+ 0x699: 82,
+ 0x69a: 68,
+ 0x69b: 68,
+ 0x69c: 68,
+ 0x69d: 68,
+ 0x69e: 68,
+ 0x69f: 68,
+ 0x6a0: 68,
+ 0x6a1: 68,
+ 0x6a2: 68,
+ 0x6a3: 68,
+ 0x6a4: 68,
+ 0x6a5: 68,
+ 0x6a6: 68,
+ 0x6a7: 68,
+ 0x6a8: 68,
+ 0x6a9: 68,
+ 0x6aa: 68,
+ 0x6ab: 68,
+ 0x6ac: 68,
+ 0x6ad: 68,
+ 0x6ae: 68,
+ 0x6af: 68,
+ 0x6b0: 68,
+ 0x6b1: 68,
+ 0x6b2: 68,
+ 0x6b3: 68,
+ 0x6b4: 68,
+ 0x6b5: 68,
+ 0x6b6: 68,
+ 0x6b7: 68,
+ 0x6b8: 68,
+ 0x6b9: 68,
+ 0x6ba: 68,
+ 0x6bb: 68,
+ 0x6bc: 68,
+ 0x6bd: 68,
+ 0x6be: 68,
+ 0x6bf: 68,
+ 0x6c0: 82,
+ 0x6c1: 68,
+ 0x6c2: 68,
+ 0x6c3: 82,
+ 0x6c4: 82,
+ 0x6c5: 82,
+ 0x6c6: 82,
+ 0x6c7: 82,
+ 0x6c8: 82,
+ 0x6c9: 82,
+ 0x6ca: 82,
+ 0x6cb: 82,
+ 0x6cc: 68,
+ 0x6cd: 82,
+ 0x6ce: 68,
+ 0x6cf: 82,
+ 0x6d0: 68,
+ 0x6d1: 68,
+ 0x6d2: 82,
+ 0x6d3: 82,
+ 0x6d5: 82,
+ 0x6dd: 85,
+ 0x6ee: 82,
+ 0x6ef: 82,
+ 0x6fa: 68,
+ 0x6fb: 68,
+ 0x6fc: 68,
+ 0x6ff: 68,
+ 0x70f: 84,
+ 0x710: 82,
+ 0x712: 68,
+ 0x713: 68,
+ 0x714: 68,
+ 0x715: 82,
+ 0x716: 82,
+ 0x717: 82,
+ 0x718: 82,
+ 0x719: 82,
+ 0x71a: 68,
+ 0x71b: 68,
+ 0x71c: 68,
+ 0x71d: 68,
+ 0x71e: 82,
+ 0x71f: 68,
+ 0x720: 68,
+ 0x721: 68,
+ 0x722: 68,
+ 0x723: 68,
+ 0x724: 68,
+ 0x725: 68,
+ 0x726: 68,
+ 0x727: 68,
+ 0x728: 82,
+ 0x729: 68,
+ 0x72a: 82,
+ 0x72b: 68,
+ 0x72c: 82,
+ 0x72d: 68,
+ 0x72e: 68,
+ 0x72f: 82,
+ 0x74d: 82,
+ 0x74e: 68,
+ 0x74f: 68,
+ 0x750: 68,
+ 0x751: 68,
+ 0x752: 68,
+ 0x753: 68,
+ 0x754: 68,
+ 0x755: 68,
+ 0x756: 68,
+ 0x757: 68,
+ 0x758: 68,
+ 0x759: 82,
+ 0x75a: 82,
+ 0x75b: 82,
+ 0x75c: 68,
+ 0x75d: 68,
+ 0x75e: 68,
+ 0x75f: 68,
+ 0x760: 68,
+ 0x761: 68,
+ 0x762: 68,
+ 0x763: 68,
+ 0x764: 68,
+ 0x765: 68,
+ 0x766: 68,
+ 0x767: 68,
+ 0x768: 68,
+ 0x769: 68,
+ 0x76a: 68,
+ 0x76b: 82,
+ 0x76c: 82,
+ 0x76d: 68,
+ 0x76e: 68,
+ 0x76f: 68,
+ 0x770: 68,
+ 0x771: 82,
+ 0x772: 68,
+ 0x773: 82,
+ 0x774: 82,
+ 0x775: 68,
+ 0x776: 68,
+ 0x777: 68,
+ 0x778: 82,
+ 0x779: 82,
+ 0x77a: 68,
+ 0x77b: 68,
+ 0x77c: 68,
+ 0x77d: 68,
+ 0x77e: 68,
+ 0x77f: 68,
+ 0x7ca: 68,
+ 0x7cb: 68,
+ 0x7cc: 68,
+ 0x7cd: 68,
+ 0x7ce: 68,
+ 0x7cf: 68,
+ 0x7d0: 68,
+ 0x7d1: 68,
+ 0x7d2: 68,
+ 0x7d3: 68,
+ 0x7d4: 68,
+ 0x7d5: 68,
+ 0x7d6: 68,
+ 0x7d7: 68,
+ 0x7d8: 68,
+ 0x7d9: 68,
+ 0x7da: 68,
+ 0x7db: 68,
+ 0x7dc: 68,
+ 0x7dd: 68,
+ 0x7de: 68,
+ 0x7df: 68,
+ 0x7e0: 68,
+ 0x7e1: 68,
+ 0x7e2: 68,
+ 0x7e3: 68,
+ 0x7e4: 68,
+ 0x7e5: 68,
+ 0x7e6: 68,
+ 0x7e7: 68,
+ 0x7e8: 68,
+ 0x7e9: 68,
+ 0x7ea: 68,
+ 0x7fa: 67,
+ 0x840: 82,
+ 0x841: 68,
+ 0x842: 68,
+ 0x843: 68,
+ 0x844: 68,
+ 0x845: 68,
+ 0x846: 82,
+ 0x847: 82,
+ 0x848: 68,
+ 0x849: 82,
+ 0x84a: 68,
+ 0x84b: 68,
+ 0x84c: 68,
+ 0x84d: 68,
+ 0x84e: 68,
+ 0x84f: 68,
+ 0x850: 68,
+ 0x851: 68,
+ 0x852: 68,
+ 0x853: 68,
+ 0x854: 82,
+ 0x855: 68,
+ 0x856: 85,
+ 0x857: 85,
+ 0x858: 85,
+ 0x860: 68,
+ 0x861: 85,
+ 0x862: 68,
+ 0x863: 68,
+ 0x864: 68,
+ 0x865: 68,
+ 0x866: 85,
+ 0x867: 82,
+ 0x868: 68,
+ 0x869: 82,
+ 0x86a: 82,
+ 0x8a0: 68,
+ 0x8a1: 68,
+ 0x8a2: 68,
+ 0x8a3: 68,
+ 0x8a4: 68,
+ 0x8a5: 68,
+ 0x8a6: 68,
+ 0x8a7: 68,
+ 0x8a8: 68,
+ 0x8a9: 68,
+ 0x8aa: 82,
+ 0x8ab: 82,
+ 0x8ac: 82,
+ 0x8ad: 85,
+ 0x8ae: 82,
+ 0x8af: 68,
+ 0x8b0: 68,
+ 0x8b1: 82,
+ 0x8b2: 82,
+ 0x8b3: 68,
+ 0x8b4: 68,
+ 0x8b6: 68,
+ 0x8b7: 68,
+ 0x8b8: 68,
+ 0x8b9: 82,
+ 0x8ba: 68,
+ 0x8bb: 68,
+ 0x8bc: 68,
+ 0x8bd: 68,
+ 0x8e2: 85,
+ 0x1806: 85,
+ 0x1807: 68,
+ 0x180a: 67,
+ 0x180e: 85,
+ 0x1820: 68,
+ 0x1821: 68,
+ 0x1822: 68,
+ 0x1823: 68,
+ 0x1824: 68,
+ 0x1825: 68,
+ 0x1826: 68,
+ 0x1827: 68,
+ 0x1828: 68,
+ 0x1829: 68,
+ 0x182a: 68,
+ 0x182b: 68,
+ 0x182c: 68,
+ 0x182d: 68,
+ 0x182e: 68,
+ 0x182f: 68,
+ 0x1830: 68,
+ 0x1831: 68,
+ 0x1832: 68,
+ 0x1833: 68,
+ 0x1834: 68,
+ 0x1835: 68,
+ 0x1836: 68,
+ 0x1837: 68,
+ 0x1838: 68,
+ 0x1839: 68,
+ 0x183a: 68,
+ 0x183b: 68,
+ 0x183c: 68,
+ 0x183d: 68,
+ 0x183e: 68,
+ 0x183f: 68,
+ 0x1840: 68,
+ 0x1841: 68,
+ 0x1842: 68,
+ 0x1843: 68,
+ 0x1844: 68,
+ 0x1845: 68,
+ 0x1846: 68,
+ 0x1847: 68,
+ 0x1848: 68,
+ 0x1849: 68,
+ 0x184a: 68,
+ 0x184b: 68,
+ 0x184c: 68,
+ 0x184d: 68,
+ 0x184e: 68,
+ 0x184f: 68,
+ 0x1850: 68,
+ 0x1851: 68,
+ 0x1852: 68,
+ 0x1853: 68,
+ 0x1854: 68,
+ 0x1855: 68,
+ 0x1856: 68,
+ 0x1857: 68,
+ 0x1858: 68,
+ 0x1859: 68,
+ 0x185a: 68,
+ 0x185b: 68,
+ 0x185c: 68,
+ 0x185d: 68,
+ 0x185e: 68,
+ 0x185f: 68,
+ 0x1860: 68,
+ 0x1861: 68,
+ 0x1862: 68,
+ 0x1863: 68,
+ 0x1864: 68,
+ 0x1865: 68,
+ 0x1866: 68,
+ 0x1867: 68,
+ 0x1868: 68,
+ 0x1869: 68,
+ 0x186a: 68,
+ 0x186b: 68,
+ 0x186c: 68,
+ 0x186d: 68,
+ 0x186e: 68,
+ 0x186f: 68,
+ 0x1870: 68,
+ 0x1871: 68,
+ 0x1872: 68,
+ 0x1873: 68,
+ 0x1874: 68,
+ 0x1875: 68,
+ 0x1876: 68,
+ 0x1877: 68,
+ 0x1878: 68,
+ 0x1880: 85,
+ 0x1881: 85,
+ 0x1882: 85,
+ 0x1883: 85,
+ 0x1884: 85,
+ 0x1885: 84,
+ 0x1886: 84,
+ 0x1887: 68,
+ 0x1888: 68,
+ 0x1889: 68,
+ 0x188a: 68,
+ 0x188b: 68,
+ 0x188c: 68,
+ 0x188d: 68,
+ 0x188e: 68,
+ 0x188f: 68,
+ 0x1890: 68,
+ 0x1891: 68,
+ 0x1892: 68,
+ 0x1893: 68,
+ 0x1894: 68,
+ 0x1895: 68,
+ 0x1896: 68,
+ 0x1897: 68,
+ 0x1898: 68,
+ 0x1899: 68,
+ 0x189a: 68,
+ 0x189b: 68,
+ 0x189c: 68,
+ 0x189d: 68,
+ 0x189e: 68,
+ 0x189f: 68,
+ 0x18a0: 68,
+ 0x18a1: 68,
+ 0x18a2: 68,
+ 0x18a3: 68,
+ 0x18a4: 68,
+ 0x18a5: 68,
+ 0x18a6: 68,
+ 0x18a7: 68,
+ 0x18a8: 68,
+ 0x18aa: 68,
+ 0x200c: 85,
+ 0x200d: 67,
+ 0x202f: 85,
+ 0x2066: 85,
+ 0x2067: 85,
+ 0x2068: 85,
+ 0x2069: 85,
+ 0xa840: 68,
+ 0xa841: 68,
+ 0xa842: 68,
+ 0xa843: 68,
+ 0xa844: 68,
+ 0xa845: 68,
+ 0xa846: 68,
+ 0xa847: 68,
+ 0xa848: 68,
+ 0xa849: 68,
+ 0xa84a: 68,
+ 0xa84b: 68,
+ 0xa84c: 68,
+ 0xa84d: 68,
+ 0xa84e: 68,
+ 0xa84f: 68,
+ 0xa850: 68,
+ 0xa851: 68,
+ 0xa852: 68,
+ 0xa853: 68,
+ 0xa854: 68,
+ 0xa855: 68,
+ 0xa856: 68,
+ 0xa857: 68,
+ 0xa858: 68,
+ 0xa859: 68,
+ 0xa85a: 68,
+ 0xa85b: 68,
+ 0xa85c: 68,
+ 0xa85d: 68,
+ 0xa85e: 68,
+ 0xa85f: 68,
+ 0xa860: 68,
+ 0xa861: 68,
+ 0xa862: 68,
+ 0xa863: 68,
+ 0xa864: 68,
+ 0xa865: 68,
+ 0xa866: 68,
+ 0xa867: 68,
+ 0xa868: 68,
+ 0xa869: 68,
+ 0xa86a: 68,
+ 0xa86b: 68,
+ 0xa86c: 68,
+ 0xa86d: 68,
+ 0xa86e: 68,
+ 0xa86f: 68,
+ 0xa870: 68,
+ 0xa871: 68,
+ 0xa872: 76,
+ 0xa873: 85,
+ 0x10ac0: 68,
+ 0x10ac1: 68,
+ 0x10ac2: 68,
+ 0x10ac3: 68,
+ 0x10ac4: 68,
+ 0x10ac5: 82,
+ 0x10ac6: 85,
+ 0x10ac7: 82,
+ 0x10ac8: 85,
+ 0x10ac9: 82,
+ 0x10aca: 82,
+ 0x10acb: 85,
+ 0x10acc: 85,
+ 0x10acd: 76,
+ 0x10ace: 82,
+ 0x10acf: 82,
+ 0x10ad0: 82,
+ 0x10ad1: 82,
+ 0x10ad2: 82,
+ 0x10ad3: 68,
+ 0x10ad4: 68,
+ 0x10ad5: 68,
+ 0x10ad6: 68,
+ 0x10ad7: 76,
+ 0x10ad8: 68,
+ 0x10ad9: 68,
+ 0x10ada: 68,
+ 0x10adb: 68,
+ 0x10adc: 68,
+ 0x10add: 82,
+ 0x10ade: 68,
+ 0x10adf: 68,
+ 0x10ae0: 68,
+ 0x10ae1: 82,
+ 0x10ae2: 85,
+ 0x10ae3: 85,
+ 0x10ae4: 82,
+ 0x10aeb: 68,
+ 0x10aec: 68,
+ 0x10aed: 68,
+ 0x10aee: 68,
+ 0x10aef: 82,
+ 0x10b80: 68,
+ 0x10b81: 82,
+ 0x10b82: 68,
+ 0x10b83: 82,
+ 0x10b84: 82,
+ 0x10b85: 82,
+ 0x10b86: 68,
+ 0x10b87: 68,
+ 0x10b88: 68,
+ 0x10b89: 82,
+ 0x10b8a: 68,
+ 0x10b8b: 68,
+ 0x10b8c: 82,
+ 0x10b8d: 68,
+ 0x10b8e: 82,
+ 0x10b8f: 82,
+ 0x10b90: 68,
+ 0x10b91: 82,
+ 0x10ba9: 82,
+ 0x10baa: 82,
+ 0x10bab: 82,
+ 0x10bac: 82,
+ 0x10bad: 68,
+ 0x10bae: 68,
+ 0x10baf: 85,
+ 0x10d00: 76,
+ 0x10d01: 68,
+ 0x10d02: 68,
+ 0x10d03: 68,
+ 0x10d04: 68,
+ 0x10d05: 68,
+ 0x10d06: 68,
+ 0x10d07: 68,
+ 0x10d08: 68,
+ 0x10d09: 68,
+ 0x10d0a: 68,
+ 0x10d0b: 68,
+ 0x10d0c: 68,
+ 0x10d0d: 68,
+ 0x10d0e: 68,
+ 0x10d0f: 68,
+ 0x10d10: 68,
+ 0x10d11: 68,
+ 0x10d12: 68,
+ 0x10d13: 68,
+ 0x10d14: 68,
+ 0x10d15: 68,
+ 0x10d16: 68,
+ 0x10d17: 68,
+ 0x10d18: 68,
+ 0x10d19: 68,
+ 0x10d1a: 68,
+ 0x10d1b: 68,
+ 0x10d1c: 68,
+ 0x10d1d: 68,
+ 0x10d1e: 68,
+ 0x10d1f: 68,
+ 0x10d20: 68,
+ 0x10d21: 68,
+ 0x10d22: 82,
+ 0x10d23: 68,
+ 0x10f30: 68,
+ 0x10f31: 68,
+ 0x10f32: 68,
+ 0x10f33: 82,
+ 0x10f34: 68,
+ 0x10f35: 68,
+ 0x10f36: 68,
+ 0x10f37: 68,
+ 0x10f38: 68,
+ 0x10f39: 68,
+ 0x10f3a: 68,
+ 0x10f3b: 68,
+ 0x10f3c: 68,
+ 0x10f3d: 68,
+ 0x10f3e: 68,
+ 0x10f3f: 68,
+ 0x10f40: 68,
+ 0x10f41: 68,
+ 0x10f42: 68,
+ 0x10f43: 68,
+ 0x10f44: 68,
+ 0x10f45: 85,
+ 0x10f51: 68,
+ 0x10f52: 68,
+ 0x10f53: 68,
+ 0x10f54: 82,
+ 0x110bd: 85,
+ 0x110cd: 85,
+ 0x1e900: 68,
+ 0x1e901: 68,
+ 0x1e902: 68,
+ 0x1e903: 68,
+ 0x1e904: 68,
+ 0x1e905: 68,
+ 0x1e906: 68,
+ 0x1e907: 68,
+ 0x1e908: 68,
+ 0x1e909: 68,
+ 0x1e90a: 68,
+ 0x1e90b: 68,
+ 0x1e90c: 68,
+ 0x1e90d: 68,
+ 0x1e90e: 68,
+ 0x1e90f: 68,
+ 0x1e910: 68,
+ 0x1e911: 68,
+ 0x1e912: 68,
+ 0x1e913: 68,
+ 0x1e914: 68,
+ 0x1e915: 68,
+ 0x1e916: 68,
+ 0x1e917: 68,
+ 0x1e918: 68,
+ 0x1e919: 68,
+ 0x1e91a: 68,
+ 0x1e91b: 68,
+ 0x1e91c: 68,
+ 0x1e91d: 68,
+ 0x1e91e: 68,
+ 0x1e91f: 68,
+ 0x1e920: 68,
+ 0x1e921: 68,
+ 0x1e922: 68,
+ 0x1e923: 68,
+ 0x1e924: 68,
+ 0x1e925: 68,
+ 0x1e926: 68,
+ 0x1e927: 68,
+ 0x1e928: 68,
+ 0x1e929: 68,
+ 0x1e92a: 68,
+ 0x1e92b: 68,
+ 0x1e92c: 68,
+ 0x1e92d: 68,
+ 0x1e92e: 68,
+ 0x1e92f: 68,
+ 0x1e930: 68,
+ 0x1e931: 68,
+ 0x1e932: 68,
+ 0x1e933: 68,
+ 0x1e934: 68,
+ 0x1e935: 68,
+ 0x1e936: 68,
+ 0x1e937: 68,
+ 0x1e938: 68,
+ 0x1e939: 68,
+ 0x1e93a: 68,
+ 0x1e93b: 68,
+ 0x1e93c: 68,
+ 0x1e93d: 68,
+ 0x1e93e: 68,
+ 0x1e93f: 68,
+ 0x1e940: 68,
+ 0x1e941: 68,
+ 0x1e942: 68,
+ 0x1e943: 68,
+}
+codepoint_classes = {
+ 'PVALID': (
+ 0x2d0000002e,
+ 0x300000003a,
+ 0x610000007b,
+ 0xdf000000f7,
+ 0xf800000100,
+ 0x10100000102,
+ 0x10300000104,
+ 0x10500000106,
+ 0x10700000108,
+ 0x1090000010a,
+ 0x10b0000010c,
+ 0x10d0000010e,
+ 0x10f00000110,
+ 0x11100000112,
+ 0x11300000114,
+ 0x11500000116,
+ 0x11700000118,
+ 0x1190000011a,
+ 0x11b0000011c,
+ 0x11d0000011e,
+ 0x11f00000120,
+ 0x12100000122,
+ 0x12300000124,
+ 0x12500000126,
+ 0x12700000128,
+ 0x1290000012a,
+ 0x12b0000012c,
+ 0x12d0000012e,
+ 0x12f00000130,
+ 0x13100000132,
+ 0x13500000136,
+ 0x13700000139,
+ 0x13a0000013b,
+ 0x13c0000013d,
+ 0x13e0000013f,
+ 0x14200000143,
+ 0x14400000145,
+ 0x14600000147,
+ 0x14800000149,
+ 0x14b0000014c,
+ 0x14d0000014e,
+ 0x14f00000150,
+ 0x15100000152,
+ 0x15300000154,
+ 0x15500000156,
+ 0x15700000158,
+ 0x1590000015a,
+ 0x15b0000015c,
+ 0x15d0000015e,
+ 0x15f00000160,
+ 0x16100000162,
+ 0x16300000164,
+ 0x16500000166,
+ 0x16700000168,
+ 0x1690000016a,
+ 0x16b0000016c,
+ 0x16d0000016e,
+ 0x16f00000170,
+ 0x17100000172,
+ 0x17300000174,
+ 0x17500000176,
+ 0x17700000178,
+ 0x17a0000017b,
+ 0x17c0000017d,
+ 0x17e0000017f,
+ 0x18000000181,
+ 0x18300000184,
+ 0x18500000186,
+ 0x18800000189,
+ 0x18c0000018e,
+ 0x19200000193,
+ 0x19500000196,
+ 0x1990000019c,
+ 0x19e0000019f,
+ 0x1a1000001a2,
+ 0x1a3000001a4,
+ 0x1a5000001a6,
+ 0x1a8000001a9,
+ 0x1aa000001ac,
+ 0x1ad000001ae,
+ 0x1b0000001b1,
+ 0x1b4000001b5,
+ 0x1b6000001b7,
+ 0x1b9000001bc,
+ 0x1bd000001c4,
+ 0x1ce000001cf,
+ 0x1d0000001d1,
+ 0x1d2000001d3,
+ 0x1d4000001d5,
+ 0x1d6000001d7,
+ 0x1d8000001d9,
+ 0x1da000001db,
+ 0x1dc000001de,
+ 0x1df000001e0,
+ 0x1e1000001e2,
+ 0x1e3000001e4,
+ 0x1e5000001e6,
+ 0x1e7000001e8,
+ 0x1e9000001ea,
+ 0x1eb000001ec,
+ 0x1ed000001ee,
+ 0x1ef000001f1,
+ 0x1f5000001f6,
+ 0x1f9000001fa,
+ 0x1fb000001fc,
+ 0x1fd000001fe,
+ 0x1ff00000200,
+ 0x20100000202,
+ 0x20300000204,
+ 0x20500000206,
+ 0x20700000208,
+ 0x2090000020a,
+ 0x20b0000020c,
+ 0x20d0000020e,
+ 0x20f00000210,
+ 0x21100000212,
+ 0x21300000214,
+ 0x21500000216,
+ 0x21700000218,
+ 0x2190000021a,
+ 0x21b0000021c,
+ 0x21d0000021e,
+ 0x21f00000220,
+ 0x22100000222,
+ 0x22300000224,
+ 0x22500000226,
+ 0x22700000228,
+ 0x2290000022a,
+ 0x22b0000022c,
+ 0x22d0000022e,
+ 0x22f00000230,
+ 0x23100000232,
+ 0x2330000023a,
+ 0x23c0000023d,
+ 0x23f00000241,
+ 0x24200000243,
+ 0x24700000248,
+ 0x2490000024a,
+ 0x24b0000024c,
+ 0x24d0000024e,
+ 0x24f000002b0,
+ 0x2b9000002c2,
+ 0x2c6000002d2,
+ 0x2ec000002ed,
+ 0x2ee000002ef,
+ 0x30000000340,
+ 0x34200000343,
+ 0x3460000034f,
+ 0x35000000370,
+ 0x37100000372,
+ 0x37300000374,
+ 0x37700000378,
+ 0x37b0000037e,
+ 0x39000000391,
+ 0x3ac000003cf,
+ 0x3d7000003d8,
+ 0x3d9000003da,
+ 0x3db000003dc,
+ 0x3dd000003de,
+ 0x3df000003e0,
+ 0x3e1000003e2,
+ 0x3e3000003e4,
+ 0x3e5000003e6,
+ 0x3e7000003e8,
+ 0x3e9000003ea,
+ 0x3eb000003ec,
+ 0x3ed000003ee,
+ 0x3ef000003f0,
+ 0x3f3000003f4,
+ 0x3f8000003f9,
+ 0x3fb000003fd,
+ 0x43000000460,
+ 0x46100000462,
+ 0x46300000464,
+ 0x46500000466,
+ 0x46700000468,
+ 0x4690000046a,
+ 0x46b0000046c,
+ 0x46d0000046e,
+ 0x46f00000470,
+ 0x47100000472,
+ 0x47300000474,
+ 0x47500000476,
+ 0x47700000478,
+ 0x4790000047a,
+ 0x47b0000047c,
+ 0x47d0000047e,
+ 0x47f00000480,
+ 0x48100000482,
+ 0x48300000488,
+ 0x48b0000048c,
+ 0x48d0000048e,
+ 0x48f00000490,
+ 0x49100000492,
+ 0x49300000494,
+ 0x49500000496,
+ 0x49700000498,
+ 0x4990000049a,
+ 0x49b0000049c,
+ 0x49d0000049e,
+ 0x49f000004a0,
+ 0x4a1000004a2,
+ 0x4a3000004a4,
+ 0x4a5000004a6,
+ 0x4a7000004a8,
+ 0x4a9000004aa,
+ 0x4ab000004ac,
+ 0x4ad000004ae,
+ 0x4af000004b0,
+ 0x4b1000004b2,
+ 0x4b3000004b4,
+ 0x4b5000004b6,
+ 0x4b7000004b8,
+ 0x4b9000004ba,
+ 0x4bb000004bc,
+ 0x4bd000004be,
+ 0x4bf000004c0,
+ 0x4c2000004c3,
+ 0x4c4000004c5,
+ 0x4c6000004c7,
+ 0x4c8000004c9,
+ 0x4ca000004cb,
+ 0x4cc000004cd,
+ 0x4ce000004d0,
+ 0x4d1000004d2,
+ 0x4d3000004d4,
+ 0x4d5000004d6,
+ 0x4d7000004d8,
+ 0x4d9000004da,
+ 0x4db000004dc,
+ 0x4dd000004de,
+ 0x4df000004e0,
+ 0x4e1000004e2,
+ 0x4e3000004e4,
+ 0x4e5000004e6,
+ 0x4e7000004e8,
+ 0x4e9000004ea,
+ 0x4eb000004ec,
+ 0x4ed000004ee,
+ 0x4ef000004f0,
+ 0x4f1000004f2,
+ 0x4f3000004f4,
+ 0x4f5000004f6,
+ 0x4f7000004f8,
+ 0x4f9000004fa,
+ 0x4fb000004fc,
+ 0x4fd000004fe,
+ 0x4ff00000500,
+ 0x50100000502,
+ 0x50300000504,
+ 0x50500000506,
+ 0x50700000508,
+ 0x5090000050a,
+ 0x50b0000050c,
+ 0x50d0000050e,
+ 0x50f00000510,
+ 0x51100000512,
+ 0x51300000514,
+ 0x51500000516,
+ 0x51700000518,
+ 0x5190000051a,
+ 0x51b0000051c,
+ 0x51d0000051e,
+ 0x51f00000520,
+ 0x52100000522,
+ 0x52300000524,
+ 0x52500000526,
+ 0x52700000528,
+ 0x5290000052a,
+ 0x52b0000052c,
+ 0x52d0000052e,
+ 0x52f00000530,
+ 0x5590000055a,
+ 0x56000000587,
+ 0x58800000589,
+ 0x591000005be,
+ 0x5bf000005c0,
+ 0x5c1000005c3,
+ 0x5c4000005c6,
+ 0x5c7000005c8,
+ 0x5d0000005eb,
+ 0x5ef000005f3,
+ 0x6100000061b,
+ 0x62000000640,
+ 0x64100000660,
+ 0x66e00000675,
+ 0x679000006d4,
+ 0x6d5000006dd,
+ 0x6df000006e9,
+ 0x6ea000006f0,
+ 0x6fa00000700,
+ 0x7100000074b,
+ 0x74d000007b2,
+ 0x7c0000007f6,
+ 0x7fd000007fe,
+ 0x8000000082e,
+ 0x8400000085c,
+ 0x8600000086b,
+ 0x8a0000008b5,
+ 0x8b6000008be,
+ 0x8d3000008e2,
+ 0x8e300000958,
+ 0x96000000964,
+ 0x96600000970,
+ 0x97100000984,
+ 0x9850000098d,
+ 0x98f00000991,
+ 0x993000009a9,
+ 0x9aa000009b1,
+ 0x9b2000009b3,
+ 0x9b6000009ba,
+ 0x9bc000009c5,
+ 0x9c7000009c9,
+ 0x9cb000009cf,
+ 0x9d7000009d8,
+ 0x9e0000009e4,
+ 0x9e6000009f2,
+ 0x9fc000009fd,
+ 0x9fe000009ff,
+ 0xa0100000a04,
+ 0xa0500000a0b,
+ 0xa0f00000a11,
+ 0xa1300000a29,
+ 0xa2a00000a31,
+ 0xa3200000a33,
+ 0xa3500000a36,
+ 0xa3800000a3a,
+ 0xa3c00000a3d,
+ 0xa3e00000a43,
+ 0xa4700000a49,
+ 0xa4b00000a4e,
+ 0xa5100000a52,
+ 0xa5c00000a5d,
+ 0xa6600000a76,
+ 0xa8100000a84,
+ 0xa8500000a8e,
+ 0xa8f00000a92,
+ 0xa9300000aa9,
+ 0xaaa00000ab1,
+ 0xab200000ab4,
+ 0xab500000aba,
+ 0xabc00000ac6,
+ 0xac700000aca,
+ 0xacb00000ace,
+ 0xad000000ad1,
+ 0xae000000ae4,
+ 0xae600000af0,
+ 0xaf900000b00,
+ 0xb0100000b04,
+ 0xb0500000b0d,
+ 0xb0f00000b11,
+ 0xb1300000b29,
+ 0xb2a00000b31,
+ 0xb3200000b34,
+ 0xb3500000b3a,
+ 0xb3c00000b45,
+ 0xb4700000b49,
+ 0xb4b00000b4e,
+ 0xb5600000b58,
+ 0xb5f00000b64,
+ 0xb6600000b70,
+ 0xb7100000b72,
+ 0xb8200000b84,
+ 0xb8500000b8b,
+ 0xb8e00000b91,
+ 0xb9200000b96,
+ 0xb9900000b9b,
+ 0xb9c00000b9d,
+ 0xb9e00000ba0,
+ 0xba300000ba5,
+ 0xba800000bab,
+ 0xbae00000bba,
+ 0xbbe00000bc3,
+ 0xbc600000bc9,
+ 0xbca00000bce,
+ 0xbd000000bd1,
+ 0xbd700000bd8,
+ 0xbe600000bf0,
+ 0xc0000000c0d,
+ 0xc0e00000c11,
+ 0xc1200000c29,
+ 0xc2a00000c3a,
+ 0xc3d00000c45,
+ 0xc4600000c49,
+ 0xc4a00000c4e,
+ 0xc5500000c57,
+ 0xc5800000c5b,
+ 0xc6000000c64,
+ 0xc6600000c70,
+ 0xc8000000c84,
+ 0xc8500000c8d,
+ 0xc8e00000c91,
+ 0xc9200000ca9,
+ 0xcaa00000cb4,
+ 0xcb500000cba,
+ 0xcbc00000cc5,
+ 0xcc600000cc9,
+ 0xcca00000cce,
+ 0xcd500000cd7,
+ 0xcde00000cdf,
+ 0xce000000ce4,
+ 0xce600000cf0,
+ 0xcf100000cf3,
+ 0xd0000000d04,
+ 0xd0500000d0d,
+ 0xd0e00000d11,
+ 0xd1200000d45,
+ 0xd4600000d49,
+ 0xd4a00000d4f,
+ 0xd5400000d58,
+ 0xd5f00000d64,
+ 0xd6600000d70,
+ 0xd7a00000d80,
+ 0xd8200000d84,
+ 0xd8500000d97,
+ 0xd9a00000db2,
+ 0xdb300000dbc,
+ 0xdbd00000dbe,
+ 0xdc000000dc7,
+ 0xdca00000dcb,
+ 0xdcf00000dd5,
+ 0xdd600000dd7,
+ 0xdd800000de0,
+ 0xde600000df0,
+ 0xdf200000df4,
+ 0xe0100000e33,
+ 0xe3400000e3b,
+ 0xe4000000e4f,
+ 0xe5000000e5a,
+ 0xe8100000e83,
+ 0xe8400000e85,
+ 0xe8700000e89,
+ 0xe8a00000e8b,
+ 0xe8d00000e8e,
+ 0xe9400000e98,
+ 0xe9900000ea0,
+ 0xea100000ea4,
+ 0xea500000ea6,
+ 0xea700000ea8,
+ 0xeaa00000eac,
+ 0xead00000eb3,
+ 0xeb400000eba,
+ 0xebb00000ebe,
+ 0xec000000ec5,
+ 0xec600000ec7,
+ 0xec800000ece,
+ 0xed000000eda,
+ 0xede00000ee0,
+ 0xf0000000f01,
+ 0xf0b00000f0c,
+ 0xf1800000f1a,
+ 0xf2000000f2a,
+ 0xf3500000f36,
+ 0xf3700000f38,
+ 0xf3900000f3a,
+ 0xf3e00000f43,
+ 0xf4400000f48,
+ 0xf4900000f4d,
+ 0xf4e00000f52,
+ 0xf5300000f57,
+ 0xf5800000f5c,
+ 0xf5d00000f69,
+ 0xf6a00000f6d,
+ 0xf7100000f73,
+ 0xf7400000f75,
+ 0xf7a00000f81,
+ 0xf8200000f85,
+ 0xf8600000f93,
+ 0xf9400000f98,
+ 0xf9900000f9d,
+ 0xf9e00000fa2,
+ 0xfa300000fa7,
+ 0xfa800000fac,
+ 0xfad00000fb9,
+ 0xfba00000fbd,
+ 0xfc600000fc7,
+ 0x10000000104a,
+ 0x10500000109e,
+ 0x10d0000010fb,
+ 0x10fd00001100,
+ 0x120000001249,
+ 0x124a0000124e,
+ 0x125000001257,
+ 0x125800001259,
+ 0x125a0000125e,
+ 0x126000001289,
+ 0x128a0000128e,
+ 0x1290000012b1,
+ 0x12b2000012b6,
+ 0x12b8000012bf,
+ 0x12c0000012c1,
+ 0x12c2000012c6,
+ 0x12c8000012d7,
+ 0x12d800001311,
+ 0x131200001316,
+ 0x13180000135b,
+ 0x135d00001360,
+ 0x138000001390,
+ 0x13a0000013f6,
+ 0x14010000166d,
+ 0x166f00001680,
+ 0x16810000169b,
+ 0x16a0000016eb,
+ 0x16f1000016f9,
+ 0x17000000170d,
+ 0x170e00001715,
+ 0x172000001735,
+ 0x174000001754,
+ 0x17600000176d,
+ 0x176e00001771,
+ 0x177200001774,
+ 0x1780000017b4,
+ 0x17b6000017d4,
+ 0x17d7000017d8,
+ 0x17dc000017de,
+ 0x17e0000017ea,
+ 0x18100000181a,
+ 0x182000001879,
+ 0x1880000018ab,
+ 0x18b0000018f6,
+ 0x19000000191f,
+ 0x19200000192c,
+ 0x19300000193c,
+ 0x19460000196e,
+ 0x197000001975,
+ 0x1980000019ac,
+ 0x19b0000019ca,
+ 0x19d0000019da,
+ 0x1a0000001a1c,
+ 0x1a2000001a5f,
+ 0x1a6000001a7d,
+ 0x1a7f00001a8a,
+ 0x1a9000001a9a,
+ 0x1aa700001aa8,
+ 0x1ab000001abe,
+ 0x1b0000001b4c,
+ 0x1b5000001b5a,
+ 0x1b6b00001b74,
+ 0x1b8000001bf4,
+ 0x1c0000001c38,
+ 0x1c4000001c4a,
+ 0x1c4d00001c7e,
+ 0x1cd000001cd3,
+ 0x1cd400001cfa,
+ 0x1d0000001d2c,
+ 0x1d2f00001d30,
+ 0x1d3b00001d3c,
+ 0x1d4e00001d4f,
+ 0x1d6b00001d78,
+ 0x1d7900001d9b,
+ 0x1dc000001dfa,
+ 0x1dfb00001e00,
+ 0x1e0100001e02,
+ 0x1e0300001e04,
+ 0x1e0500001e06,
+ 0x1e0700001e08,
+ 0x1e0900001e0a,
+ 0x1e0b00001e0c,
+ 0x1e0d00001e0e,
+ 0x1e0f00001e10,
+ 0x1e1100001e12,
+ 0x1e1300001e14,
+ 0x1e1500001e16,
+ 0x1e1700001e18,
+ 0x1e1900001e1a,
+ 0x1e1b00001e1c,
+ 0x1e1d00001e1e,
+ 0x1e1f00001e20,
+ 0x1e2100001e22,
+ 0x1e2300001e24,
+ 0x1e2500001e26,
+ 0x1e2700001e28,
+ 0x1e2900001e2a,
+ 0x1e2b00001e2c,
+ 0x1e2d00001e2e,
+ 0x1e2f00001e30,
+ 0x1e3100001e32,
+ 0x1e3300001e34,
+ 0x1e3500001e36,
+ 0x1e3700001e38,
+ 0x1e3900001e3a,
+ 0x1e3b00001e3c,
+ 0x1e3d00001e3e,
+ 0x1e3f00001e40,
+ 0x1e4100001e42,
+ 0x1e4300001e44,
+ 0x1e4500001e46,
+ 0x1e4700001e48,
+ 0x1e4900001e4a,
+ 0x1e4b00001e4c,
+ 0x1e4d00001e4e,
+ 0x1e4f00001e50,
+ 0x1e5100001e52,
+ 0x1e5300001e54,
+ 0x1e5500001e56,
+ 0x1e5700001e58,
+ 0x1e5900001e5a,
+ 0x1e5b00001e5c,
+ 0x1e5d00001e5e,
+ 0x1e5f00001e60,
+ 0x1e6100001e62,
+ 0x1e6300001e64,
+ 0x1e6500001e66,
+ 0x1e6700001e68,
+ 0x1e6900001e6a,
+ 0x1e6b00001e6c,
+ 0x1e6d00001e6e,
+ 0x1e6f00001e70,
+ 0x1e7100001e72,
+ 0x1e7300001e74,
+ 0x1e7500001e76,
+ 0x1e7700001e78,
+ 0x1e7900001e7a,
+ 0x1e7b00001e7c,
+ 0x1e7d00001e7e,
+ 0x1e7f00001e80,
+ 0x1e8100001e82,
+ 0x1e8300001e84,
+ 0x1e8500001e86,
+ 0x1e8700001e88,
+ 0x1e8900001e8a,
+ 0x1e8b00001e8c,
+ 0x1e8d00001e8e,
+ 0x1e8f00001e90,
+ 0x1e9100001e92,
+ 0x1e9300001e94,
+ 0x1e9500001e9a,
+ 0x1e9c00001e9e,
+ 0x1e9f00001ea0,
+ 0x1ea100001ea2,
+ 0x1ea300001ea4,
+ 0x1ea500001ea6,
+ 0x1ea700001ea8,
+ 0x1ea900001eaa,
+ 0x1eab00001eac,
+ 0x1ead00001eae,
+ 0x1eaf00001eb0,
+ 0x1eb100001eb2,
+ 0x1eb300001eb4,
+ 0x1eb500001eb6,
+ 0x1eb700001eb8,
+ 0x1eb900001eba,
+ 0x1ebb00001ebc,
+ 0x1ebd00001ebe,
+ 0x1ebf00001ec0,
+ 0x1ec100001ec2,
+ 0x1ec300001ec4,
+ 0x1ec500001ec6,
+ 0x1ec700001ec8,
+ 0x1ec900001eca,
+ 0x1ecb00001ecc,
+ 0x1ecd00001ece,
+ 0x1ecf00001ed0,
+ 0x1ed100001ed2,
+ 0x1ed300001ed4,
+ 0x1ed500001ed6,
+ 0x1ed700001ed8,
+ 0x1ed900001eda,
+ 0x1edb00001edc,
+ 0x1edd00001ede,
+ 0x1edf00001ee0,
+ 0x1ee100001ee2,
+ 0x1ee300001ee4,
+ 0x1ee500001ee6,
+ 0x1ee700001ee8,
+ 0x1ee900001eea,
+ 0x1eeb00001eec,
+ 0x1eed00001eee,
+ 0x1eef00001ef0,
+ 0x1ef100001ef2,
+ 0x1ef300001ef4,
+ 0x1ef500001ef6,
+ 0x1ef700001ef8,
+ 0x1ef900001efa,
+ 0x1efb00001efc,
+ 0x1efd00001efe,
+ 0x1eff00001f08,
+ 0x1f1000001f16,
+ 0x1f2000001f28,
+ 0x1f3000001f38,
+ 0x1f4000001f46,
+ 0x1f5000001f58,
+ 0x1f6000001f68,
+ 0x1f7000001f71,
+ 0x1f7200001f73,
+ 0x1f7400001f75,
+ 0x1f7600001f77,
+ 0x1f7800001f79,
+ 0x1f7a00001f7b,
+ 0x1f7c00001f7d,
+ 0x1fb000001fb2,
+ 0x1fb600001fb7,
+ 0x1fc600001fc7,
+ 0x1fd000001fd3,
+ 0x1fd600001fd8,
+ 0x1fe000001fe3,
+ 0x1fe400001fe8,
+ 0x1ff600001ff7,
+ 0x214e0000214f,
+ 0x218400002185,
+ 0x2c3000002c5f,
+ 0x2c6100002c62,
+ 0x2c6500002c67,
+ 0x2c6800002c69,
+ 0x2c6a00002c6b,
+ 0x2c6c00002c6d,
+ 0x2c7100002c72,
+ 0x2c7300002c75,
+ 0x2c7600002c7c,
+ 0x2c8100002c82,
+ 0x2c8300002c84,
+ 0x2c8500002c86,
+ 0x2c8700002c88,
+ 0x2c8900002c8a,
+ 0x2c8b00002c8c,
+ 0x2c8d00002c8e,
+ 0x2c8f00002c90,
+ 0x2c9100002c92,
+ 0x2c9300002c94,
+ 0x2c9500002c96,
+ 0x2c9700002c98,
+ 0x2c9900002c9a,
+ 0x2c9b00002c9c,
+ 0x2c9d00002c9e,
+ 0x2c9f00002ca0,
+ 0x2ca100002ca2,
+ 0x2ca300002ca4,
+ 0x2ca500002ca6,
+ 0x2ca700002ca8,
+ 0x2ca900002caa,
+ 0x2cab00002cac,
+ 0x2cad00002cae,
+ 0x2caf00002cb0,
+ 0x2cb100002cb2,
+ 0x2cb300002cb4,
+ 0x2cb500002cb6,
+ 0x2cb700002cb8,
+ 0x2cb900002cba,
+ 0x2cbb00002cbc,
+ 0x2cbd00002cbe,
+ 0x2cbf00002cc0,
+ 0x2cc100002cc2,
+ 0x2cc300002cc4,
+ 0x2cc500002cc6,
+ 0x2cc700002cc8,
+ 0x2cc900002cca,
+ 0x2ccb00002ccc,
+ 0x2ccd00002cce,
+ 0x2ccf00002cd0,
+ 0x2cd100002cd2,
+ 0x2cd300002cd4,
+ 0x2cd500002cd6,
+ 0x2cd700002cd8,
+ 0x2cd900002cda,
+ 0x2cdb00002cdc,
+ 0x2cdd00002cde,
+ 0x2cdf00002ce0,
+ 0x2ce100002ce2,
+ 0x2ce300002ce5,
+ 0x2cec00002ced,
+ 0x2cee00002cf2,
+ 0x2cf300002cf4,
+ 0x2d0000002d26,
+ 0x2d2700002d28,
+ 0x2d2d00002d2e,
+ 0x2d3000002d68,
+ 0x2d7f00002d97,
+ 0x2da000002da7,
+ 0x2da800002daf,
+ 0x2db000002db7,
+ 0x2db800002dbf,
+ 0x2dc000002dc7,
+ 0x2dc800002dcf,
+ 0x2dd000002dd7,
+ 0x2dd800002ddf,
+ 0x2de000002e00,
+ 0x2e2f00002e30,
+ 0x300500003008,
+ 0x302a0000302e,
+ 0x303c0000303d,
+ 0x304100003097,
+ 0x30990000309b,
+ 0x309d0000309f,
+ 0x30a1000030fb,
+ 0x30fc000030ff,
+ 0x310500003130,
+ 0x31a0000031bb,
+ 0x31f000003200,
+ 0x340000004db6,
+ 0x4e0000009ff0,
+ 0xa0000000a48d,
+ 0xa4d00000a4fe,
+ 0xa5000000a60d,
+ 0xa6100000a62c,
+ 0xa6410000a642,
+ 0xa6430000a644,
+ 0xa6450000a646,
+ 0xa6470000a648,
+ 0xa6490000a64a,
+ 0xa64b0000a64c,
+ 0xa64d0000a64e,
+ 0xa64f0000a650,
+ 0xa6510000a652,
+ 0xa6530000a654,
+ 0xa6550000a656,
+ 0xa6570000a658,
+ 0xa6590000a65a,
+ 0xa65b0000a65c,
+ 0xa65d0000a65e,
+ 0xa65f0000a660,
+ 0xa6610000a662,
+ 0xa6630000a664,
+ 0xa6650000a666,
+ 0xa6670000a668,
+ 0xa6690000a66a,
+ 0xa66b0000a66c,
+ 0xa66d0000a670,
+ 0xa6740000a67e,
+ 0xa67f0000a680,
+ 0xa6810000a682,
+ 0xa6830000a684,
+ 0xa6850000a686,
+ 0xa6870000a688,
+ 0xa6890000a68a,
+ 0xa68b0000a68c,
+ 0xa68d0000a68e,
+ 0xa68f0000a690,
+ 0xa6910000a692,
+ 0xa6930000a694,
+ 0xa6950000a696,
+ 0xa6970000a698,
+ 0xa6990000a69a,
+ 0xa69b0000a69c,
+ 0xa69e0000a6e6,
+ 0xa6f00000a6f2,
+ 0xa7170000a720,
+ 0xa7230000a724,
+ 0xa7250000a726,
+ 0xa7270000a728,
+ 0xa7290000a72a,
+ 0xa72b0000a72c,
+ 0xa72d0000a72e,
+ 0xa72f0000a732,
+ 0xa7330000a734,
+ 0xa7350000a736,
+ 0xa7370000a738,
+ 0xa7390000a73a,
+ 0xa73b0000a73c,
+ 0xa73d0000a73e,
+ 0xa73f0000a740,
+ 0xa7410000a742,
+ 0xa7430000a744,
+ 0xa7450000a746,
+ 0xa7470000a748,
+ 0xa7490000a74a,
+ 0xa74b0000a74c,
+ 0xa74d0000a74e,
+ 0xa74f0000a750,
+ 0xa7510000a752,
+ 0xa7530000a754,
+ 0xa7550000a756,
+ 0xa7570000a758,
+ 0xa7590000a75a,
+ 0xa75b0000a75c,
+ 0xa75d0000a75e,
+ 0xa75f0000a760,
+ 0xa7610000a762,
+ 0xa7630000a764,
+ 0xa7650000a766,
+ 0xa7670000a768,
+ 0xa7690000a76a,
+ 0xa76b0000a76c,
+ 0xa76d0000a76e,
+ 0xa76f0000a770,
+ 0xa7710000a779,
+ 0xa77a0000a77b,
+ 0xa77c0000a77d,
+ 0xa77f0000a780,
+ 0xa7810000a782,
+ 0xa7830000a784,
+ 0xa7850000a786,
+ 0xa7870000a789,
+ 0xa78c0000a78d,
+ 0xa78e0000a790,
+ 0xa7910000a792,
+ 0xa7930000a796,
+ 0xa7970000a798,
+ 0xa7990000a79a,
+ 0xa79b0000a79c,
+ 0xa79d0000a79e,
+ 0xa79f0000a7a0,
+ 0xa7a10000a7a2,
+ 0xa7a30000a7a4,
+ 0xa7a50000a7a6,
+ 0xa7a70000a7a8,
+ 0xa7a90000a7aa,
+ 0xa7af0000a7b0,
+ 0xa7b50000a7b6,
+ 0xa7b70000a7b8,
+ 0xa7b90000a7ba,
+ 0xa7f70000a7f8,
+ 0xa7fa0000a828,
+ 0xa8400000a874,
+ 0xa8800000a8c6,
+ 0xa8d00000a8da,
+ 0xa8e00000a8f8,
+ 0xa8fb0000a8fc,
+ 0xa8fd0000a92e,
+ 0xa9300000a954,
+ 0xa9800000a9c1,
+ 0xa9cf0000a9da,
+ 0xa9e00000a9ff,
+ 0xaa000000aa37,
+ 0xaa400000aa4e,
+ 0xaa500000aa5a,
+ 0xaa600000aa77,
+ 0xaa7a0000aac3,
+ 0xaadb0000aade,
+ 0xaae00000aaf0,
+ 0xaaf20000aaf7,
+ 0xab010000ab07,
+ 0xab090000ab0f,
+ 0xab110000ab17,
+ 0xab200000ab27,
+ 0xab280000ab2f,
+ 0xab300000ab5b,
+ 0xab600000ab66,
+ 0xabc00000abeb,
+ 0xabec0000abee,
+ 0xabf00000abfa,
+ 0xac000000d7a4,
+ 0xfa0e0000fa10,
+ 0xfa110000fa12,
+ 0xfa130000fa15,
+ 0xfa1f0000fa20,
+ 0xfa210000fa22,
+ 0xfa230000fa25,
+ 0xfa270000fa2a,
+ 0xfb1e0000fb1f,
+ 0xfe200000fe30,
+ 0xfe730000fe74,
+ 0x100000001000c,
+ 0x1000d00010027,
+ 0x100280001003b,
+ 0x1003c0001003e,
+ 0x1003f0001004e,
+ 0x100500001005e,
+ 0x10080000100fb,
+ 0x101fd000101fe,
+ 0x102800001029d,
+ 0x102a0000102d1,
+ 0x102e0000102e1,
+ 0x1030000010320,
+ 0x1032d00010341,
+ 0x103420001034a,
+ 0x103500001037b,
+ 0x103800001039e,
+ 0x103a0000103c4,
+ 0x103c8000103d0,
+ 0x104280001049e,
+ 0x104a0000104aa,
+ 0x104d8000104fc,
+ 0x1050000010528,
+ 0x1053000010564,
+ 0x1060000010737,
+ 0x1074000010756,
+ 0x1076000010768,
+ 0x1080000010806,
+ 0x1080800010809,
+ 0x1080a00010836,
+ 0x1083700010839,
+ 0x1083c0001083d,
+ 0x1083f00010856,
+ 0x1086000010877,
+ 0x108800001089f,
+ 0x108e0000108f3,
+ 0x108f4000108f6,
+ 0x1090000010916,
+ 0x109200001093a,
+ 0x10980000109b8,
+ 0x109be000109c0,
+ 0x10a0000010a04,
+ 0x10a0500010a07,
+ 0x10a0c00010a14,
+ 0x10a1500010a18,
+ 0x10a1900010a36,
+ 0x10a3800010a3b,
+ 0x10a3f00010a40,
+ 0x10a6000010a7d,
+ 0x10a8000010a9d,
+ 0x10ac000010ac8,
+ 0x10ac900010ae7,
+ 0x10b0000010b36,
+ 0x10b4000010b56,
+ 0x10b6000010b73,
+ 0x10b8000010b92,
+ 0x10c0000010c49,
+ 0x10cc000010cf3,
+ 0x10d0000010d28,
+ 0x10d3000010d3a,
+ 0x10f0000010f1d,
+ 0x10f2700010f28,
+ 0x10f3000010f51,
+ 0x1100000011047,
+ 0x1106600011070,
+ 0x1107f000110bb,
+ 0x110d0000110e9,
+ 0x110f0000110fa,
+ 0x1110000011135,
+ 0x1113600011140,
+ 0x1114400011147,
+ 0x1115000011174,
+ 0x1117600011177,
+ 0x11180000111c5,
+ 0x111c9000111cd,
+ 0x111d0000111db,
+ 0x111dc000111dd,
+ 0x1120000011212,
+ 0x1121300011238,
+ 0x1123e0001123f,
+ 0x1128000011287,
+ 0x1128800011289,
+ 0x1128a0001128e,
+ 0x1128f0001129e,
+ 0x1129f000112a9,
+ 0x112b0000112eb,
+ 0x112f0000112fa,
+ 0x1130000011304,
+ 0x113050001130d,
+ 0x1130f00011311,
+ 0x1131300011329,
+ 0x1132a00011331,
+ 0x1133200011334,
+ 0x113350001133a,
+ 0x1133b00011345,
+ 0x1134700011349,
+ 0x1134b0001134e,
+ 0x1135000011351,
+ 0x1135700011358,
+ 0x1135d00011364,
+ 0x113660001136d,
+ 0x1137000011375,
+ 0x114000001144b,
+ 0x114500001145a,
+ 0x1145e0001145f,
+ 0x11480000114c6,
+ 0x114c7000114c8,
+ 0x114d0000114da,
+ 0x11580000115b6,
+ 0x115b8000115c1,
+ 0x115d8000115de,
+ 0x1160000011641,
+ 0x1164400011645,
+ 0x116500001165a,
+ 0x11680000116b8,
+ 0x116c0000116ca,
+ 0x117000001171b,
+ 0x1171d0001172c,
+ 0x117300001173a,
+ 0x118000001183b,
+ 0x118c0000118ea,
+ 0x118ff00011900,
+ 0x11a0000011a3f,
+ 0x11a4700011a48,
+ 0x11a5000011a84,
+ 0x11a8600011a9a,
+ 0x11a9d00011a9e,
+ 0x11ac000011af9,
+ 0x11c0000011c09,
+ 0x11c0a00011c37,
+ 0x11c3800011c41,
+ 0x11c5000011c5a,
+ 0x11c7200011c90,
+ 0x11c9200011ca8,
+ 0x11ca900011cb7,
+ 0x11d0000011d07,
+ 0x11d0800011d0a,
+ 0x11d0b00011d37,
+ 0x11d3a00011d3b,
+ 0x11d3c00011d3e,
+ 0x11d3f00011d48,
+ 0x11d5000011d5a,
+ 0x11d6000011d66,
+ 0x11d6700011d69,
+ 0x11d6a00011d8f,
+ 0x11d9000011d92,
+ 0x11d9300011d99,
+ 0x11da000011daa,
+ 0x11ee000011ef7,
+ 0x120000001239a,
+ 0x1248000012544,
+ 0x130000001342f,
+ 0x1440000014647,
+ 0x1680000016a39,
+ 0x16a4000016a5f,
+ 0x16a6000016a6a,
+ 0x16ad000016aee,
+ 0x16af000016af5,
+ 0x16b0000016b37,
+ 0x16b4000016b44,
+ 0x16b5000016b5a,
+ 0x16b6300016b78,
+ 0x16b7d00016b90,
+ 0x16e6000016e80,
+ 0x16f0000016f45,
+ 0x16f5000016f7f,
+ 0x16f8f00016fa0,
+ 0x16fe000016fe2,
+ 0x17000000187f2,
+ 0x1880000018af3,
+ 0x1b0000001b11f,
+ 0x1b1700001b2fc,
+ 0x1bc000001bc6b,
+ 0x1bc700001bc7d,
+ 0x1bc800001bc89,
+ 0x1bc900001bc9a,
+ 0x1bc9d0001bc9f,
+ 0x1da000001da37,
+ 0x1da3b0001da6d,
+ 0x1da750001da76,
+ 0x1da840001da85,
+ 0x1da9b0001daa0,
+ 0x1daa10001dab0,
+ 0x1e0000001e007,
+ 0x1e0080001e019,
+ 0x1e01b0001e022,
+ 0x1e0230001e025,
+ 0x1e0260001e02b,
+ 0x1e8000001e8c5,
+ 0x1e8d00001e8d7,
+ 0x1e9220001e94b,
+ 0x1e9500001e95a,
+ 0x200000002a6d7,
+ 0x2a7000002b735,
+ 0x2b7400002b81e,
+ 0x2b8200002cea2,
+ 0x2ceb00002ebe1,
+ ),
+ 'CONTEXTJ': (
+ 0x200c0000200e,
+ ),
+ 'CONTEXTO': (
+ 0xb7000000b8,
+ 0x37500000376,
+ 0x5f3000005f5,
+ 0x6600000066a,
+ 0x6f0000006fa,
+ 0x30fb000030fc,
+ ),
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/intranges.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/intranges.py
new file mode 100644
index 00000000..fa8a7356
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/intranges.py
@@ -0,0 +1,53 @@
+"""
+Given a list of integers, made up of (hopefully) a small number of long runs
+of consecutive integers, compute a representation of the form
+((start1, end1), (start2, end2) ...). Then answer the question "was x present
+in the original list?" in time O(log(# runs)).
+"""
+
+import bisect
+
+def intranges_from_list(list_):
+ """Represent a list of integers as a sequence of ranges:
+ ((start_0, end_0), (start_1, end_1), ...), such that the original
+ integers are exactly those x such that start_i <= x < end_i for some i.
+
+ Ranges are encoded as single integers (start << 32 | end), not as tuples.
+ """
+
+ sorted_list = sorted(list_)
+ ranges = []
+ last_write = -1
+ for i in range(len(sorted_list)):
+ if i+1 < len(sorted_list):
+ if sorted_list[i] == sorted_list[i+1]-1:
+ continue
+ current_range = sorted_list[last_write+1:i+1]
+ ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
+ last_write = i
+
+ return tuple(ranges)
+
+def _encode_range(start, end):
+ return (start << 32) | end
+
+def _decode_range(r):
+ return (r >> 32), (r & ((1 << 32) - 1))
+
+
+def intranges_contain(int_, ranges):
+ """Determine if `int_` falls into one of the ranges in `ranges`."""
+ tuple_ = _encode_range(int_, 0)
+ pos = bisect.bisect_left(ranges, tuple_)
+ # we could be immediately ahead of a tuple (start, end)
+ # with start < int_ <= end
+ if pos > 0:
+ left, right = _decode_range(ranges[pos-1])
+ if left <= int_ < right:
+ return True
+ # or we could be immediately behind a tuple (int_, end)
+ if pos < len(ranges):
+ left, _ = _decode_range(ranges[pos])
+ if left == int_:
+ return True
+ return False
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/package_data.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/package_data.py
new file mode 100644
index 00000000..257e8989
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/package_data.py
@@ -0,0 +1,2 @@
+__version__ = '2.8'
+
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/uts46data.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/uts46data.py
new file mode 100644
index 00000000..a68ed4c0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/idna/uts46data.py
@@ -0,0 +1,8205 @@
+# This file is automatically generated by tools/idna-data
+# vim: set fileencoding=utf-8 :
+
+"""IDNA Mapping Table from UTS46."""
+
+
+__version__ = "11.0.0"
+def _seg_0():
+ return [
+ (0x0, '3'),
+ (0x1, '3'),
+ (0x2, '3'),
+ (0x3, '3'),
+ (0x4, '3'),
+ (0x5, '3'),
+ (0x6, '3'),
+ (0x7, '3'),
+ (0x8, '3'),
+ (0x9, '3'),
+ (0xA, '3'),
+ (0xB, '3'),
+ (0xC, '3'),
+ (0xD, '3'),
+ (0xE, '3'),
+ (0xF, '3'),
+ (0x10, '3'),
+ (0x11, '3'),
+ (0x12, '3'),
+ (0x13, '3'),
+ (0x14, '3'),
+ (0x15, '3'),
+ (0x16, '3'),
+ (0x17, '3'),
+ (0x18, '3'),
+ (0x19, '3'),
+ (0x1A, '3'),
+ (0x1B, '3'),
+ (0x1C, '3'),
+ (0x1D, '3'),
+ (0x1E, '3'),
+ (0x1F, '3'),
+ (0x20, '3'),
+ (0x21, '3'),
+ (0x22, '3'),
+ (0x23, '3'),
+ (0x24, '3'),
+ (0x25, '3'),
+ (0x26, '3'),
+ (0x27, '3'),
+ (0x28, '3'),
+ (0x29, '3'),
+ (0x2A, '3'),
+ (0x2B, '3'),
+ (0x2C, '3'),
+ (0x2D, 'V'),
+ (0x2E, 'V'),
+ (0x2F, '3'),
+ (0x30, 'V'),
+ (0x31, 'V'),
+ (0x32, 'V'),
+ (0x33, 'V'),
+ (0x34, 'V'),
+ (0x35, 'V'),
+ (0x36, 'V'),
+ (0x37, 'V'),
+ (0x38, 'V'),
+ (0x39, 'V'),
+ (0x3A, '3'),
+ (0x3B, '3'),
+ (0x3C, '3'),
+ (0x3D, '3'),
+ (0x3E, '3'),
+ (0x3F, '3'),
+ (0x40, '3'),
+ (0x41, 'M', u'a'),
+ (0x42, 'M', u'b'),
+ (0x43, 'M', u'c'),
+ (0x44, 'M', u'd'),
+ (0x45, 'M', u'e'),
+ (0x46, 'M', u'f'),
+ (0x47, 'M', u'g'),
+ (0x48, 'M', u'h'),
+ (0x49, 'M', u'i'),
+ (0x4A, 'M', u'j'),
+ (0x4B, 'M', u'k'),
+ (0x4C, 'M', u'l'),
+ (0x4D, 'M', u'm'),
+ (0x4E, 'M', u'n'),
+ (0x4F, 'M', u'o'),
+ (0x50, 'M', u'p'),
+ (0x51, 'M', u'q'),
+ (0x52, 'M', u'r'),
+ (0x53, 'M', u's'),
+ (0x54, 'M', u't'),
+ (0x55, 'M', u'u'),
+ (0x56, 'M', u'v'),
+ (0x57, 'M', u'w'),
+ (0x58, 'M', u'x'),
+ (0x59, 'M', u'y'),
+ (0x5A, 'M', u'z'),
+ (0x5B, '3'),
+ (0x5C, '3'),
+ (0x5D, '3'),
+ (0x5E, '3'),
+ (0x5F, '3'),
+ (0x60, '3'),
+ (0x61, 'V'),
+ (0x62, 'V'),
+ (0x63, 'V'),
+ ]
+
+def _seg_1():
+ return [
+ (0x64, 'V'),
+ (0x65, 'V'),
+ (0x66, 'V'),
+ (0x67, 'V'),
+ (0x68, 'V'),
+ (0x69, 'V'),
+ (0x6A, 'V'),
+ (0x6B, 'V'),
+ (0x6C, 'V'),
+ (0x6D, 'V'),
+ (0x6E, 'V'),
+ (0x6F, 'V'),
+ (0x70, 'V'),
+ (0x71, 'V'),
+ (0x72, 'V'),
+ (0x73, 'V'),
+ (0x74, 'V'),
+ (0x75, 'V'),
+ (0x76, 'V'),
+ (0x77, 'V'),
+ (0x78, 'V'),
+ (0x79, 'V'),
+ (0x7A, 'V'),
+ (0x7B, '3'),
+ (0x7C, '3'),
+ (0x7D, '3'),
+ (0x7E, '3'),
+ (0x7F, '3'),
+ (0x80, 'X'),
+ (0x81, 'X'),
+ (0x82, 'X'),
+ (0x83, 'X'),
+ (0x84, 'X'),
+ (0x85, 'X'),
+ (0x86, 'X'),
+ (0x87, 'X'),
+ (0x88, 'X'),
+ (0x89, 'X'),
+ (0x8A, 'X'),
+ (0x8B, 'X'),
+ (0x8C, 'X'),
+ (0x8D, 'X'),
+ (0x8E, 'X'),
+ (0x8F, 'X'),
+ (0x90, 'X'),
+ (0x91, 'X'),
+ (0x92, 'X'),
+ (0x93, 'X'),
+ (0x94, 'X'),
+ (0x95, 'X'),
+ (0x96, 'X'),
+ (0x97, 'X'),
+ (0x98, 'X'),
+ (0x99, 'X'),
+ (0x9A, 'X'),
+ (0x9B, 'X'),
+ (0x9C, 'X'),
+ (0x9D, 'X'),
+ (0x9E, 'X'),
+ (0x9F, 'X'),
+ (0xA0, '3', u' '),
+ (0xA1, 'V'),
+ (0xA2, 'V'),
+ (0xA3, 'V'),
+ (0xA4, 'V'),
+ (0xA5, 'V'),
+ (0xA6, 'V'),
+ (0xA7, 'V'),
+ (0xA8, '3', u' ̈'),
+ (0xA9, 'V'),
+ (0xAA, 'M', u'a'),
+ (0xAB, 'V'),
+ (0xAC, 'V'),
+ (0xAD, 'I'),
+ (0xAE, 'V'),
+ (0xAF, '3', u' ̄'),
+ (0xB0, 'V'),
+ (0xB1, 'V'),
+ (0xB2, 'M', u'2'),
+ (0xB3, 'M', u'3'),
+ (0xB4, '3', u' ́'),
+ (0xB5, 'M', u'μ'),
+ (0xB6, 'V'),
+ (0xB7, 'V'),
+ (0xB8, '3', u' ̧'),
+ (0xB9, 'M', u'1'),
+ (0xBA, 'M', u'o'),
+ (0xBB, 'V'),
+ (0xBC, 'M', u'1⁄4'),
+ (0xBD, 'M', u'1⁄2'),
+ (0xBE, 'M', u'3⁄4'),
+ (0xBF, 'V'),
+ (0xC0, 'M', u'à'),
+ (0xC1, 'M', u'á'),
+ (0xC2, 'M', u'â'),
+ (0xC3, 'M', u'ã'),
+ (0xC4, 'M', u'ä'),
+ (0xC5, 'M', u'å'),
+ (0xC6, 'M', u'æ'),
+ (0xC7, 'M', u'ç'),
+ ]
+
+def _seg_2():
+ return [
+ (0xC8, 'M', u'è'),
+ (0xC9, 'M', u'é'),
+ (0xCA, 'M', u'ê'),
+ (0xCB, 'M', u'ë'),
+ (0xCC, 'M', u'ì'),
+ (0xCD, 'M', u'í'),
+ (0xCE, 'M', u'î'),
+ (0xCF, 'M', u'ï'),
+ (0xD0, 'M', u'ð'),
+ (0xD1, 'M', u'ñ'),
+ (0xD2, 'M', u'ò'),
+ (0xD3, 'M', u'ó'),
+ (0xD4, 'M', u'ô'),
+ (0xD5, 'M', u'õ'),
+ (0xD6, 'M', u'ö'),
+ (0xD7, 'V'),
+ (0xD8, 'M', u'ø'),
+ (0xD9, 'M', u'ù'),
+ (0xDA, 'M', u'ú'),
+ (0xDB, 'M', u'û'),
+ (0xDC, 'M', u'ü'),
+ (0xDD, 'M', u'ý'),
+ (0xDE, 'M', u'þ'),
+ (0xDF, 'D', u'ss'),
+ (0xE0, 'V'),
+ (0xE1, 'V'),
+ (0xE2, 'V'),
+ (0xE3, 'V'),
+ (0xE4, 'V'),
+ (0xE5, 'V'),
+ (0xE6, 'V'),
+ (0xE7, 'V'),
+ (0xE8, 'V'),
+ (0xE9, 'V'),
+ (0xEA, 'V'),
+ (0xEB, 'V'),
+ (0xEC, 'V'),
+ (0xED, 'V'),
+ (0xEE, 'V'),
+ (0xEF, 'V'),
+ (0xF0, 'V'),
+ (0xF1, 'V'),
+ (0xF2, 'V'),
+ (0xF3, 'V'),
+ (0xF4, 'V'),
+ (0xF5, 'V'),
+ (0xF6, 'V'),
+ (0xF7, 'V'),
+ (0xF8, 'V'),
+ (0xF9, 'V'),
+ (0xFA, 'V'),
+ (0xFB, 'V'),
+ (0xFC, 'V'),
+ (0xFD, 'V'),
+ (0xFE, 'V'),
+ (0xFF, 'V'),
+ (0x100, 'M', u'ā'),
+ (0x101, 'V'),
+ (0x102, 'M', u'ă'),
+ (0x103, 'V'),
+ (0x104, 'M', u'ą'),
+ (0x105, 'V'),
+ (0x106, 'M', u'ć'),
+ (0x107, 'V'),
+ (0x108, 'M', u'ĉ'),
+ (0x109, 'V'),
+ (0x10A, 'M', u'ċ'),
+ (0x10B, 'V'),
+ (0x10C, 'M', u'č'),
+ (0x10D, 'V'),
+ (0x10E, 'M', u'ď'),
+ (0x10F, 'V'),
+ (0x110, 'M', u'đ'),
+ (0x111, 'V'),
+ (0x112, 'M', u'ē'),
+ (0x113, 'V'),
+ (0x114, 'M', u'ĕ'),
+ (0x115, 'V'),
+ (0x116, 'M', u'ė'),
+ (0x117, 'V'),
+ (0x118, 'M', u'ę'),
+ (0x119, 'V'),
+ (0x11A, 'M', u'ě'),
+ (0x11B, 'V'),
+ (0x11C, 'M', u'ĝ'),
+ (0x11D, 'V'),
+ (0x11E, 'M', u'ğ'),
+ (0x11F, 'V'),
+ (0x120, 'M', u'ġ'),
+ (0x121, 'V'),
+ (0x122, 'M', u'ģ'),
+ (0x123, 'V'),
+ (0x124, 'M', u'ĥ'),
+ (0x125, 'V'),
+ (0x126, 'M', u'ħ'),
+ (0x127, 'V'),
+ (0x128, 'M', u'ĩ'),
+ (0x129, 'V'),
+ (0x12A, 'M', u'ī'),
+ (0x12B, 'V'),
+ ]
+
+def _seg_3():
+ return [
+ (0x12C, 'M', u'ĭ'),
+ (0x12D, 'V'),
+ (0x12E, 'M', u'į'),
+ (0x12F, 'V'),
+ (0x130, 'M', u'i̇'),
+ (0x131, 'V'),
+ (0x132, 'M', u'ij'),
+ (0x134, 'M', u'ĵ'),
+ (0x135, 'V'),
+ (0x136, 'M', u'ķ'),
+ (0x137, 'V'),
+ (0x139, 'M', u'ĺ'),
+ (0x13A, 'V'),
+ (0x13B, 'M', u'ļ'),
+ (0x13C, 'V'),
+ (0x13D, 'M', u'ľ'),
+ (0x13E, 'V'),
+ (0x13F, 'M', u'l·'),
+ (0x141, 'M', u'ł'),
+ (0x142, 'V'),
+ (0x143, 'M', u'ń'),
+ (0x144, 'V'),
+ (0x145, 'M', u'ņ'),
+ (0x146, 'V'),
+ (0x147, 'M', u'ň'),
+ (0x148, 'V'),
+ (0x149, 'M', u'ʼn'),
+ (0x14A, 'M', u'ŋ'),
+ (0x14B, 'V'),
+ (0x14C, 'M', u'ō'),
+ (0x14D, 'V'),
+ (0x14E, 'M', u'ŏ'),
+ (0x14F, 'V'),
+ (0x150, 'M', u'ő'),
+ (0x151, 'V'),
+ (0x152, 'M', u'œ'),
+ (0x153, 'V'),
+ (0x154, 'M', u'ŕ'),
+ (0x155, 'V'),
+ (0x156, 'M', u'ŗ'),
+ (0x157, 'V'),
+ (0x158, 'M', u'ř'),
+ (0x159, 'V'),
+ (0x15A, 'M', u'ś'),
+ (0x15B, 'V'),
+ (0x15C, 'M', u'ŝ'),
+ (0x15D, 'V'),
+ (0x15E, 'M', u'ş'),
+ (0x15F, 'V'),
+ (0x160, 'M', u'š'),
+ (0x161, 'V'),
+ (0x162, 'M', u'ţ'),
+ (0x163, 'V'),
+ (0x164, 'M', u'ť'),
+ (0x165, 'V'),
+ (0x166, 'M', u'ŧ'),
+ (0x167, 'V'),
+ (0x168, 'M', u'ũ'),
+ (0x169, 'V'),
+ (0x16A, 'M', u'ū'),
+ (0x16B, 'V'),
+ (0x16C, 'M', u'ŭ'),
+ (0x16D, 'V'),
+ (0x16E, 'M', u'ů'),
+ (0x16F, 'V'),
+ (0x170, 'M', u'ű'),
+ (0x171, 'V'),
+ (0x172, 'M', u'ų'),
+ (0x173, 'V'),
+ (0x174, 'M', u'ŵ'),
+ (0x175, 'V'),
+ (0x176, 'M', u'ŷ'),
+ (0x177, 'V'),
+ (0x178, 'M', u'ÿ'),
+ (0x179, 'M', u'ź'),
+ (0x17A, 'V'),
+ (0x17B, 'M', u'ż'),
+ (0x17C, 'V'),
+ (0x17D, 'M', u'ž'),
+ (0x17E, 'V'),
+ (0x17F, 'M', u's'),
+ (0x180, 'V'),
+ (0x181, 'M', u'ɓ'),
+ (0x182, 'M', u'ƃ'),
+ (0x183, 'V'),
+ (0x184, 'M', u'ƅ'),
+ (0x185, 'V'),
+ (0x186, 'M', u'ɔ'),
+ (0x187, 'M', u'ƈ'),
+ (0x188, 'V'),
+ (0x189, 'M', u'ɖ'),
+ (0x18A, 'M', u'ɗ'),
+ (0x18B, 'M', u'ƌ'),
+ (0x18C, 'V'),
+ (0x18E, 'M', u'ǝ'),
+ (0x18F, 'M', u'ə'),
+ (0x190, 'M', u'ɛ'),
+ (0x191, 'M', u'ƒ'),
+ (0x192, 'V'),
+ (0x193, 'M', u'ɠ'),
+ ]
+
+def _seg_4():
+ return [
+ (0x194, 'M', u'ɣ'),
+ (0x195, 'V'),
+ (0x196, 'M', u'ɩ'),
+ (0x197, 'M', u'ɨ'),
+ (0x198, 'M', u'ƙ'),
+ (0x199, 'V'),
+ (0x19C, 'M', u'ɯ'),
+ (0x19D, 'M', u'ɲ'),
+ (0x19E, 'V'),
+ (0x19F, 'M', u'ɵ'),
+ (0x1A0, 'M', u'ơ'),
+ (0x1A1, 'V'),
+ (0x1A2, 'M', u'ƣ'),
+ (0x1A3, 'V'),
+ (0x1A4, 'M', u'ƥ'),
+ (0x1A5, 'V'),
+ (0x1A6, 'M', u'ʀ'),
+ (0x1A7, 'M', u'ƨ'),
+ (0x1A8, 'V'),
+ (0x1A9, 'M', u'ʃ'),
+ (0x1AA, 'V'),
+ (0x1AC, 'M', u'ƭ'),
+ (0x1AD, 'V'),
+ (0x1AE, 'M', u'ʈ'),
+ (0x1AF, 'M', u'ư'),
+ (0x1B0, 'V'),
+ (0x1B1, 'M', u'ʊ'),
+ (0x1B2, 'M', u'ʋ'),
+ (0x1B3, 'M', u'ƴ'),
+ (0x1B4, 'V'),
+ (0x1B5, 'M', u'ƶ'),
+ (0x1B6, 'V'),
+ (0x1B7, 'M', u'ʒ'),
+ (0x1B8, 'M', u'ƹ'),
+ (0x1B9, 'V'),
+ (0x1BC, 'M', u'ƽ'),
+ (0x1BD, 'V'),
+ (0x1C4, 'M', u'dž'),
+ (0x1C7, 'M', u'lj'),
+ (0x1CA, 'M', u'nj'),
+ (0x1CD, 'M', u'ǎ'),
+ (0x1CE, 'V'),
+ (0x1CF, 'M', u'ǐ'),
+ (0x1D0, 'V'),
+ (0x1D1, 'M', u'ǒ'),
+ (0x1D2, 'V'),
+ (0x1D3, 'M', u'ǔ'),
+ (0x1D4, 'V'),
+ (0x1D5, 'M', u'ǖ'),
+ (0x1D6, 'V'),
+ (0x1D7, 'M', u'ǘ'),
+ (0x1D8, 'V'),
+ (0x1D9, 'M', u'ǚ'),
+ (0x1DA, 'V'),
+ (0x1DB, 'M', u'ǜ'),
+ (0x1DC, 'V'),
+ (0x1DE, 'M', u'ǟ'),
+ (0x1DF, 'V'),
+ (0x1E0, 'M', u'ǡ'),
+ (0x1E1, 'V'),
+ (0x1E2, 'M', u'ǣ'),
+ (0x1E3, 'V'),
+ (0x1E4, 'M', u'ǥ'),
+ (0x1E5, 'V'),
+ (0x1E6, 'M', u'ǧ'),
+ (0x1E7, 'V'),
+ (0x1E8, 'M', u'ǩ'),
+ (0x1E9, 'V'),
+ (0x1EA, 'M', u'ǫ'),
+ (0x1EB, 'V'),
+ (0x1EC, 'M', u'ǭ'),
+ (0x1ED, 'V'),
+ (0x1EE, 'M', u'ǯ'),
+ (0x1EF, 'V'),
+ (0x1F1, 'M', u'dz'),
+ (0x1F4, 'M', u'ǵ'),
+ (0x1F5, 'V'),
+ (0x1F6, 'M', u'ƕ'),
+ (0x1F7, 'M', u'ƿ'),
+ (0x1F8, 'M', u'ǹ'),
+ (0x1F9, 'V'),
+ (0x1FA, 'M', u'ǻ'),
+ (0x1FB, 'V'),
+ (0x1FC, 'M', u'ǽ'),
+ (0x1FD, 'V'),
+ (0x1FE, 'M', u'ǿ'),
+ (0x1FF, 'V'),
+ (0x200, 'M', u'ȁ'),
+ (0x201, 'V'),
+ (0x202, 'M', u'ȃ'),
+ (0x203, 'V'),
+ (0x204, 'M', u'ȅ'),
+ (0x205, 'V'),
+ (0x206, 'M', u'ȇ'),
+ (0x207, 'V'),
+ (0x208, 'M', u'ȉ'),
+ (0x209, 'V'),
+ (0x20A, 'M', u'ȋ'),
+ (0x20B, 'V'),
+ (0x20C, 'M', u'ȍ'),
+ ]
+
+def _seg_5():
+ return [
+ (0x20D, 'V'),
+ (0x20E, 'M', u'ȏ'),
+ (0x20F, 'V'),
+ (0x210, 'M', u'ȑ'),
+ (0x211, 'V'),
+ (0x212, 'M', u'ȓ'),
+ (0x213, 'V'),
+ (0x214, 'M', u'ȕ'),
+ (0x215, 'V'),
+ (0x216, 'M', u'ȗ'),
+ (0x217, 'V'),
+ (0x218, 'M', u'ș'),
+ (0x219, 'V'),
+ (0x21A, 'M', u'ț'),
+ (0x21B, 'V'),
+ (0x21C, 'M', u'ȝ'),
+ (0x21D, 'V'),
+ (0x21E, 'M', u'ȟ'),
+ (0x21F, 'V'),
+ (0x220, 'M', u'ƞ'),
+ (0x221, 'V'),
+ (0x222, 'M', u'ȣ'),
+ (0x223, 'V'),
+ (0x224, 'M', u'ȥ'),
+ (0x225, 'V'),
+ (0x226, 'M', u'ȧ'),
+ (0x227, 'V'),
+ (0x228, 'M', u'ȩ'),
+ (0x229, 'V'),
+ (0x22A, 'M', u'ȫ'),
+ (0x22B, 'V'),
+ (0x22C, 'M', u'ȭ'),
+ (0x22D, 'V'),
+ (0x22E, 'M', u'ȯ'),
+ (0x22F, 'V'),
+ (0x230, 'M', u'ȱ'),
+ (0x231, 'V'),
+ (0x232, 'M', u'ȳ'),
+ (0x233, 'V'),
+ (0x23A, 'M', u'ⱥ'),
+ (0x23B, 'M', u'ȼ'),
+ (0x23C, 'V'),
+ (0x23D, 'M', u'ƚ'),
+ (0x23E, 'M', u'ⱦ'),
+ (0x23F, 'V'),
+ (0x241, 'M', u'ɂ'),
+ (0x242, 'V'),
+ (0x243, 'M', u'ƀ'),
+ (0x244, 'M', u'ʉ'),
+ (0x245, 'M', u'ʌ'),
+ (0x246, 'M', u'ɇ'),
+ (0x247, 'V'),
+ (0x248, 'M', u'ɉ'),
+ (0x249, 'V'),
+ (0x24A, 'M', u'ɋ'),
+ (0x24B, 'V'),
+ (0x24C, 'M', u'ɍ'),
+ (0x24D, 'V'),
+ (0x24E, 'M', u'ɏ'),
+ (0x24F, 'V'),
+ (0x2B0, 'M', u'h'),
+ (0x2B1, 'M', u'ɦ'),
+ (0x2B2, 'M', u'j'),
+ (0x2B3, 'M', u'r'),
+ (0x2B4, 'M', u'ɹ'),
+ (0x2B5, 'M', u'ɻ'),
+ (0x2B6, 'M', u'ʁ'),
+ (0x2B7, 'M', u'w'),
+ (0x2B8, 'M', u'y'),
+ (0x2B9, 'V'),
+ (0x2D8, '3', u' ̆'),
+ (0x2D9, '3', u' ̇'),
+ (0x2DA, '3', u' ̊'),
+ (0x2DB, '3', u' ̨'),
+ (0x2DC, '3', u' ̃'),
+ (0x2DD, '3', u' ̋'),
+ (0x2DE, 'V'),
+ (0x2E0, 'M', u'ɣ'),
+ (0x2E1, 'M', u'l'),
+ (0x2E2, 'M', u's'),
+ (0x2E3, 'M', u'x'),
+ (0x2E4, 'M', u'ʕ'),
+ (0x2E5, 'V'),
+ (0x340, 'M', u'̀'),
+ (0x341, 'M', u'́'),
+ (0x342, 'V'),
+ (0x343, 'M', u'̓'),
+ (0x344, 'M', u'̈́'),
+ (0x345, 'M', u'ι'),
+ (0x346, 'V'),
+ (0x34F, 'I'),
+ (0x350, 'V'),
+ (0x370, 'M', u'ͱ'),
+ (0x371, 'V'),
+ (0x372, 'M', u'ͳ'),
+ (0x373, 'V'),
+ (0x374, 'M', u'ʹ'),
+ (0x375, 'V'),
+ (0x376, 'M', u'ͷ'),
+ (0x377, 'V'),
+ ]
+
+def _seg_6():
+ return [
+ (0x378, 'X'),
+ (0x37A, '3', u' ι'),
+ (0x37B, 'V'),
+ (0x37E, '3', u';'),
+ (0x37F, 'M', u'ϳ'),
+ (0x380, 'X'),
+ (0x384, '3', u' ́'),
+ (0x385, '3', u' ̈́'),
+ (0x386, 'M', u'ά'),
+ (0x387, 'M', u'·'),
+ (0x388, 'M', u'έ'),
+ (0x389, 'M', u'ή'),
+ (0x38A, 'M', u'ί'),
+ (0x38B, 'X'),
+ (0x38C, 'M', u'ό'),
+ (0x38D, 'X'),
+ (0x38E, 'M', u'ύ'),
+ (0x38F, 'M', u'ώ'),
+ (0x390, 'V'),
+ (0x391, 'M', u'α'),
+ (0x392, 'M', u'β'),
+ (0x393, 'M', u'γ'),
+ (0x394, 'M', u'δ'),
+ (0x395, 'M', u'ε'),
+ (0x396, 'M', u'ζ'),
+ (0x397, 'M', u'η'),
+ (0x398, 'M', u'θ'),
+ (0x399, 'M', u'ι'),
+ (0x39A, 'M', u'κ'),
+ (0x39B, 'M', u'λ'),
+ (0x39C, 'M', u'μ'),
+ (0x39D, 'M', u'ν'),
+ (0x39E, 'M', u'ξ'),
+ (0x39F, 'M', u'ο'),
+ (0x3A0, 'M', u'π'),
+ (0x3A1, 'M', u'ρ'),
+ (0x3A2, 'X'),
+ (0x3A3, 'M', u'σ'),
+ (0x3A4, 'M', u'τ'),
+ (0x3A5, 'M', u'υ'),
+ (0x3A6, 'M', u'φ'),
+ (0x3A7, 'M', u'χ'),
+ (0x3A8, 'M', u'ψ'),
+ (0x3A9, 'M', u'ω'),
+ (0x3AA, 'M', u'ϊ'),
+ (0x3AB, 'M', u'ϋ'),
+ (0x3AC, 'V'),
+ (0x3C2, 'D', u'σ'),
+ (0x3C3, 'V'),
+ (0x3CF, 'M', u'ϗ'),
+ (0x3D0, 'M', u'β'),
+ (0x3D1, 'M', u'θ'),
+ (0x3D2, 'M', u'υ'),
+ (0x3D3, 'M', u'ύ'),
+ (0x3D4, 'M', u'ϋ'),
+ (0x3D5, 'M', u'φ'),
+ (0x3D6, 'M', u'π'),
+ (0x3D7, 'V'),
+ (0x3D8, 'M', u'ϙ'),
+ (0x3D9, 'V'),
+ (0x3DA, 'M', u'ϛ'),
+ (0x3DB, 'V'),
+ (0x3DC, 'M', u'ϝ'),
+ (0x3DD, 'V'),
+ (0x3DE, 'M', u'ϟ'),
+ (0x3DF, 'V'),
+ (0x3E0, 'M', u'ϡ'),
+ (0x3E1, 'V'),
+ (0x3E2, 'M', u'ϣ'),
+ (0x3E3, 'V'),
+ (0x3E4, 'M', u'ϥ'),
+ (0x3E5, 'V'),
+ (0x3E6, 'M', u'ϧ'),
+ (0x3E7, 'V'),
+ (0x3E8, 'M', u'ϩ'),
+ (0x3E9, 'V'),
+ (0x3EA, 'M', u'ϫ'),
+ (0x3EB, 'V'),
+ (0x3EC, 'M', u'ϭ'),
+ (0x3ED, 'V'),
+ (0x3EE, 'M', u'ϯ'),
+ (0x3EF, 'V'),
+ (0x3F0, 'M', u'κ'),
+ (0x3F1, 'M', u'ρ'),
+ (0x3F2, 'M', u'σ'),
+ (0x3F3, 'V'),
+ (0x3F4, 'M', u'θ'),
+ (0x3F5, 'M', u'ε'),
+ (0x3F6, 'V'),
+ (0x3F7, 'M', u'ϸ'),
+ (0x3F8, 'V'),
+ (0x3F9, 'M', u'σ'),
+ (0x3FA, 'M', u'ϻ'),
+ (0x3FB, 'V'),
+ (0x3FD, 'M', u'ͻ'),
+ (0x3FE, 'M', u'ͼ'),
+ (0x3FF, 'M', u'ͽ'),
+ (0x400, 'M', u'ѐ'),
+ (0x401, 'M', u'ё'),
+ (0x402, 'M', u'ђ'),
+ ]
+
+def _seg_7():
+ return [
+ (0x403, 'M', u'ѓ'),
+ (0x404, 'M', u'є'),
+ (0x405, 'M', u'ѕ'),
+ (0x406, 'M', u'і'),
+ (0x407, 'M', u'ї'),
+ (0x408, 'M', u'ј'),
+ (0x409, 'M', u'љ'),
+ (0x40A, 'M', u'њ'),
+ (0x40B, 'M', u'ћ'),
+ (0x40C, 'M', u'ќ'),
+ (0x40D, 'M', u'ѝ'),
+ (0x40E, 'M', u'ў'),
+ (0x40F, 'M', u'џ'),
+ (0x410, 'M', u'а'),
+ (0x411, 'M', u'б'),
+ (0x412, 'M', u'в'),
+ (0x413, 'M', u'г'),
+ (0x414, 'M', u'д'),
+ (0x415, 'M', u'е'),
+ (0x416, 'M', u'ж'),
+ (0x417, 'M', u'з'),
+ (0x418, 'M', u'и'),
+ (0x419, 'M', u'й'),
+ (0x41A, 'M', u'к'),
+ (0x41B, 'M', u'л'),
+ (0x41C, 'M', u'м'),
+ (0x41D, 'M', u'н'),
+ (0x41E, 'M', u'о'),
+ (0x41F, 'M', u'п'),
+ (0x420, 'M', u'р'),
+ (0x421, 'M', u'с'),
+ (0x422, 'M', u'т'),
+ (0x423, 'M', u'у'),
+ (0x424, 'M', u'ф'),
+ (0x425, 'M', u'х'),
+ (0x426, 'M', u'ц'),
+ (0x427, 'M', u'ч'),
+ (0x428, 'M', u'ш'),
+ (0x429, 'M', u'щ'),
+ (0x42A, 'M', u'ъ'),
+ (0x42B, 'M', u'ы'),
+ (0x42C, 'M', u'ь'),
+ (0x42D, 'M', u'э'),
+ (0x42E, 'M', u'ю'),
+ (0x42F, 'M', u'я'),
+ (0x430, 'V'),
+ (0x460, 'M', u'ѡ'),
+ (0x461, 'V'),
+ (0x462, 'M', u'ѣ'),
+ (0x463, 'V'),
+ (0x464, 'M', u'ѥ'),
+ (0x465, 'V'),
+ (0x466, 'M', u'ѧ'),
+ (0x467, 'V'),
+ (0x468, 'M', u'ѩ'),
+ (0x469, 'V'),
+ (0x46A, 'M', u'ѫ'),
+ (0x46B, 'V'),
+ (0x46C, 'M', u'ѭ'),
+ (0x46D, 'V'),
+ (0x46E, 'M', u'ѯ'),
+ (0x46F, 'V'),
+ (0x470, 'M', u'ѱ'),
+ (0x471, 'V'),
+ (0x472, 'M', u'ѳ'),
+ (0x473, 'V'),
+ (0x474, 'M', u'ѵ'),
+ (0x475, 'V'),
+ (0x476, 'M', u'ѷ'),
+ (0x477, 'V'),
+ (0x478, 'M', u'ѹ'),
+ (0x479, 'V'),
+ (0x47A, 'M', u'ѻ'),
+ (0x47B, 'V'),
+ (0x47C, 'M', u'ѽ'),
+ (0x47D, 'V'),
+ (0x47E, 'M', u'ѿ'),
+ (0x47F, 'V'),
+ (0x480, 'M', u'ҁ'),
+ (0x481, 'V'),
+ (0x48A, 'M', u'ҋ'),
+ (0x48B, 'V'),
+ (0x48C, 'M', u'ҍ'),
+ (0x48D, 'V'),
+ (0x48E, 'M', u'ҏ'),
+ (0x48F, 'V'),
+ (0x490, 'M', u'ґ'),
+ (0x491, 'V'),
+ (0x492, 'M', u'ғ'),
+ (0x493, 'V'),
+ (0x494, 'M', u'ҕ'),
+ (0x495, 'V'),
+ (0x496, 'M', u'җ'),
+ (0x497, 'V'),
+ (0x498, 'M', u'ҙ'),
+ (0x499, 'V'),
+ (0x49A, 'M', u'қ'),
+ (0x49B, 'V'),
+ (0x49C, 'M', u'ҝ'),
+ (0x49D, 'V'),
+ ]
+
+def _seg_8():
+ return [
+ (0x49E, 'M', u'ҟ'),
+ (0x49F, 'V'),
+ (0x4A0, 'M', u'ҡ'),
+ (0x4A1, 'V'),
+ (0x4A2, 'M', u'ң'),
+ (0x4A3, 'V'),
+ (0x4A4, 'M', u'ҥ'),
+ (0x4A5, 'V'),
+ (0x4A6, 'M', u'ҧ'),
+ (0x4A7, 'V'),
+ (0x4A8, 'M', u'ҩ'),
+ (0x4A9, 'V'),
+ (0x4AA, 'M', u'ҫ'),
+ (0x4AB, 'V'),
+ (0x4AC, 'M', u'ҭ'),
+ (0x4AD, 'V'),
+ (0x4AE, 'M', u'ү'),
+ (0x4AF, 'V'),
+ (0x4B0, 'M', u'ұ'),
+ (0x4B1, 'V'),
+ (0x4B2, 'M', u'ҳ'),
+ (0x4B3, 'V'),
+ (0x4B4, 'M', u'ҵ'),
+ (0x4B5, 'V'),
+ (0x4B6, 'M', u'ҷ'),
+ (0x4B7, 'V'),
+ (0x4B8, 'M', u'ҹ'),
+ (0x4B9, 'V'),
+ (0x4BA, 'M', u'һ'),
+ (0x4BB, 'V'),
+ (0x4BC, 'M', u'ҽ'),
+ (0x4BD, 'V'),
+ (0x4BE, 'M', u'ҿ'),
+ (0x4BF, 'V'),
+ (0x4C0, 'X'),
+ (0x4C1, 'M', u'ӂ'),
+ (0x4C2, 'V'),
+ (0x4C3, 'M', u'ӄ'),
+ (0x4C4, 'V'),
+ (0x4C5, 'M', u'ӆ'),
+ (0x4C6, 'V'),
+ (0x4C7, 'M', u'ӈ'),
+ (0x4C8, 'V'),
+ (0x4C9, 'M', u'ӊ'),
+ (0x4CA, 'V'),
+ (0x4CB, 'M', u'ӌ'),
+ (0x4CC, 'V'),
+ (0x4CD, 'M', u'ӎ'),
+ (0x4CE, 'V'),
+ (0x4D0, 'M', u'ӑ'),
+ (0x4D1, 'V'),
+ (0x4D2, 'M', u'ӓ'),
+ (0x4D3, 'V'),
+ (0x4D4, 'M', u'ӕ'),
+ (0x4D5, 'V'),
+ (0x4D6, 'M', u'ӗ'),
+ (0x4D7, 'V'),
+ (0x4D8, 'M', u'ә'),
+ (0x4D9, 'V'),
+ (0x4DA, 'M', u'ӛ'),
+ (0x4DB, 'V'),
+ (0x4DC, 'M', u'ӝ'),
+ (0x4DD, 'V'),
+ (0x4DE, 'M', u'ӟ'),
+ (0x4DF, 'V'),
+ (0x4E0, 'M', u'ӡ'),
+ (0x4E1, 'V'),
+ (0x4E2, 'M', u'ӣ'),
+ (0x4E3, 'V'),
+ (0x4E4, 'M', u'ӥ'),
+ (0x4E5, 'V'),
+ (0x4E6, 'M', u'ӧ'),
+ (0x4E7, 'V'),
+ (0x4E8, 'M', u'ө'),
+ (0x4E9, 'V'),
+ (0x4EA, 'M', u'ӫ'),
+ (0x4EB, 'V'),
+ (0x4EC, 'M', u'ӭ'),
+ (0x4ED, 'V'),
+ (0x4EE, 'M', u'ӯ'),
+ (0x4EF, 'V'),
+ (0x4F0, 'M', u'ӱ'),
+ (0x4F1, 'V'),
+ (0x4F2, 'M', u'ӳ'),
+ (0x4F3, 'V'),
+ (0x4F4, 'M', u'ӵ'),
+ (0x4F5, 'V'),
+ (0x4F6, 'M', u'ӷ'),
+ (0x4F7, 'V'),
+ (0x4F8, 'M', u'ӹ'),
+ (0x4F9, 'V'),
+ (0x4FA, 'M', u'ӻ'),
+ (0x4FB, 'V'),
+ (0x4FC, 'M', u'ӽ'),
+ (0x4FD, 'V'),
+ (0x4FE, 'M', u'ӿ'),
+ (0x4FF, 'V'),
+ (0x500, 'M', u'ԁ'),
+ (0x501, 'V'),
+ (0x502, 'M', u'ԃ'),
+ ]
+
+def _seg_9():
+ return [
+ (0x503, 'V'),
+ (0x504, 'M', u'ԅ'),
+ (0x505, 'V'),
+ (0x506, 'M', u'ԇ'),
+ (0x507, 'V'),
+ (0x508, 'M', u'ԉ'),
+ (0x509, 'V'),
+ (0x50A, 'M', u'ԋ'),
+ (0x50B, 'V'),
+ (0x50C, 'M', u'ԍ'),
+ (0x50D, 'V'),
+ (0x50E, 'M', u'ԏ'),
+ (0x50F, 'V'),
+ (0x510, 'M', u'ԑ'),
+ (0x511, 'V'),
+ (0x512, 'M', u'ԓ'),
+ (0x513, 'V'),
+ (0x514, 'M', u'ԕ'),
+ (0x515, 'V'),
+ (0x516, 'M', u'ԗ'),
+ (0x517, 'V'),
+ (0x518, 'M', u'ԙ'),
+ (0x519, 'V'),
+ (0x51A, 'M', u'ԛ'),
+ (0x51B, 'V'),
+ (0x51C, 'M', u'ԝ'),
+ (0x51D, 'V'),
+ (0x51E, 'M', u'ԟ'),
+ (0x51F, 'V'),
+ (0x520, 'M', u'ԡ'),
+ (0x521, 'V'),
+ (0x522, 'M', u'ԣ'),
+ (0x523, 'V'),
+ (0x524, 'M', u'ԥ'),
+ (0x525, 'V'),
+ (0x526, 'M', u'ԧ'),
+ (0x527, 'V'),
+ (0x528, 'M', u'ԩ'),
+ (0x529, 'V'),
+ (0x52A, 'M', u'ԫ'),
+ (0x52B, 'V'),
+ (0x52C, 'M', u'ԭ'),
+ (0x52D, 'V'),
+ (0x52E, 'M', u'ԯ'),
+ (0x52F, 'V'),
+ (0x530, 'X'),
+ (0x531, 'M', u'ա'),
+ (0x532, 'M', u'բ'),
+ (0x533, 'M', u'գ'),
+ (0x534, 'M', u'դ'),
+ (0x535, 'M', u'ե'),
+ (0x536, 'M', u'զ'),
+ (0x537, 'M', u'է'),
+ (0x538, 'M', u'ը'),
+ (0x539, 'M', u'թ'),
+ (0x53A, 'M', u'ժ'),
+ (0x53B, 'M', u'ի'),
+ (0x53C, 'M', u'լ'),
+ (0x53D, 'M', u'խ'),
+ (0x53E, 'M', u'ծ'),
+ (0x53F, 'M', u'կ'),
+ (0x540, 'M', u'հ'),
+ (0x541, 'M', u'ձ'),
+ (0x542, 'M', u'ղ'),
+ (0x543, 'M', u'ճ'),
+ (0x544, 'M', u'մ'),
+ (0x545, 'M', u'յ'),
+ (0x546, 'M', u'ն'),
+ (0x547, 'M', u'շ'),
+ (0x548, 'M', u'ո'),
+ (0x549, 'M', u'չ'),
+ (0x54A, 'M', u'պ'),
+ (0x54B, 'M', u'ջ'),
+ (0x54C, 'M', u'ռ'),
+ (0x54D, 'M', u'ս'),
+ (0x54E, 'M', u'վ'),
+ (0x54F, 'M', u'տ'),
+ (0x550, 'M', u'ր'),
+ (0x551, 'M', u'ց'),
+ (0x552, 'M', u'ւ'),
+ (0x553, 'M', u'փ'),
+ (0x554, 'M', u'ք'),
+ (0x555, 'M', u'օ'),
+ (0x556, 'M', u'ֆ'),
+ (0x557, 'X'),
+ (0x559, 'V'),
+ (0x587, 'M', u'եւ'),
+ (0x588, 'V'),
+ (0x58B, 'X'),
+ (0x58D, 'V'),
+ (0x590, 'X'),
+ (0x591, 'V'),
+ (0x5C8, 'X'),
+ (0x5D0, 'V'),
+ (0x5EB, 'X'),
+ (0x5EF, 'V'),
+ (0x5F5, 'X'),
+ (0x606, 'V'),
+ (0x61C, 'X'),
+ (0x61E, 'V'),
+ ]
+
+def _seg_10():
+ return [
+ (0x675, 'M', u'اٴ'),
+ (0x676, 'M', u'وٴ'),
+ (0x677, 'M', u'ۇٴ'),
+ (0x678, 'M', u'يٴ'),
+ (0x679, 'V'),
+ (0x6DD, 'X'),
+ (0x6DE, 'V'),
+ (0x70E, 'X'),
+ (0x710, 'V'),
+ (0x74B, 'X'),
+ (0x74D, 'V'),
+ (0x7B2, 'X'),
+ (0x7C0, 'V'),
+ (0x7FB, 'X'),
+ (0x7FD, 'V'),
+ (0x82E, 'X'),
+ (0x830, 'V'),
+ (0x83F, 'X'),
+ (0x840, 'V'),
+ (0x85C, 'X'),
+ (0x85E, 'V'),
+ (0x85F, 'X'),
+ (0x860, 'V'),
+ (0x86B, 'X'),
+ (0x8A0, 'V'),
+ (0x8B5, 'X'),
+ (0x8B6, 'V'),
+ (0x8BE, 'X'),
+ (0x8D3, 'V'),
+ (0x8E2, 'X'),
+ (0x8E3, 'V'),
+ (0x958, 'M', u'क़'),
+ (0x959, 'M', u'ख़'),
+ (0x95A, 'M', u'ग़'),
+ (0x95B, 'M', u'ज़'),
+ (0x95C, 'M', u'ड़'),
+ (0x95D, 'M', u'ढ़'),
+ (0x95E, 'M', u'फ़'),
+ (0x95F, 'M', u'य़'),
+ (0x960, 'V'),
+ (0x984, 'X'),
+ (0x985, 'V'),
+ (0x98D, 'X'),
+ (0x98F, 'V'),
+ (0x991, 'X'),
+ (0x993, 'V'),
+ (0x9A9, 'X'),
+ (0x9AA, 'V'),
+ (0x9B1, 'X'),
+ (0x9B2, 'V'),
+ (0x9B3, 'X'),
+ (0x9B6, 'V'),
+ (0x9BA, 'X'),
+ (0x9BC, 'V'),
+ (0x9C5, 'X'),
+ (0x9C7, 'V'),
+ (0x9C9, 'X'),
+ (0x9CB, 'V'),
+ (0x9CF, 'X'),
+ (0x9D7, 'V'),
+ (0x9D8, 'X'),
+ (0x9DC, 'M', u'ড়'),
+ (0x9DD, 'M', u'ঢ়'),
+ (0x9DE, 'X'),
+ (0x9DF, 'M', u'য়'),
+ (0x9E0, 'V'),
+ (0x9E4, 'X'),
+ (0x9E6, 'V'),
+ (0x9FF, 'X'),
+ (0xA01, 'V'),
+ (0xA04, 'X'),
+ (0xA05, 'V'),
+ (0xA0B, 'X'),
+ (0xA0F, 'V'),
+ (0xA11, 'X'),
+ (0xA13, 'V'),
+ (0xA29, 'X'),
+ (0xA2A, 'V'),
+ (0xA31, 'X'),
+ (0xA32, 'V'),
+ (0xA33, 'M', u'ਲ਼'),
+ (0xA34, 'X'),
+ (0xA35, 'V'),
+ (0xA36, 'M', u'ਸ਼'),
+ (0xA37, 'X'),
+ (0xA38, 'V'),
+ (0xA3A, 'X'),
+ (0xA3C, 'V'),
+ (0xA3D, 'X'),
+ (0xA3E, 'V'),
+ (0xA43, 'X'),
+ (0xA47, 'V'),
+ (0xA49, 'X'),
+ (0xA4B, 'V'),
+ (0xA4E, 'X'),
+ (0xA51, 'V'),
+ (0xA52, 'X'),
+ (0xA59, 'M', u'ਖ਼'),
+ (0xA5A, 'M', u'ਗ਼'),
+ (0xA5B, 'M', u'ਜ਼'),
+ ]
+
+def _seg_11():
+ return [
+ (0xA5C, 'V'),
+ (0xA5D, 'X'),
+ (0xA5E, 'M', u'ਫ਼'),
+ (0xA5F, 'X'),
+ (0xA66, 'V'),
+ (0xA77, 'X'),
+ (0xA81, 'V'),
+ (0xA84, 'X'),
+ (0xA85, 'V'),
+ (0xA8E, 'X'),
+ (0xA8F, 'V'),
+ (0xA92, 'X'),
+ (0xA93, 'V'),
+ (0xAA9, 'X'),
+ (0xAAA, 'V'),
+ (0xAB1, 'X'),
+ (0xAB2, 'V'),
+ (0xAB4, 'X'),
+ (0xAB5, 'V'),
+ (0xABA, 'X'),
+ (0xABC, 'V'),
+ (0xAC6, 'X'),
+ (0xAC7, 'V'),
+ (0xACA, 'X'),
+ (0xACB, 'V'),
+ (0xACE, 'X'),
+ (0xAD0, 'V'),
+ (0xAD1, 'X'),
+ (0xAE0, 'V'),
+ (0xAE4, 'X'),
+ (0xAE6, 'V'),
+ (0xAF2, 'X'),
+ (0xAF9, 'V'),
+ (0xB00, 'X'),
+ (0xB01, 'V'),
+ (0xB04, 'X'),
+ (0xB05, 'V'),
+ (0xB0D, 'X'),
+ (0xB0F, 'V'),
+ (0xB11, 'X'),
+ (0xB13, 'V'),
+ (0xB29, 'X'),
+ (0xB2A, 'V'),
+ (0xB31, 'X'),
+ (0xB32, 'V'),
+ (0xB34, 'X'),
+ (0xB35, 'V'),
+ (0xB3A, 'X'),
+ (0xB3C, 'V'),
+ (0xB45, 'X'),
+ (0xB47, 'V'),
+ (0xB49, 'X'),
+ (0xB4B, 'V'),
+ (0xB4E, 'X'),
+ (0xB56, 'V'),
+ (0xB58, 'X'),
+ (0xB5C, 'M', u'ଡ଼'),
+ (0xB5D, 'M', u'ଢ଼'),
+ (0xB5E, 'X'),
+ (0xB5F, 'V'),
+ (0xB64, 'X'),
+ (0xB66, 'V'),
+ (0xB78, 'X'),
+ (0xB82, 'V'),
+ (0xB84, 'X'),
+ (0xB85, 'V'),
+ (0xB8B, 'X'),
+ (0xB8E, 'V'),
+ (0xB91, 'X'),
+ (0xB92, 'V'),
+ (0xB96, 'X'),
+ (0xB99, 'V'),
+ (0xB9B, 'X'),
+ (0xB9C, 'V'),
+ (0xB9D, 'X'),
+ (0xB9E, 'V'),
+ (0xBA0, 'X'),
+ (0xBA3, 'V'),
+ (0xBA5, 'X'),
+ (0xBA8, 'V'),
+ (0xBAB, 'X'),
+ (0xBAE, 'V'),
+ (0xBBA, 'X'),
+ (0xBBE, 'V'),
+ (0xBC3, 'X'),
+ (0xBC6, 'V'),
+ (0xBC9, 'X'),
+ (0xBCA, 'V'),
+ (0xBCE, 'X'),
+ (0xBD0, 'V'),
+ (0xBD1, 'X'),
+ (0xBD7, 'V'),
+ (0xBD8, 'X'),
+ (0xBE6, 'V'),
+ (0xBFB, 'X'),
+ (0xC00, 'V'),
+ (0xC0D, 'X'),
+ (0xC0E, 'V'),
+ (0xC11, 'X'),
+ (0xC12, 'V'),
+ ]
+
+def _seg_12():
+ return [
+ (0xC29, 'X'),
+ (0xC2A, 'V'),
+ (0xC3A, 'X'),
+ (0xC3D, 'V'),
+ (0xC45, 'X'),
+ (0xC46, 'V'),
+ (0xC49, 'X'),
+ (0xC4A, 'V'),
+ (0xC4E, 'X'),
+ (0xC55, 'V'),
+ (0xC57, 'X'),
+ (0xC58, 'V'),
+ (0xC5B, 'X'),
+ (0xC60, 'V'),
+ (0xC64, 'X'),
+ (0xC66, 'V'),
+ (0xC70, 'X'),
+ (0xC78, 'V'),
+ (0xC8D, 'X'),
+ (0xC8E, 'V'),
+ (0xC91, 'X'),
+ (0xC92, 'V'),
+ (0xCA9, 'X'),
+ (0xCAA, 'V'),
+ (0xCB4, 'X'),
+ (0xCB5, 'V'),
+ (0xCBA, 'X'),
+ (0xCBC, 'V'),
+ (0xCC5, 'X'),
+ (0xCC6, 'V'),
+ (0xCC9, 'X'),
+ (0xCCA, 'V'),
+ (0xCCE, 'X'),
+ (0xCD5, 'V'),
+ (0xCD7, 'X'),
+ (0xCDE, 'V'),
+ (0xCDF, 'X'),
+ (0xCE0, 'V'),
+ (0xCE4, 'X'),
+ (0xCE6, 'V'),
+ (0xCF0, 'X'),
+ (0xCF1, 'V'),
+ (0xCF3, 'X'),
+ (0xD00, 'V'),
+ (0xD04, 'X'),
+ (0xD05, 'V'),
+ (0xD0D, 'X'),
+ (0xD0E, 'V'),
+ (0xD11, 'X'),
+ (0xD12, 'V'),
+ (0xD45, 'X'),
+ (0xD46, 'V'),
+ (0xD49, 'X'),
+ (0xD4A, 'V'),
+ (0xD50, 'X'),
+ (0xD54, 'V'),
+ (0xD64, 'X'),
+ (0xD66, 'V'),
+ (0xD80, 'X'),
+ (0xD82, 'V'),
+ (0xD84, 'X'),
+ (0xD85, 'V'),
+ (0xD97, 'X'),
+ (0xD9A, 'V'),
+ (0xDB2, 'X'),
+ (0xDB3, 'V'),
+ (0xDBC, 'X'),
+ (0xDBD, 'V'),
+ (0xDBE, 'X'),
+ (0xDC0, 'V'),
+ (0xDC7, 'X'),
+ (0xDCA, 'V'),
+ (0xDCB, 'X'),
+ (0xDCF, 'V'),
+ (0xDD5, 'X'),
+ (0xDD6, 'V'),
+ (0xDD7, 'X'),
+ (0xDD8, 'V'),
+ (0xDE0, 'X'),
+ (0xDE6, 'V'),
+ (0xDF0, 'X'),
+ (0xDF2, 'V'),
+ (0xDF5, 'X'),
+ (0xE01, 'V'),
+ (0xE33, 'M', u'ํา'),
+ (0xE34, 'V'),
+ (0xE3B, 'X'),
+ (0xE3F, 'V'),
+ (0xE5C, 'X'),
+ (0xE81, 'V'),
+ (0xE83, 'X'),
+ (0xE84, 'V'),
+ (0xE85, 'X'),
+ (0xE87, 'V'),
+ (0xE89, 'X'),
+ (0xE8A, 'V'),
+ (0xE8B, 'X'),
+ (0xE8D, 'V'),
+ (0xE8E, 'X'),
+ (0xE94, 'V'),
+ ]
+
+def _seg_13():
+ return [
+ (0xE98, 'X'),
+ (0xE99, 'V'),
+ (0xEA0, 'X'),
+ (0xEA1, 'V'),
+ (0xEA4, 'X'),
+ (0xEA5, 'V'),
+ (0xEA6, 'X'),
+ (0xEA7, 'V'),
+ (0xEA8, 'X'),
+ (0xEAA, 'V'),
+ (0xEAC, 'X'),
+ (0xEAD, 'V'),
+ (0xEB3, 'M', u'ໍາ'),
+ (0xEB4, 'V'),
+ (0xEBA, 'X'),
+ (0xEBB, 'V'),
+ (0xEBE, 'X'),
+ (0xEC0, 'V'),
+ (0xEC5, 'X'),
+ (0xEC6, 'V'),
+ (0xEC7, 'X'),
+ (0xEC8, 'V'),
+ (0xECE, 'X'),
+ (0xED0, 'V'),
+ (0xEDA, 'X'),
+ (0xEDC, 'M', u'ຫນ'),
+ (0xEDD, 'M', u'ຫມ'),
+ (0xEDE, 'V'),
+ (0xEE0, 'X'),
+ (0xF00, 'V'),
+ (0xF0C, 'M', u'་'),
+ (0xF0D, 'V'),
+ (0xF43, 'M', u'གྷ'),
+ (0xF44, 'V'),
+ (0xF48, 'X'),
+ (0xF49, 'V'),
+ (0xF4D, 'M', u'ཌྷ'),
+ (0xF4E, 'V'),
+ (0xF52, 'M', u'དྷ'),
+ (0xF53, 'V'),
+ (0xF57, 'M', u'བྷ'),
+ (0xF58, 'V'),
+ (0xF5C, 'M', u'ཛྷ'),
+ (0xF5D, 'V'),
+ (0xF69, 'M', u'ཀྵ'),
+ (0xF6A, 'V'),
+ (0xF6D, 'X'),
+ (0xF71, 'V'),
+ (0xF73, 'M', u'ཱི'),
+ (0xF74, 'V'),
+ (0xF75, 'M', u'ཱུ'),
+ (0xF76, 'M', u'ྲྀ'),
+ (0xF77, 'M', u'ྲཱྀ'),
+ (0xF78, 'M', u'ླྀ'),
+ (0xF79, 'M', u'ླཱྀ'),
+ (0xF7A, 'V'),
+ (0xF81, 'M', u'ཱྀ'),
+ (0xF82, 'V'),
+ (0xF93, 'M', u'ྒྷ'),
+ (0xF94, 'V'),
+ (0xF98, 'X'),
+ (0xF99, 'V'),
+ (0xF9D, 'M', u'ྜྷ'),
+ (0xF9E, 'V'),
+ (0xFA2, 'M', u'ྡྷ'),
+ (0xFA3, 'V'),
+ (0xFA7, 'M', u'ྦྷ'),
+ (0xFA8, 'V'),
+ (0xFAC, 'M', u'ྫྷ'),
+ (0xFAD, 'V'),
+ (0xFB9, 'M', u'ྐྵ'),
+ (0xFBA, 'V'),
+ (0xFBD, 'X'),
+ (0xFBE, 'V'),
+ (0xFCD, 'X'),
+ (0xFCE, 'V'),
+ (0xFDB, 'X'),
+ (0x1000, 'V'),
+ (0x10A0, 'X'),
+ (0x10C7, 'M', u'ⴧ'),
+ (0x10C8, 'X'),
+ (0x10CD, 'M', u'ⴭ'),
+ (0x10CE, 'X'),
+ (0x10D0, 'V'),
+ (0x10FC, 'M', u'ნ'),
+ (0x10FD, 'V'),
+ (0x115F, 'X'),
+ (0x1161, 'V'),
+ (0x1249, 'X'),
+ (0x124A, 'V'),
+ (0x124E, 'X'),
+ (0x1250, 'V'),
+ (0x1257, 'X'),
+ (0x1258, 'V'),
+ (0x1259, 'X'),
+ (0x125A, 'V'),
+ (0x125E, 'X'),
+ (0x1260, 'V'),
+ (0x1289, 'X'),
+ (0x128A, 'V'),
+ ]
+
+def _seg_14():
+ return [
+ (0x128E, 'X'),
+ (0x1290, 'V'),
+ (0x12B1, 'X'),
+ (0x12B2, 'V'),
+ (0x12B6, 'X'),
+ (0x12B8, 'V'),
+ (0x12BF, 'X'),
+ (0x12C0, 'V'),
+ (0x12C1, 'X'),
+ (0x12C2, 'V'),
+ (0x12C6, 'X'),
+ (0x12C8, 'V'),
+ (0x12D7, 'X'),
+ (0x12D8, 'V'),
+ (0x1311, 'X'),
+ (0x1312, 'V'),
+ (0x1316, 'X'),
+ (0x1318, 'V'),
+ (0x135B, 'X'),
+ (0x135D, 'V'),
+ (0x137D, 'X'),
+ (0x1380, 'V'),
+ (0x139A, 'X'),
+ (0x13A0, 'V'),
+ (0x13F6, 'X'),
+ (0x13F8, 'M', u'Ᏸ'),
+ (0x13F9, 'M', u'Ᏹ'),
+ (0x13FA, 'M', u'Ᏺ'),
+ (0x13FB, 'M', u'Ᏻ'),
+ (0x13FC, 'M', u'Ᏼ'),
+ (0x13FD, 'M', u'Ᏽ'),
+ (0x13FE, 'X'),
+ (0x1400, 'V'),
+ (0x1680, 'X'),
+ (0x1681, 'V'),
+ (0x169D, 'X'),
+ (0x16A0, 'V'),
+ (0x16F9, 'X'),
+ (0x1700, 'V'),
+ (0x170D, 'X'),
+ (0x170E, 'V'),
+ (0x1715, 'X'),
+ (0x1720, 'V'),
+ (0x1737, 'X'),
+ (0x1740, 'V'),
+ (0x1754, 'X'),
+ (0x1760, 'V'),
+ (0x176D, 'X'),
+ (0x176E, 'V'),
+ (0x1771, 'X'),
+ (0x1772, 'V'),
+ (0x1774, 'X'),
+ (0x1780, 'V'),
+ (0x17B4, 'X'),
+ (0x17B6, 'V'),
+ (0x17DE, 'X'),
+ (0x17E0, 'V'),
+ (0x17EA, 'X'),
+ (0x17F0, 'V'),
+ (0x17FA, 'X'),
+ (0x1800, 'V'),
+ (0x1806, 'X'),
+ (0x1807, 'V'),
+ (0x180B, 'I'),
+ (0x180E, 'X'),
+ (0x1810, 'V'),
+ (0x181A, 'X'),
+ (0x1820, 'V'),
+ (0x1879, 'X'),
+ (0x1880, 'V'),
+ (0x18AB, 'X'),
+ (0x18B0, 'V'),
+ (0x18F6, 'X'),
+ (0x1900, 'V'),
+ (0x191F, 'X'),
+ (0x1920, 'V'),
+ (0x192C, 'X'),
+ (0x1930, 'V'),
+ (0x193C, 'X'),
+ (0x1940, 'V'),
+ (0x1941, 'X'),
+ (0x1944, 'V'),
+ (0x196E, 'X'),
+ (0x1970, 'V'),
+ (0x1975, 'X'),
+ (0x1980, 'V'),
+ (0x19AC, 'X'),
+ (0x19B0, 'V'),
+ (0x19CA, 'X'),
+ (0x19D0, 'V'),
+ (0x19DB, 'X'),
+ (0x19DE, 'V'),
+ (0x1A1C, 'X'),
+ (0x1A1E, 'V'),
+ (0x1A5F, 'X'),
+ (0x1A60, 'V'),
+ (0x1A7D, 'X'),
+ (0x1A7F, 'V'),
+ (0x1A8A, 'X'),
+ (0x1A90, 'V'),
+ ]
+
+def _seg_15():
+ return [
+ (0x1A9A, 'X'),
+ (0x1AA0, 'V'),
+ (0x1AAE, 'X'),
+ (0x1AB0, 'V'),
+ (0x1ABF, 'X'),
+ (0x1B00, 'V'),
+ (0x1B4C, 'X'),
+ (0x1B50, 'V'),
+ (0x1B7D, 'X'),
+ (0x1B80, 'V'),
+ (0x1BF4, 'X'),
+ (0x1BFC, 'V'),
+ (0x1C38, 'X'),
+ (0x1C3B, 'V'),
+ (0x1C4A, 'X'),
+ (0x1C4D, 'V'),
+ (0x1C80, 'M', u'в'),
+ (0x1C81, 'M', u'д'),
+ (0x1C82, 'M', u'о'),
+ (0x1C83, 'M', u'с'),
+ (0x1C84, 'M', u'т'),
+ (0x1C86, 'M', u'ъ'),
+ (0x1C87, 'M', u'ѣ'),
+ (0x1C88, 'M', u'ꙋ'),
+ (0x1C89, 'X'),
+ (0x1CC0, 'V'),
+ (0x1CC8, 'X'),
+ (0x1CD0, 'V'),
+ (0x1CFA, 'X'),
+ (0x1D00, 'V'),
+ (0x1D2C, 'M', u'a'),
+ (0x1D2D, 'M', u'æ'),
+ (0x1D2E, 'M', u'b'),
+ (0x1D2F, 'V'),
+ (0x1D30, 'M', u'd'),
+ (0x1D31, 'M', u'e'),
+ (0x1D32, 'M', u'ǝ'),
+ (0x1D33, 'M', u'g'),
+ (0x1D34, 'M', u'h'),
+ (0x1D35, 'M', u'i'),
+ (0x1D36, 'M', u'j'),
+ (0x1D37, 'M', u'k'),
+ (0x1D38, 'M', u'l'),
+ (0x1D39, 'M', u'm'),
+ (0x1D3A, 'M', u'n'),
+ (0x1D3B, 'V'),
+ (0x1D3C, 'M', u'o'),
+ (0x1D3D, 'M', u'ȣ'),
+ (0x1D3E, 'M', u'p'),
+ (0x1D3F, 'M', u'r'),
+ (0x1D40, 'M', u't'),
+ (0x1D41, 'M', u'u'),
+ (0x1D42, 'M', u'w'),
+ (0x1D43, 'M', u'a'),
+ (0x1D44, 'M', u'ɐ'),
+ (0x1D45, 'M', u'ɑ'),
+ (0x1D46, 'M', u'ᴂ'),
+ (0x1D47, 'M', u'b'),
+ (0x1D48, 'M', u'd'),
+ (0x1D49, 'M', u'e'),
+ (0x1D4A, 'M', u'ə'),
+ (0x1D4B, 'M', u'ɛ'),
+ (0x1D4C, 'M', u'ɜ'),
+ (0x1D4D, 'M', u'g'),
+ (0x1D4E, 'V'),
+ (0x1D4F, 'M', u'k'),
+ (0x1D50, 'M', u'm'),
+ (0x1D51, 'M', u'ŋ'),
+ (0x1D52, 'M', u'o'),
+ (0x1D53, 'M', u'ɔ'),
+ (0x1D54, 'M', u'ᴖ'),
+ (0x1D55, 'M', u'ᴗ'),
+ (0x1D56, 'M', u'p'),
+ (0x1D57, 'M', u't'),
+ (0x1D58, 'M', u'u'),
+ (0x1D59, 'M', u'ᴝ'),
+ (0x1D5A, 'M', u'ɯ'),
+ (0x1D5B, 'M', u'v'),
+ (0x1D5C, 'M', u'ᴥ'),
+ (0x1D5D, 'M', u'β'),
+ (0x1D5E, 'M', u'γ'),
+ (0x1D5F, 'M', u'δ'),
+ (0x1D60, 'M', u'φ'),
+ (0x1D61, 'M', u'χ'),
+ (0x1D62, 'M', u'i'),
+ (0x1D63, 'M', u'r'),
+ (0x1D64, 'M', u'u'),
+ (0x1D65, 'M', u'v'),
+ (0x1D66, 'M', u'β'),
+ (0x1D67, 'M', u'γ'),
+ (0x1D68, 'M', u'ρ'),
+ (0x1D69, 'M', u'φ'),
+ (0x1D6A, 'M', u'χ'),
+ (0x1D6B, 'V'),
+ (0x1D78, 'M', u'н'),
+ (0x1D79, 'V'),
+ (0x1D9B, 'M', u'ɒ'),
+ (0x1D9C, 'M', u'c'),
+ (0x1D9D, 'M', u'ɕ'),
+ (0x1D9E, 'M', u'ð'),
+ ]
+
+def _seg_16():
+ return [
+ (0x1D9F, 'M', u'ɜ'),
+ (0x1DA0, 'M', u'f'),
+ (0x1DA1, 'M', u'ɟ'),
+ (0x1DA2, 'M', u'ɡ'),
+ (0x1DA3, 'M', u'ɥ'),
+ (0x1DA4, 'M', u'ɨ'),
+ (0x1DA5, 'M', u'ɩ'),
+ (0x1DA6, 'M', u'ɪ'),
+ (0x1DA7, 'M', u'ᵻ'),
+ (0x1DA8, 'M', u'ʝ'),
+ (0x1DA9, 'M', u'ɭ'),
+ (0x1DAA, 'M', u'ᶅ'),
+ (0x1DAB, 'M', u'ʟ'),
+ (0x1DAC, 'M', u'ɱ'),
+ (0x1DAD, 'M', u'ɰ'),
+ (0x1DAE, 'M', u'ɲ'),
+ (0x1DAF, 'M', u'ɳ'),
+ (0x1DB0, 'M', u'ɴ'),
+ (0x1DB1, 'M', u'ɵ'),
+ (0x1DB2, 'M', u'ɸ'),
+ (0x1DB3, 'M', u'ʂ'),
+ (0x1DB4, 'M', u'ʃ'),
+ (0x1DB5, 'M', u'ƫ'),
+ (0x1DB6, 'M', u'ʉ'),
+ (0x1DB7, 'M', u'ʊ'),
+ (0x1DB8, 'M', u'ᴜ'),
+ (0x1DB9, 'M', u'ʋ'),
+ (0x1DBA, 'M', u'ʌ'),
+ (0x1DBB, 'M', u'z'),
+ (0x1DBC, 'M', u'ʐ'),
+ (0x1DBD, 'M', u'ʑ'),
+ (0x1DBE, 'M', u'ʒ'),
+ (0x1DBF, 'M', u'θ'),
+ (0x1DC0, 'V'),
+ (0x1DFA, 'X'),
+ (0x1DFB, 'V'),
+ (0x1E00, 'M', u'ḁ'),
+ (0x1E01, 'V'),
+ (0x1E02, 'M', u'ḃ'),
+ (0x1E03, 'V'),
+ (0x1E04, 'M', u'ḅ'),
+ (0x1E05, 'V'),
+ (0x1E06, 'M', u'ḇ'),
+ (0x1E07, 'V'),
+ (0x1E08, 'M', u'ḉ'),
+ (0x1E09, 'V'),
+ (0x1E0A, 'M', u'ḋ'),
+ (0x1E0B, 'V'),
+ (0x1E0C, 'M', u'ḍ'),
+ (0x1E0D, 'V'),
+ (0x1E0E, 'M', u'ḏ'),
+ (0x1E0F, 'V'),
+ (0x1E10, 'M', u'ḑ'),
+ (0x1E11, 'V'),
+ (0x1E12, 'M', u'ḓ'),
+ (0x1E13, 'V'),
+ (0x1E14, 'M', u'ḕ'),
+ (0x1E15, 'V'),
+ (0x1E16, 'M', u'ḗ'),
+ (0x1E17, 'V'),
+ (0x1E18, 'M', u'ḙ'),
+ (0x1E19, 'V'),
+ (0x1E1A, 'M', u'ḛ'),
+ (0x1E1B, 'V'),
+ (0x1E1C, 'M', u'ḝ'),
+ (0x1E1D, 'V'),
+ (0x1E1E, 'M', u'ḟ'),
+ (0x1E1F, 'V'),
+ (0x1E20, 'M', u'ḡ'),
+ (0x1E21, 'V'),
+ (0x1E22, 'M', u'ḣ'),
+ (0x1E23, 'V'),
+ (0x1E24, 'M', u'ḥ'),
+ (0x1E25, 'V'),
+ (0x1E26, 'M', u'ḧ'),
+ (0x1E27, 'V'),
+ (0x1E28, 'M', u'ḩ'),
+ (0x1E29, 'V'),
+ (0x1E2A, 'M', u'ḫ'),
+ (0x1E2B, 'V'),
+ (0x1E2C, 'M', u'ḭ'),
+ (0x1E2D, 'V'),
+ (0x1E2E, 'M', u'ḯ'),
+ (0x1E2F, 'V'),
+ (0x1E30, 'M', u'ḱ'),
+ (0x1E31, 'V'),
+ (0x1E32, 'M', u'ḳ'),
+ (0x1E33, 'V'),
+ (0x1E34, 'M', u'ḵ'),
+ (0x1E35, 'V'),
+ (0x1E36, 'M', u'ḷ'),
+ (0x1E37, 'V'),
+ (0x1E38, 'M', u'ḹ'),
+ (0x1E39, 'V'),
+ (0x1E3A, 'M', u'ḻ'),
+ (0x1E3B, 'V'),
+ (0x1E3C, 'M', u'ḽ'),
+ (0x1E3D, 'V'),
+ (0x1E3E, 'M', u'ḿ'),
+ (0x1E3F, 'V'),
+ ]
+
+def _seg_17():
+ return [
+ (0x1E40, 'M', u'ṁ'),
+ (0x1E41, 'V'),
+ (0x1E42, 'M', u'ṃ'),
+ (0x1E43, 'V'),
+ (0x1E44, 'M', u'ṅ'),
+ (0x1E45, 'V'),
+ (0x1E46, 'M', u'ṇ'),
+ (0x1E47, 'V'),
+ (0x1E48, 'M', u'ṉ'),
+ (0x1E49, 'V'),
+ (0x1E4A, 'M', u'ṋ'),
+ (0x1E4B, 'V'),
+ (0x1E4C, 'M', u'ṍ'),
+ (0x1E4D, 'V'),
+ (0x1E4E, 'M', u'ṏ'),
+ (0x1E4F, 'V'),
+ (0x1E50, 'M', u'ṑ'),
+ (0x1E51, 'V'),
+ (0x1E52, 'M', u'ṓ'),
+ (0x1E53, 'V'),
+ (0x1E54, 'M', u'ṕ'),
+ (0x1E55, 'V'),
+ (0x1E56, 'M', u'ṗ'),
+ (0x1E57, 'V'),
+ (0x1E58, 'M', u'ṙ'),
+ (0x1E59, 'V'),
+ (0x1E5A, 'M', u'ṛ'),
+ (0x1E5B, 'V'),
+ (0x1E5C, 'M', u'ṝ'),
+ (0x1E5D, 'V'),
+ (0x1E5E, 'M', u'ṟ'),
+ (0x1E5F, 'V'),
+ (0x1E60, 'M', u'ṡ'),
+ (0x1E61, 'V'),
+ (0x1E62, 'M', u'ṣ'),
+ (0x1E63, 'V'),
+ (0x1E64, 'M', u'ṥ'),
+ (0x1E65, 'V'),
+ (0x1E66, 'M', u'ṧ'),
+ (0x1E67, 'V'),
+ (0x1E68, 'M', u'ṩ'),
+ (0x1E69, 'V'),
+ (0x1E6A, 'M', u'ṫ'),
+ (0x1E6B, 'V'),
+ (0x1E6C, 'M', u'ṭ'),
+ (0x1E6D, 'V'),
+ (0x1E6E, 'M', u'ṯ'),
+ (0x1E6F, 'V'),
+ (0x1E70, 'M', u'ṱ'),
+ (0x1E71, 'V'),
+ (0x1E72, 'M', u'ṳ'),
+ (0x1E73, 'V'),
+ (0x1E74, 'M', u'ṵ'),
+ (0x1E75, 'V'),
+ (0x1E76, 'M', u'ṷ'),
+ (0x1E77, 'V'),
+ (0x1E78, 'M', u'ṹ'),
+ (0x1E79, 'V'),
+ (0x1E7A, 'M', u'ṻ'),
+ (0x1E7B, 'V'),
+ (0x1E7C, 'M', u'ṽ'),
+ (0x1E7D, 'V'),
+ (0x1E7E, 'M', u'ṿ'),
+ (0x1E7F, 'V'),
+ (0x1E80, 'M', u'ẁ'),
+ (0x1E81, 'V'),
+ (0x1E82, 'M', u'ẃ'),
+ (0x1E83, 'V'),
+ (0x1E84, 'M', u'ẅ'),
+ (0x1E85, 'V'),
+ (0x1E86, 'M', u'ẇ'),
+ (0x1E87, 'V'),
+ (0x1E88, 'M', u'ẉ'),
+ (0x1E89, 'V'),
+ (0x1E8A, 'M', u'ẋ'),
+ (0x1E8B, 'V'),
+ (0x1E8C, 'M', u'ẍ'),
+ (0x1E8D, 'V'),
+ (0x1E8E, 'M', u'ẏ'),
+ (0x1E8F, 'V'),
+ (0x1E90, 'M', u'ẑ'),
+ (0x1E91, 'V'),
+ (0x1E92, 'M', u'ẓ'),
+ (0x1E93, 'V'),
+ (0x1E94, 'M', u'ẕ'),
+ (0x1E95, 'V'),
+ (0x1E9A, 'M', u'aʾ'),
+ (0x1E9B, 'M', u'ṡ'),
+ (0x1E9C, 'V'),
+ (0x1E9E, 'M', u'ss'),
+ (0x1E9F, 'V'),
+ (0x1EA0, 'M', u'ạ'),
+ (0x1EA1, 'V'),
+ (0x1EA2, 'M', u'ả'),
+ (0x1EA3, 'V'),
+ (0x1EA4, 'M', u'ấ'),
+ (0x1EA5, 'V'),
+ (0x1EA6, 'M', u'ầ'),
+ (0x1EA7, 'V'),
+ (0x1EA8, 'M', u'ẩ'),
+ ]
+
+def _seg_18():
+ return [
+ (0x1EA9, 'V'),
+ (0x1EAA, 'M', u'ẫ'),
+ (0x1EAB, 'V'),
+ (0x1EAC, 'M', u'ậ'),
+ (0x1EAD, 'V'),
+ (0x1EAE, 'M', u'ắ'),
+ (0x1EAF, 'V'),
+ (0x1EB0, 'M', u'ằ'),
+ (0x1EB1, 'V'),
+ (0x1EB2, 'M', u'ẳ'),
+ (0x1EB3, 'V'),
+ (0x1EB4, 'M', u'ẵ'),
+ (0x1EB5, 'V'),
+ (0x1EB6, 'M', u'ặ'),
+ (0x1EB7, 'V'),
+ (0x1EB8, 'M', u'ẹ'),
+ (0x1EB9, 'V'),
+ (0x1EBA, 'M', u'ẻ'),
+ (0x1EBB, 'V'),
+ (0x1EBC, 'M', u'ẽ'),
+ (0x1EBD, 'V'),
+ (0x1EBE, 'M', u'ế'),
+ (0x1EBF, 'V'),
+ (0x1EC0, 'M', u'ề'),
+ (0x1EC1, 'V'),
+ (0x1EC2, 'M', u'ể'),
+ (0x1EC3, 'V'),
+ (0x1EC4, 'M', u'ễ'),
+ (0x1EC5, 'V'),
+ (0x1EC6, 'M', u'ệ'),
+ (0x1EC7, 'V'),
+ (0x1EC8, 'M', u'ỉ'),
+ (0x1EC9, 'V'),
+ (0x1ECA, 'M', u'ị'),
+ (0x1ECB, 'V'),
+ (0x1ECC, 'M', u'ọ'),
+ (0x1ECD, 'V'),
+ (0x1ECE, 'M', u'ỏ'),
+ (0x1ECF, 'V'),
+ (0x1ED0, 'M', u'ố'),
+ (0x1ED1, 'V'),
+ (0x1ED2, 'M', u'ồ'),
+ (0x1ED3, 'V'),
+ (0x1ED4, 'M', u'ổ'),
+ (0x1ED5, 'V'),
+ (0x1ED6, 'M', u'ỗ'),
+ (0x1ED7, 'V'),
+ (0x1ED8, 'M', u'ộ'),
+ (0x1ED9, 'V'),
+ (0x1EDA, 'M', u'ớ'),
+ (0x1EDB, 'V'),
+ (0x1EDC, 'M', u'ờ'),
+ (0x1EDD, 'V'),
+ (0x1EDE, 'M', u'ở'),
+ (0x1EDF, 'V'),
+ (0x1EE0, 'M', u'ỡ'),
+ (0x1EE1, 'V'),
+ (0x1EE2, 'M', u'ợ'),
+ (0x1EE3, 'V'),
+ (0x1EE4, 'M', u'ụ'),
+ (0x1EE5, 'V'),
+ (0x1EE6, 'M', u'ủ'),
+ (0x1EE7, 'V'),
+ (0x1EE8, 'M', u'ứ'),
+ (0x1EE9, 'V'),
+ (0x1EEA, 'M', u'ừ'),
+ (0x1EEB, 'V'),
+ (0x1EEC, 'M', u'ử'),
+ (0x1EED, 'V'),
+ (0x1EEE, 'M', u'ữ'),
+ (0x1EEF, 'V'),
+ (0x1EF0, 'M', u'ự'),
+ (0x1EF1, 'V'),
+ (0x1EF2, 'M', u'ỳ'),
+ (0x1EF3, 'V'),
+ (0x1EF4, 'M', u'ỵ'),
+ (0x1EF5, 'V'),
+ (0x1EF6, 'M', u'ỷ'),
+ (0x1EF7, 'V'),
+ (0x1EF8, 'M', u'ỹ'),
+ (0x1EF9, 'V'),
+ (0x1EFA, 'M', u'ỻ'),
+ (0x1EFB, 'V'),
+ (0x1EFC, 'M', u'ỽ'),
+ (0x1EFD, 'V'),
+ (0x1EFE, 'M', u'ỿ'),
+ (0x1EFF, 'V'),
+ (0x1F08, 'M', u'ἀ'),
+ (0x1F09, 'M', u'ἁ'),
+ (0x1F0A, 'M', u'ἂ'),
+ (0x1F0B, 'M', u'ἃ'),
+ (0x1F0C, 'M', u'ἄ'),
+ (0x1F0D, 'M', u'ἅ'),
+ (0x1F0E, 'M', u'ἆ'),
+ (0x1F0F, 'M', u'ἇ'),
+ (0x1F10, 'V'),
+ (0x1F16, 'X'),
+ (0x1F18, 'M', u'ἐ'),
+ (0x1F19, 'M', u'ἑ'),
+ (0x1F1A, 'M', u'ἒ'),
+ ]
+
+def _seg_19():
+ return [
+ (0x1F1B, 'M', u'ἓ'),
+ (0x1F1C, 'M', u'ἔ'),
+ (0x1F1D, 'M', u'ἕ'),
+ (0x1F1E, 'X'),
+ (0x1F20, 'V'),
+ (0x1F28, 'M', u'ἠ'),
+ (0x1F29, 'M', u'ἡ'),
+ (0x1F2A, 'M', u'ἢ'),
+ (0x1F2B, 'M', u'ἣ'),
+ (0x1F2C, 'M', u'ἤ'),
+ (0x1F2D, 'M', u'ἥ'),
+ (0x1F2E, 'M', u'ἦ'),
+ (0x1F2F, 'M', u'ἧ'),
+ (0x1F30, 'V'),
+ (0x1F38, 'M', u'ἰ'),
+ (0x1F39, 'M', u'ἱ'),
+ (0x1F3A, 'M', u'ἲ'),
+ (0x1F3B, 'M', u'ἳ'),
+ (0x1F3C, 'M', u'ἴ'),
+ (0x1F3D, 'M', u'ἵ'),
+ (0x1F3E, 'M', u'ἶ'),
+ (0x1F3F, 'M', u'ἷ'),
+ (0x1F40, 'V'),
+ (0x1F46, 'X'),
+ (0x1F48, 'M', u'ὀ'),
+ (0x1F49, 'M', u'ὁ'),
+ (0x1F4A, 'M', u'ὂ'),
+ (0x1F4B, 'M', u'ὃ'),
+ (0x1F4C, 'M', u'ὄ'),
+ (0x1F4D, 'M', u'ὅ'),
+ (0x1F4E, 'X'),
+ (0x1F50, 'V'),
+ (0x1F58, 'X'),
+ (0x1F59, 'M', u'ὑ'),
+ (0x1F5A, 'X'),
+ (0x1F5B, 'M', u'ὓ'),
+ (0x1F5C, 'X'),
+ (0x1F5D, 'M', u'ὕ'),
+ (0x1F5E, 'X'),
+ (0x1F5F, 'M', u'ὗ'),
+ (0x1F60, 'V'),
+ (0x1F68, 'M', u'ὠ'),
+ (0x1F69, 'M', u'ὡ'),
+ (0x1F6A, 'M', u'ὢ'),
+ (0x1F6B, 'M', u'ὣ'),
+ (0x1F6C, 'M', u'ὤ'),
+ (0x1F6D, 'M', u'ὥ'),
+ (0x1F6E, 'M', u'ὦ'),
+ (0x1F6F, 'M', u'ὧ'),
+ (0x1F70, 'V'),
+ (0x1F71, 'M', u'ά'),
+ (0x1F72, 'V'),
+ (0x1F73, 'M', u'έ'),
+ (0x1F74, 'V'),
+ (0x1F75, 'M', u'ή'),
+ (0x1F76, 'V'),
+ (0x1F77, 'M', u'ί'),
+ (0x1F78, 'V'),
+ (0x1F79, 'M', u'ό'),
+ (0x1F7A, 'V'),
+ (0x1F7B, 'M', u'ύ'),
+ (0x1F7C, 'V'),
+ (0x1F7D, 'M', u'ώ'),
+ (0x1F7E, 'X'),
+ (0x1F80, 'M', u'ἀι'),
+ (0x1F81, 'M', u'ἁι'),
+ (0x1F82, 'M', u'ἂι'),
+ (0x1F83, 'M', u'ἃι'),
+ (0x1F84, 'M', u'ἄι'),
+ (0x1F85, 'M', u'ἅι'),
+ (0x1F86, 'M', u'ἆι'),
+ (0x1F87, 'M', u'ἇι'),
+ (0x1F88, 'M', u'ἀι'),
+ (0x1F89, 'M', u'ἁι'),
+ (0x1F8A, 'M', u'ἂι'),
+ (0x1F8B, 'M', u'ἃι'),
+ (0x1F8C, 'M', u'ἄι'),
+ (0x1F8D, 'M', u'ἅι'),
+ (0x1F8E, 'M', u'ἆι'),
+ (0x1F8F, 'M', u'ἇι'),
+ (0x1F90, 'M', u'ἠι'),
+ (0x1F91, 'M', u'ἡι'),
+ (0x1F92, 'M', u'ἢι'),
+ (0x1F93, 'M', u'ἣι'),
+ (0x1F94, 'M', u'ἤι'),
+ (0x1F95, 'M', u'ἥι'),
+ (0x1F96, 'M', u'ἦι'),
+ (0x1F97, 'M', u'ἧι'),
+ (0x1F98, 'M', u'ἠι'),
+ (0x1F99, 'M', u'ἡι'),
+ (0x1F9A, 'M', u'ἢι'),
+ (0x1F9B, 'M', u'ἣι'),
+ (0x1F9C, 'M', u'ἤι'),
+ (0x1F9D, 'M', u'ἥι'),
+ (0x1F9E, 'M', u'ἦι'),
+ (0x1F9F, 'M', u'ἧι'),
+ (0x1FA0, 'M', u'ὠι'),
+ (0x1FA1, 'M', u'ὡι'),
+ (0x1FA2, 'M', u'ὢι'),
+ (0x1FA3, 'M', u'ὣι'),
+ ]
+
+def _seg_20():
+ return [
+ (0x1FA4, 'M', u'ὤι'),
+ (0x1FA5, 'M', u'ὥι'),
+ (0x1FA6, 'M', u'ὦι'),
+ (0x1FA7, 'M', u'ὧι'),
+ (0x1FA8, 'M', u'ὠι'),
+ (0x1FA9, 'M', u'ὡι'),
+ (0x1FAA, 'M', u'ὢι'),
+ (0x1FAB, 'M', u'ὣι'),
+ (0x1FAC, 'M', u'ὤι'),
+ (0x1FAD, 'M', u'ὥι'),
+ (0x1FAE, 'M', u'ὦι'),
+ (0x1FAF, 'M', u'ὧι'),
+ (0x1FB0, 'V'),
+ (0x1FB2, 'M', u'ὰι'),
+ (0x1FB3, 'M', u'αι'),
+ (0x1FB4, 'M', u'άι'),
+ (0x1FB5, 'X'),
+ (0x1FB6, 'V'),
+ (0x1FB7, 'M', u'ᾶι'),
+ (0x1FB8, 'M', u'ᾰ'),
+ (0x1FB9, 'M', u'ᾱ'),
+ (0x1FBA, 'M', u'ὰ'),
+ (0x1FBB, 'M', u'ά'),
+ (0x1FBC, 'M', u'αι'),
+ (0x1FBD, '3', u' ̓'),
+ (0x1FBE, 'M', u'ι'),
+ (0x1FBF, '3', u' ̓'),
+ (0x1FC0, '3', u' ͂'),
+ (0x1FC1, '3', u' ̈͂'),
+ (0x1FC2, 'M', u'ὴι'),
+ (0x1FC3, 'M', u'ηι'),
+ (0x1FC4, 'M', u'ήι'),
+ (0x1FC5, 'X'),
+ (0x1FC6, 'V'),
+ (0x1FC7, 'M', u'ῆι'),
+ (0x1FC8, 'M', u'ὲ'),
+ (0x1FC9, 'M', u'έ'),
+ (0x1FCA, 'M', u'ὴ'),
+ (0x1FCB, 'M', u'ή'),
+ (0x1FCC, 'M', u'ηι'),
+ (0x1FCD, '3', u' ̓̀'),
+ (0x1FCE, '3', u' ̓́'),
+ (0x1FCF, '3', u' ̓͂'),
+ (0x1FD0, 'V'),
+ (0x1FD3, 'M', u'ΐ'),
+ (0x1FD4, 'X'),
+ (0x1FD6, 'V'),
+ (0x1FD8, 'M', u'ῐ'),
+ (0x1FD9, 'M', u'ῑ'),
+ (0x1FDA, 'M', u'ὶ'),
+ (0x1FDB, 'M', u'ί'),
+ (0x1FDC, 'X'),
+ (0x1FDD, '3', u' ̔̀'),
+ (0x1FDE, '3', u' ̔́'),
+ (0x1FDF, '3', u' ̔͂'),
+ (0x1FE0, 'V'),
+ (0x1FE3, 'M', u'ΰ'),
+ (0x1FE4, 'V'),
+ (0x1FE8, 'M', u'ῠ'),
+ (0x1FE9, 'M', u'ῡ'),
+ (0x1FEA, 'M', u'ὺ'),
+ (0x1FEB, 'M', u'ύ'),
+ (0x1FEC, 'M', u'ῥ'),
+ (0x1FED, '3', u' ̈̀'),
+ (0x1FEE, '3', u' ̈́'),
+ (0x1FEF, '3', u'`'),
+ (0x1FF0, 'X'),
+ (0x1FF2, 'M', u'ὼι'),
+ (0x1FF3, 'M', u'ωι'),
+ (0x1FF4, 'M', u'ώι'),
+ (0x1FF5, 'X'),
+ (0x1FF6, 'V'),
+ (0x1FF7, 'M', u'ῶι'),
+ (0x1FF8, 'M', u'ὸ'),
+ (0x1FF9, 'M', u'ό'),
+ (0x1FFA, 'M', u'ὼ'),
+ (0x1FFB, 'M', u'ώ'),
+ (0x1FFC, 'M', u'ωι'),
+ (0x1FFD, '3', u' ́'),
+ (0x1FFE, '3', u' ̔'),
+ (0x1FFF, 'X'),
+ (0x2000, '3', u' '),
+ (0x200B, 'I'),
+ (0x200C, 'D', u''),
+ (0x200E, 'X'),
+ (0x2010, 'V'),
+ (0x2011, 'M', u'‐'),
+ (0x2012, 'V'),
+ (0x2017, '3', u' ̳'),
+ (0x2018, 'V'),
+ (0x2024, 'X'),
+ (0x2027, 'V'),
+ (0x2028, 'X'),
+ (0x202F, '3', u' '),
+ (0x2030, 'V'),
+ (0x2033, 'M', u'′′'),
+ (0x2034, 'M', u'′′′'),
+ (0x2035, 'V'),
+ (0x2036, 'M', u'‵‵'),
+ (0x2037, 'M', u'‵‵‵'),
+ ]
+
+def _seg_21():
+ return [
+ (0x2038, 'V'),
+ (0x203C, '3', u'!!'),
+ (0x203D, 'V'),
+ (0x203E, '3', u' ̅'),
+ (0x203F, 'V'),
+ (0x2047, '3', u'??'),
+ (0x2048, '3', u'?!'),
+ (0x2049, '3', u'!?'),
+ (0x204A, 'V'),
+ (0x2057, 'M', u'′′′′'),
+ (0x2058, 'V'),
+ (0x205F, '3', u' '),
+ (0x2060, 'I'),
+ (0x2061, 'X'),
+ (0x2064, 'I'),
+ (0x2065, 'X'),
+ (0x2070, 'M', u'0'),
+ (0x2071, 'M', u'i'),
+ (0x2072, 'X'),
+ (0x2074, 'M', u'4'),
+ (0x2075, 'M', u'5'),
+ (0x2076, 'M', u'6'),
+ (0x2077, 'M', u'7'),
+ (0x2078, 'M', u'8'),
+ (0x2079, 'M', u'9'),
+ (0x207A, '3', u'+'),
+ (0x207B, 'M', u'−'),
+ (0x207C, '3', u'='),
+ (0x207D, '3', u'('),
+ (0x207E, '3', u')'),
+ (0x207F, 'M', u'n'),
+ (0x2080, 'M', u'0'),
+ (0x2081, 'M', u'1'),
+ (0x2082, 'M', u'2'),
+ (0x2083, 'M', u'3'),
+ (0x2084, 'M', u'4'),
+ (0x2085, 'M', u'5'),
+ (0x2086, 'M', u'6'),
+ (0x2087, 'M', u'7'),
+ (0x2088, 'M', u'8'),
+ (0x2089, 'M', u'9'),
+ (0x208A, '3', u'+'),
+ (0x208B, 'M', u'−'),
+ (0x208C, '3', u'='),
+ (0x208D, '3', u'('),
+ (0x208E, '3', u')'),
+ (0x208F, 'X'),
+ (0x2090, 'M', u'a'),
+ (0x2091, 'M', u'e'),
+ (0x2092, 'M', u'o'),
+ (0x2093, 'M', u'x'),
+ (0x2094, 'M', u'ə'),
+ (0x2095, 'M', u'h'),
+ (0x2096, 'M', u'k'),
+ (0x2097, 'M', u'l'),
+ (0x2098, 'M', u'm'),
+ (0x2099, 'M', u'n'),
+ (0x209A, 'M', u'p'),
+ (0x209B, 'M', u's'),
+ (0x209C, 'M', u't'),
+ (0x209D, 'X'),
+ (0x20A0, 'V'),
+ (0x20A8, 'M', u'rs'),
+ (0x20A9, 'V'),
+ (0x20C0, 'X'),
+ (0x20D0, 'V'),
+ (0x20F1, 'X'),
+ (0x2100, '3', u'a/c'),
+ (0x2101, '3', u'a/s'),
+ (0x2102, 'M', u'c'),
+ (0x2103, 'M', u'°c'),
+ (0x2104, 'V'),
+ (0x2105, '3', u'c/o'),
+ (0x2106, '3', u'c/u'),
+ (0x2107, 'M', u'ɛ'),
+ (0x2108, 'V'),
+ (0x2109, 'M', u'°f'),
+ (0x210A, 'M', u'g'),
+ (0x210B, 'M', u'h'),
+ (0x210F, 'M', u'ħ'),
+ (0x2110, 'M', u'i'),
+ (0x2112, 'M', u'l'),
+ (0x2114, 'V'),
+ (0x2115, 'M', u'n'),
+ (0x2116, 'M', u'no'),
+ (0x2117, 'V'),
+ (0x2119, 'M', u'p'),
+ (0x211A, 'M', u'q'),
+ (0x211B, 'M', u'r'),
+ (0x211E, 'V'),
+ (0x2120, 'M', u'sm'),
+ (0x2121, 'M', u'tel'),
+ (0x2122, 'M', u'tm'),
+ (0x2123, 'V'),
+ (0x2124, 'M', u'z'),
+ (0x2125, 'V'),
+ (0x2126, 'M', u'ω'),
+ (0x2127, 'V'),
+ (0x2128, 'M', u'z'),
+ (0x2129, 'V'),
+ ]
+
+def _seg_22():
+ return [
+ (0x212A, 'M', u'k'),
+ (0x212B, 'M', u'å'),
+ (0x212C, 'M', u'b'),
+ (0x212D, 'M', u'c'),
+ (0x212E, 'V'),
+ (0x212F, 'M', u'e'),
+ (0x2131, 'M', u'f'),
+ (0x2132, 'X'),
+ (0x2133, 'M', u'm'),
+ (0x2134, 'M', u'o'),
+ (0x2135, 'M', u'א'),
+ (0x2136, 'M', u'ב'),
+ (0x2137, 'M', u'ג'),
+ (0x2138, 'M', u'ד'),
+ (0x2139, 'M', u'i'),
+ (0x213A, 'V'),
+ (0x213B, 'M', u'fax'),
+ (0x213C, 'M', u'π'),
+ (0x213D, 'M', u'γ'),
+ (0x213F, 'M', u'π'),
+ (0x2140, 'M', u'∑'),
+ (0x2141, 'V'),
+ (0x2145, 'M', u'd'),
+ (0x2147, 'M', u'e'),
+ (0x2148, 'M', u'i'),
+ (0x2149, 'M', u'j'),
+ (0x214A, 'V'),
+ (0x2150, 'M', u'1⁄7'),
+ (0x2151, 'M', u'1⁄9'),
+ (0x2152, 'M', u'1⁄10'),
+ (0x2153, 'M', u'1⁄3'),
+ (0x2154, 'M', u'2⁄3'),
+ (0x2155, 'M', u'1⁄5'),
+ (0x2156, 'M', u'2⁄5'),
+ (0x2157, 'M', u'3⁄5'),
+ (0x2158, 'M', u'4⁄5'),
+ (0x2159, 'M', u'1⁄6'),
+ (0x215A, 'M', u'5⁄6'),
+ (0x215B, 'M', u'1⁄8'),
+ (0x215C, 'M', u'3⁄8'),
+ (0x215D, 'M', u'5⁄8'),
+ (0x215E, 'M', u'7⁄8'),
+ (0x215F, 'M', u'1⁄'),
+ (0x2160, 'M', u'i'),
+ (0x2161, 'M', u'ii'),
+ (0x2162, 'M', u'iii'),
+ (0x2163, 'M', u'iv'),
+ (0x2164, 'M', u'v'),
+ (0x2165, 'M', u'vi'),
+ (0x2166, 'M', u'vii'),
+ (0x2167, 'M', u'viii'),
+ (0x2168, 'M', u'ix'),
+ (0x2169, 'M', u'x'),
+ (0x216A, 'M', u'xi'),
+ (0x216B, 'M', u'xii'),
+ (0x216C, 'M', u'l'),
+ (0x216D, 'M', u'c'),
+ (0x216E, 'M', u'd'),
+ (0x216F, 'M', u'm'),
+ (0x2170, 'M', u'i'),
+ (0x2171, 'M', u'ii'),
+ (0x2172, 'M', u'iii'),
+ (0x2173, 'M', u'iv'),
+ (0x2174, 'M', u'v'),
+ (0x2175, 'M', u'vi'),
+ (0x2176, 'M', u'vii'),
+ (0x2177, 'M', u'viii'),
+ (0x2178, 'M', u'ix'),
+ (0x2179, 'M', u'x'),
+ (0x217A, 'M', u'xi'),
+ (0x217B, 'M', u'xii'),
+ (0x217C, 'M', u'l'),
+ (0x217D, 'M', u'c'),
+ (0x217E, 'M', u'd'),
+ (0x217F, 'M', u'm'),
+ (0x2180, 'V'),
+ (0x2183, 'X'),
+ (0x2184, 'V'),
+ (0x2189, 'M', u'0⁄3'),
+ (0x218A, 'V'),
+ (0x218C, 'X'),
+ (0x2190, 'V'),
+ (0x222C, 'M', u'∫∫'),
+ (0x222D, 'M', u'∫∫∫'),
+ (0x222E, 'V'),
+ (0x222F, 'M', u'∮∮'),
+ (0x2230, 'M', u'∮∮∮'),
+ (0x2231, 'V'),
+ (0x2260, '3'),
+ (0x2261, 'V'),
+ (0x226E, '3'),
+ (0x2270, 'V'),
+ (0x2329, 'M', u'〈'),
+ (0x232A, 'M', u'〉'),
+ (0x232B, 'V'),
+ (0x2427, 'X'),
+ (0x2440, 'V'),
+ (0x244B, 'X'),
+ (0x2460, 'M', u'1'),
+ (0x2461, 'M', u'2'),
+ ]
+
+def _seg_23():
+ return [
+ (0x2462, 'M', u'3'),
+ (0x2463, 'M', u'4'),
+ (0x2464, 'M', u'5'),
+ (0x2465, 'M', u'6'),
+ (0x2466, 'M', u'7'),
+ (0x2467, 'M', u'8'),
+ (0x2468, 'M', u'9'),
+ (0x2469, 'M', u'10'),
+ (0x246A, 'M', u'11'),
+ (0x246B, 'M', u'12'),
+ (0x246C, 'M', u'13'),
+ (0x246D, 'M', u'14'),
+ (0x246E, 'M', u'15'),
+ (0x246F, 'M', u'16'),
+ (0x2470, 'M', u'17'),
+ (0x2471, 'M', u'18'),
+ (0x2472, 'M', u'19'),
+ (0x2473, 'M', u'20'),
+ (0x2474, '3', u'(1)'),
+ (0x2475, '3', u'(2)'),
+ (0x2476, '3', u'(3)'),
+ (0x2477, '3', u'(4)'),
+ (0x2478, '3', u'(5)'),
+ (0x2479, '3', u'(6)'),
+ (0x247A, '3', u'(7)'),
+ (0x247B, '3', u'(8)'),
+ (0x247C, '3', u'(9)'),
+ (0x247D, '3', u'(10)'),
+ (0x247E, '3', u'(11)'),
+ (0x247F, '3', u'(12)'),
+ (0x2480, '3', u'(13)'),
+ (0x2481, '3', u'(14)'),
+ (0x2482, '3', u'(15)'),
+ (0x2483, '3', u'(16)'),
+ (0x2484, '3', u'(17)'),
+ (0x2485, '3', u'(18)'),
+ (0x2486, '3', u'(19)'),
+ (0x2487, '3', u'(20)'),
+ (0x2488, 'X'),
+ (0x249C, '3', u'(a)'),
+ (0x249D, '3', u'(b)'),
+ (0x249E, '3', u'(c)'),
+ (0x249F, '3', u'(d)'),
+ (0x24A0, '3', u'(e)'),
+ (0x24A1, '3', u'(f)'),
+ (0x24A2, '3', u'(g)'),
+ (0x24A3, '3', u'(h)'),
+ (0x24A4, '3', u'(i)'),
+ (0x24A5, '3', u'(j)'),
+ (0x24A6, '3', u'(k)'),
+ (0x24A7, '3', u'(l)'),
+ (0x24A8, '3', u'(m)'),
+ (0x24A9, '3', u'(n)'),
+ (0x24AA, '3', u'(o)'),
+ (0x24AB, '3', u'(p)'),
+ (0x24AC, '3', u'(q)'),
+ (0x24AD, '3', u'(r)'),
+ (0x24AE, '3', u'(s)'),
+ (0x24AF, '3', u'(t)'),
+ (0x24B0, '3', u'(u)'),
+ (0x24B1, '3', u'(v)'),
+ (0x24B2, '3', u'(w)'),
+ (0x24B3, '3', u'(x)'),
+ (0x24B4, '3', u'(y)'),
+ (0x24B5, '3', u'(z)'),
+ (0x24B6, 'M', u'a'),
+ (0x24B7, 'M', u'b'),
+ (0x24B8, 'M', u'c'),
+ (0x24B9, 'M', u'd'),
+ (0x24BA, 'M', u'e'),
+ (0x24BB, 'M', u'f'),
+ (0x24BC, 'M', u'g'),
+ (0x24BD, 'M', u'h'),
+ (0x24BE, 'M', u'i'),
+ (0x24BF, 'M', u'j'),
+ (0x24C0, 'M', u'k'),
+ (0x24C1, 'M', u'l'),
+ (0x24C2, 'M', u'm'),
+ (0x24C3, 'M', u'n'),
+ (0x24C4, 'M', u'o'),
+ (0x24C5, 'M', u'p'),
+ (0x24C6, 'M', u'q'),
+ (0x24C7, 'M', u'r'),
+ (0x24C8, 'M', u's'),
+ (0x24C9, 'M', u't'),
+ (0x24CA, 'M', u'u'),
+ (0x24CB, 'M', u'v'),
+ (0x24CC, 'M', u'w'),
+ (0x24CD, 'M', u'x'),
+ (0x24CE, 'M', u'y'),
+ (0x24CF, 'M', u'z'),
+ (0x24D0, 'M', u'a'),
+ (0x24D1, 'M', u'b'),
+ (0x24D2, 'M', u'c'),
+ (0x24D3, 'M', u'd'),
+ (0x24D4, 'M', u'e'),
+ (0x24D5, 'M', u'f'),
+ (0x24D6, 'M', u'g'),
+ (0x24D7, 'M', u'h'),
+ (0x24D8, 'M', u'i'),
+ ]
+
+def _seg_24():
+ return [
+ (0x24D9, 'M', u'j'),
+ (0x24DA, 'M', u'k'),
+ (0x24DB, 'M', u'l'),
+ (0x24DC, 'M', u'm'),
+ (0x24DD, 'M', u'n'),
+ (0x24DE, 'M', u'o'),
+ (0x24DF, 'M', u'p'),
+ (0x24E0, 'M', u'q'),
+ (0x24E1, 'M', u'r'),
+ (0x24E2, 'M', u's'),
+ (0x24E3, 'M', u't'),
+ (0x24E4, 'M', u'u'),
+ (0x24E5, 'M', u'v'),
+ (0x24E6, 'M', u'w'),
+ (0x24E7, 'M', u'x'),
+ (0x24E8, 'M', u'y'),
+ (0x24E9, 'M', u'z'),
+ (0x24EA, 'M', u'0'),
+ (0x24EB, 'V'),
+ (0x2A0C, 'M', u'∫∫∫∫'),
+ (0x2A0D, 'V'),
+ (0x2A74, '3', u'::='),
+ (0x2A75, '3', u'=='),
+ (0x2A76, '3', u'==='),
+ (0x2A77, 'V'),
+ (0x2ADC, 'M', u'⫝̸'),
+ (0x2ADD, 'V'),
+ (0x2B74, 'X'),
+ (0x2B76, 'V'),
+ (0x2B96, 'X'),
+ (0x2B98, 'V'),
+ (0x2BC9, 'X'),
+ (0x2BCA, 'V'),
+ (0x2BFF, 'X'),
+ (0x2C00, 'M', u'ⰰ'),
+ (0x2C01, 'M', u'ⰱ'),
+ (0x2C02, 'M', u'ⰲ'),
+ (0x2C03, 'M', u'ⰳ'),
+ (0x2C04, 'M', u'ⰴ'),
+ (0x2C05, 'M', u'ⰵ'),
+ (0x2C06, 'M', u'ⰶ'),
+ (0x2C07, 'M', u'ⰷ'),
+ (0x2C08, 'M', u'ⰸ'),
+ (0x2C09, 'M', u'ⰹ'),
+ (0x2C0A, 'M', u'ⰺ'),
+ (0x2C0B, 'M', u'ⰻ'),
+ (0x2C0C, 'M', u'ⰼ'),
+ (0x2C0D, 'M', u'ⰽ'),
+ (0x2C0E, 'M', u'ⰾ'),
+ (0x2C0F, 'M', u'ⰿ'),
+ (0x2C10, 'M', u'ⱀ'),
+ (0x2C11, 'M', u'ⱁ'),
+ (0x2C12, 'M', u'ⱂ'),
+ (0x2C13, 'M', u'ⱃ'),
+ (0x2C14, 'M', u'ⱄ'),
+ (0x2C15, 'M', u'ⱅ'),
+ (0x2C16, 'M', u'ⱆ'),
+ (0x2C17, 'M', u'ⱇ'),
+ (0x2C18, 'M', u'ⱈ'),
+ (0x2C19, 'M', u'ⱉ'),
+ (0x2C1A, 'M', u'ⱊ'),
+ (0x2C1B, 'M', u'ⱋ'),
+ (0x2C1C, 'M', u'ⱌ'),
+ (0x2C1D, 'M', u'ⱍ'),
+ (0x2C1E, 'M', u'ⱎ'),
+ (0x2C1F, 'M', u'ⱏ'),
+ (0x2C20, 'M', u'ⱐ'),
+ (0x2C21, 'M', u'ⱑ'),
+ (0x2C22, 'M', u'ⱒ'),
+ (0x2C23, 'M', u'ⱓ'),
+ (0x2C24, 'M', u'ⱔ'),
+ (0x2C25, 'M', u'ⱕ'),
+ (0x2C26, 'M', u'ⱖ'),
+ (0x2C27, 'M', u'ⱗ'),
+ (0x2C28, 'M', u'ⱘ'),
+ (0x2C29, 'M', u'ⱙ'),
+ (0x2C2A, 'M', u'ⱚ'),
+ (0x2C2B, 'M', u'ⱛ'),
+ (0x2C2C, 'M', u'ⱜ'),
+ (0x2C2D, 'M', u'ⱝ'),
+ (0x2C2E, 'M', u'ⱞ'),
+ (0x2C2F, 'X'),
+ (0x2C30, 'V'),
+ (0x2C5F, 'X'),
+ (0x2C60, 'M', u'ⱡ'),
+ (0x2C61, 'V'),
+ (0x2C62, 'M', u'ɫ'),
+ (0x2C63, 'M', u'ᵽ'),
+ (0x2C64, 'M', u'ɽ'),
+ (0x2C65, 'V'),
+ (0x2C67, 'M', u'ⱨ'),
+ (0x2C68, 'V'),
+ (0x2C69, 'M', u'ⱪ'),
+ (0x2C6A, 'V'),
+ (0x2C6B, 'M', u'ⱬ'),
+ (0x2C6C, 'V'),
+ (0x2C6D, 'M', u'ɑ'),
+ (0x2C6E, 'M', u'ɱ'),
+ (0x2C6F, 'M', u'ɐ'),
+ (0x2C70, 'M', u'ɒ'),
+ ]
+
+def _seg_25():
+ return [
+ (0x2C71, 'V'),
+ (0x2C72, 'M', u'ⱳ'),
+ (0x2C73, 'V'),
+ (0x2C75, 'M', u'ⱶ'),
+ (0x2C76, 'V'),
+ (0x2C7C, 'M', u'j'),
+ (0x2C7D, 'M', u'v'),
+ (0x2C7E, 'M', u'ȿ'),
+ (0x2C7F, 'M', u'ɀ'),
+ (0x2C80, 'M', u'ⲁ'),
+ (0x2C81, 'V'),
+ (0x2C82, 'M', u'ⲃ'),
+ (0x2C83, 'V'),
+ (0x2C84, 'M', u'ⲅ'),
+ (0x2C85, 'V'),
+ (0x2C86, 'M', u'ⲇ'),
+ (0x2C87, 'V'),
+ (0x2C88, 'M', u'ⲉ'),
+ (0x2C89, 'V'),
+ (0x2C8A, 'M', u'ⲋ'),
+ (0x2C8B, 'V'),
+ (0x2C8C, 'M', u'ⲍ'),
+ (0x2C8D, 'V'),
+ (0x2C8E, 'M', u'ⲏ'),
+ (0x2C8F, 'V'),
+ (0x2C90, 'M', u'ⲑ'),
+ (0x2C91, 'V'),
+ (0x2C92, 'M', u'ⲓ'),
+ (0x2C93, 'V'),
+ (0x2C94, 'M', u'ⲕ'),
+ (0x2C95, 'V'),
+ (0x2C96, 'M', u'ⲗ'),
+ (0x2C97, 'V'),
+ (0x2C98, 'M', u'ⲙ'),
+ (0x2C99, 'V'),
+ (0x2C9A, 'M', u'ⲛ'),
+ (0x2C9B, 'V'),
+ (0x2C9C, 'M', u'ⲝ'),
+ (0x2C9D, 'V'),
+ (0x2C9E, 'M', u'ⲟ'),
+ (0x2C9F, 'V'),
+ (0x2CA0, 'M', u'ⲡ'),
+ (0x2CA1, 'V'),
+ (0x2CA2, 'M', u'ⲣ'),
+ (0x2CA3, 'V'),
+ (0x2CA4, 'M', u'ⲥ'),
+ (0x2CA5, 'V'),
+ (0x2CA6, 'M', u'ⲧ'),
+ (0x2CA7, 'V'),
+ (0x2CA8, 'M', u'ⲩ'),
+ (0x2CA9, 'V'),
+ (0x2CAA, 'M', u'ⲫ'),
+ (0x2CAB, 'V'),
+ (0x2CAC, 'M', u'ⲭ'),
+ (0x2CAD, 'V'),
+ (0x2CAE, 'M', u'ⲯ'),
+ (0x2CAF, 'V'),
+ (0x2CB0, 'M', u'ⲱ'),
+ (0x2CB1, 'V'),
+ (0x2CB2, 'M', u'ⲳ'),
+ (0x2CB3, 'V'),
+ (0x2CB4, 'M', u'ⲵ'),
+ (0x2CB5, 'V'),
+ (0x2CB6, 'M', u'ⲷ'),
+ (0x2CB7, 'V'),
+ (0x2CB8, 'M', u'ⲹ'),
+ (0x2CB9, 'V'),
+ (0x2CBA, 'M', u'ⲻ'),
+ (0x2CBB, 'V'),
+ (0x2CBC, 'M', u'ⲽ'),
+ (0x2CBD, 'V'),
+ (0x2CBE, 'M', u'ⲿ'),
+ (0x2CBF, 'V'),
+ (0x2CC0, 'M', u'ⳁ'),
+ (0x2CC1, 'V'),
+ (0x2CC2, 'M', u'ⳃ'),
+ (0x2CC3, 'V'),
+ (0x2CC4, 'M', u'ⳅ'),
+ (0x2CC5, 'V'),
+ (0x2CC6, 'M', u'ⳇ'),
+ (0x2CC7, 'V'),
+ (0x2CC8, 'M', u'ⳉ'),
+ (0x2CC9, 'V'),
+ (0x2CCA, 'M', u'ⳋ'),
+ (0x2CCB, 'V'),
+ (0x2CCC, 'M', u'ⳍ'),
+ (0x2CCD, 'V'),
+ (0x2CCE, 'M', u'ⳏ'),
+ (0x2CCF, 'V'),
+ (0x2CD0, 'M', u'ⳑ'),
+ (0x2CD1, 'V'),
+ (0x2CD2, 'M', u'ⳓ'),
+ (0x2CD3, 'V'),
+ (0x2CD4, 'M', u'ⳕ'),
+ (0x2CD5, 'V'),
+ (0x2CD6, 'M', u'ⳗ'),
+ (0x2CD7, 'V'),
+ (0x2CD8, 'M', u'ⳙ'),
+ (0x2CD9, 'V'),
+ (0x2CDA, 'M', u'ⳛ'),
+ ]
+
+def _seg_26():
+ return [
+ (0x2CDB, 'V'),
+ (0x2CDC, 'M', u'ⳝ'),
+ (0x2CDD, 'V'),
+ (0x2CDE, 'M', u'ⳟ'),
+ (0x2CDF, 'V'),
+ (0x2CE0, 'M', u'ⳡ'),
+ (0x2CE1, 'V'),
+ (0x2CE2, 'M', u'ⳣ'),
+ (0x2CE3, 'V'),
+ (0x2CEB, 'M', u'ⳬ'),
+ (0x2CEC, 'V'),
+ (0x2CED, 'M', u'ⳮ'),
+ (0x2CEE, 'V'),
+ (0x2CF2, 'M', u'ⳳ'),
+ (0x2CF3, 'V'),
+ (0x2CF4, 'X'),
+ (0x2CF9, 'V'),
+ (0x2D26, 'X'),
+ (0x2D27, 'V'),
+ (0x2D28, 'X'),
+ (0x2D2D, 'V'),
+ (0x2D2E, 'X'),
+ (0x2D30, 'V'),
+ (0x2D68, 'X'),
+ (0x2D6F, 'M', u'ⵡ'),
+ (0x2D70, 'V'),
+ (0x2D71, 'X'),
+ (0x2D7F, 'V'),
+ (0x2D97, 'X'),
+ (0x2DA0, 'V'),
+ (0x2DA7, 'X'),
+ (0x2DA8, 'V'),
+ (0x2DAF, 'X'),
+ (0x2DB0, 'V'),
+ (0x2DB7, 'X'),
+ (0x2DB8, 'V'),
+ (0x2DBF, 'X'),
+ (0x2DC0, 'V'),
+ (0x2DC7, 'X'),
+ (0x2DC8, 'V'),
+ (0x2DCF, 'X'),
+ (0x2DD0, 'V'),
+ (0x2DD7, 'X'),
+ (0x2DD8, 'V'),
+ (0x2DDF, 'X'),
+ (0x2DE0, 'V'),
+ (0x2E4F, 'X'),
+ (0x2E80, 'V'),
+ (0x2E9A, 'X'),
+ (0x2E9B, 'V'),
+ (0x2E9F, 'M', u'母'),
+ (0x2EA0, 'V'),
+ (0x2EF3, 'M', u'龟'),
+ (0x2EF4, 'X'),
+ (0x2F00, 'M', u'一'),
+ (0x2F01, 'M', u'丨'),
+ (0x2F02, 'M', u'丶'),
+ (0x2F03, 'M', u'丿'),
+ (0x2F04, 'M', u'乙'),
+ (0x2F05, 'M', u'亅'),
+ (0x2F06, 'M', u'二'),
+ (0x2F07, 'M', u'亠'),
+ (0x2F08, 'M', u'人'),
+ (0x2F09, 'M', u'儿'),
+ (0x2F0A, 'M', u'入'),
+ (0x2F0B, 'M', u'八'),
+ (0x2F0C, 'M', u'冂'),
+ (0x2F0D, 'M', u'冖'),
+ (0x2F0E, 'M', u'冫'),
+ (0x2F0F, 'M', u'几'),
+ (0x2F10, 'M', u'凵'),
+ (0x2F11, 'M', u'刀'),
+ (0x2F12, 'M', u'力'),
+ (0x2F13, 'M', u'勹'),
+ (0x2F14, 'M', u'匕'),
+ (0x2F15, 'M', u'匚'),
+ (0x2F16, 'M', u'匸'),
+ (0x2F17, 'M', u'十'),
+ (0x2F18, 'M', u'卜'),
+ (0x2F19, 'M', u'卩'),
+ (0x2F1A, 'M', u'厂'),
+ (0x2F1B, 'M', u'厶'),
+ (0x2F1C, 'M', u'又'),
+ (0x2F1D, 'M', u'口'),
+ (0x2F1E, 'M', u'囗'),
+ (0x2F1F, 'M', u'土'),
+ (0x2F20, 'M', u'士'),
+ (0x2F21, 'M', u'夂'),
+ (0x2F22, 'M', u'夊'),
+ (0x2F23, 'M', u'夕'),
+ (0x2F24, 'M', u'大'),
+ (0x2F25, 'M', u'女'),
+ (0x2F26, 'M', u'子'),
+ (0x2F27, 'M', u'宀'),
+ (0x2F28, 'M', u'寸'),
+ (0x2F29, 'M', u'小'),
+ (0x2F2A, 'M', u'尢'),
+ (0x2F2B, 'M', u'尸'),
+ (0x2F2C, 'M', u'屮'),
+ (0x2F2D, 'M', u'山'),
+ ]
+
+def _seg_27():
+ return [
+ (0x2F2E, 'M', u'巛'),
+ (0x2F2F, 'M', u'工'),
+ (0x2F30, 'M', u'己'),
+ (0x2F31, 'M', u'巾'),
+ (0x2F32, 'M', u'干'),
+ (0x2F33, 'M', u'幺'),
+ (0x2F34, 'M', u'广'),
+ (0x2F35, 'M', u'廴'),
+ (0x2F36, 'M', u'廾'),
+ (0x2F37, 'M', u'弋'),
+ (0x2F38, 'M', u'弓'),
+ (0x2F39, 'M', u'彐'),
+ (0x2F3A, 'M', u'彡'),
+ (0x2F3B, 'M', u'彳'),
+ (0x2F3C, 'M', u'心'),
+ (0x2F3D, 'M', u'戈'),
+ (0x2F3E, 'M', u'戶'),
+ (0x2F3F, 'M', u'手'),
+ (0x2F40, 'M', u'支'),
+ (0x2F41, 'M', u'攴'),
+ (0x2F42, 'M', u'文'),
+ (0x2F43, 'M', u'斗'),
+ (0x2F44, 'M', u'斤'),
+ (0x2F45, 'M', u'方'),
+ (0x2F46, 'M', u'无'),
+ (0x2F47, 'M', u'日'),
+ (0x2F48, 'M', u'曰'),
+ (0x2F49, 'M', u'月'),
+ (0x2F4A, 'M', u'木'),
+ (0x2F4B, 'M', u'欠'),
+ (0x2F4C, 'M', u'止'),
+ (0x2F4D, 'M', u'歹'),
+ (0x2F4E, 'M', u'殳'),
+ (0x2F4F, 'M', u'毋'),
+ (0x2F50, 'M', u'比'),
+ (0x2F51, 'M', u'毛'),
+ (0x2F52, 'M', u'氏'),
+ (0x2F53, 'M', u'气'),
+ (0x2F54, 'M', u'水'),
+ (0x2F55, 'M', u'火'),
+ (0x2F56, 'M', u'爪'),
+ (0x2F57, 'M', u'父'),
+ (0x2F58, 'M', u'爻'),
+ (0x2F59, 'M', u'爿'),
+ (0x2F5A, 'M', u'片'),
+ (0x2F5B, 'M', u'牙'),
+ (0x2F5C, 'M', u'牛'),
+ (0x2F5D, 'M', u'犬'),
+ (0x2F5E, 'M', u'玄'),
+ (0x2F5F, 'M', u'玉'),
+ (0x2F60, 'M', u'瓜'),
+ (0x2F61, 'M', u'瓦'),
+ (0x2F62, 'M', u'甘'),
+ (0x2F63, 'M', u'生'),
+ (0x2F64, 'M', u'用'),
+ (0x2F65, 'M', u'田'),
+ (0x2F66, 'M', u'疋'),
+ (0x2F67, 'M', u'疒'),
+ (0x2F68, 'M', u'癶'),
+ (0x2F69, 'M', u'白'),
+ (0x2F6A, 'M', u'皮'),
+ (0x2F6B, 'M', u'皿'),
+ (0x2F6C, 'M', u'目'),
+ (0x2F6D, 'M', u'矛'),
+ (0x2F6E, 'M', u'矢'),
+ (0x2F6F, 'M', u'石'),
+ (0x2F70, 'M', u'示'),
+ (0x2F71, 'M', u'禸'),
+ (0x2F72, 'M', u'禾'),
+ (0x2F73, 'M', u'穴'),
+ (0x2F74, 'M', u'立'),
+ (0x2F75, 'M', u'竹'),
+ (0x2F76, 'M', u'米'),
+ (0x2F77, 'M', u'糸'),
+ (0x2F78, 'M', u'缶'),
+ (0x2F79, 'M', u'网'),
+ (0x2F7A, 'M', u'羊'),
+ (0x2F7B, 'M', u'羽'),
+ (0x2F7C, 'M', u'老'),
+ (0x2F7D, 'M', u'而'),
+ (0x2F7E, 'M', u'耒'),
+ (0x2F7F, 'M', u'耳'),
+ (0x2F80, 'M', u'聿'),
+ (0x2F81, 'M', u'肉'),
+ (0x2F82, 'M', u'臣'),
+ (0x2F83, 'M', u'自'),
+ (0x2F84, 'M', u'至'),
+ (0x2F85, 'M', u'臼'),
+ (0x2F86, 'M', u'舌'),
+ (0x2F87, 'M', u'舛'),
+ (0x2F88, 'M', u'舟'),
+ (0x2F89, 'M', u'艮'),
+ (0x2F8A, 'M', u'色'),
+ (0x2F8B, 'M', u'艸'),
+ (0x2F8C, 'M', u'虍'),
+ (0x2F8D, 'M', u'虫'),
+ (0x2F8E, 'M', u'血'),
+ (0x2F8F, 'M', u'行'),
+ (0x2F90, 'M', u'衣'),
+ (0x2F91, 'M', u'襾'),
+ ]
+
+def _seg_28():
+ return [
+ (0x2F92, 'M', u'見'),
+ (0x2F93, 'M', u'角'),
+ (0x2F94, 'M', u'言'),
+ (0x2F95, 'M', u'谷'),
+ (0x2F96, 'M', u'豆'),
+ (0x2F97, 'M', u'豕'),
+ (0x2F98, 'M', u'豸'),
+ (0x2F99, 'M', u'貝'),
+ (0x2F9A, 'M', u'赤'),
+ (0x2F9B, 'M', u'走'),
+ (0x2F9C, 'M', u'足'),
+ (0x2F9D, 'M', u'身'),
+ (0x2F9E, 'M', u'車'),
+ (0x2F9F, 'M', u'辛'),
+ (0x2FA0, 'M', u'辰'),
+ (0x2FA1, 'M', u'辵'),
+ (0x2FA2, 'M', u'邑'),
+ (0x2FA3, 'M', u'酉'),
+ (0x2FA4, 'M', u'釆'),
+ (0x2FA5, 'M', u'里'),
+ (0x2FA6, 'M', u'金'),
+ (0x2FA7, 'M', u'長'),
+ (0x2FA8, 'M', u'門'),
+ (0x2FA9, 'M', u'阜'),
+ (0x2FAA, 'M', u'隶'),
+ (0x2FAB, 'M', u'隹'),
+ (0x2FAC, 'M', u'雨'),
+ (0x2FAD, 'M', u'靑'),
+ (0x2FAE, 'M', u'非'),
+ (0x2FAF, 'M', u'面'),
+ (0x2FB0, 'M', u'革'),
+ (0x2FB1, 'M', u'韋'),
+ (0x2FB2, 'M', u'韭'),
+ (0x2FB3, 'M', u'音'),
+ (0x2FB4, 'M', u'頁'),
+ (0x2FB5, 'M', u'風'),
+ (0x2FB6, 'M', u'飛'),
+ (0x2FB7, 'M', u'食'),
+ (0x2FB8, 'M', u'首'),
+ (0x2FB9, 'M', u'香'),
+ (0x2FBA, 'M', u'馬'),
+ (0x2FBB, 'M', u'骨'),
+ (0x2FBC, 'M', u'高'),
+ (0x2FBD, 'M', u'髟'),
+ (0x2FBE, 'M', u'鬥'),
+ (0x2FBF, 'M', u'鬯'),
+ (0x2FC0, 'M', u'鬲'),
+ (0x2FC1, 'M', u'鬼'),
+ (0x2FC2, 'M', u'魚'),
+ (0x2FC3, 'M', u'鳥'),
+ (0x2FC4, 'M', u'鹵'),
+ (0x2FC5, 'M', u'鹿'),
+ (0x2FC6, 'M', u'麥'),
+ (0x2FC7, 'M', u'麻'),
+ (0x2FC8, 'M', u'黃'),
+ (0x2FC9, 'M', u'黍'),
+ (0x2FCA, 'M', u'黑'),
+ (0x2FCB, 'M', u'黹'),
+ (0x2FCC, 'M', u'黽'),
+ (0x2FCD, 'M', u'鼎'),
+ (0x2FCE, 'M', u'鼓'),
+ (0x2FCF, 'M', u'鼠'),
+ (0x2FD0, 'M', u'鼻'),
+ (0x2FD1, 'M', u'齊'),
+ (0x2FD2, 'M', u'齒'),
+ (0x2FD3, 'M', u'龍'),
+ (0x2FD4, 'M', u'龜'),
+ (0x2FD5, 'M', u'龠'),
+ (0x2FD6, 'X'),
+ (0x3000, '3', u' '),
+ (0x3001, 'V'),
+ (0x3002, 'M', u'.'),
+ (0x3003, 'V'),
+ (0x3036, 'M', u'〒'),
+ (0x3037, 'V'),
+ (0x3038, 'M', u'十'),
+ (0x3039, 'M', u'卄'),
+ (0x303A, 'M', u'卅'),
+ (0x303B, 'V'),
+ (0x3040, 'X'),
+ (0x3041, 'V'),
+ (0x3097, 'X'),
+ (0x3099, 'V'),
+ (0x309B, '3', u' ゙'),
+ (0x309C, '3', u' ゚'),
+ (0x309D, 'V'),
+ (0x309F, 'M', u'より'),
+ (0x30A0, 'V'),
+ (0x30FF, 'M', u'コト'),
+ (0x3100, 'X'),
+ (0x3105, 'V'),
+ (0x3130, 'X'),
+ (0x3131, 'M', u'ᄀ'),
+ (0x3132, 'M', u'ᄁ'),
+ (0x3133, 'M', u'ᆪ'),
+ (0x3134, 'M', u'ᄂ'),
+ (0x3135, 'M', u'ᆬ'),
+ (0x3136, 'M', u'ᆭ'),
+ (0x3137, 'M', u'ᄃ'),
+ (0x3138, 'M', u'ᄄ'),
+ ]
+
+def _seg_29():
+ return [
+ (0x3139, 'M', u'ᄅ'),
+ (0x313A, 'M', u'ᆰ'),
+ (0x313B, 'M', u'ᆱ'),
+ (0x313C, 'M', u'ᆲ'),
+ (0x313D, 'M', u'ᆳ'),
+ (0x313E, 'M', u'ᆴ'),
+ (0x313F, 'M', u'ᆵ'),
+ (0x3140, 'M', u'ᄚ'),
+ (0x3141, 'M', u'ᄆ'),
+ (0x3142, 'M', u'ᄇ'),
+ (0x3143, 'M', u'ᄈ'),
+ (0x3144, 'M', u'ᄡ'),
+ (0x3145, 'M', u'ᄉ'),
+ (0x3146, 'M', u'ᄊ'),
+ (0x3147, 'M', u'ᄋ'),
+ (0x3148, 'M', u'ᄌ'),
+ (0x3149, 'M', u'ᄍ'),
+ (0x314A, 'M', u'ᄎ'),
+ (0x314B, 'M', u'ᄏ'),
+ (0x314C, 'M', u'ᄐ'),
+ (0x314D, 'M', u'ᄑ'),
+ (0x314E, 'M', u'ᄒ'),
+ (0x314F, 'M', u'ᅡ'),
+ (0x3150, 'M', u'ᅢ'),
+ (0x3151, 'M', u'ᅣ'),
+ (0x3152, 'M', u'ᅤ'),
+ (0x3153, 'M', u'ᅥ'),
+ (0x3154, 'M', u'ᅦ'),
+ (0x3155, 'M', u'ᅧ'),
+ (0x3156, 'M', u'ᅨ'),
+ (0x3157, 'M', u'ᅩ'),
+ (0x3158, 'M', u'ᅪ'),
+ (0x3159, 'M', u'ᅫ'),
+ (0x315A, 'M', u'ᅬ'),
+ (0x315B, 'M', u'ᅭ'),
+ (0x315C, 'M', u'ᅮ'),
+ (0x315D, 'M', u'ᅯ'),
+ (0x315E, 'M', u'ᅰ'),
+ (0x315F, 'M', u'ᅱ'),
+ (0x3160, 'M', u'ᅲ'),
+ (0x3161, 'M', u'ᅳ'),
+ (0x3162, 'M', u'ᅴ'),
+ (0x3163, 'M', u'ᅵ'),
+ (0x3164, 'X'),
+ (0x3165, 'M', u'ᄔ'),
+ (0x3166, 'M', u'ᄕ'),
+ (0x3167, 'M', u'ᇇ'),
+ (0x3168, 'M', u'ᇈ'),
+ (0x3169, 'M', u'ᇌ'),
+ (0x316A, 'M', u'ᇎ'),
+ (0x316B, 'M', u'ᇓ'),
+ (0x316C, 'M', u'ᇗ'),
+ (0x316D, 'M', u'ᇙ'),
+ (0x316E, 'M', u'ᄜ'),
+ (0x316F, 'M', u'ᇝ'),
+ (0x3170, 'M', u'ᇟ'),
+ (0x3171, 'M', u'ᄝ'),
+ (0x3172, 'M', u'ᄞ'),
+ (0x3173, 'M', u'ᄠ'),
+ (0x3174, 'M', u'ᄢ'),
+ (0x3175, 'M', u'ᄣ'),
+ (0x3176, 'M', u'ᄧ'),
+ (0x3177, 'M', u'ᄩ'),
+ (0x3178, 'M', u'ᄫ'),
+ (0x3179, 'M', u'ᄬ'),
+ (0x317A, 'M', u'ᄭ'),
+ (0x317B, 'M', u'ᄮ'),
+ (0x317C, 'M', u'ᄯ'),
+ (0x317D, 'M', u'ᄲ'),
+ (0x317E, 'M', u'ᄶ'),
+ (0x317F, 'M', u'ᅀ'),
+ (0x3180, 'M', u'ᅇ'),
+ (0x3181, 'M', u'ᅌ'),
+ (0x3182, 'M', u'ᇱ'),
+ (0x3183, 'M', u'ᇲ'),
+ (0x3184, 'M', u'ᅗ'),
+ (0x3185, 'M', u'ᅘ'),
+ (0x3186, 'M', u'ᅙ'),
+ (0x3187, 'M', u'ᆄ'),
+ (0x3188, 'M', u'ᆅ'),
+ (0x3189, 'M', u'ᆈ'),
+ (0x318A, 'M', u'ᆑ'),
+ (0x318B, 'M', u'ᆒ'),
+ (0x318C, 'M', u'ᆔ'),
+ (0x318D, 'M', u'ᆞ'),
+ (0x318E, 'M', u'ᆡ'),
+ (0x318F, 'X'),
+ (0x3190, 'V'),
+ (0x3192, 'M', u'一'),
+ (0x3193, 'M', u'二'),
+ (0x3194, 'M', u'三'),
+ (0x3195, 'M', u'四'),
+ (0x3196, 'M', u'上'),
+ (0x3197, 'M', u'中'),
+ (0x3198, 'M', u'下'),
+ (0x3199, 'M', u'甲'),
+ (0x319A, 'M', u'乙'),
+ (0x319B, 'M', u'丙'),
+ (0x319C, 'M', u'丁'),
+ (0x319D, 'M', u'天'),
+ ]
+
+def _seg_30():
+ return [
+ (0x319E, 'M', u'地'),
+ (0x319F, 'M', u'人'),
+ (0x31A0, 'V'),
+ (0x31BB, 'X'),
+ (0x31C0, 'V'),
+ (0x31E4, 'X'),
+ (0x31F0, 'V'),
+ (0x3200, '3', u'(ᄀ)'),
+ (0x3201, '3', u'(ᄂ)'),
+ (0x3202, '3', u'(ᄃ)'),
+ (0x3203, '3', u'(ᄅ)'),
+ (0x3204, '3', u'(ᄆ)'),
+ (0x3205, '3', u'(ᄇ)'),
+ (0x3206, '3', u'(ᄉ)'),
+ (0x3207, '3', u'(ᄋ)'),
+ (0x3208, '3', u'(ᄌ)'),
+ (0x3209, '3', u'(ᄎ)'),
+ (0x320A, '3', u'(ᄏ)'),
+ (0x320B, '3', u'(ᄐ)'),
+ (0x320C, '3', u'(ᄑ)'),
+ (0x320D, '3', u'(ᄒ)'),
+ (0x320E, '3', u'(가)'),
+ (0x320F, '3', u'(나)'),
+ (0x3210, '3', u'(다)'),
+ (0x3211, '3', u'(라)'),
+ (0x3212, '3', u'(마)'),
+ (0x3213, '3', u'(바)'),
+ (0x3214, '3', u'(사)'),
+ (0x3215, '3', u'(아)'),
+ (0x3216, '3', u'(자)'),
+ (0x3217, '3', u'(차)'),
+ (0x3218, '3', u'(카)'),
+ (0x3219, '3', u'(타)'),
+ (0x321A, '3', u'(파)'),
+ (0x321B, '3', u'(하)'),
+ (0x321C, '3', u'(주)'),
+ (0x321D, '3', u'(오전)'),
+ (0x321E, '3', u'(오후)'),
+ (0x321F, 'X'),
+ (0x3220, '3', u'(一)'),
+ (0x3221, '3', u'(二)'),
+ (0x3222, '3', u'(三)'),
+ (0x3223, '3', u'(四)'),
+ (0x3224, '3', u'(五)'),
+ (0x3225, '3', u'(六)'),
+ (0x3226, '3', u'(七)'),
+ (0x3227, '3', u'(八)'),
+ (0x3228, '3', u'(九)'),
+ (0x3229, '3', u'(十)'),
+ (0x322A, '3', u'(月)'),
+ (0x322B, '3', u'(火)'),
+ (0x322C, '3', u'(水)'),
+ (0x322D, '3', u'(木)'),
+ (0x322E, '3', u'(金)'),
+ (0x322F, '3', u'(土)'),
+ (0x3230, '3', u'(日)'),
+ (0x3231, '3', u'(株)'),
+ (0x3232, '3', u'(有)'),
+ (0x3233, '3', u'(社)'),
+ (0x3234, '3', u'(名)'),
+ (0x3235, '3', u'(特)'),
+ (0x3236, '3', u'(財)'),
+ (0x3237, '3', u'(祝)'),
+ (0x3238, '3', u'(労)'),
+ (0x3239, '3', u'(代)'),
+ (0x323A, '3', u'(呼)'),
+ (0x323B, '3', u'(学)'),
+ (0x323C, '3', u'(監)'),
+ (0x323D, '3', u'(企)'),
+ (0x323E, '3', u'(資)'),
+ (0x323F, '3', u'(協)'),
+ (0x3240, '3', u'(祭)'),
+ (0x3241, '3', u'(休)'),
+ (0x3242, '3', u'(自)'),
+ (0x3243, '3', u'(至)'),
+ (0x3244, 'M', u'問'),
+ (0x3245, 'M', u'幼'),
+ (0x3246, 'M', u'文'),
+ (0x3247, 'M', u'箏'),
+ (0x3248, 'V'),
+ (0x3250, 'M', u'pte'),
+ (0x3251, 'M', u'21'),
+ (0x3252, 'M', u'22'),
+ (0x3253, 'M', u'23'),
+ (0x3254, 'M', u'24'),
+ (0x3255, 'M', u'25'),
+ (0x3256, 'M', u'26'),
+ (0x3257, 'M', u'27'),
+ (0x3258, 'M', u'28'),
+ (0x3259, 'M', u'29'),
+ (0x325A, 'M', u'30'),
+ (0x325B, 'M', u'31'),
+ (0x325C, 'M', u'32'),
+ (0x325D, 'M', u'33'),
+ (0x325E, 'M', u'34'),
+ (0x325F, 'M', u'35'),
+ (0x3260, 'M', u'ᄀ'),
+ (0x3261, 'M', u'ᄂ'),
+ (0x3262, 'M', u'ᄃ'),
+ (0x3263, 'M', u'ᄅ'),
+ ]
+
+def _seg_31():
+ return [
+ (0x3264, 'M', u'ᄆ'),
+ (0x3265, 'M', u'ᄇ'),
+ (0x3266, 'M', u'ᄉ'),
+ (0x3267, 'M', u'ᄋ'),
+ (0x3268, 'M', u'ᄌ'),
+ (0x3269, 'M', u'ᄎ'),
+ (0x326A, 'M', u'ᄏ'),
+ (0x326B, 'M', u'ᄐ'),
+ (0x326C, 'M', u'ᄑ'),
+ (0x326D, 'M', u'ᄒ'),
+ (0x326E, 'M', u'가'),
+ (0x326F, 'M', u'나'),
+ (0x3270, 'M', u'다'),
+ (0x3271, 'M', u'라'),
+ (0x3272, 'M', u'마'),
+ (0x3273, 'M', u'바'),
+ (0x3274, 'M', u'사'),
+ (0x3275, 'M', u'아'),
+ (0x3276, 'M', u'자'),
+ (0x3277, 'M', u'차'),
+ (0x3278, 'M', u'카'),
+ (0x3279, 'M', u'타'),
+ (0x327A, 'M', u'파'),
+ (0x327B, 'M', u'하'),
+ (0x327C, 'M', u'참고'),
+ (0x327D, 'M', u'주의'),
+ (0x327E, 'M', u'우'),
+ (0x327F, 'V'),
+ (0x3280, 'M', u'一'),
+ (0x3281, 'M', u'二'),
+ (0x3282, 'M', u'三'),
+ (0x3283, 'M', u'四'),
+ (0x3284, 'M', u'五'),
+ (0x3285, 'M', u'六'),
+ (0x3286, 'M', u'七'),
+ (0x3287, 'M', u'八'),
+ (0x3288, 'M', u'九'),
+ (0x3289, 'M', u'十'),
+ (0x328A, 'M', u'月'),
+ (0x328B, 'M', u'火'),
+ (0x328C, 'M', u'水'),
+ (0x328D, 'M', u'木'),
+ (0x328E, 'M', u'金'),
+ (0x328F, 'M', u'土'),
+ (0x3290, 'M', u'日'),
+ (0x3291, 'M', u'株'),
+ (0x3292, 'M', u'有'),
+ (0x3293, 'M', u'社'),
+ (0x3294, 'M', u'名'),
+ (0x3295, 'M', u'特'),
+ (0x3296, 'M', u'財'),
+ (0x3297, 'M', u'祝'),
+ (0x3298, 'M', u'労'),
+ (0x3299, 'M', u'秘'),
+ (0x329A, 'M', u'男'),
+ (0x329B, 'M', u'女'),
+ (0x329C, 'M', u'適'),
+ (0x329D, 'M', u'優'),
+ (0x329E, 'M', u'印'),
+ (0x329F, 'M', u'注'),
+ (0x32A0, 'M', u'項'),
+ (0x32A1, 'M', u'休'),
+ (0x32A2, 'M', u'写'),
+ (0x32A3, 'M', u'正'),
+ (0x32A4, 'M', u'上'),
+ (0x32A5, 'M', u'中'),
+ (0x32A6, 'M', u'下'),
+ (0x32A7, 'M', u'左'),
+ (0x32A8, 'M', u'右'),
+ (0x32A9, 'M', u'医'),
+ (0x32AA, 'M', u'宗'),
+ (0x32AB, 'M', u'学'),
+ (0x32AC, 'M', u'監'),
+ (0x32AD, 'M', u'企'),
+ (0x32AE, 'M', u'資'),
+ (0x32AF, 'M', u'協'),
+ (0x32B0, 'M', u'夜'),
+ (0x32B1, 'M', u'36'),
+ (0x32B2, 'M', u'37'),
+ (0x32B3, 'M', u'38'),
+ (0x32B4, 'M', u'39'),
+ (0x32B5, 'M', u'40'),
+ (0x32B6, 'M', u'41'),
+ (0x32B7, 'M', u'42'),
+ (0x32B8, 'M', u'43'),
+ (0x32B9, 'M', u'44'),
+ (0x32BA, 'M', u'45'),
+ (0x32BB, 'M', u'46'),
+ (0x32BC, 'M', u'47'),
+ (0x32BD, 'M', u'48'),
+ (0x32BE, 'M', u'49'),
+ (0x32BF, 'M', u'50'),
+ (0x32C0, 'M', u'1月'),
+ (0x32C1, 'M', u'2月'),
+ (0x32C2, 'M', u'3月'),
+ (0x32C3, 'M', u'4月'),
+ (0x32C4, 'M', u'5月'),
+ (0x32C5, 'M', u'6月'),
+ (0x32C6, 'M', u'7月'),
+ (0x32C7, 'M', u'8月'),
+ ]
+
+def _seg_32():
+ return [
+ (0x32C8, 'M', u'9月'),
+ (0x32C9, 'M', u'10月'),
+ (0x32CA, 'M', u'11月'),
+ (0x32CB, 'M', u'12月'),
+ (0x32CC, 'M', u'hg'),
+ (0x32CD, 'M', u'erg'),
+ (0x32CE, 'M', u'ev'),
+ (0x32CF, 'M', u'ltd'),
+ (0x32D0, 'M', u'ア'),
+ (0x32D1, 'M', u'イ'),
+ (0x32D2, 'M', u'ウ'),
+ (0x32D3, 'M', u'エ'),
+ (0x32D4, 'M', u'オ'),
+ (0x32D5, 'M', u'カ'),
+ (0x32D6, 'M', u'キ'),
+ (0x32D7, 'M', u'ク'),
+ (0x32D8, 'M', u'ケ'),
+ (0x32D9, 'M', u'コ'),
+ (0x32DA, 'M', u'サ'),
+ (0x32DB, 'M', u'シ'),
+ (0x32DC, 'M', u'ス'),
+ (0x32DD, 'M', u'セ'),
+ (0x32DE, 'M', u'ソ'),
+ (0x32DF, 'M', u'タ'),
+ (0x32E0, 'M', u'チ'),
+ (0x32E1, 'M', u'ツ'),
+ (0x32E2, 'M', u'テ'),
+ (0x32E3, 'M', u'ト'),
+ (0x32E4, 'M', u'ナ'),
+ (0x32E5, 'M', u'ニ'),
+ (0x32E6, 'M', u'ヌ'),
+ (0x32E7, 'M', u'ネ'),
+ (0x32E8, 'M', u'ノ'),
+ (0x32E9, 'M', u'ハ'),
+ (0x32EA, 'M', u'ヒ'),
+ (0x32EB, 'M', u'フ'),
+ (0x32EC, 'M', u'ヘ'),
+ (0x32ED, 'M', u'ホ'),
+ (0x32EE, 'M', u'マ'),
+ (0x32EF, 'M', u'ミ'),
+ (0x32F0, 'M', u'ム'),
+ (0x32F1, 'M', u'メ'),
+ (0x32F2, 'M', u'モ'),
+ (0x32F3, 'M', u'ヤ'),
+ (0x32F4, 'M', u'ユ'),
+ (0x32F5, 'M', u'ヨ'),
+ (0x32F6, 'M', u'ラ'),
+ (0x32F7, 'M', u'リ'),
+ (0x32F8, 'M', u'ル'),
+ (0x32F9, 'M', u'レ'),
+ (0x32FA, 'M', u'ロ'),
+ (0x32FB, 'M', u'ワ'),
+ (0x32FC, 'M', u'ヰ'),
+ (0x32FD, 'M', u'ヱ'),
+ (0x32FE, 'M', u'ヲ'),
+ (0x32FF, 'X'),
+ (0x3300, 'M', u'アパート'),
+ (0x3301, 'M', u'アルファ'),
+ (0x3302, 'M', u'アンペア'),
+ (0x3303, 'M', u'アール'),
+ (0x3304, 'M', u'イニング'),
+ (0x3305, 'M', u'インチ'),
+ (0x3306, 'M', u'ウォン'),
+ (0x3307, 'M', u'エスクード'),
+ (0x3308, 'M', u'エーカー'),
+ (0x3309, 'M', u'オンス'),
+ (0x330A, 'M', u'オーム'),
+ (0x330B, 'M', u'カイリ'),
+ (0x330C, 'M', u'カラット'),
+ (0x330D, 'M', u'カロリー'),
+ (0x330E, 'M', u'ガロン'),
+ (0x330F, 'M', u'ガンマ'),
+ (0x3310, 'M', u'ギガ'),
+ (0x3311, 'M', u'ギニー'),
+ (0x3312, 'M', u'キュリー'),
+ (0x3313, 'M', u'ギルダー'),
+ (0x3314, 'M', u'キロ'),
+ (0x3315, 'M', u'キログラム'),
+ (0x3316, 'M', u'キロメートル'),
+ (0x3317, 'M', u'キロワット'),
+ (0x3318, 'M', u'グラム'),
+ (0x3319, 'M', u'グラムトン'),
+ (0x331A, 'M', u'クルゼイロ'),
+ (0x331B, 'M', u'クローネ'),
+ (0x331C, 'M', u'ケース'),
+ (0x331D, 'M', u'コルナ'),
+ (0x331E, 'M', u'コーポ'),
+ (0x331F, 'M', u'サイクル'),
+ (0x3320, 'M', u'サンチーム'),
+ (0x3321, 'M', u'シリング'),
+ (0x3322, 'M', u'センチ'),
+ (0x3323, 'M', u'セント'),
+ (0x3324, 'M', u'ダース'),
+ (0x3325, 'M', u'デシ'),
+ (0x3326, 'M', u'ドル'),
+ (0x3327, 'M', u'トン'),
+ (0x3328, 'M', u'ナノ'),
+ (0x3329, 'M', u'ノット'),
+ (0x332A, 'M', u'ハイツ'),
+ (0x332B, 'M', u'パーセント'),
+ ]
+
+def _seg_33():
+ return [
+ (0x332C, 'M', u'パーツ'),
+ (0x332D, 'M', u'バーレル'),
+ (0x332E, 'M', u'ピアストル'),
+ (0x332F, 'M', u'ピクル'),
+ (0x3330, 'M', u'ピコ'),
+ (0x3331, 'M', u'ビル'),
+ (0x3332, 'M', u'ファラッド'),
+ (0x3333, 'M', u'フィート'),
+ (0x3334, 'M', u'ブッシェル'),
+ (0x3335, 'M', u'フラン'),
+ (0x3336, 'M', u'ヘクタール'),
+ (0x3337, 'M', u'ペソ'),
+ (0x3338, 'M', u'ペニヒ'),
+ (0x3339, 'M', u'ヘルツ'),
+ (0x333A, 'M', u'ペンス'),
+ (0x333B, 'M', u'ページ'),
+ (0x333C, 'M', u'ベータ'),
+ (0x333D, 'M', u'ポイント'),
+ (0x333E, 'M', u'ボルト'),
+ (0x333F, 'M', u'ホン'),
+ (0x3340, 'M', u'ポンド'),
+ (0x3341, 'M', u'ホール'),
+ (0x3342, 'M', u'ホーン'),
+ (0x3343, 'M', u'マイクロ'),
+ (0x3344, 'M', u'マイル'),
+ (0x3345, 'M', u'マッハ'),
+ (0x3346, 'M', u'マルク'),
+ (0x3347, 'M', u'マンション'),
+ (0x3348, 'M', u'ミクロン'),
+ (0x3349, 'M', u'ミリ'),
+ (0x334A, 'M', u'ミリバール'),
+ (0x334B, 'M', u'メガ'),
+ (0x334C, 'M', u'メガトン'),
+ (0x334D, 'M', u'メートル'),
+ (0x334E, 'M', u'ヤード'),
+ (0x334F, 'M', u'ヤール'),
+ (0x3350, 'M', u'ユアン'),
+ (0x3351, 'M', u'リットル'),
+ (0x3352, 'M', u'リラ'),
+ (0x3353, 'M', u'ルピー'),
+ (0x3354, 'M', u'ルーブル'),
+ (0x3355, 'M', u'レム'),
+ (0x3356, 'M', u'レントゲン'),
+ (0x3357, 'M', u'ワット'),
+ (0x3358, 'M', u'0点'),
+ (0x3359, 'M', u'1点'),
+ (0x335A, 'M', u'2点'),
+ (0x335B, 'M', u'3点'),
+ (0x335C, 'M', u'4点'),
+ (0x335D, 'M', u'5点'),
+ (0x335E, 'M', u'6点'),
+ (0x335F, 'M', u'7点'),
+ (0x3360, 'M', u'8点'),
+ (0x3361, 'M', u'9点'),
+ (0x3362, 'M', u'10点'),
+ (0x3363, 'M', u'11点'),
+ (0x3364, 'M', u'12点'),
+ (0x3365, 'M', u'13点'),
+ (0x3366, 'M', u'14点'),
+ (0x3367, 'M', u'15点'),
+ (0x3368, 'M', u'16点'),
+ (0x3369, 'M', u'17点'),
+ (0x336A, 'M', u'18点'),
+ (0x336B, 'M', u'19点'),
+ (0x336C, 'M', u'20点'),
+ (0x336D, 'M', u'21点'),
+ (0x336E, 'M', u'22点'),
+ (0x336F, 'M', u'23点'),
+ (0x3370, 'M', u'24点'),
+ (0x3371, 'M', u'hpa'),
+ (0x3372, 'M', u'da'),
+ (0x3373, 'M', u'au'),
+ (0x3374, 'M', u'bar'),
+ (0x3375, 'M', u'ov'),
+ (0x3376, 'M', u'pc'),
+ (0x3377, 'M', u'dm'),
+ (0x3378, 'M', u'dm2'),
+ (0x3379, 'M', u'dm3'),
+ (0x337A, 'M', u'iu'),
+ (0x337B, 'M', u'平成'),
+ (0x337C, 'M', u'昭和'),
+ (0x337D, 'M', u'大正'),
+ (0x337E, 'M', u'明治'),
+ (0x337F, 'M', u'株式会社'),
+ (0x3380, 'M', u'pa'),
+ (0x3381, 'M', u'na'),
+ (0x3382, 'M', u'μa'),
+ (0x3383, 'M', u'ma'),
+ (0x3384, 'M', u'ka'),
+ (0x3385, 'M', u'kb'),
+ (0x3386, 'M', u'mb'),
+ (0x3387, 'M', u'gb'),
+ (0x3388, 'M', u'cal'),
+ (0x3389, 'M', u'kcal'),
+ (0x338A, 'M', u'pf'),
+ (0x338B, 'M', u'nf'),
+ (0x338C, 'M', u'μf'),
+ (0x338D, 'M', u'μg'),
+ (0x338E, 'M', u'mg'),
+ (0x338F, 'M', u'kg'),
+ ]
+
+def _seg_34():
+ return [
+ (0x3390, 'M', u'hz'),
+ (0x3391, 'M', u'khz'),
+ (0x3392, 'M', u'mhz'),
+ (0x3393, 'M', u'ghz'),
+ (0x3394, 'M', u'thz'),
+ (0x3395, 'M', u'μl'),
+ (0x3396, 'M', u'ml'),
+ (0x3397, 'M', u'dl'),
+ (0x3398, 'M', u'kl'),
+ (0x3399, 'M', u'fm'),
+ (0x339A, 'M', u'nm'),
+ (0x339B, 'M', u'μm'),
+ (0x339C, 'M', u'mm'),
+ (0x339D, 'M', u'cm'),
+ (0x339E, 'M', u'km'),
+ (0x339F, 'M', u'mm2'),
+ (0x33A0, 'M', u'cm2'),
+ (0x33A1, 'M', u'm2'),
+ (0x33A2, 'M', u'km2'),
+ (0x33A3, 'M', u'mm3'),
+ (0x33A4, 'M', u'cm3'),
+ (0x33A5, 'M', u'm3'),
+ (0x33A6, 'M', u'km3'),
+ (0x33A7, 'M', u'm∕s'),
+ (0x33A8, 'M', u'm∕s2'),
+ (0x33A9, 'M', u'pa'),
+ (0x33AA, 'M', u'kpa'),
+ (0x33AB, 'M', u'mpa'),
+ (0x33AC, 'M', u'gpa'),
+ (0x33AD, 'M', u'rad'),
+ (0x33AE, 'M', u'rad∕s'),
+ (0x33AF, 'M', u'rad∕s2'),
+ (0x33B0, 'M', u'ps'),
+ (0x33B1, 'M', u'ns'),
+ (0x33B2, 'M', u'μs'),
+ (0x33B3, 'M', u'ms'),
+ (0x33B4, 'M', u'pv'),
+ (0x33B5, 'M', u'nv'),
+ (0x33B6, 'M', u'μv'),
+ (0x33B7, 'M', u'mv'),
+ (0x33B8, 'M', u'kv'),
+ (0x33B9, 'M', u'mv'),
+ (0x33BA, 'M', u'pw'),
+ (0x33BB, 'M', u'nw'),
+ (0x33BC, 'M', u'μw'),
+ (0x33BD, 'M', u'mw'),
+ (0x33BE, 'M', u'kw'),
+ (0x33BF, 'M', u'mw'),
+ (0x33C0, 'M', u'kω'),
+ (0x33C1, 'M', u'mω'),
+ (0x33C2, 'X'),
+ (0x33C3, 'M', u'bq'),
+ (0x33C4, 'M', u'cc'),
+ (0x33C5, 'M', u'cd'),
+ (0x33C6, 'M', u'c∕kg'),
+ (0x33C7, 'X'),
+ (0x33C8, 'M', u'db'),
+ (0x33C9, 'M', u'gy'),
+ (0x33CA, 'M', u'ha'),
+ (0x33CB, 'M', u'hp'),
+ (0x33CC, 'M', u'in'),
+ (0x33CD, 'M', u'kk'),
+ (0x33CE, 'M', u'km'),
+ (0x33CF, 'M', u'kt'),
+ (0x33D0, 'M', u'lm'),
+ (0x33D1, 'M', u'ln'),
+ (0x33D2, 'M', u'log'),
+ (0x33D3, 'M', u'lx'),
+ (0x33D4, 'M', u'mb'),
+ (0x33D5, 'M', u'mil'),
+ (0x33D6, 'M', u'mol'),
+ (0x33D7, 'M', u'ph'),
+ (0x33D8, 'X'),
+ (0x33D9, 'M', u'ppm'),
+ (0x33DA, 'M', u'pr'),
+ (0x33DB, 'M', u'sr'),
+ (0x33DC, 'M', u'sv'),
+ (0x33DD, 'M', u'wb'),
+ (0x33DE, 'M', u'v∕m'),
+ (0x33DF, 'M', u'a∕m'),
+ (0x33E0, 'M', u'1日'),
+ (0x33E1, 'M', u'2日'),
+ (0x33E2, 'M', u'3日'),
+ (0x33E3, 'M', u'4日'),
+ (0x33E4, 'M', u'5日'),
+ (0x33E5, 'M', u'6日'),
+ (0x33E6, 'M', u'7日'),
+ (0x33E7, 'M', u'8日'),
+ (0x33E8, 'M', u'9日'),
+ (0x33E9, 'M', u'10日'),
+ (0x33EA, 'M', u'11日'),
+ (0x33EB, 'M', u'12日'),
+ (0x33EC, 'M', u'13日'),
+ (0x33ED, 'M', u'14日'),
+ (0x33EE, 'M', u'15日'),
+ (0x33EF, 'M', u'16日'),
+ (0x33F0, 'M', u'17日'),
+ (0x33F1, 'M', u'18日'),
+ (0x33F2, 'M', u'19日'),
+ (0x33F3, 'M', u'20日'),
+ ]
+
+def _seg_35():
+ return [
+ (0x33F4, 'M', u'21日'),
+ (0x33F5, 'M', u'22日'),
+ (0x33F6, 'M', u'23日'),
+ (0x33F7, 'M', u'24日'),
+ (0x33F8, 'M', u'25日'),
+ (0x33F9, 'M', u'26日'),
+ (0x33FA, 'M', u'27日'),
+ (0x33FB, 'M', u'28日'),
+ (0x33FC, 'M', u'29日'),
+ (0x33FD, 'M', u'30日'),
+ (0x33FE, 'M', u'31日'),
+ (0x33FF, 'M', u'gal'),
+ (0x3400, 'V'),
+ (0x4DB6, 'X'),
+ (0x4DC0, 'V'),
+ (0x9FF0, 'X'),
+ (0xA000, 'V'),
+ (0xA48D, 'X'),
+ (0xA490, 'V'),
+ (0xA4C7, 'X'),
+ (0xA4D0, 'V'),
+ (0xA62C, 'X'),
+ (0xA640, 'M', u'ꙁ'),
+ (0xA641, 'V'),
+ (0xA642, 'M', u'ꙃ'),
+ (0xA643, 'V'),
+ (0xA644, 'M', u'ꙅ'),
+ (0xA645, 'V'),
+ (0xA646, 'M', u'ꙇ'),
+ (0xA647, 'V'),
+ (0xA648, 'M', u'ꙉ'),
+ (0xA649, 'V'),
+ (0xA64A, 'M', u'ꙋ'),
+ (0xA64B, 'V'),
+ (0xA64C, 'M', u'ꙍ'),
+ (0xA64D, 'V'),
+ (0xA64E, 'M', u'ꙏ'),
+ (0xA64F, 'V'),
+ (0xA650, 'M', u'ꙑ'),
+ (0xA651, 'V'),
+ (0xA652, 'M', u'ꙓ'),
+ (0xA653, 'V'),
+ (0xA654, 'M', u'ꙕ'),
+ (0xA655, 'V'),
+ (0xA656, 'M', u'ꙗ'),
+ (0xA657, 'V'),
+ (0xA658, 'M', u'ꙙ'),
+ (0xA659, 'V'),
+ (0xA65A, 'M', u'ꙛ'),
+ (0xA65B, 'V'),
+ (0xA65C, 'M', u'ꙝ'),
+ (0xA65D, 'V'),
+ (0xA65E, 'M', u'ꙟ'),
+ (0xA65F, 'V'),
+ (0xA660, 'M', u'ꙡ'),
+ (0xA661, 'V'),
+ (0xA662, 'M', u'ꙣ'),
+ (0xA663, 'V'),
+ (0xA664, 'M', u'ꙥ'),
+ (0xA665, 'V'),
+ (0xA666, 'M', u'ꙧ'),
+ (0xA667, 'V'),
+ (0xA668, 'M', u'ꙩ'),
+ (0xA669, 'V'),
+ (0xA66A, 'M', u'ꙫ'),
+ (0xA66B, 'V'),
+ (0xA66C, 'M', u'ꙭ'),
+ (0xA66D, 'V'),
+ (0xA680, 'M', u'ꚁ'),
+ (0xA681, 'V'),
+ (0xA682, 'M', u'ꚃ'),
+ (0xA683, 'V'),
+ (0xA684, 'M', u'ꚅ'),
+ (0xA685, 'V'),
+ (0xA686, 'M', u'ꚇ'),
+ (0xA687, 'V'),
+ (0xA688, 'M', u'ꚉ'),
+ (0xA689, 'V'),
+ (0xA68A, 'M', u'ꚋ'),
+ (0xA68B, 'V'),
+ (0xA68C, 'M', u'ꚍ'),
+ (0xA68D, 'V'),
+ (0xA68E, 'M', u'ꚏ'),
+ (0xA68F, 'V'),
+ (0xA690, 'M', u'ꚑ'),
+ (0xA691, 'V'),
+ (0xA692, 'M', u'ꚓ'),
+ (0xA693, 'V'),
+ (0xA694, 'M', u'ꚕ'),
+ (0xA695, 'V'),
+ (0xA696, 'M', u'ꚗ'),
+ (0xA697, 'V'),
+ (0xA698, 'M', u'ꚙ'),
+ (0xA699, 'V'),
+ (0xA69A, 'M', u'ꚛ'),
+ (0xA69B, 'V'),
+ (0xA69C, 'M', u'ъ'),
+ (0xA69D, 'M', u'ь'),
+ (0xA69E, 'V'),
+ (0xA6F8, 'X'),
+ ]
+
+def _seg_36():
+ return [
+ (0xA700, 'V'),
+ (0xA722, 'M', u'ꜣ'),
+ (0xA723, 'V'),
+ (0xA724, 'M', u'ꜥ'),
+ (0xA725, 'V'),
+ (0xA726, 'M', u'ꜧ'),
+ (0xA727, 'V'),
+ (0xA728, 'M', u'ꜩ'),
+ (0xA729, 'V'),
+ (0xA72A, 'M', u'ꜫ'),
+ (0xA72B, 'V'),
+ (0xA72C, 'M', u'ꜭ'),
+ (0xA72D, 'V'),
+ (0xA72E, 'M', u'ꜯ'),
+ (0xA72F, 'V'),
+ (0xA732, 'M', u'ꜳ'),
+ (0xA733, 'V'),
+ (0xA734, 'M', u'ꜵ'),
+ (0xA735, 'V'),
+ (0xA736, 'M', u'ꜷ'),
+ (0xA737, 'V'),
+ (0xA738, 'M', u'ꜹ'),
+ (0xA739, 'V'),
+ (0xA73A, 'M', u'ꜻ'),
+ (0xA73B, 'V'),
+ (0xA73C, 'M', u'ꜽ'),
+ (0xA73D, 'V'),
+ (0xA73E, 'M', u'ꜿ'),
+ (0xA73F, 'V'),
+ (0xA740, 'M', u'ꝁ'),
+ (0xA741, 'V'),
+ (0xA742, 'M', u'ꝃ'),
+ (0xA743, 'V'),
+ (0xA744, 'M', u'ꝅ'),
+ (0xA745, 'V'),
+ (0xA746, 'M', u'ꝇ'),
+ (0xA747, 'V'),
+ (0xA748, 'M', u'ꝉ'),
+ (0xA749, 'V'),
+ (0xA74A, 'M', u'ꝋ'),
+ (0xA74B, 'V'),
+ (0xA74C, 'M', u'ꝍ'),
+ (0xA74D, 'V'),
+ (0xA74E, 'M', u'ꝏ'),
+ (0xA74F, 'V'),
+ (0xA750, 'M', u'ꝑ'),
+ (0xA751, 'V'),
+ (0xA752, 'M', u'ꝓ'),
+ (0xA753, 'V'),
+ (0xA754, 'M', u'ꝕ'),
+ (0xA755, 'V'),
+ (0xA756, 'M', u'ꝗ'),
+ (0xA757, 'V'),
+ (0xA758, 'M', u'ꝙ'),
+ (0xA759, 'V'),
+ (0xA75A, 'M', u'ꝛ'),
+ (0xA75B, 'V'),
+ (0xA75C, 'M', u'ꝝ'),
+ (0xA75D, 'V'),
+ (0xA75E, 'M', u'ꝟ'),
+ (0xA75F, 'V'),
+ (0xA760, 'M', u'ꝡ'),
+ (0xA761, 'V'),
+ (0xA762, 'M', u'ꝣ'),
+ (0xA763, 'V'),
+ (0xA764, 'M', u'ꝥ'),
+ (0xA765, 'V'),
+ (0xA766, 'M', u'ꝧ'),
+ (0xA767, 'V'),
+ (0xA768, 'M', u'ꝩ'),
+ (0xA769, 'V'),
+ (0xA76A, 'M', u'ꝫ'),
+ (0xA76B, 'V'),
+ (0xA76C, 'M', u'ꝭ'),
+ (0xA76D, 'V'),
+ (0xA76E, 'M', u'ꝯ'),
+ (0xA76F, 'V'),
+ (0xA770, 'M', u'ꝯ'),
+ (0xA771, 'V'),
+ (0xA779, 'M', u'ꝺ'),
+ (0xA77A, 'V'),
+ (0xA77B, 'M', u'ꝼ'),
+ (0xA77C, 'V'),
+ (0xA77D, 'M', u'ᵹ'),
+ (0xA77E, 'M', u'ꝿ'),
+ (0xA77F, 'V'),
+ (0xA780, 'M', u'ꞁ'),
+ (0xA781, 'V'),
+ (0xA782, 'M', u'ꞃ'),
+ (0xA783, 'V'),
+ (0xA784, 'M', u'ꞅ'),
+ (0xA785, 'V'),
+ (0xA786, 'M', u'ꞇ'),
+ (0xA787, 'V'),
+ (0xA78B, 'M', u'ꞌ'),
+ (0xA78C, 'V'),
+ (0xA78D, 'M', u'ɥ'),
+ (0xA78E, 'V'),
+ (0xA790, 'M', u'ꞑ'),
+ (0xA791, 'V'),
+ ]
+
+def _seg_37():
+ return [
+ (0xA792, 'M', u'ꞓ'),
+ (0xA793, 'V'),
+ (0xA796, 'M', u'ꞗ'),
+ (0xA797, 'V'),
+ (0xA798, 'M', u'ꞙ'),
+ (0xA799, 'V'),
+ (0xA79A, 'M', u'ꞛ'),
+ (0xA79B, 'V'),
+ (0xA79C, 'M', u'ꞝ'),
+ (0xA79D, 'V'),
+ (0xA79E, 'M', u'ꞟ'),
+ (0xA79F, 'V'),
+ (0xA7A0, 'M', u'ꞡ'),
+ (0xA7A1, 'V'),
+ (0xA7A2, 'M', u'ꞣ'),
+ (0xA7A3, 'V'),
+ (0xA7A4, 'M', u'ꞥ'),
+ (0xA7A5, 'V'),
+ (0xA7A6, 'M', u'ꞧ'),
+ (0xA7A7, 'V'),
+ (0xA7A8, 'M', u'ꞩ'),
+ (0xA7A9, 'V'),
+ (0xA7AA, 'M', u'ɦ'),
+ (0xA7AB, 'M', u'ɜ'),
+ (0xA7AC, 'M', u'ɡ'),
+ (0xA7AD, 'M', u'ɬ'),
+ (0xA7AE, 'M', u'ɪ'),
+ (0xA7AF, 'V'),
+ (0xA7B0, 'M', u'ʞ'),
+ (0xA7B1, 'M', u'ʇ'),
+ (0xA7B2, 'M', u'ʝ'),
+ (0xA7B3, 'M', u'ꭓ'),
+ (0xA7B4, 'M', u'ꞵ'),
+ (0xA7B5, 'V'),
+ (0xA7B6, 'M', u'ꞷ'),
+ (0xA7B7, 'V'),
+ (0xA7B8, 'X'),
+ (0xA7B9, 'V'),
+ (0xA7BA, 'X'),
+ (0xA7F7, 'V'),
+ (0xA7F8, 'M', u'ħ'),
+ (0xA7F9, 'M', u'œ'),
+ (0xA7FA, 'V'),
+ (0xA82C, 'X'),
+ (0xA830, 'V'),
+ (0xA83A, 'X'),
+ (0xA840, 'V'),
+ (0xA878, 'X'),
+ (0xA880, 'V'),
+ (0xA8C6, 'X'),
+ (0xA8CE, 'V'),
+ (0xA8DA, 'X'),
+ (0xA8E0, 'V'),
+ (0xA954, 'X'),
+ (0xA95F, 'V'),
+ (0xA97D, 'X'),
+ (0xA980, 'V'),
+ (0xA9CE, 'X'),
+ (0xA9CF, 'V'),
+ (0xA9DA, 'X'),
+ (0xA9DE, 'V'),
+ (0xA9FF, 'X'),
+ (0xAA00, 'V'),
+ (0xAA37, 'X'),
+ (0xAA40, 'V'),
+ (0xAA4E, 'X'),
+ (0xAA50, 'V'),
+ (0xAA5A, 'X'),
+ (0xAA5C, 'V'),
+ (0xAAC3, 'X'),
+ (0xAADB, 'V'),
+ (0xAAF7, 'X'),
+ (0xAB01, 'V'),
+ (0xAB07, 'X'),
+ (0xAB09, 'V'),
+ (0xAB0F, 'X'),
+ (0xAB11, 'V'),
+ (0xAB17, 'X'),
+ (0xAB20, 'V'),
+ (0xAB27, 'X'),
+ (0xAB28, 'V'),
+ (0xAB2F, 'X'),
+ (0xAB30, 'V'),
+ (0xAB5C, 'M', u'ꜧ'),
+ (0xAB5D, 'M', u'ꬷ'),
+ (0xAB5E, 'M', u'ɫ'),
+ (0xAB5F, 'M', u'ꭒ'),
+ (0xAB60, 'V'),
+ (0xAB66, 'X'),
+ (0xAB70, 'M', u'Ꭰ'),
+ (0xAB71, 'M', u'Ꭱ'),
+ (0xAB72, 'M', u'Ꭲ'),
+ (0xAB73, 'M', u'Ꭳ'),
+ (0xAB74, 'M', u'Ꭴ'),
+ (0xAB75, 'M', u'Ꭵ'),
+ (0xAB76, 'M', u'Ꭶ'),
+ (0xAB77, 'M', u'Ꭷ'),
+ (0xAB78, 'M', u'Ꭸ'),
+ (0xAB79, 'M', u'Ꭹ'),
+ (0xAB7A, 'M', u'Ꭺ'),
+ ]
+
+def _seg_38():
+ return [
+ (0xAB7B, 'M', u'Ꭻ'),
+ (0xAB7C, 'M', u'Ꭼ'),
+ (0xAB7D, 'M', u'Ꭽ'),
+ (0xAB7E, 'M', u'Ꭾ'),
+ (0xAB7F, 'M', u'Ꭿ'),
+ (0xAB80, 'M', u'Ꮀ'),
+ (0xAB81, 'M', u'Ꮁ'),
+ (0xAB82, 'M', u'Ꮂ'),
+ (0xAB83, 'M', u'Ꮃ'),
+ (0xAB84, 'M', u'Ꮄ'),
+ (0xAB85, 'M', u'Ꮅ'),
+ (0xAB86, 'M', u'Ꮆ'),
+ (0xAB87, 'M', u'Ꮇ'),
+ (0xAB88, 'M', u'Ꮈ'),
+ (0xAB89, 'M', u'Ꮉ'),
+ (0xAB8A, 'M', u'Ꮊ'),
+ (0xAB8B, 'M', u'Ꮋ'),
+ (0xAB8C, 'M', u'Ꮌ'),
+ (0xAB8D, 'M', u'Ꮍ'),
+ (0xAB8E, 'M', u'Ꮎ'),
+ (0xAB8F, 'M', u'Ꮏ'),
+ (0xAB90, 'M', u'Ꮐ'),
+ (0xAB91, 'M', u'Ꮑ'),
+ (0xAB92, 'M', u'Ꮒ'),
+ (0xAB93, 'M', u'Ꮓ'),
+ (0xAB94, 'M', u'Ꮔ'),
+ (0xAB95, 'M', u'Ꮕ'),
+ (0xAB96, 'M', u'Ꮖ'),
+ (0xAB97, 'M', u'Ꮗ'),
+ (0xAB98, 'M', u'Ꮘ'),
+ (0xAB99, 'M', u'Ꮙ'),
+ (0xAB9A, 'M', u'Ꮚ'),
+ (0xAB9B, 'M', u'Ꮛ'),
+ (0xAB9C, 'M', u'Ꮜ'),
+ (0xAB9D, 'M', u'Ꮝ'),
+ (0xAB9E, 'M', u'Ꮞ'),
+ (0xAB9F, 'M', u'Ꮟ'),
+ (0xABA0, 'M', u'Ꮠ'),
+ (0xABA1, 'M', u'Ꮡ'),
+ (0xABA2, 'M', u'Ꮢ'),
+ (0xABA3, 'M', u'Ꮣ'),
+ (0xABA4, 'M', u'Ꮤ'),
+ (0xABA5, 'M', u'Ꮥ'),
+ (0xABA6, 'M', u'Ꮦ'),
+ (0xABA7, 'M', u'Ꮧ'),
+ (0xABA8, 'M', u'Ꮨ'),
+ (0xABA9, 'M', u'Ꮩ'),
+ (0xABAA, 'M', u'Ꮪ'),
+ (0xABAB, 'M', u'Ꮫ'),
+ (0xABAC, 'M', u'Ꮬ'),
+ (0xABAD, 'M', u'Ꮭ'),
+ (0xABAE, 'M', u'Ꮮ'),
+ (0xABAF, 'M', u'Ꮯ'),
+ (0xABB0, 'M', u'Ꮰ'),
+ (0xABB1, 'M', u'Ꮱ'),
+ (0xABB2, 'M', u'Ꮲ'),
+ (0xABB3, 'M', u'Ꮳ'),
+ (0xABB4, 'M', u'Ꮴ'),
+ (0xABB5, 'M', u'Ꮵ'),
+ (0xABB6, 'M', u'Ꮶ'),
+ (0xABB7, 'M', u'Ꮷ'),
+ (0xABB8, 'M', u'Ꮸ'),
+ (0xABB9, 'M', u'Ꮹ'),
+ (0xABBA, 'M', u'Ꮺ'),
+ (0xABBB, 'M', u'Ꮻ'),
+ (0xABBC, 'M', u'Ꮼ'),
+ (0xABBD, 'M', u'Ꮽ'),
+ (0xABBE, 'M', u'Ꮾ'),
+ (0xABBF, 'M', u'Ꮿ'),
+ (0xABC0, 'V'),
+ (0xABEE, 'X'),
+ (0xABF0, 'V'),
+ (0xABFA, 'X'),
+ (0xAC00, 'V'),
+ (0xD7A4, 'X'),
+ (0xD7B0, 'V'),
+ (0xD7C7, 'X'),
+ (0xD7CB, 'V'),
+ (0xD7FC, 'X'),
+ (0xF900, 'M', u'豈'),
+ (0xF901, 'M', u'更'),
+ (0xF902, 'M', u'車'),
+ (0xF903, 'M', u'賈'),
+ (0xF904, 'M', u'滑'),
+ (0xF905, 'M', u'串'),
+ (0xF906, 'M', u'句'),
+ (0xF907, 'M', u'龜'),
+ (0xF909, 'M', u'契'),
+ (0xF90A, 'M', u'金'),
+ (0xF90B, 'M', u'喇'),
+ (0xF90C, 'M', u'奈'),
+ (0xF90D, 'M', u'懶'),
+ (0xF90E, 'M', u'癩'),
+ (0xF90F, 'M', u'羅'),
+ (0xF910, 'M', u'蘿'),
+ (0xF911, 'M', u'螺'),
+ (0xF912, 'M', u'裸'),
+ (0xF913, 'M', u'邏'),
+ (0xF914, 'M', u'樂'),
+ (0xF915, 'M', u'洛'),
+ ]
+
+def _seg_39():
+ return [
+ (0xF916, 'M', u'烙'),
+ (0xF917, 'M', u'珞'),
+ (0xF918, 'M', u'落'),
+ (0xF919, 'M', u'酪'),
+ (0xF91A, 'M', u'駱'),
+ (0xF91B, 'M', u'亂'),
+ (0xF91C, 'M', u'卵'),
+ (0xF91D, 'M', u'欄'),
+ (0xF91E, 'M', u'爛'),
+ (0xF91F, 'M', u'蘭'),
+ (0xF920, 'M', u'鸞'),
+ (0xF921, 'M', u'嵐'),
+ (0xF922, 'M', u'濫'),
+ (0xF923, 'M', u'藍'),
+ (0xF924, 'M', u'襤'),
+ (0xF925, 'M', u'拉'),
+ (0xF926, 'M', u'臘'),
+ (0xF927, 'M', u'蠟'),
+ (0xF928, 'M', u'廊'),
+ (0xF929, 'M', u'朗'),
+ (0xF92A, 'M', u'浪'),
+ (0xF92B, 'M', u'狼'),
+ (0xF92C, 'M', u'郎'),
+ (0xF92D, 'M', u'來'),
+ (0xF92E, 'M', u'冷'),
+ (0xF92F, 'M', u'勞'),
+ (0xF930, 'M', u'擄'),
+ (0xF931, 'M', u'櫓'),
+ (0xF932, 'M', u'爐'),
+ (0xF933, 'M', u'盧'),
+ (0xF934, 'M', u'老'),
+ (0xF935, 'M', u'蘆'),
+ (0xF936, 'M', u'虜'),
+ (0xF937, 'M', u'路'),
+ (0xF938, 'M', u'露'),
+ (0xF939, 'M', u'魯'),
+ (0xF93A, 'M', u'鷺'),
+ (0xF93B, 'M', u'碌'),
+ (0xF93C, 'M', u'祿'),
+ (0xF93D, 'M', u'綠'),
+ (0xF93E, 'M', u'菉'),
+ (0xF93F, 'M', u'錄'),
+ (0xF940, 'M', u'鹿'),
+ (0xF941, 'M', u'論'),
+ (0xF942, 'M', u'壟'),
+ (0xF943, 'M', u'弄'),
+ (0xF944, 'M', u'籠'),
+ (0xF945, 'M', u'聾'),
+ (0xF946, 'M', u'牢'),
+ (0xF947, 'M', u'磊'),
+ (0xF948, 'M', u'賂'),
+ (0xF949, 'M', u'雷'),
+ (0xF94A, 'M', u'壘'),
+ (0xF94B, 'M', u'屢'),
+ (0xF94C, 'M', u'樓'),
+ (0xF94D, 'M', u'淚'),
+ (0xF94E, 'M', u'漏'),
+ (0xF94F, 'M', u'累'),
+ (0xF950, 'M', u'縷'),
+ (0xF951, 'M', u'陋'),
+ (0xF952, 'M', u'勒'),
+ (0xF953, 'M', u'肋'),
+ (0xF954, 'M', u'凜'),
+ (0xF955, 'M', u'凌'),
+ (0xF956, 'M', u'稜'),
+ (0xF957, 'M', u'綾'),
+ (0xF958, 'M', u'菱'),
+ (0xF959, 'M', u'陵'),
+ (0xF95A, 'M', u'讀'),
+ (0xF95B, 'M', u'拏'),
+ (0xF95C, 'M', u'樂'),
+ (0xF95D, 'M', u'諾'),
+ (0xF95E, 'M', u'丹'),
+ (0xF95F, 'M', u'寧'),
+ (0xF960, 'M', u'怒'),
+ (0xF961, 'M', u'率'),
+ (0xF962, 'M', u'異'),
+ (0xF963, 'M', u'北'),
+ (0xF964, 'M', u'磻'),
+ (0xF965, 'M', u'便'),
+ (0xF966, 'M', u'復'),
+ (0xF967, 'M', u'不'),
+ (0xF968, 'M', u'泌'),
+ (0xF969, 'M', u'數'),
+ (0xF96A, 'M', u'索'),
+ (0xF96B, 'M', u'參'),
+ (0xF96C, 'M', u'塞'),
+ (0xF96D, 'M', u'省'),
+ (0xF96E, 'M', u'葉'),
+ (0xF96F, 'M', u'說'),
+ (0xF970, 'M', u'殺'),
+ (0xF971, 'M', u'辰'),
+ (0xF972, 'M', u'沈'),
+ (0xF973, 'M', u'拾'),
+ (0xF974, 'M', u'若'),
+ (0xF975, 'M', u'掠'),
+ (0xF976, 'M', u'略'),
+ (0xF977, 'M', u'亮'),
+ (0xF978, 'M', u'兩'),
+ (0xF979, 'M', u'凉'),
+ ]
+
+def _seg_40():
+ return [
+ (0xF97A, 'M', u'梁'),
+ (0xF97B, 'M', u'糧'),
+ (0xF97C, 'M', u'良'),
+ (0xF97D, 'M', u'諒'),
+ (0xF97E, 'M', u'量'),
+ (0xF97F, 'M', u'勵'),
+ (0xF980, 'M', u'呂'),
+ (0xF981, 'M', u'女'),
+ (0xF982, 'M', u'廬'),
+ (0xF983, 'M', u'旅'),
+ (0xF984, 'M', u'濾'),
+ (0xF985, 'M', u'礪'),
+ (0xF986, 'M', u'閭'),
+ (0xF987, 'M', u'驪'),
+ (0xF988, 'M', u'麗'),
+ (0xF989, 'M', u'黎'),
+ (0xF98A, 'M', u'力'),
+ (0xF98B, 'M', u'曆'),
+ (0xF98C, 'M', u'歷'),
+ (0xF98D, 'M', u'轢'),
+ (0xF98E, 'M', u'年'),
+ (0xF98F, 'M', u'憐'),
+ (0xF990, 'M', u'戀'),
+ (0xF991, 'M', u'撚'),
+ (0xF992, 'M', u'漣'),
+ (0xF993, 'M', u'煉'),
+ (0xF994, 'M', u'璉'),
+ (0xF995, 'M', u'秊'),
+ (0xF996, 'M', u'練'),
+ (0xF997, 'M', u'聯'),
+ (0xF998, 'M', u'輦'),
+ (0xF999, 'M', u'蓮'),
+ (0xF99A, 'M', u'連'),
+ (0xF99B, 'M', u'鍊'),
+ (0xF99C, 'M', u'列'),
+ (0xF99D, 'M', u'劣'),
+ (0xF99E, 'M', u'咽'),
+ (0xF99F, 'M', u'烈'),
+ (0xF9A0, 'M', u'裂'),
+ (0xF9A1, 'M', u'說'),
+ (0xF9A2, 'M', u'廉'),
+ (0xF9A3, 'M', u'念'),
+ (0xF9A4, 'M', u'捻'),
+ (0xF9A5, 'M', u'殮'),
+ (0xF9A6, 'M', u'簾'),
+ (0xF9A7, 'M', u'獵'),
+ (0xF9A8, 'M', u'令'),
+ (0xF9A9, 'M', u'囹'),
+ (0xF9AA, 'M', u'寧'),
+ (0xF9AB, 'M', u'嶺'),
+ (0xF9AC, 'M', u'怜'),
+ (0xF9AD, 'M', u'玲'),
+ (0xF9AE, 'M', u'瑩'),
+ (0xF9AF, 'M', u'羚'),
+ (0xF9B0, 'M', u'聆'),
+ (0xF9B1, 'M', u'鈴'),
+ (0xF9B2, 'M', u'零'),
+ (0xF9B3, 'M', u'靈'),
+ (0xF9B4, 'M', u'領'),
+ (0xF9B5, 'M', u'例'),
+ (0xF9B6, 'M', u'禮'),
+ (0xF9B7, 'M', u'醴'),
+ (0xF9B8, 'M', u'隸'),
+ (0xF9B9, 'M', u'惡'),
+ (0xF9BA, 'M', u'了'),
+ (0xF9BB, 'M', u'僚'),
+ (0xF9BC, 'M', u'寮'),
+ (0xF9BD, 'M', u'尿'),
+ (0xF9BE, 'M', u'料'),
+ (0xF9BF, 'M', u'樂'),
+ (0xF9C0, 'M', u'燎'),
+ (0xF9C1, 'M', u'療'),
+ (0xF9C2, 'M', u'蓼'),
+ (0xF9C3, 'M', u'遼'),
+ (0xF9C4, 'M', u'龍'),
+ (0xF9C5, 'M', u'暈'),
+ (0xF9C6, 'M', u'阮'),
+ (0xF9C7, 'M', u'劉'),
+ (0xF9C8, 'M', u'杻'),
+ (0xF9C9, 'M', u'柳'),
+ (0xF9CA, 'M', u'流'),
+ (0xF9CB, 'M', u'溜'),
+ (0xF9CC, 'M', u'琉'),
+ (0xF9CD, 'M', u'留'),
+ (0xF9CE, 'M', u'硫'),
+ (0xF9CF, 'M', u'紐'),
+ (0xF9D0, 'M', u'類'),
+ (0xF9D1, 'M', u'六'),
+ (0xF9D2, 'M', u'戮'),
+ (0xF9D3, 'M', u'陸'),
+ (0xF9D4, 'M', u'倫'),
+ (0xF9D5, 'M', u'崙'),
+ (0xF9D6, 'M', u'淪'),
+ (0xF9D7, 'M', u'輪'),
+ (0xF9D8, 'M', u'律'),
+ (0xF9D9, 'M', u'慄'),
+ (0xF9DA, 'M', u'栗'),
+ (0xF9DB, 'M', u'率'),
+ (0xF9DC, 'M', u'隆'),
+ (0xF9DD, 'M', u'利'),
+ ]
+
+def _seg_41():
+ return [
+ (0xF9DE, 'M', u'吏'),
+ (0xF9DF, 'M', u'履'),
+ (0xF9E0, 'M', u'易'),
+ (0xF9E1, 'M', u'李'),
+ (0xF9E2, 'M', u'梨'),
+ (0xF9E3, 'M', u'泥'),
+ (0xF9E4, 'M', u'理'),
+ (0xF9E5, 'M', u'痢'),
+ (0xF9E6, 'M', u'罹'),
+ (0xF9E7, 'M', u'裏'),
+ (0xF9E8, 'M', u'裡'),
+ (0xF9E9, 'M', u'里'),
+ (0xF9EA, 'M', u'離'),
+ (0xF9EB, 'M', u'匿'),
+ (0xF9EC, 'M', u'溺'),
+ (0xF9ED, 'M', u'吝'),
+ (0xF9EE, 'M', u'燐'),
+ (0xF9EF, 'M', u'璘'),
+ (0xF9F0, 'M', u'藺'),
+ (0xF9F1, 'M', u'隣'),
+ (0xF9F2, 'M', u'鱗'),
+ (0xF9F3, 'M', u'麟'),
+ (0xF9F4, 'M', u'林'),
+ (0xF9F5, 'M', u'淋'),
+ (0xF9F6, 'M', u'臨'),
+ (0xF9F7, 'M', u'立'),
+ (0xF9F8, 'M', u'笠'),
+ (0xF9F9, 'M', u'粒'),
+ (0xF9FA, 'M', u'狀'),
+ (0xF9FB, 'M', u'炙'),
+ (0xF9FC, 'M', u'識'),
+ (0xF9FD, 'M', u'什'),
+ (0xF9FE, 'M', u'茶'),
+ (0xF9FF, 'M', u'刺'),
+ (0xFA00, 'M', u'切'),
+ (0xFA01, 'M', u'度'),
+ (0xFA02, 'M', u'拓'),
+ (0xFA03, 'M', u'糖'),
+ (0xFA04, 'M', u'宅'),
+ (0xFA05, 'M', u'洞'),
+ (0xFA06, 'M', u'暴'),
+ (0xFA07, 'M', u'輻'),
+ (0xFA08, 'M', u'行'),
+ (0xFA09, 'M', u'降'),
+ (0xFA0A, 'M', u'見'),
+ (0xFA0B, 'M', u'廓'),
+ (0xFA0C, 'M', u'兀'),
+ (0xFA0D, 'M', u'嗀'),
+ (0xFA0E, 'V'),
+ (0xFA10, 'M', u'塚'),
+ (0xFA11, 'V'),
+ (0xFA12, 'M', u'晴'),
+ (0xFA13, 'V'),
+ (0xFA15, 'M', u'凞'),
+ (0xFA16, 'M', u'猪'),
+ (0xFA17, 'M', u'益'),
+ (0xFA18, 'M', u'礼'),
+ (0xFA19, 'M', u'神'),
+ (0xFA1A, 'M', u'祥'),
+ (0xFA1B, 'M', u'福'),
+ (0xFA1C, 'M', u'靖'),
+ (0xFA1D, 'M', u'精'),
+ (0xFA1E, 'M', u'羽'),
+ (0xFA1F, 'V'),
+ (0xFA20, 'M', u'蘒'),
+ (0xFA21, 'V'),
+ (0xFA22, 'M', u'諸'),
+ (0xFA23, 'V'),
+ (0xFA25, 'M', u'逸'),
+ (0xFA26, 'M', u'都'),
+ (0xFA27, 'V'),
+ (0xFA2A, 'M', u'飯'),
+ (0xFA2B, 'M', u'飼'),
+ (0xFA2C, 'M', u'館'),
+ (0xFA2D, 'M', u'鶴'),
+ (0xFA2E, 'M', u'郞'),
+ (0xFA2F, 'M', u'隷'),
+ (0xFA30, 'M', u'侮'),
+ (0xFA31, 'M', u'僧'),
+ (0xFA32, 'M', u'免'),
+ (0xFA33, 'M', u'勉'),
+ (0xFA34, 'M', u'勤'),
+ (0xFA35, 'M', u'卑'),
+ (0xFA36, 'M', u'喝'),
+ (0xFA37, 'M', u'嘆'),
+ (0xFA38, 'M', u'器'),
+ (0xFA39, 'M', u'塀'),
+ (0xFA3A, 'M', u'墨'),
+ (0xFA3B, 'M', u'層'),
+ (0xFA3C, 'M', u'屮'),
+ (0xFA3D, 'M', u'悔'),
+ (0xFA3E, 'M', u'慨'),
+ (0xFA3F, 'M', u'憎'),
+ (0xFA40, 'M', u'懲'),
+ (0xFA41, 'M', u'敏'),
+ (0xFA42, 'M', u'既'),
+ (0xFA43, 'M', u'暑'),
+ (0xFA44, 'M', u'梅'),
+ (0xFA45, 'M', u'海'),
+ (0xFA46, 'M', u'渚'),
+ ]
+
+def _seg_42():
+ return [
+ (0xFA47, 'M', u'漢'),
+ (0xFA48, 'M', u'煮'),
+ (0xFA49, 'M', u'爫'),
+ (0xFA4A, 'M', u'琢'),
+ (0xFA4B, 'M', u'碑'),
+ (0xFA4C, 'M', u'社'),
+ (0xFA4D, 'M', u'祉'),
+ (0xFA4E, 'M', u'祈'),
+ (0xFA4F, 'M', u'祐'),
+ (0xFA50, 'M', u'祖'),
+ (0xFA51, 'M', u'祝'),
+ (0xFA52, 'M', u'禍'),
+ (0xFA53, 'M', u'禎'),
+ (0xFA54, 'M', u'穀'),
+ (0xFA55, 'M', u'突'),
+ (0xFA56, 'M', u'節'),
+ (0xFA57, 'M', u'練'),
+ (0xFA58, 'M', u'縉'),
+ (0xFA59, 'M', u'繁'),
+ (0xFA5A, 'M', u'署'),
+ (0xFA5B, 'M', u'者'),
+ (0xFA5C, 'M', u'臭'),
+ (0xFA5D, 'M', u'艹'),
+ (0xFA5F, 'M', u'著'),
+ (0xFA60, 'M', u'褐'),
+ (0xFA61, 'M', u'視'),
+ (0xFA62, 'M', u'謁'),
+ (0xFA63, 'M', u'謹'),
+ (0xFA64, 'M', u'賓'),
+ (0xFA65, 'M', u'贈'),
+ (0xFA66, 'M', u'辶'),
+ (0xFA67, 'M', u'逸'),
+ (0xFA68, 'M', u'難'),
+ (0xFA69, 'M', u'響'),
+ (0xFA6A, 'M', u'頻'),
+ (0xFA6B, 'M', u'恵'),
+ (0xFA6C, 'M', u'𤋮'),
+ (0xFA6D, 'M', u'舘'),
+ (0xFA6E, 'X'),
+ (0xFA70, 'M', u'並'),
+ (0xFA71, 'M', u'况'),
+ (0xFA72, 'M', u'全'),
+ (0xFA73, 'M', u'侀'),
+ (0xFA74, 'M', u'充'),
+ (0xFA75, 'M', u'冀'),
+ (0xFA76, 'M', u'勇'),
+ (0xFA77, 'M', u'勺'),
+ (0xFA78, 'M', u'喝'),
+ (0xFA79, 'M', u'啕'),
+ (0xFA7A, 'M', u'喙'),
+ (0xFA7B, 'M', u'嗢'),
+ (0xFA7C, 'M', u'塚'),
+ (0xFA7D, 'M', u'墳'),
+ (0xFA7E, 'M', u'奄'),
+ (0xFA7F, 'M', u'奔'),
+ (0xFA80, 'M', u'婢'),
+ (0xFA81, 'M', u'嬨'),
+ (0xFA82, 'M', u'廒'),
+ (0xFA83, 'M', u'廙'),
+ (0xFA84, 'M', u'彩'),
+ (0xFA85, 'M', u'徭'),
+ (0xFA86, 'M', u'惘'),
+ (0xFA87, 'M', u'慎'),
+ (0xFA88, 'M', u'愈'),
+ (0xFA89, 'M', u'憎'),
+ (0xFA8A, 'M', u'慠'),
+ (0xFA8B, 'M', u'懲'),
+ (0xFA8C, 'M', u'戴'),
+ (0xFA8D, 'M', u'揄'),
+ (0xFA8E, 'M', u'搜'),
+ (0xFA8F, 'M', u'摒'),
+ (0xFA90, 'M', u'敖'),
+ (0xFA91, 'M', u'晴'),
+ (0xFA92, 'M', u'朗'),
+ (0xFA93, 'M', u'望'),
+ (0xFA94, 'M', u'杖'),
+ (0xFA95, 'M', u'歹'),
+ (0xFA96, 'M', u'殺'),
+ (0xFA97, 'M', u'流'),
+ (0xFA98, 'M', u'滛'),
+ (0xFA99, 'M', u'滋'),
+ (0xFA9A, 'M', u'漢'),
+ (0xFA9B, 'M', u'瀞'),
+ (0xFA9C, 'M', u'煮'),
+ (0xFA9D, 'M', u'瞧'),
+ (0xFA9E, 'M', u'爵'),
+ (0xFA9F, 'M', u'犯'),
+ (0xFAA0, 'M', u'猪'),
+ (0xFAA1, 'M', u'瑱'),
+ (0xFAA2, 'M', u'甆'),
+ (0xFAA3, 'M', u'画'),
+ (0xFAA4, 'M', u'瘝'),
+ (0xFAA5, 'M', u'瘟'),
+ (0xFAA6, 'M', u'益'),
+ (0xFAA7, 'M', u'盛'),
+ (0xFAA8, 'M', u'直'),
+ (0xFAA9, 'M', u'睊'),
+ (0xFAAA, 'M', u'着'),
+ (0xFAAB, 'M', u'磌'),
+ (0xFAAC, 'M', u'窱'),
+ ]
+
+def _seg_43():
+ return [
+ (0xFAAD, 'M', u'節'),
+ (0xFAAE, 'M', u'类'),
+ (0xFAAF, 'M', u'絛'),
+ (0xFAB0, 'M', u'練'),
+ (0xFAB1, 'M', u'缾'),
+ (0xFAB2, 'M', u'者'),
+ (0xFAB3, 'M', u'荒'),
+ (0xFAB4, 'M', u'華'),
+ (0xFAB5, 'M', u'蝹'),
+ (0xFAB6, 'M', u'襁'),
+ (0xFAB7, 'M', u'覆'),
+ (0xFAB8, 'M', u'視'),
+ (0xFAB9, 'M', u'調'),
+ (0xFABA, 'M', u'諸'),
+ (0xFABB, 'M', u'請'),
+ (0xFABC, 'M', u'謁'),
+ (0xFABD, 'M', u'諾'),
+ (0xFABE, 'M', u'諭'),
+ (0xFABF, 'M', u'謹'),
+ (0xFAC0, 'M', u'變'),
+ (0xFAC1, 'M', u'贈'),
+ (0xFAC2, 'M', u'輸'),
+ (0xFAC3, 'M', u'遲'),
+ (0xFAC4, 'M', u'醙'),
+ (0xFAC5, 'M', u'鉶'),
+ (0xFAC6, 'M', u'陼'),
+ (0xFAC7, 'M', u'難'),
+ (0xFAC8, 'M', u'靖'),
+ (0xFAC9, 'M', u'韛'),
+ (0xFACA, 'M', u'響'),
+ (0xFACB, 'M', u'頋'),
+ (0xFACC, 'M', u'頻'),
+ (0xFACD, 'M', u'鬒'),
+ (0xFACE, 'M', u'龜'),
+ (0xFACF, 'M', u'𢡊'),
+ (0xFAD0, 'M', u'𢡄'),
+ (0xFAD1, 'M', u'𣏕'),
+ (0xFAD2, 'M', u'㮝'),
+ (0xFAD3, 'M', u'䀘'),
+ (0xFAD4, 'M', u'䀹'),
+ (0xFAD5, 'M', u'𥉉'),
+ (0xFAD6, 'M', u'𥳐'),
+ (0xFAD7, 'M', u'𧻓'),
+ (0xFAD8, 'M', u'齃'),
+ (0xFAD9, 'M', u'龎'),
+ (0xFADA, 'X'),
+ (0xFB00, 'M', u'ff'),
+ (0xFB01, 'M', u'fi'),
+ (0xFB02, 'M', u'fl'),
+ (0xFB03, 'M', u'ffi'),
+ (0xFB04, 'M', u'ffl'),
+ (0xFB05, 'M', u'st'),
+ (0xFB07, 'X'),
+ (0xFB13, 'M', u'մն'),
+ (0xFB14, 'M', u'մե'),
+ (0xFB15, 'M', u'մի'),
+ (0xFB16, 'M', u'վն'),
+ (0xFB17, 'M', u'մխ'),
+ (0xFB18, 'X'),
+ (0xFB1D, 'M', u'יִ'),
+ (0xFB1E, 'V'),
+ (0xFB1F, 'M', u'ײַ'),
+ (0xFB20, 'M', u'ע'),
+ (0xFB21, 'M', u'א'),
+ (0xFB22, 'M', u'ד'),
+ (0xFB23, 'M', u'ה'),
+ (0xFB24, 'M', u'כ'),
+ (0xFB25, 'M', u'ל'),
+ (0xFB26, 'M', u'ם'),
+ (0xFB27, 'M', u'ר'),
+ (0xFB28, 'M', u'ת'),
+ (0xFB29, '3', u'+'),
+ (0xFB2A, 'M', u'שׁ'),
+ (0xFB2B, 'M', u'שׂ'),
+ (0xFB2C, 'M', u'שּׁ'),
+ (0xFB2D, 'M', u'שּׂ'),
+ (0xFB2E, 'M', u'אַ'),
+ (0xFB2F, 'M', u'אָ'),
+ (0xFB30, 'M', u'אּ'),
+ (0xFB31, 'M', u'בּ'),
+ (0xFB32, 'M', u'גּ'),
+ (0xFB33, 'M', u'דּ'),
+ (0xFB34, 'M', u'הּ'),
+ (0xFB35, 'M', u'וּ'),
+ (0xFB36, 'M', u'זּ'),
+ (0xFB37, 'X'),
+ (0xFB38, 'M', u'טּ'),
+ (0xFB39, 'M', u'יּ'),
+ (0xFB3A, 'M', u'ךּ'),
+ (0xFB3B, 'M', u'כּ'),
+ (0xFB3C, 'M', u'לּ'),
+ (0xFB3D, 'X'),
+ (0xFB3E, 'M', u'מּ'),
+ (0xFB3F, 'X'),
+ (0xFB40, 'M', u'נּ'),
+ (0xFB41, 'M', u'סּ'),
+ (0xFB42, 'X'),
+ (0xFB43, 'M', u'ףּ'),
+ (0xFB44, 'M', u'פּ'),
+ (0xFB45, 'X'),
+ ]
+
+def _seg_44():
+ return [
+ (0xFB46, 'M', u'צּ'),
+ (0xFB47, 'M', u'קּ'),
+ (0xFB48, 'M', u'רּ'),
+ (0xFB49, 'M', u'שּ'),
+ (0xFB4A, 'M', u'תּ'),
+ (0xFB4B, 'M', u'וֹ'),
+ (0xFB4C, 'M', u'בֿ'),
+ (0xFB4D, 'M', u'כֿ'),
+ (0xFB4E, 'M', u'פֿ'),
+ (0xFB4F, 'M', u'אל'),
+ (0xFB50, 'M', u'ٱ'),
+ (0xFB52, 'M', u'ٻ'),
+ (0xFB56, 'M', u'پ'),
+ (0xFB5A, 'M', u'ڀ'),
+ (0xFB5E, 'M', u'ٺ'),
+ (0xFB62, 'M', u'ٿ'),
+ (0xFB66, 'M', u'ٹ'),
+ (0xFB6A, 'M', u'ڤ'),
+ (0xFB6E, 'M', u'ڦ'),
+ (0xFB72, 'M', u'ڄ'),
+ (0xFB76, 'M', u'ڃ'),
+ (0xFB7A, 'M', u'چ'),
+ (0xFB7E, 'M', u'ڇ'),
+ (0xFB82, 'M', u'ڍ'),
+ (0xFB84, 'M', u'ڌ'),
+ (0xFB86, 'M', u'ڎ'),
+ (0xFB88, 'M', u'ڈ'),
+ (0xFB8A, 'M', u'ژ'),
+ (0xFB8C, 'M', u'ڑ'),
+ (0xFB8E, 'M', u'ک'),
+ (0xFB92, 'M', u'گ'),
+ (0xFB96, 'M', u'ڳ'),
+ (0xFB9A, 'M', u'ڱ'),
+ (0xFB9E, 'M', u'ں'),
+ (0xFBA0, 'M', u'ڻ'),
+ (0xFBA4, 'M', u'ۀ'),
+ (0xFBA6, 'M', u'ہ'),
+ (0xFBAA, 'M', u'ھ'),
+ (0xFBAE, 'M', u'ے'),
+ (0xFBB0, 'M', u'ۓ'),
+ (0xFBB2, 'V'),
+ (0xFBC2, 'X'),
+ (0xFBD3, 'M', u'ڭ'),
+ (0xFBD7, 'M', u'ۇ'),
+ (0xFBD9, 'M', u'ۆ'),
+ (0xFBDB, 'M', u'ۈ'),
+ (0xFBDD, 'M', u'ۇٴ'),
+ (0xFBDE, 'M', u'ۋ'),
+ (0xFBE0, 'M', u'ۅ'),
+ (0xFBE2, 'M', u'ۉ'),
+ (0xFBE4, 'M', u'ې'),
+ (0xFBE8, 'M', u'ى'),
+ (0xFBEA, 'M', u'ئا'),
+ (0xFBEC, 'M', u'ئە'),
+ (0xFBEE, 'M', u'ئو'),
+ (0xFBF0, 'M', u'ئۇ'),
+ (0xFBF2, 'M', u'ئۆ'),
+ (0xFBF4, 'M', u'ئۈ'),
+ (0xFBF6, 'M', u'ئې'),
+ (0xFBF9, 'M', u'ئى'),
+ (0xFBFC, 'M', u'ی'),
+ (0xFC00, 'M', u'ئج'),
+ (0xFC01, 'M', u'ئح'),
+ (0xFC02, 'M', u'ئم'),
+ (0xFC03, 'M', u'ئى'),
+ (0xFC04, 'M', u'ئي'),
+ (0xFC05, 'M', u'بج'),
+ (0xFC06, 'M', u'بح'),
+ (0xFC07, 'M', u'بخ'),
+ (0xFC08, 'M', u'بم'),
+ (0xFC09, 'M', u'بى'),
+ (0xFC0A, 'M', u'بي'),
+ (0xFC0B, 'M', u'تج'),
+ (0xFC0C, 'M', u'تح'),
+ (0xFC0D, 'M', u'تخ'),
+ (0xFC0E, 'M', u'تم'),
+ (0xFC0F, 'M', u'تى'),
+ (0xFC10, 'M', u'تي'),
+ (0xFC11, 'M', u'ثج'),
+ (0xFC12, 'M', u'ثم'),
+ (0xFC13, 'M', u'ثى'),
+ (0xFC14, 'M', u'ثي'),
+ (0xFC15, 'M', u'جح'),
+ (0xFC16, 'M', u'جم'),
+ (0xFC17, 'M', u'حج'),
+ (0xFC18, 'M', u'حم'),
+ (0xFC19, 'M', u'خج'),
+ (0xFC1A, 'M', u'خح'),
+ (0xFC1B, 'M', u'خم'),
+ (0xFC1C, 'M', u'سج'),
+ (0xFC1D, 'M', u'سح'),
+ (0xFC1E, 'M', u'سخ'),
+ (0xFC1F, 'M', u'سم'),
+ (0xFC20, 'M', u'صح'),
+ (0xFC21, 'M', u'صم'),
+ (0xFC22, 'M', u'ضج'),
+ (0xFC23, 'M', u'ضح'),
+ (0xFC24, 'M', u'ضخ'),
+ (0xFC25, 'M', u'ضم'),
+ (0xFC26, 'M', u'طح'),
+ ]
+
+def _seg_45():
+ return [
+ (0xFC27, 'M', u'طم'),
+ (0xFC28, 'M', u'ظم'),
+ (0xFC29, 'M', u'عج'),
+ (0xFC2A, 'M', u'عم'),
+ (0xFC2B, 'M', u'غج'),
+ (0xFC2C, 'M', u'غم'),
+ (0xFC2D, 'M', u'فج'),
+ (0xFC2E, 'M', u'فح'),
+ (0xFC2F, 'M', u'فخ'),
+ (0xFC30, 'M', u'فم'),
+ (0xFC31, 'M', u'فى'),
+ (0xFC32, 'M', u'في'),
+ (0xFC33, 'M', u'قح'),
+ (0xFC34, 'M', u'قم'),
+ (0xFC35, 'M', u'قى'),
+ (0xFC36, 'M', u'قي'),
+ (0xFC37, 'M', u'كا'),
+ (0xFC38, 'M', u'كج'),
+ (0xFC39, 'M', u'كح'),
+ (0xFC3A, 'M', u'كخ'),
+ (0xFC3B, 'M', u'كل'),
+ (0xFC3C, 'M', u'كم'),
+ (0xFC3D, 'M', u'كى'),
+ (0xFC3E, 'M', u'كي'),
+ (0xFC3F, 'M', u'لج'),
+ (0xFC40, 'M', u'لح'),
+ (0xFC41, 'M', u'لخ'),
+ (0xFC42, 'M', u'لم'),
+ (0xFC43, 'M', u'لى'),
+ (0xFC44, 'M', u'لي'),
+ (0xFC45, 'M', u'مج'),
+ (0xFC46, 'M', u'مح'),
+ (0xFC47, 'M', u'مخ'),
+ (0xFC48, 'M', u'مم'),
+ (0xFC49, 'M', u'مى'),
+ (0xFC4A, 'M', u'مي'),
+ (0xFC4B, 'M', u'نج'),
+ (0xFC4C, 'M', u'نح'),
+ (0xFC4D, 'M', u'نخ'),
+ (0xFC4E, 'M', u'نم'),
+ (0xFC4F, 'M', u'نى'),
+ (0xFC50, 'M', u'ني'),
+ (0xFC51, 'M', u'هج'),
+ (0xFC52, 'M', u'هم'),
+ (0xFC53, 'M', u'هى'),
+ (0xFC54, 'M', u'هي'),
+ (0xFC55, 'M', u'يج'),
+ (0xFC56, 'M', u'يح'),
+ (0xFC57, 'M', u'يخ'),
+ (0xFC58, 'M', u'يم'),
+ (0xFC59, 'M', u'يى'),
+ (0xFC5A, 'M', u'يي'),
+ (0xFC5B, 'M', u'ذٰ'),
+ (0xFC5C, 'M', u'رٰ'),
+ (0xFC5D, 'M', u'ىٰ'),
+ (0xFC5E, '3', u' ٌّ'),
+ (0xFC5F, '3', u' ٍّ'),
+ (0xFC60, '3', u' َّ'),
+ (0xFC61, '3', u' ُّ'),
+ (0xFC62, '3', u' ِّ'),
+ (0xFC63, '3', u' ّٰ'),
+ (0xFC64, 'M', u'ئر'),
+ (0xFC65, 'M', u'ئز'),
+ (0xFC66, 'M', u'ئم'),
+ (0xFC67, 'M', u'ئن'),
+ (0xFC68, 'M', u'ئى'),
+ (0xFC69, 'M', u'ئي'),
+ (0xFC6A, 'M', u'بر'),
+ (0xFC6B, 'M', u'بز'),
+ (0xFC6C, 'M', u'بم'),
+ (0xFC6D, 'M', u'بن'),
+ (0xFC6E, 'M', u'بى'),
+ (0xFC6F, 'M', u'بي'),
+ (0xFC70, 'M', u'تر'),
+ (0xFC71, 'M', u'تز'),
+ (0xFC72, 'M', u'تم'),
+ (0xFC73, 'M', u'تن'),
+ (0xFC74, 'M', u'تى'),
+ (0xFC75, 'M', u'تي'),
+ (0xFC76, 'M', u'ثر'),
+ (0xFC77, 'M', u'ثز'),
+ (0xFC78, 'M', u'ثم'),
+ (0xFC79, 'M', u'ثن'),
+ (0xFC7A, 'M', u'ثى'),
+ (0xFC7B, 'M', u'ثي'),
+ (0xFC7C, 'M', u'فى'),
+ (0xFC7D, 'M', u'في'),
+ (0xFC7E, 'M', u'قى'),
+ (0xFC7F, 'M', u'قي'),
+ (0xFC80, 'M', u'كا'),
+ (0xFC81, 'M', u'كل'),
+ (0xFC82, 'M', u'كم'),
+ (0xFC83, 'M', u'كى'),
+ (0xFC84, 'M', u'كي'),
+ (0xFC85, 'M', u'لم'),
+ (0xFC86, 'M', u'لى'),
+ (0xFC87, 'M', u'لي'),
+ (0xFC88, 'M', u'ما'),
+ (0xFC89, 'M', u'مم'),
+ (0xFC8A, 'M', u'نر'),
+ ]
+
+def _seg_46():
+ return [
+ (0xFC8B, 'M', u'نز'),
+ (0xFC8C, 'M', u'نم'),
+ (0xFC8D, 'M', u'نن'),
+ (0xFC8E, 'M', u'نى'),
+ (0xFC8F, 'M', u'ني'),
+ (0xFC90, 'M', u'ىٰ'),
+ (0xFC91, 'M', u'ير'),
+ (0xFC92, 'M', u'يز'),
+ (0xFC93, 'M', u'يم'),
+ (0xFC94, 'M', u'ين'),
+ (0xFC95, 'M', u'يى'),
+ (0xFC96, 'M', u'يي'),
+ (0xFC97, 'M', u'ئج'),
+ (0xFC98, 'M', u'ئح'),
+ (0xFC99, 'M', u'ئخ'),
+ (0xFC9A, 'M', u'ئم'),
+ (0xFC9B, 'M', u'ئه'),
+ (0xFC9C, 'M', u'بج'),
+ (0xFC9D, 'M', u'بح'),
+ (0xFC9E, 'M', u'بخ'),
+ (0xFC9F, 'M', u'بم'),
+ (0xFCA0, 'M', u'به'),
+ (0xFCA1, 'M', u'تج'),
+ (0xFCA2, 'M', u'تح'),
+ (0xFCA3, 'M', u'تخ'),
+ (0xFCA4, 'M', u'تم'),
+ (0xFCA5, 'M', u'ته'),
+ (0xFCA6, 'M', u'ثم'),
+ (0xFCA7, 'M', u'جح'),
+ (0xFCA8, 'M', u'جم'),
+ (0xFCA9, 'M', u'حج'),
+ (0xFCAA, 'M', u'حم'),
+ (0xFCAB, 'M', u'خج'),
+ (0xFCAC, 'M', u'خم'),
+ (0xFCAD, 'M', u'سج'),
+ (0xFCAE, 'M', u'سح'),
+ (0xFCAF, 'M', u'سخ'),
+ (0xFCB0, 'M', u'سم'),
+ (0xFCB1, 'M', u'صح'),
+ (0xFCB2, 'M', u'صخ'),
+ (0xFCB3, 'M', u'صم'),
+ (0xFCB4, 'M', u'ضج'),
+ (0xFCB5, 'M', u'ضح'),
+ (0xFCB6, 'M', u'ضخ'),
+ (0xFCB7, 'M', u'ضم'),
+ (0xFCB8, 'M', u'طح'),
+ (0xFCB9, 'M', u'ظم'),
+ (0xFCBA, 'M', u'عج'),
+ (0xFCBB, 'M', u'عم'),
+ (0xFCBC, 'M', u'غج'),
+ (0xFCBD, 'M', u'غم'),
+ (0xFCBE, 'M', u'فج'),
+ (0xFCBF, 'M', u'فح'),
+ (0xFCC0, 'M', u'فخ'),
+ (0xFCC1, 'M', u'فم'),
+ (0xFCC2, 'M', u'قح'),
+ (0xFCC3, 'M', u'قم'),
+ (0xFCC4, 'M', u'كج'),
+ (0xFCC5, 'M', u'كح'),
+ (0xFCC6, 'M', u'كخ'),
+ (0xFCC7, 'M', u'كل'),
+ (0xFCC8, 'M', u'كم'),
+ (0xFCC9, 'M', u'لج'),
+ (0xFCCA, 'M', u'لح'),
+ (0xFCCB, 'M', u'لخ'),
+ (0xFCCC, 'M', u'لم'),
+ (0xFCCD, 'M', u'له'),
+ (0xFCCE, 'M', u'مج'),
+ (0xFCCF, 'M', u'مح'),
+ (0xFCD0, 'M', u'مخ'),
+ (0xFCD1, 'M', u'مم'),
+ (0xFCD2, 'M', u'نج'),
+ (0xFCD3, 'M', u'نح'),
+ (0xFCD4, 'M', u'نخ'),
+ (0xFCD5, 'M', u'نم'),
+ (0xFCD6, 'M', u'نه'),
+ (0xFCD7, 'M', u'هج'),
+ (0xFCD8, 'M', u'هم'),
+ (0xFCD9, 'M', u'هٰ'),
+ (0xFCDA, 'M', u'يج'),
+ (0xFCDB, 'M', u'يح'),
+ (0xFCDC, 'M', u'يخ'),
+ (0xFCDD, 'M', u'يم'),
+ (0xFCDE, 'M', u'يه'),
+ (0xFCDF, 'M', u'ئم'),
+ (0xFCE0, 'M', u'ئه'),
+ (0xFCE1, 'M', u'بم'),
+ (0xFCE2, 'M', u'به'),
+ (0xFCE3, 'M', u'تم'),
+ (0xFCE4, 'M', u'ته'),
+ (0xFCE5, 'M', u'ثم'),
+ (0xFCE6, 'M', u'ثه'),
+ (0xFCE7, 'M', u'سم'),
+ (0xFCE8, 'M', u'سه'),
+ (0xFCE9, 'M', u'شم'),
+ (0xFCEA, 'M', u'شه'),
+ (0xFCEB, 'M', u'كل'),
+ (0xFCEC, 'M', u'كم'),
+ (0xFCED, 'M', u'لم'),
+ (0xFCEE, 'M', u'نم'),
+ ]
+
+def _seg_47():
+ return [
+ (0xFCEF, 'M', u'نه'),
+ (0xFCF0, 'M', u'يم'),
+ (0xFCF1, 'M', u'يه'),
+ (0xFCF2, 'M', u'ـَّ'),
+ (0xFCF3, 'M', u'ـُّ'),
+ (0xFCF4, 'M', u'ـِّ'),
+ (0xFCF5, 'M', u'طى'),
+ (0xFCF6, 'M', u'طي'),
+ (0xFCF7, 'M', u'عى'),
+ (0xFCF8, 'M', u'عي'),
+ (0xFCF9, 'M', u'غى'),
+ (0xFCFA, 'M', u'غي'),
+ (0xFCFB, 'M', u'سى'),
+ (0xFCFC, 'M', u'سي'),
+ (0xFCFD, 'M', u'شى'),
+ (0xFCFE, 'M', u'شي'),
+ (0xFCFF, 'M', u'حى'),
+ (0xFD00, 'M', u'حي'),
+ (0xFD01, 'M', u'جى'),
+ (0xFD02, 'M', u'جي'),
+ (0xFD03, 'M', u'خى'),
+ (0xFD04, 'M', u'خي'),
+ (0xFD05, 'M', u'صى'),
+ (0xFD06, 'M', u'صي'),
+ (0xFD07, 'M', u'ضى'),
+ (0xFD08, 'M', u'ضي'),
+ (0xFD09, 'M', u'شج'),
+ (0xFD0A, 'M', u'شح'),
+ (0xFD0B, 'M', u'شخ'),
+ (0xFD0C, 'M', u'شم'),
+ (0xFD0D, 'M', u'شر'),
+ (0xFD0E, 'M', u'سر'),
+ (0xFD0F, 'M', u'صر'),
+ (0xFD10, 'M', u'ضر'),
+ (0xFD11, 'M', u'طى'),
+ (0xFD12, 'M', u'طي'),
+ (0xFD13, 'M', u'عى'),
+ (0xFD14, 'M', u'عي'),
+ (0xFD15, 'M', u'غى'),
+ (0xFD16, 'M', u'غي'),
+ (0xFD17, 'M', u'سى'),
+ (0xFD18, 'M', u'سي'),
+ (0xFD19, 'M', u'شى'),
+ (0xFD1A, 'M', u'شي'),
+ (0xFD1B, 'M', u'حى'),
+ (0xFD1C, 'M', u'حي'),
+ (0xFD1D, 'M', u'جى'),
+ (0xFD1E, 'M', u'جي'),
+ (0xFD1F, 'M', u'خى'),
+ (0xFD20, 'M', u'خي'),
+ (0xFD21, 'M', u'صى'),
+ (0xFD22, 'M', u'صي'),
+ (0xFD23, 'M', u'ضى'),
+ (0xFD24, 'M', u'ضي'),
+ (0xFD25, 'M', u'شج'),
+ (0xFD26, 'M', u'شح'),
+ (0xFD27, 'M', u'شخ'),
+ (0xFD28, 'M', u'شم'),
+ (0xFD29, 'M', u'شر'),
+ (0xFD2A, 'M', u'سر'),
+ (0xFD2B, 'M', u'صر'),
+ (0xFD2C, 'M', u'ضر'),
+ (0xFD2D, 'M', u'شج'),
+ (0xFD2E, 'M', u'شح'),
+ (0xFD2F, 'M', u'شخ'),
+ (0xFD30, 'M', u'شم'),
+ (0xFD31, 'M', u'سه'),
+ (0xFD32, 'M', u'شه'),
+ (0xFD33, 'M', u'طم'),
+ (0xFD34, 'M', u'سج'),
+ (0xFD35, 'M', u'سح'),
+ (0xFD36, 'M', u'سخ'),
+ (0xFD37, 'M', u'شج'),
+ (0xFD38, 'M', u'شح'),
+ (0xFD39, 'M', u'شخ'),
+ (0xFD3A, 'M', u'طم'),
+ (0xFD3B, 'M', u'ظم'),
+ (0xFD3C, 'M', u'اً'),
+ (0xFD3E, 'V'),
+ (0xFD40, 'X'),
+ (0xFD50, 'M', u'تجم'),
+ (0xFD51, 'M', u'تحج'),
+ (0xFD53, 'M', u'تحم'),
+ (0xFD54, 'M', u'تخم'),
+ (0xFD55, 'M', u'تمج'),
+ (0xFD56, 'M', u'تمح'),
+ (0xFD57, 'M', u'تمخ'),
+ (0xFD58, 'M', u'جمح'),
+ (0xFD5A, 'M', u'حمي'),
+ (0xFD5B, 'M', u'حمى'),
+ (0xFD5C, 'M', u'سحج'),
+ (0xFD5D, 'M', u'سجح'),
+ (0xFD5E, 'M', u'سجى'),
+ (0xFD5F, 'M', u'سمح'),
+ (0xFD61, 'M', u'سمج'),
+ (0xFD62, 'M', u'سمم'),
+ (0xFD64, 'M', u'صحح'),
+ (0xFD66, 'M', u'صمم'),
+ (0xFD67, 'M', u'شحم'),
+ (0xFD69, 'M', u'شجي'),
+ ]
+
+def _seg_48():
+ return [
+ (0xFD6A, 'M', u'شمخ'),
+ (0xFD6C, 'M', u'شمم'),
+ (0xFD6E, 'M', u'ضحى'),
+ (0xFD6F, 'M', u'ضخم'),
+ (0xFD71, 'M', u'طمح'),
+ (0xFD73, 'M', u'طمم'),
+ (0xFD74, 'M', u'طمي'),
+ (0xFD75, 'M', u'عجم'),
+ (0xFD76, 'M', u'عمم'),
+ (0xFD78, 'M', u'عمى'),
+ (0xFD79, 'M', u'غمم'),
+ (0xFD7A, 'M', u'غمي'),
+ (0xFD7B, 'M', u'غمى'),
+ (0xFD7C, 'M', u'فخم'),
+ (0xFD7E, 'M', u'قمح'),
+ (0xFD7F, 'M', u'قمم'),
+ (0xFD80, 'M', u'لحم'),
+ (0xFD81, 'M', u'لحي'),
+ (0xFD82, 'M', u'لحى'),
+ (0xFD83, 'M', u'لجج'),
+ (0xFD85, 'M', u'لخم'),
+ (0xFD87, 'M', u'لمح'),
+ (0xFD89, 'M', u'محج'),
+ (0xFD8A, 'M', u'محم'),
+ (0xFD8B, 'M', u'محي'),
+ (0xFD8C, 'M', u'مجح'),
+ (0xFD8D, 'M', u'مجم'),
+ (0xFD8E, 'M', u'مخج'),
+ (0xFD8F, 'M', u'مخم'),
+ (0xFD90, 'X'),
+ (0xFD92, 'M', u'مجخ'),
+ (0xFD93, 'M', u'همج'),
+ (0xFD94, 'M', u'همم'),
+ (0xFD95, 'M', u'نحم'),
+ (0xFD96, 'M', u'نحى'),
+ (0xFD97, 'M', u'نجم'),
+ (0xFD99, 'M', u'نجى'),
+ (0xFD9A, 'M', u'نمي'),
+ (0xFD9B, 'M', u'نمى'),
+ (0xFD9C, 'M', u'يمم'),
+ (0xFD9E, 'M', u'بخي'),
+ (0xFD9F, 'M', u'تجي'),
+ (0xFDA0, 'M', u'تجى'),
+ (0xFDA1, 'M', u'تخي'),
+ (0xFDA2, 'M', u'تخى'),
+ (0xFDA3, 'M', u'تمي'),
+ (0xFDA4, 'M', u'تمى'),
+ (0xFDA5, 'M', u'جمي'),
+ (0xFDA6, 'M', u'جحى'),
+ (0xFDA7, 'M', u'جمى'),
+ (0xFDA8, 'M', u'سخى'),
+ (0xFDA9, 'M', u'صحي'),
+ (0xFDAA, 'M', u'شحي'),
+ (0xFDAB, 'M', u'ضحي'),
+ (0xFDAC, 'M', u'لجي'),
+ (0xFDAD, 'M', u'لمي'),
+ (0xFDAE, 'M', u'يحي'),
+ (0xFDAF, 'M', u'يجي'),
+ (0xFDB0, 'M', u'يمي'),
+ (0xFDB1, 'M', u'ممي'),
+ (0xFDB2, 'M', u'قمي'),
+ (0xFDB3, 'M', u'نحي'),
+ (0xFDB4, 'M', u'قمح'),
+ (0xFDB5, 'M', u'لحم'),
+ (0xFDB6, 'M', u'عمي'),
+ (0xFDB7, 'M', u'كمي'),
+ (0xFDB8, 'M', u'نجح'),
+ (0xFDB9, 'M', u'مخي'),
+ (0xFDBA, 'M', u'لجم'),
+ (0xFDBB, 'M', u'كمم'),
+ (0xFDBC, 'M', u'لجم'),
+ (0xFDBD, 'M', u'نجح'),
+ (0xFDBE, 'M', u'جحي'),
+ (0xFDBF, 'M', u'حجي'),
+ (0xFDC0, 'M', u'مجي'),
+ (0xFDC1, 'M', u'فمي'),
+ (0xFDC2, 'M', u'بحي'),
+ (0xFDC3, 'M', u'كمم'),
+ (0xFDC4, 'M', u'عجم'),
+ (0xFDC5, 'M', u'صمم'),
+ (0xFDC6, 'M', u'سخي'),
+ (0xFDC7, 'M', u'نجي'),
+ (0xFDC8, 'X'),
+ (0xFDF0, 'M', u'صلے'),
+ (0xFDF1, 'M', u'قلے'),
+ (0xFDF2, 'M', u'الله'),
+ (0xFDF3, 'M', u'اكبر'),
+ (0xFDF4, 'M', u'محمد'),
+ (0xFDF5, 'M', u'صلعم'),
+ (0xFDF6, 'M', u'رسول'),
+ (0xFDF7, 'M', u'عليه'),
+ (0xFDF8, 'M', u'وسلم'),
+ (0xFDF9, 'M', u'صلى'),
+ (0xFDFA, '3', u'صلى الله عليه وسلم'),
+ (0xFDFB, '3', u'جل جلاله'),
+ (0xFDFC, 'M', u'ریال'),
+ (0xFDFD, 'V'),
+ (0xFDFE, 'X'),
+ (0xFE00, 'I'),
+ (0xFE10, '3', u','),
+ ]
+
+def _seg_49():
+ return [
+ (0xFE11, 'M', u'、'),
+ (0xFE12, 'X'),
+ (0xFE13, '3', u':'),
+ (0xFE14, '3', u';'),
+ (0xFE15, '3', u'!'),
+ (0xFE16, '3', u'?'),
+ (0xFE17, 'M', u'〖'),
+ (0xFE18, 'M', u'〗'),
+ (0xFE19, 'X'),
+ (0xFE20, 'V'),
+ (0xFE30, 'X'),
+ (0xFE31, 'M', u'—'),
+ (0xFE32, 'M', u'–'),
+ (0xFE33, '3', u'_'),
+ (0xFE35, '3', u'('),
+ (0xFE36, '3', u')'),
+ (0xFE37, '3', u'{'),
+ (0xFE38, '3', u'}'),
+ (0xFE39, 'M', u'〔'),
+ (0xFE3A, 'M', u'〕'),
+ (0xFE3B, 'M', u'【'),
+ (0xFE3C, 'M', u'】'),
+ (0xFE3D, 'M', u'《'),
+ (0xFE3E, 'M', u'》'),
+ (0xFE3F, 'M', u'〈'),
+ (0xFE40, 'M', u'〉'),
+ (0xFE41, 'M', u'「'),
+ (0xFE42, 'M', u'」'),
+ (0xFE43, 'M', u'『'),
+ (0xFE44, 'M', u'』'),
+ (0xFE45, 'V'),
+ (0xFE47, '3', u'['),
+ (0xFE48, '3', u']'),
+ (0xFE49, '3', u' ̅'),
+ (0xFE4D, '3', u'_'),
+ (0xFE50, '3', u','),
+ (0xFE51, 'M', u'、'),
+ (0xFE52, 'X'),
+ (0xFE54, '3', u';'),
+ (0xFE55, '3', u':'),
+ (0xFE56, '3', u'?'),
+ (0xFE57, '3', u'!'),
+ (0xFE58, 'M', u'—'),
+ (0xFE59, '3', u'('),
+ (0xFE5A, '3', u')'),
+ (0xFE5B, '3', u'{'),
+ (0xFE5C, '3', u'}'),
+ (0xFE5D, 'M', u'〔'),
+ (0xFE5E, 'M', u'〕'),
+ (0xFE5F, '3', u'#'),
+ (0xFE60, '3', u'&'),
+ (0xFE61, '3', u'*'),
+ (0xFE62, '3', u'+'),
+ (0xFE63, 'M', u'-'),
+ (0xFE64, '3', u'<'),
+ (0xFE65, '3', u'>'),
+ (0xFE66, '3', u'='),
+ (0xFE67, 'X'),
+ (0xFE68, '3', u'\\'),
+ (0xFE69, '3', u'$'),
+ (0xFE6A, '3', u'%'),
+ (0xFE6B, '3', u'@'),
+ (0xFE6C, 'X'),
+ (0xFE70, '3', u' ً'),
+ (0xFE71, 'M', u'ـً'),
+ (0xFE72, '3', u' ٌ'),
+ (0xFE73, 'V'),
+ (0xFE74, '3', u' ٍ'),
+ (0xFE75, 'X'),
+ (0xFE76, '3', u' َ'),
+ (0xFE77, 'M', u'ـَ'),
+ (0xFE78, '3', u' ُ'),
+ (0xFE79, 'M', u'ـُ'),
+ (0xFE7A, '3', u' ِ'),
+ (0xFE7B, 'M', u'ـِ'),
+ (0xFE7C, '3', u' ّ'),
+ (0xFE7D, 'M', u'ـّ'),
+ (0xFE7E, '3', u' ْ'),
+ (0xFE7F, 'M', u'ـْ'),
+ (0xFE80, 'M', u'ء'),
+ (0xFE81, 'M', u'آ'),
+ (0xFE83, 'M', u'أ'),
+ (0xFE85, 'M', u'ؤ'),
+ (0xFE87, 'M', u'إ'),
+ (0xFE89, 'M', u'ئ'),
+ (0xFE8D, 'M', u'ا'),
+ (0xFE8F, 'M', u'ب'),
+ (0xFE93, 'M', u'ة'),
+ (0xFE95, 'M', u'ت'),
+ (0xFE99, 'M', u'ث'),
+ (0xFE9D, 'M', u'ج'),
+ (0xFEA1, 'M', u'ح'),
+ (0xFEA5, 'M', u'خ'),
+ (0xFEA9, 'M', u'د'),
+ (0xFEAB, 'M', u'ذ'),
+ (0xFEAD, 'M', u'ر'),
+ (0xFEAF, 'M', u'ز'),
+ (0xFEB1, 'M', u'س'),
+ (0xFEB5, 'M', u'ش'),
+ (0xFEB9, 'M', u'ص'),
+ ]
+
+def _seg_50():
+ return [
+ (0xFEBD, 'M', u'ض'),
+ (0xFEC1, 'M', u'ط'),
+ (0xFEC5, 'M', u'ظ'),
+ (0xFEC9, 'M', u'ع'),
+ (0xFECD, 'M', u'غ'),
+ (0xFED1, 'M', u'ف'),
+ (0xFED5, 'M', u'ق'),
+ (0xFED9, 'M', u'ك'),
+ (0xFEDD, 'M', u'ل'),
+ (0xFEE1, 'M', u'م'),
+ (0xFEE5, 'M', u'ن'),
+ (0xFEE9, 'M', u'ه'),
+ (0xFEED, 'M', u'و'),
+ (0xFEEF, 'M', u'ى'),
+ (0xFEF1, 'M', u'ي'),
+ (0xFEF5, 'M', u'لآ'),
+ (0xFEF7, 'M', u'لأ'),
+ (0xFEF9, 'M', u'لإ'),
+ (0xFEFB, 'M', u'لا'),
+ (0xFEFD, 'X'),
+ (0xFEFF, 'I'),
+ (0xFF00, 'X'),
+ (0xFF01, '3', u'!'),
+ (0xFF02, '3', u'"'),
+ (0xFF03, '3', u'#'),
+ (0xFF04, '3', u'$'),
+ (0xFF05, '3', u'%'),
+ (0xFF06, '3', u'&'),
+ (0xFF07, '3', u'\''),
+ (0xFF08, '3', u'('),
+ (0xFF09, '3', u')'),
+ (0xFF0A, '3', u'*'),
+ (0xFF0B, '3', u'+'),
+ (0xFF0C, '3', u','),
+ (0xFF0D, 'M', u'-'),
+ (0xFF0E, 'M', u'.'),
+ (0xFF0F, '3', u'/'),
+ (0xFF10, 'M', u'0'),
+ (0xFF11, 'M', u'1'),
+ (0xFF12, 'M', u'2'),
+ (0xFF13, 'M', u'3'),
+ (0xFF14, 'M', u'4'),
+ (0xFF15, 'M', u'5'),
+ (0xFF16, 'M', u'6'),
+ (0xFF17, 'M', u'7'),
+ (0xFF18, 'M', u'8'),
+ (0xFF19, 'M', u'9'),
+ (0xFF1A, '3', u':'),
+ (0xFF1B, '3', u';'),
+ (0xFF1C, '3', u'<'),
+ (0xFF1D, '3', u'='),
+ (0xFF1E, '3', u'>'),
+ (0xFF1F, '3', u'?'),
+ (0xFF20, '3', u'@'),
+ (0xFF21, 'M', u'a'),
+ (0xFF22, 'M', u'b'),
+ (0xFF23, 'M', u'c'),
+ (0xFF24, 'M', u'd'),
+ (0xFF25, 'M', u'e'),
+ (0xFF26, 'M', u'f'),
+ (0xFF27, 'M', u'g'),
+ (0xFF28, 'M', u'h'),
+ (0xFF29, 'M', u'i'),
+ (0xFF2A, 'M', u'j'),
+ (0xFF2B, 'M', u'k'),
+ (0xFF2C, 'M', u'l'),
+ (0xFF2D, 'M', u'm'),
+ (0xFF2E, 'M', u'n'),
+ (0xFF2F, 'M', u'o'),
+ (0xFF30, 'M', u'p'),
+ (0xFF31, 'M', u'q'),
+ (0xFF32, 'M', u'r'),
+ (0xFF33, 'M', u's'),
+ (0xFF34, 'M', u't'),
+ (0xFF35, 'M', u'u'),
+ (0xFF36, 'M', u'v'),
+ (0xFF37, 'M', u'w'),
+ (0xFF38, 'M', u'x'),
+ (0xFF39, 'M', u'y'),
+ (0xFF3A, 'M', u'z'),
+ (0xFF3B, '3', u'['),
+ (0xFF3C, '3', u'\\'),
+ (0xFF3D, '3', u']'),
+ (0xFF3E, '3', u'^'),
+ (0xFF3F, '3', u'_'),
+ (0xFF40, '3', u'`'),
+ (0xFF41, 'M', u'a'),
+ (0xFF42, 'M', u'b'),
+ (0xFF43, 'M', u'c'),
+ (0xFF44, 'M', u'd'),
+ (0xFF45, 'M', u'e'),
+ (0xFF46, 'M', u'f'),
+ (0xFF47, 'M', u'g'),
+ (0xFF48, 'M', u'h'),
+ (0xFF49, 'M', u'i'),
+ (0xFF4A, 'M', u'j'),
+ (0xFF4B, 'M', u'k'),
+ (0xFF4C, 'M', u'l'),
+ (0xFF4D, 'M', u'm'),
+ (0xFF4E, 'M', u'n'),
+ ]
+
+def _seg_51():
+ return [
+ (0xFF4F, 'M', u'o'),
+ (0xFF50, 'M', u'p'),
+ (0xFF51, 'M', u'q'),
+ (0xFF52, 'M', u'r'),
+ (0xFF53, 'M', u's'),
+ (0xFF54, 'M', u't'),
+ (0xFF55, 'M', u'u'),
+ (0xFF56, 'M', u'v'),
+ (0xFF57, 'M', u'w'),
+ (0xFF58, 'M', u'x'),
+ (0xFF59, 'M', u'y'),
+ (0xFF5A, 'M', u'z'),
+ (0xFF5B, '3', u'{'),
+ (0xFF5C, '3', u'|'),
+ (0xFF5D, '3', u'}'),
+ (0xFF5E, '3', u'~'),
+ (0xFF5F, 'M', u'⦅'),
+ (0xFF60, 'M', u'⦆'),
+ (0xFF61, 'M', u'.'),
+ (0xFF62, 'M', u'「'),
+ (0xFF63, 'M', u'」'),
+ (0xFF64, 'M', u'、'),
+ (0xFF65, 'M', u'・'),
+ (0xFF66, 'M', u'ヲ'),
+ (0xFF67, 'M', u'ァ'),
+ (0xFF68, 'M', u'ィ'),
+ (0xFF69, 'M', u'ゥ'),
+ (0xFF6A, 'M', u'ェ'),
+ (0xFF6B, 'M', u'ォ'),
+ (0xFF6C, 'M', u'ャ'),
+ (0xFF6D, 'M', u'ュ'),
+ (0xFF6E, 'M', u'ョ'),
+ (0xFF6F, 'M', u'ッ'),
+ (0xFF70, 'M', u'ー'),
+ (0xFF71, 'M', u'ア'),
+ (0xFF72, 'M', u'イ'),
+ (0xFF73, 'M', u'ウ'),
+ (0xFF74, 'M', u'エ'),
+ (0xFF75, 'M', u'オ'),
+ (0xFF76, 'M', u'カ'),
+ (0xFF77, 'M', u'キ'),
+ (0xFF78, 'M', u'ク'),
+ (0xFF79, 'M', u'ケ'),
+ (0xFF7A, 'M', u'コ'),
+ (0xFF7B, 'M', u'サ'),
+ (0xFF7C, 'M', u'シ'),
+ (0xFF7D, 'M', u'ス'),
+ (0xFF7E, 'M', u'セ'),
+ (0xFF7F, 'M', u'ソ'),
+ (0xFF80, 'M', u'タ'),
+ (0xFF81, 'M', u'チ'),
+ (0xFF82, 'M', u'ツ'),
+ (0xFF83, 'M', u'テ'),
+ (0xFF84, 'M', u'ト'),
+ (0xFF85, 'M', u'ナ'),
+ (0xFF86, 'M', u'ニ'),
+ (0xFF87, 'M', u'ヌ'),
+ (0xFF88, 'M', u'ネ'),
+ (0xFF89, 'M', u'ノ'),
+ (0xFF8A, 'M', u'ハ'),
+ (0xFF8B, 'M', u'ヒ'),
+ (0xFF8C, 'M', u'フ'),
+ (0xFF8D, 'M', u'ヘ'),
+ (0xFF8E, 'M', u'ホ'),
+ (0xFF8F, 'M', u'マ'),
+ (0xFF90, 'M', u'ミ'),
+ (0xFF91, 'M', u'ム'),
+ (0xFF92, 'M', u'メ'),
+ (0xFF93, 'M', u'モ'),
+ (0xFF94, 'M', u'ヤ'),
+ (0xFF95, 'M', u'ユ'),
+ (0xFF96, 'M', u'ヨ'),
+ (0xFF97, 'M', u'ラ'),
+ (0xFF98, 'M', u'リ'),
+ (0xFF99, 'M', u'ル'),
+ (0xFF9A, 'M', u'レ'),
+ (0xFF9B, 'M', u'ロ'),
+ (0xFF9C, 'M', u'ワ'),
+ (0xFF9D, 'M', u'ン'),
+ (0xFF9E, 'M', u'゙'),
+ (0xFF9F, 'M', u'゚'),
+ (0xFFA0, 'X'),
+ (0xFFA1, 'M', u'ᄀ'),
+ (0xFFA2, 'M', u'ᄁ'),
+ (0xFFA3, 'M', u'ᆪ'),
+ (0xFFA4, 'M', u'ᄂ'),
+ (0xFFA5, 'M', u'ᆬ'),
+ (0xFFA6, 'M', u'ᆭ'),
+ (0xFFA7, 'M', u'ᄃ'),
+ (0xFFA8, 'M', u'ᄄ'),
+ (0xFFA9, 'M', u'ᄅ'),
+ (0xFFAA, 'M', u'ᆰ'),
+ (0xFFAB, 'M', u'ᆱ'),
+ (0xFFAC, 'M', u'ᆲ'),
+ (0xFFAD, 'M', u'ᆳ'),
+ (0xFFAE, 'M', u'ᆴ'),
+ (0xFFAF, 'M', u'ᆵ'),
+ (0xFFB0, 'M', u'ᄚ'),
+ (0xFFB1, 'M', u'ᄆ'),
+ (0xFFB2, 'M', u'ᄇ'),
+ ]
+
+def _seg_52():
+ return [
+ (0xFFB3, 'M', u'ᄈ'),
+ (0xFFB4, 'M', u'ᄡ'),
+ (0xFFB5, 'M', u'ᄉ'),
+ (0xFFB6, 'M', u'ᄊ'),
+ (0xFFB7, 'M', u'ᄋ'),
+ (0xFFB8, 'M', u'ᄌ'),
+ (0xFFB9, 'M', u'ᄍ'),
+ (0xFFBA, 'M', u'ᄎ'),
+ (0xFFBB, 'M', u'ᄏ'),
+ (0xFFBC, 'M', u'ᄐ'),
+ (0xFFBD, 'M', u'ᄑ'),
+ (0xFFBE, 'M', u'ᄒ'),
+ (0xFFBF, 'X'),
+ (0xFFC2, 'M', u'ᅡ'),
+ (0xFFC3, 'M', u'ᅢ'),
+ (0xFFC4, 'M', u'ᅣ'),
+ (0xFFC5, 'M', u'ᅤ'),
+ (0xFFC6, 'M', u'ᅥ'),
+ (0xFFC7, 'M', u'ᅦ'),
+ (0xFFC8, 'X'),
+ (0xFFCA, 'M', u'ᅧ'),
+ (0xFFCB, 'M', u'ᅨ'),
+ (0xFFCC, 'M', u'ᅩ'),
+ (0xFFCD, 'M', u'ᅪ'),
+ (0xFFCE, 'M', u'ᅫ'),
+ (0xFFCF, 'M', u'ᅬ'),
+ (0xFFD0, 'X'),
+ (0xFFD2, 'M', u'ᅭ'),
+ (0xFFD3, 'M', u'ᅮ'),
+ (0xFFD4, 'M', u'ᅯ'),
+ (0xFFD5, 'M', u'ᅰ'),
+ (0xFFD6, 'M', u'ᅱ'),
+ (0xFFD7, 'M', u'ᅲ'),
+ (0xFFD8, 'X'),
+ (0xFFDA, 'M', u'ᅳ'),
+ (0xFFDB, 'M', u'ᅴ'),
+ (0xFFDC, 'M', u'ᅵ'),
+ (0xFFDD, 'X'),
+ (0xFFE0, 'M', u'¢'),
+ (0xFFE1, 'M', u'£'),
+ (0xFFE2, 'M', u'¬'),
+ (0xFFE3, '3', u' ̄'),
+ (0xFFE4, 'M', u'¦'),
+ (0xFFE5, 'M', u'¥'),
+ (0xFFE6, 'M', u'₩'),
+ (0xFFE7, 'X'),
+ (0xFFE8, 'M', u'│'),
+ (0xFFE9, 'M', u'←'),
+ (0xFFEA, 'M', u'↑'),
+ (0xFFEB, 'M', u'→'),
+ (0xFFEC, 'M', u'↓'),
+ (0xFFED, 'M', u'■'),
+ (0xFFEE, 'M', u'○'),
+ (0xFFEF, 'X'),
+ (0x10000, 'V'),
+ (0x1000C, 'X'),
+ (0x1000D, 'V'),
+ (0x10027, 'X'),
+ (0x10028, 'V'),
+ (0x1003B, 'X'),
+ (0x1003C, 'V'),
+ (0x1003E, 'X'),
+ (0x1003F, 'V'),
+ (0x1004E, 'X'),
+ (0x10050, 'V'),
+ (0x1005E, 'X'),
+ (0x10080, 'V'),
+ (0x100FB, 'X'),
+ (0x10100, 'V'),
+ (0x10103, 'X'),
+ (0x10107, 'V'),
+ (0x10134, 'X'),
+ (0x10137, 'V'),
+ (0x1018F, 'X'),
+ (0x10190, 'V'),
+ (0x1019C, 'X'),
+ (0x101A0, 'V'),
+ (0x101A1, 'X'),
+ (0x101D0, 'V'),
+ (0x101FE, 'X'),
+ (0x10280, 'V'),
+ (0x1029D, 'X'),
+ (0x102A0, 'V'),
+ (0x102D1, 'X'),
+ (0x102E0, 'V'),
+ (0x102FC, 'X'),
+ (0x10300, 'V'),
+ (0x10324, 'X'),
+ (0x1032D, 'V'),
+ (0x1034B, 'X'),
+ (0x10350, 'V'),
+ (0x1037B, 'X'),
+ (0x10380, 'V'),
+ (0x1039E, 'X'),
+ (0x1039F, 'V'),
+ (0x103C4, 'X'),
+ (0x103C8, 'V'),
+ (0x103D6, 'X'),
+ (0x10400, 'M', u'𐐨'),
+ (0x10401, 'M', u'𐐩'),
+ ]
+
+def _seg_53():
+ return [
+ (0x10402, 'M', u'𐐪'),
+ (0x10403, 'M', u'𐐫'),
+ (0x10404, 'M', u'𐐬'),
+ (0x10405, 'M', u'𐐭'),
+ (0x10406, 'M', u'𐐮'),
+ (0x10407, 'M', u'𐐯'),
+ (0x10408, 'M', u'𐐰'),
+ (0x10409, 'M', u'𐐱'),
+ (0x1040A, 'M', u'𐐲'),
+ (0x1040B, 'M', u'𐐳'),
+ (0x1040C, 'M', u'𐐴'),
+ (0x1040D, 'M', u'𐐵'),
+ (0x1040E, 'M', u'𐐶'),
+ (0x1040F, 'M', u'𐐷'),
+ (0x10410, 'M', u'𐐸'),
+ (0x10411, 'M', u'𐐹'),
+ (0x10412, 'M', u'𐐺'),
+ (0x10413, 'M', u'𐐻'),
+ (0x10414, 'M', u'𐐼'),
+ (0x10415, 'M', u'𐐽'),
+ (0x10416, 'M', u'𐐾'),
+ (0x10417, 'M', u'𐐿'),
+ (0x10418, 'M', u'𐑀'),
+ (0x10419, 'M', u'𐑁'),
+ (0x1041A, 'M', u'𐑂'),
+ (0x1041B, 'M', u'𐑃'),
+ (0x1041C, 'M', u'𐑄'),
+ (0x1041D, 'M', u'𐑅'),
+ (0x1041E, 'M', u'𐑆'),
+ (0x1041F, 'M', u'𐑇'),
+ (0x10420, 'M', u'𐑈'),
+ (0x10421, 'M', u'𐑉'),
+ (0x10422, 'M', u'𐑊'),
+ (0x10423, 'M', u'𐑋'),
+ (0x10424, 'M', u'𐑌'),
+ (0x10425, 'M', u'𐑍'),
+ (0x10426, 'M', u'𐑎'),
+ (0x10427, 'M', u'𐑏'),
+ (0x10428, 'V'),
+ (0x1049E, 'X'),
+ (0x104A0, 'V'),
+ (0x104AA, 'X'),
+ (0x104B0, 'M', u'𐓘'),
+ (0x104B1, 'M', u'𐓙'),
+ (0x104B2, 'M', u'𐓚'),
+ (0x104B3, 'M', u'𐓛'),
+ (0x104B4, 'M', u'𐓜'),
+ (0x104B5, 'M', u'𐓝'),
+ (0x104B6, 'M', u'𐓞'),
+ (0x104B7, 'M', u'𐓟'),
+ (0x104B8, 'M', u'𐓠'),
+ (0x104B9, 'M', u'𐓡'),
+ (0x104BA, 'M', u'𐓢'),
+ (0x104BB, 'M', u'𐓣'),
+ (0x104BC, 'M', u'𐓤'),
+ (0x104BD, 'M', u'𐓥'),
+ (0x104BE, 'M', u'𐓦'),
+ (0x104BF, 'M', u'𐓧'),
+ (0x104C0, 'M', u'𐓨'),
+ (0x104C1, 'M', u'𐓩'),
+ (0x104C2, 'M', u'𐓪'),
+ (0x104C3, 'M', u'𐓫'),
+ (0x104C4, 'M', u'𐓬'),
+ (0x104C5, 'M', u'𐓭'),
+ (0x104C6, 'M', u'𐓮'),
+ (0x104C7, 'M', u'𐓯'),
+ (0x104C8, 'M', u'𐓰'),
+ (0x104C9, 'M', u'𐓱'),
+ (0x104CA, 'M', u'𐓲'),
+ (0x104CB, 'M', u'𐓳'),
+ (0x104CC, 'M', u'𐓴'),
+ (0x104CD, 'M', u'𐓵'),
+ (0x104CE, 'M', u'𐓶'),
+ (0x104CF, 'M', u'𐓷'),
+ (0x104D0, 'M', u'𐓸'),
+ (0x104D1, 'M', u'𐓹'),
+ (0x104D2, 'M', u'𐓺'),
+ (0x104D3, 'M', u'𐓻'),
+ (0x104D4, 'X'),
+ (0x104D8, 'V'),
+ (0x104FC, 'X'),
+ (0x10500, 'V'),
+ (0x10528, 'X'),
+ (0x10530, 'V'),
+ (0x10564, 'X'),
+ (0x1056F, 'V'),
+ (0x10570, 'X'),
+ (0x10600, 'V'),
+ (0x10737, 'X'),
+ (0x10740, 'V'),
+ (0x10756, 'X'),
+ (0x10760, 'V'),
+ (0x10768, 'X'),
+ (0x10800, 'V'),
+ (0x10806, 'X'),
+ (0x10808, 'V'),
+ (0x10809, 'X'),
+ (0x1080A, 'V'),
+ (0x10836, 'X'),
+ (0x10837, 'V'),
+ ]
+
+def _seg_54():
+ return [
+ (0x10839, 'X'),
+ (0x1083C, 'V'),
+ (0x1083D, 'X'),
+ (0x1083F, 'V'),
+ (0x10856, 'X'),
+ (0x10857, 'V'),
+ (0x1089F, 'X'),
+ (0x108A7, 'V'),
+ (0x108B0, 'X'),
+ (0x108E0, 'V'),
+ (0x108F3, 'X'),
+ (0x108F4, 'V'),
+ (0x108F6, 'X'),
+ (0x108FB, 'V'),
+ (0x1091C, 'X'),
+ (0x1091F, 'V'),
+ (0x1093A, 'X'),
+ (0x1093F, 'V'),
+ (0x10940, 'X'),
+ (0x10980, 'V'),
+ (0x109B8, 'X'),
+ (0x109BC, 'V'),
+ (0x109D0, 'X'),
+ (0x109D2, 'V'),
+ (0x10A04, 'X'),
+ (0x10A05, 'V'),
+ (0x10A07, 'X'),
+ (0x10A0C, 'V'),
+ (0x10A14, 'X'),
+ (0x10A15, 'V'),
+ (0x10A18, 'X'),
+ (0x10A19, 'V'),
+ (0x10A36, 'X'),
+ (0x10A38, 'V'),
+ (0x10A3B, 'X'),
+ (0x10A3F, 'V'),
+ (0x10A49, 'X'),
+ (0x10A50, 'V'),
+ (0x10A59, 'X'),
+ (0x10A60, 'V'),
+ (0x10AA0, 'X'),
+ (0x10AC0, 'V'),
+ (0x10AE7, 'X'),
+ (0x10AEB, 'V'),
+ (0x10AF7, 'X'),
+ (0x10B00, 'V'),
+ (0x10B36, 'X'),
+ (0x10B39, 'V'),
+ (0x10B56, 'X'),
+ (0x10B58, 'V'),
+ (0x10B73, 'X'),
+ (0x10B78, 'V'),
+ (0x10B92, 'X'),
+ (0x10B99, 'V'),
+ (0x10B9D, 'X'),
+ (0x10BA9, 'V'),
+ (0x10BB0, 'X'),
+ (0x10C00, 'V'),
+ (0x10C49, 'X'),
+ (0x10C80, 'M', u'𐳀'),
+ (0x10C81, 'M', u'𐳁'),
+ (0x10C82, 'M', u'𐳂'),
+ (0x10C83, 'M', u'𐳃'),
+ (0x10C84, 'M', u'𐳄'),
+ (0x10C85, 'M', u'𐳅'),
+ (0x10C86, 'M', u'𐳆'),
+ (0x10C87, 'M', u'𐳇'),
+ (0x10C88, 'M', u'𐳈'),
+ (0x10C89, 'M', u'𐳉'),
+ (0x10C8A, 'M', u'𐳊'),
+ (0x10C8B, 'M', u'𐳋'),
+ (0x10C8C, 'M', u'𐳌'),
+ (0x10C8D, 'M', u'𐳍'),
+ (0x10C8E, 'M', u'𐳎'),
+ (0x10C8F, 'M', u'𐳏'),
+ (0x10C90, 'M', u'𐳐'),
+ (0x10C91, 'M', u'𐳑'),
+ (0x10C92, 'M', u'𐳒'),
+ (0x10C93, 'M', u'𐳓'),
+ (0x10C94, 'M', u'𐳔'),
+ (0x10C95, 'M', u'𐳕'),
+ (0x10C96, 'M', u'𐳖'),
+ (0x10C97, 'M', u'𐳗'),
+ (0x10C98, 'M', u'𐳘'),
+ (0x10C99, 'M', u'𐳙'),
+ (0x10C9A, 'M', u'𐳚'),
+ (0x10C9B, 'M', u'𐳛'),
+ (0x10C9C, 'M', u'𐳜'),
+ (0x10C9D, 'M', u'𐳝'),
+ (0x10C9E, 'M', u'𐳞'),
+ (0x10C9F, 'M', u'𐳟'),
+ (0x10CA0, 'M', u'𐳠'),
+ (0x10CA1, 'M', u'𐳡'),
+ (0x10CA2, 'M', u'𐳢'),
+ (0x10CA3, 'M', u'𐳣'),
+ (0x10CA4, 'M', u'𐳤'),
+ (0x10CA5, 'M', u'𐳥'),
+ (0x10CA6, 'M', u'𐳦'),
+ (0x10CA7, 'M', u'𐳧'),
+ (0x10CA8, 'M', u'𐳨'),
+ ]
+
+def _seg_55():
+ return [
+ (0x10CA9, 'M', u'𐳩'),
+ (0x10CAA, 'M', u'𐳪'),
+ (0x10CAB, 'M', u'𐳫'),
+ (0x10CAC, 'M', u'𐳬'),
+ (0x10CAD, 'M', u'𐳭'),
+ (0x10CAE, 'M', u'𐳮'),
+ (0x10CAF, 'M', u'𐳯'),
+ (0x10CB0, 'M', u'𐳰'),
+ (0x10CB1, 'M', u'𐳱'),
+ (0x10CB2, 'M', u'𐳲'),
+ (0x10CB3, 'X'),
+ (0x10CC0, 'V'),
+ (0x10CF3, 'X'),
+ (0x10CFA, 'V'),
+ (0x10D28, 'X'),
+ (0x10D30, 'V'),
+ (0x10D3A, 'X'),
+ (0x10E60, 'V'),
+ (0x10E7F, 'X'),
+ (0x10F00, 'V'),
+ (0x10F28, 'X'),
+ (0x10F30, 'V'),
+ (0x10F5A, 'X'),
+ (0x11000, 'V'),
+ (0x1104E, 'X'),
+ (0x11052, 'V'),
+ (0x11070, 'X'),
+ (0x1107F, 'V'),
+ (0x110BD, 'X'),
+ (0x110BE, 'V'),
+ (0x110C2, 'X'),
+ (0x110D0, 'V'),
+ (0x110E9, 'X'),
+ (0x110F0, 'V'),
+ (0x110FA, 'X'),
+ (0x11100, 'V'),
+ (0x11135, 'X'),
+ (0x11136, 'V'),
+ (0x11147, 'X'),
+ (0x11150, 'V'),
+ (0x11177, 'X'),
+ (0x11180, 'V'),
+ (0x111CE, 'X'),
+ (0x111D0, 'V'),
+ (0x111E0, 'X'),
+ (0x111E1, 'V'),
+ (0x111F5, 'X'),
+ (0x11200, 'V'),
+ (0x11212, 'X'),
+ (0x11213, 'V'),
+ (0x1123F, 'X'),
+ (0x11280, 'V'),
+ (0x11287, 'X'),
+ (0x11288, 'V'),
+ (0x11289, 'X'),
+ (0x1128A, 'V'),
+ (0x1128E, 'X'),
+ (0x1128F, 'V'),
+ (0x1129E, 'X'),
+ (0x1129F, 'V'),
+ (0x112AA, 'X'),
+ (0x112B0, 'V'),
+ (0x112EB, 'X'),
+ (0x112F0, 'V'),
+ (0x112FA, 'X'),
+ (0x11300, 'V'),
+ (0x11304, 'X'),
+ (0x11305, 'V'),
+ (0x1130D, 'X'),
+ (0x1130F, 'V'),
+ (0x11311, 'X'),
+ (0x11313, 'V'),
+ (0x11329, 'X'),
+ (0x1132A, 'V'),
+ (0x11331, 'X'),
+ (0x11332, 'V'),
+ (0x11334, 'X'),
+ (0x11335, 'V'),
+ (0x1133A, 'X'),
+ (0x1133B, 'V'),
+ (0x11345, 'X'),
+ (0x11347, 'V'),
+ (0x11349, 'X'),
+ (0x1134B, 'V'),
+ (0x1134E, 'X'),
+ (0x11350, 'V'),
+ (0x11351, 'X'),
+ (0x11357, 'V'),
+ (0x11358, 'X'),
+ (0x1135D, 'V'),
+ (0x11364, 'X'),
+ (0x11366, 'V'),
+ (0x1136D, 'X'),
+ (0x11370, 'V'),
+ (0x11375, 'X'),
+ (0x11400, 'V'),
+ (0x1145A, 'X'),
+ (0x1145B, 'V'),
+ (0x1145C, 'X'),
+ (0x1145D, 'V'),
+ ]
+
+def _seg_56():
+ return [
+ (0x1145F, 'X'),
+ (0x11480, 'V'),
+ (0x114C8, 'X'),
+ (0x114D0, 'V'),
+ (0x114DA, 'X'),
+ (0x11580, 'V'),
+ (0x115B6, 'X'),
+ (0x115B8, 'V'),
+ (0x115DE, 'X'),
+ (0x11600, 'V'),
+ (0x11645, 'X'),
+ (0x11650, 'V'),
+ (0x1165A, 'X'),
+ (0x11660, 'V'),
+ (0x1166D, 'X'),
+ (0x11680, 'V'),
+ (0x116B8, 'X'),
+ (0x116C0, 'V'),
+ (0x116CA, 'X'),
+ (0x11700, 'V'),
+ (0x1171B, 'X'),
+ (0x1171D, 'V'),
+ (0x1172C, 'X'),
+ (0x11730, 'V'),
+ (0x11740, 'X'),
+ (0x11800, 'V'),
+ (0x1183C, 'X'),
+ (0x118A0, 'M', u'𑣀'),
+ (0x118A1, 'M', u'𑣁'),
+ (0x118A2, 'M', u'𑣂'),
+ (0x118A3, 'M', u'𑣃'),
+ (0x118A4, 'M', u'𑣄'),
+ (0x118A5, 'M', u'𑣅'),
+ (0x118A6, 'M', u'𑣆'),
+ (0x118A7, 'M', u'𑣇'),
+ (0x118A8, 'M', u'𑣈'),
+ (0x118A9, 'M', u'𑣉'),
+ (0x118AA, 'M', u'𑣊'),
+ (0x118AB, 'M', u'𑣋'),
+ (0x118AC, 'M', u'𑣌'),
+ (0x118AD, 'M', u'𑣍'),
+ (0x118AE, 'M', u'𑣎'),
+ (0x118AF, 'M', u'𑣏'),
+ (0x118B0, 'M', u'𑣐'),
+ (0x118B1, 'M', u'𑣑'),
+ (0x118B2, 'M', u'𑣒'),
+ (0x118B3, 'M', u'𑣓'),
+ (0x118B4, 'M', u'𑣔'),
+ (0x118B5, 'M', u'𑣕'),
+ (0x118B6, 'M', u'𑣖'),
+ (0x118B7, 'M', u'𑣗'),
+ (0x118B8, 'M', u'𑣘'),
+ (0x118B9, 'M', u'𑣙'),
+ (0x118BA, 'M', u'𑣚'),
+ (0x118BB, 'M', u'𑣛'),
+ (0x118BC, 'M', u'𑣜'),
+ (0x118BD, 'M', u'𑣝'),
+ (0x118BE, 'M', u'𑣞'),
+ (0x118BF, 'M', u'𑣟'),
+ (0x118C0, 'V'),
+ (0x118F3, 'X'),
+ (0x118FF, 'V'),
+ (0x11900, 'X'),
+ (0x11A00, 'V'),
+ (0x11A48, 'X'),
+ (0x11A50, 'V'),
+ (0x11A84, 'X'),
+ (0x11A86, 'V'),
+ (0x11AA3, 'X'),
+ (0x11AC0, 'V'),
+ (0x11AF9, 'X'),
+ (0x11C00, 'V'),
+ (0x11C09, 'X'),
+ (0x11C0A, 'V'),
+ (0x11C37, 'X'),
+ (0x11C38, 'V'),
+ (0x11C46, 'X'),
+ (0x11C50, 'V'),
+ (0x11C6D, 'X'),
+ (0x11C70, 'V'),
+ (0x11C90, 'X'),
+ (0x11C92, 'V'),
+ (0x11CA8, 'X'),
+ (0x11CA9, 'V'),
+ (0x11CB7, 'X'),
+ (0x11D00, 'V'),
+ (0x11D07, 'X'),
+ (0x11D08, 'V'),
+ (0x11D0A, 'X'),
+ (0x11D0B, 'V'),
+ (0x11D37, 'X'),
+ (0x11D3A, 'V'),
+ (0x11D3B, 'X'),
+ (0x11D3C, 'V'),
+ (0x11D3E, 'X'),
+ (0x11D3F, 'V'),
+ (0x11D48, 'X'),
+ (0x11D50, 'V'),
+ (0x11D5A, 'X'),
+ (0x11D60, 'V'),
+ ]
+
+def _seg_57():
+ return [
+ (0x11D66, 'X'),
+ (0x11D67, 'V'),
+ (0x11D69, 'X'),
+ (0x11D6A, 'V'),
+ (0x11D8F, 'X'),
+ (0x11D90, 'V'),
+ (0x11D92, 'X'),
+ (0x11D93, 'V'),
+ (0x11D99, 'X'),
+ (0x11DA0, 'V'),
+ (0x11DAA, 'X'),
+ (0x11EE0, 'V'),
+ (0x11EF9, 'X'),
+ (0x12000, 'V'),
+ (0x1239A, 'X'),
+ (0x12400, 'V'),
+ (0x1246F, 'X'),
+ (0x12470, 'V'),
+ (0x12475, 'X'),
+ (0x12480, 'V'),
+ (0x12544, 'X'),
+ (0x13000, 'V'),
+ (0x1342F, 'X'),
+ (0x14400, 'V'),
+ (0x14647, 'X'),
+ (0x16800, 'V'),
+ (0x16A39, 'X'),
+ (0x16A40, 'V'),
+ (0x16A5F, 'X'),
+ (0x16A60, 'V'),
+ (0x16A6A, 'X'),
+ (0x16A6E, 'V'),
+ (0x16A70, 'X'),
+ (0x16AD0, 'V'),
+ (0x16AEE, 'X'),
+ (0x16AF0, 'V'),
+ (0x16AF6, 'X'),
+ (0x16B00, 'V'),
+ (0x16B46, 'X'),
+ (0x16B50, 'V'),
+ (0x16B5A, 'X'),
+ (0x16B5B, 'V'),
+ (0x16B62, 'X'),
+ (0x16B63, 'V'),
+ (0x16B78, 'X'),
+ (0x16B7D, 'V'),
+ (0x16B90, 'X'),
+ (0x16E60, 'V'),
+ (0x16E9B, 'X'),
+ (0x16F00, 'V'),
+ (0x16F45, 'X'),
+ (0x16F50, 'V'),
+ (0x16F7F, 'X'),
+ (0x16F8F, 'V'),
+ (0x16FA0, 'X'),
+ (0x16FE0, 'V'),
+ (0x16FE2, 'X'),
+ (0x17000, 'V'),
+ (0x187F2, 'X'),
+ (0x18800, 'V'),
+ (0x18AF3, 'X'),
+ (0x1B000, 'V'),
+ (0x1B11F, 'X'),
+ (0x1B170, 'V'),
+ (0x1B2FC, 'X'),
+ (0x1BC00, 'V'),
+ (0x1BC6B, 'X'),
+ (0x1BC70, 'V'),
+ (0x1BC7D, 'X'),
+ (0x1BC80, 'V'),
+ (0x1BC89, 'X'),
+ (0x1BC90, 'V'),
+ (0x1BC9A, 'X'),
+ (0x1BC9C, 'V'),
+ (0x1BCA0, 'I'),
+ (0x1BCA4, 'X'),
+ (0x1D000, 'V'),
+ (0x1D0F6, 'X'),
+ (0x1D100, 'V'),
+ (0x1D127, 'X'),
+ (0x1D129, 'V'),
+ (0x1D15E, 'M', u'𝅗𝅥'),
+ (0x1D15F, 'M', u'𝅘𝅥'),
+ (0x1D160, 'M', u'𝅘𝅥𝅮'),
+ (0x1D161, 'M', u'𝅘𝅥𝅯'),
+ (0x1D162, 'M', u'𝅘𝅥𝅰'),
+ (0x1D163, 'M', u'𝅘𝅥𝅱'),
+ (0x1D164, 'M', u'𝅘𝅥𝅲'),
+ (0x1D165, 'V'),
+ (0x1D173, 'X'),
+ (0x1D17B, 'V'),
+ (0x1D1BB, 'M', u'𝆹𝅥'),
+ (0x1D1BC, 'M', u'𝆺𝅥'),
+ (0x1D1BD, 'M', u'𝆹𝅥𝅮'),
+ (0x1D1BE, 'M', u'𝆺𝅥𝅮'),
+ (0x1D1BF, 'M', u'𝆹𝅥𝅯'),
+ (0x1D1C0, 'M', u'𝆺𝅥𝅯'),
+ (0x1D1C1, 'V'),
+ (0x1D1E9, 'X'),
+ (0x1D200, 'V'),
+ ]
+
+def _seg_58():
+ return [
+ (0x1D246, 'X'),
+ (0x1D2E0, 'V'),
+ (0x1D2F4, 'X'),
+ (0x1D300, 'V'),
+ (0x1D357, 'X'),
+ (0x1D360, 'V'),
+ (0x1D379, 'X'),
+ (0x1D400, 'M', u'a'),
+ (0x1D401, 'M', u'b'),
+ (0x1D402, 'M', u'c'),
+ (0x1D403, 'M', u'd'),
+ (0x1D404, 'M', u'e'),
+ (0x1D405, 'M', u'f'),
+ (0x1D406, 'M', u'g'),
+ (0x1D407, 'M', u'h'),
+ (0x1D408, 'M', u'i'),
+ (0x1D409, 'M', u'j'),
+ (0x1D40A, 'M', u'k'),
+ (0x1D40B, 'M', u'l'),
+ (0x1D40C, 'M', u'm'),
+ (0x1D40D, 'M', u'n'),
+ (0x1D40E, 'M', u'o'),
+ (0x1D40F, 'M', u'p'),
+ (0x1D410, 'M', u'q'),
+ (0x1D411, 'M', u'r'),
+ (0x1D412, 'M', u's'),
+ (0x1D413, 'M', u't'),
+ (0x1D414, 'M', u'u'),
+ (0x1D415, 'M', u'v'),
+ (0x1D416, 'M', u'w'),
+ (0x1D417, 'M', u'x'),
+ (0x1D418, 'M', u'y'),
+ (0x1D419, 'M', u'z'),
+ (0x1D41A, 'M', u'a'),
+ (0x1D41B, 'M', u'b'),
+ (0x1D41C, 'M', u'c'),
+ (0x1D41D, 'M', u'd'),
+ (0x1D41E, 'M', u'e'),
+ (0x1D41F, 'M', u'f'),
+ (0x1D420, 'M', u'g'),
+ (0x1D421, 'M', u'h'),
+ (0x1D422, 'M', u'i'),
+ (0x1D423, 'M', u'j'),
+ (0x1D424, 'M', u'k'),
+ (0x1D425, 'M', u'l'),
+ (0x1D426, 'M', u'm'),
+ (0x1D427, 'M', u'n'),
+ (0x1D428, 'M', u'o'),
+ (0x1D429, 'M', u'p'),
+ (0x1D42A, 'M', u'q'),
+ (0x1D42B, 'M', u'r'),
+ (0x1D42C, 'M', u's'),
+ (0x1D42D, 'M', u't'),
+ (0x1D42E, 'M', u'u'),
+ (0x1D42F, 'M', u'v'),
+ (0x1D430, 'M', u'w'),
+ (0x1D431, 'M', u'x'),
+ (0x1D432, 'M', u'y'),
+ (0x1D433, 'M', u'z'),
+ (0x1D434, 'M', u'a'),
+ (0x1D435, 'M', u'b'),
+ (0x1D436, 'M', u'c'),
+ (0x1D437, 'M', u'd'),
+ (0x1D438, 'M', u'e'),
+ (0x1D439, 'M', u'f'),
+ (0x1D43A, 'M', u'g'),
+ (0x1D43B, 'M', u'h'),
+ (0x1D43C, 'M', u'i'),
+ (0x1D43D, 'M', u'j'),
+ (0x1D43E, 'M', u'k'),
+ (0x1D43F, 'M', u'l'),
+ (0x1D440, 'M', u'm'),
+ (0x1D441, 'M', u'n'),
+ (0x1D442, 'M', u'o'),
+ (0x1D443, 'M', u'p'),
+ (0x1D444, 'M', u'q'),
+ (0x1D445, 'M', u'r'),
+ (0x1D446, 'M', u's'),
+ (0x1D447, 'M', u't'),
+ (0x1D448, 'M', u'u'),
+ (0x1D449, 'M', u'v'),
+ (0x1D44A, 'M', u'w'),
+ (0x1D44B, 'M', u'x'),
+ (0x1D44C, 'M', u'y'),
+ (0x1D44D, 'M', u'z'),
+ (0x1D44E, 'M', u'a'),
+ (0x1D44F, 'M', u'b'),
+ (0x1D450, 'M', u'c'),
+ (0x1D451, 'M', u'd'),
+ (0x1D452, 'M', u'e'),
+ (0x1D453, 'M', u'f'),
+ (0x1D454, 'M', u'g'),
+ (0x1D455, 'X'),
+ (0x1D456, 'M', u'i'),
+ (0x1D457, 'M', u'j'),
+ (0x1D458, 'M', u'k'),
+ (0x1D459, 'M', u'l'),
+ (0x1D45A, 'M', u'm'),
+ (0x1D45B, 'M', u'n'),
+ (0x1D45C, 'M', u'o'),
+ ]
+
+def _seg_59():
+ return [
+ (0x1D45D, 'M', u'p'),
+ (0x1D45E, 'M', u'q'),
+ (0x1D45F, 'M', u'r'),
+ (0x1D460, 'M', u's'),
+ (0x1D461, 'M', u't'),
+ (0x1D462, 'M', u'u'),
+ (0x1D463, 'M', u'v'),
+ (0x1D464, 'M', u'w'),
+ (0x1D465, 'M', u'x'),
+ (0x1D466, 'M', u'y'),
+ (0x1D467, 'M', u'z'),
+ (0x1D468, 'M', u'a'),
+ (0x1D469, 'M', u'b'),
+ (0x1D46A, 'M', u'c'),
+ (0x1D46B, 'M', u'd'),
+ (0x1D46C, 'M', u'e'),
+ (0x1D46D, 'M', u'f'),
+ (0x1D46E, 'M', u'g'),
+ (0x1D46F, 'M', u'h'),
+ (0x1D470, 'M', u'i'),
+ (0x1D471, 'M', u'j'),
+ (0x1D472, 'M', u'k'),
+ (0x1D473, 'M', u'l'),
+ (0x1D474, 'M', u'm'),
+ (0x1D475, 'M', u'n'),
+ (0x1D476, 'M', u'o'),
+ (0x1D477, 'M', u'p'),
+ (0x1D478, 'M', u'q'),
+ (0x1D479, 'M', u'r'),
+ (0x1D47A, 'M', u's'),
+ (0x1D47B, 'M', u't'),
+ (0x1D47C, 'M', u'u'),
+ (0x1D47D, 'M', u'v'),
+ (0x1D47E, 'M', u'w'),
+ (0x1D47F, 'M', u'x'),
+ (0x1D480, 'M', u'y'),
+ (0x1D481, 'M', u'z'),
+ (0x1D482, 'M', u'a'),
+ (0x1D483, 'M', u'b'),
+ (0x1D484, 'M', u'c'),
+ (0x1D485, 'M', u'd'),
+ (0x1D486, 'M', u'e'),
+ (0x1D487, 'M', u'f'),
+ (0x1D488, 'M', u'g'),
+ (0x1D489, 'M', u'h'),
+ (0x1D48A, 'M', u'i'),
+ (0x1D48B, 'M', u'j'),
+ (0x1D48C, 'M', u'k'),
+ (0x1D48D, 'M', u'l'),
+ (0x1D48E, 'M', u'm'),
+ (0x1D48F, 'M', u'n'),
+ (0x1D490, 'M', u'o'),
+ (0x1D491, 'M', u'p'),
+ (0x1D492, 'M', u'q'),
+ (0x1D493, 'M', u'r'),
+ (0x1D494, 'M', u's'),
+ (0x1D495, 'M', u't'),
+ (0x1D496, 'M', u'u'),
+ (0x1D497, 'M', u'v'),
+ (0x1D498, 'M', u'w'),
+ (0x1D499, 'M', u'x'),
+ (0x1D49A, 'M', u'y'),
+ (0x1D49B, 'M', u'z'),
+ (0x1D49C, 'M', u'a'),
+ (0x1D49D, 'X'),
+ (0x1D49E, 'M', u'c'),
+ (0x1D49F, 'M', u'd'),
+ (0x1D4A0, 'X'),
+ (0x1D4A2, 'M', u'g'),
+ (0x1D4A3, 'X'),
+ (0x1D4A5, 'M', u'j'),
+ (0x1D4A6, 'M', u'k'),
+ (0x1D4A7, 'X'),
+ (0x1D4A9, 'M', u'n'),
+ (0x1D4AA, 'M', u'o'),
+ (0x1D4AB, 'M', u'p'),
+ (0x1D4AC, 'M', u'q'),
+ (0x1D4AD, 'X'),
+ (0x1D4AE, 'M', u's'),
+ (0x1D4AF, 'M', u't'),
+ (0x1D4B0, 'M', u'u'),
+ (0x1D4B1, 'M', u'v'),
+ (0x1D4B2, 'M', u'w'),
+ (0x1D4B3, 'M', u'x'),
+ (0x1D4B4, 'M', u'y'),
+ (0x1D4B5, 'M', u'z'),
+ (0x1D4B6, 'M', u'a'),
+ (0x1D4B7, 'M', u'b'),
+ (0x1D4B8, 'M', u'c'),
+ (0x1D4B9, 'M', u'd'),
+ (0x1D4BA, 'X'),
+ (0x1D4BB, 'M', u'f'),
+ (0x1D4BC, 'X'),
+ (0x1D4BD, 'M', u'h'),
+ (0x1D4BE, 'M', u'i'),
+ (0x1D4BF, 'M', u'j'),
+ (0x1D4C0, 'M', u'k'),
+ (0x1D4C1, 'M', u'l'),
+ (0x1D4C2, 'M', u'm'),
+ (0x1D4C3, 'M', u'n'),
+ ]
+
+def _seg_60():
+ return [
+ (0x1D4C4, 'X'),
+ (0x1D4C5, 'M', u'p'),
+ (0x1D4C6, 'M', u'q'),
+ (0x1D4C7, 'M', u'r'),
+ (0x1D4C8, 'M', u's'),
+ (0x1D4C9, 'M', u't'),
+ (0x1D4CA, 'M', u'u'),
+ (0x1D4CB, 'M', u'v'),
+ (0x1D4CC, 'M', u'w'),
+ (0x1D4CD, 'M', u'x'),
+ (0x1D4CE, 'M', u'y'),
+ (0x1D4CF, 'M', u'z'),
+ (0x1D4D0, 'M', u'a'),
+ (0x1D4D1, 'M', u'b'),
+ (0x1D4D2, 'M', u'c'),
+ (0x1D4D3, 'M', u'd'),
+ (0x1D4D4, 'M', u'e'),
+ (0x1D4D5, 'M', u'f'),
+ (0x1D4D6, 'M', u'g'),
+ (0x1D4D7, 'M', u'h'),
+ (0x1D4D8, 'M', u'i'),
+ (0x1D4D9, 'M', u'j'),
+ (0x1D4DA, 'M', u'k'),
+ (0x1D4DB, 'M', u'l'),
+ (0x1D4DC, 'M', u'm'),
+ (0x1D4DD, 'M', u'n'),
+ (0x1D4DE, 'M', u'o'),
+ (0x1D4DF, 'M', u'p'),
+ (0x1D4E0, 'M', u'q'),
+ (0x1D4E1, 'M', u'r'),
+ (0x1D4E2, 'M', u's'),
+ (0x1D4E3, 'M', u't'),
+ (0x1D4E4, 'M', u'u'),
+ (0x1D4E5, 'M', u'v'),
+ (0x1D4E6, 'M', u'w'),
+ (0x1D4E7, 'M', u'x'),
+ (0x1D4E8, 'M', u'y'),
+ (0x1D4E9, 'M', u'z'),
+ (0x1D4EA, 'M', u'a'),
+ (0x1D4EB, 'M', u'b'),
+ (0x1D4EC, 'M', u'c'),
+ (0x1D4ED, 'M', u'd'),
+ (0x1D4EE, 'M', u'e'),
+ (0x1D4EF, 'M', u'f'),
+ (0x1D4F0, 'M', u'g'),
+ (0x1D4F1, 'M', u'h'),
+ (0x1D4F2, 'M', u'i'),
+ (0x1D4F3, 'M', u'j'),
+ (0x1D4F4, 'M', u'k'),
+ (0x1D4F5, 'M', u'l'),
+ (0x1D4F6, 'M', u'm'),
+ (0x1D4F7, 'M', u'n'),
+ (0x1D4F8, 'M', u'o'),
+ (0x1D4F9, 'M', u'p'),
+ (0x1D4FA, 'M', u'q'),
+ (0x1D4FB, 'M', u'r'),
+ (0x1D4FC, 'M', u's'),
+ (0x1D4FD, 'M', u't'),
+ (0x1D4FE, 'M', u'u'),
+ (0x1D4FF, 'M', u'v'),
+ (0x1D500, 'M', u'w'),
+ (0x1D501, 'M', u'x'),
+ (0x1D502, 'M', u'y'),
+ (0x1D503, 'M', u'z'),
+ (0x1D504, 'M', u'a'),
+ (0x1D505, 'M', u'b'),
+ (0x1D506, 'X'),
+ (0x1D507, 'M', u'd'),
+ (0x1D508, 'M', u'e'),
+ (0x1D509, 'M', u'f'),
+ (0x1D50A, 'M', u'g'),
+ (0x1D50B, 'X'),
+ (0x1D50D, 'M', u'j'),
+ (0x1D50E, 'M', u'k'),
+ (0x1D50F, 'M', u'l'),
+ (0x1D510, 'M', u'm'),
+ (0x1D511, 'M', u'n'),
+ (0x1D512, 'M', u'o'),
+ (0x1D513, 'M', u'p'),
+ (0x1D514, 'M', u'q'),
+ (0x1D515, 'X'),
+ (0x1D516, 'M', u's'),
+ (0x1D517, 'M', u't'),
+ (0x1D518, 'M', u'u'),
+ (0x1D519, 'M', u'v'),
+ (0x1D51A, 'M', u'w'),
+ (0x1D51B, 'M', u'x'),
+ (0x1D51C, 'M', u'y'),
+ (0x1D51D, 'X'),
+ (0x1D51E, 'M', u'a'),
+ (0x1D51F, 'M', u'b'),
+ (0x1D520, 'M', u'c'),
+ (0x1D521, 'M', u'd'),
+ (0x1D522, 'M', u'e'),
+ (0x1D523, 'M', u'f'),
+ (0x1D524, 'M', u'g'),
+ (0x1D525, 'M', u'h'),
+ (0x1D526, 'M', u'i'),
+ (0x1D527, 'M', u'j'),
+ (0x1D528, 'M', u'k'),
+ ]
+
+def _seg_61():
+ return [
+ (0x1D529, 'M', u'l'),
+ (0x1D52A, 'M', u'm'),
+ (0x1D52B, 'M', u'n'),
+ (0x1D52C, 'M', u'o'),
+ (0x1D52D, 'M', u'p'),
+ (0x1D52E, 'M', u'q'),
+ (0x1D52F, 'M', u'r'),
+ (0x1D530, 'M', u's'),
+ (0x1D531, 'M', u't'),
+ (0x1D532, 'M', u'u'),
+ (0x1D533, 'M', u'v'),
+ (0x1D534, 'M', u'w'),
+ (0x1D535, 'M', u'x'),
+ (0x1D536, 'M', u'y'),
+ (0x1D537, 'M', u'z'),
+ (0x1D538, 'M', u'a'),
+ (0x1D539, 'M', u'b'),
+ (0x1D53A, 'X'),
+ (0x1D53B, 'M', u'd'),
+ (0x1D53C, 'M', u'e'),
+ (0x1D53D, 'M', u'f'),
+ (0x1D53E, 'M', u'g'),
+ (0x1D53F, 'X'),
+ (0x1D540, 'M', u'i'),
+ (0x1D541, 'M', u'j'),
+ (0x1D542, 'M', u'k'),
+ (0x1D543, 'M', u'l'),
+ (0x1D544, 'M', u'm'),
+ (0x1D545, 'X'),
+ (0x1D546, 'M', u'o'),
+ (0x1D547, 'X'),
+ (0x1D54A, 'M', u's'),
+ (0x1D54B, 'M', u't'),
+ (0x1D54C, 'M', u'u'),
+ (0x1D54D, 'M', u'v'),
+ (0x1D54E, 'M', u'w'),
+ (0x1D54F, 'M', u'x'),
+ (0x1D550, 'M', u'y'),
+ (0x1D551, 'X'),
+ (0x1D552, 'M', u'a'),
+ (0x1D553, 'M', u'b'),
+ (0x1D554, 'M', u'c'),
+ (0x1D555, 'M', u'd'),
+ (0x1D556, 'M', u'e'),
+ (0x1D557, 'M', u'f'),
+ (0x1D558, 'M', u'g'),
+ (0x1D559, 'M', u'h'),
+ (0x1D55A, 'M', u'i'),
+ (0x1D55B, 'M', u'j'),
+ (0x1D55C, 'M', u'k'),
+ (0x1D55D, 'M', u'l'),
+ (0x1D55E, 'M', u'm'),
+ (0x1D55F, 'M', u'n'),
+ (0x1D560, 'M', u'o'),
+ (0x1D561, 'M', u'p'),
+ (0x1D562, 'M', u'q'),
+ (0x1D563, 'M', u'r'),
+ (0x1D564, 'M', u's'),
+ (0x1D565, 'M', u't'),
+ (0x1D566, 'M', u'u'),
+ (0x1D567, 'M', u'v'),
+ (0x1D568, 'M', u'w'),
+ (0x1D569, 'M', u'x'),
+ (0x1D56A, 'M', u'y'),
+ (0x1D56B, 'M', u'z'),
+ (0x1D56C, 'M', u'a'),
+ (0x1D56D, 'M', u'b'),
+ (0x1D56E, 'M', u'c'),
+ (0x1D56F, 'M', u'd'),
+ (0x1D570, 'M', u'e'),
+ (0x1D571, 'M', u'f'),
+ (0x1D572, 'M', u'g'),
+ (0x1D573, 'M', u'h'),
+ (0x1D574, 'M', u'i'),
+ (0x1D575, 'M', u'j'),
+ (0x1D576, 'M', u'k'),
+ (0x1D577, 'M', u'l'),
+ (0x1D578, 'M', u'm'),
+ (0x1D579, 'M', u'n'),
+ (0x1D57A, 'M', u'o'),
+ (0x1D57B, 'M', u'p'),
+ (0x1D57C, 'M', u'q'),
+ (0x1D57D, 'M', u'r'),
+ (0x1D57E, 'M', u's'),
+ (0x1D57F, 'M', u't'),
+ (0x1D580, 'M', u'u'),
+ (0x1D581, 'M', u'v'),
+ (0x1D582, 'M', u'w'),
+ (0x1D583, 'M', u'x'),
+ (0x1D584, 'M', u'y'),
+ (0x1D585, 'M', u'z'),
+ (0x1D586, 'M', u'a'),
+ (0x1D587, 'M', u'b'),
+ (0x1D588, 'M', u'c'),
+ (0x1D589, 'M', u'd'),
+ (0x1D58A, 'M', u'e'),
+ (0x1D58B, 'M', u'f'),
+ (0x1D58C, 'M', u'g'),
+ (0x1D58D, 'M', u'h'),
+ (0x1D58E, 'M', u'i'),
+ ]
+
+def _seg_62():
+ return [
+ (0x1D58F, 'M', u'j'),
+ (0x1D590, 'M', u'k'),
+ (0x1D591, 'M', u'l'),
+ (0x1D592, 'M', u'm'),
+ (0x1D593, 'M', u'n'),
+ (0x1D594, 'M', u'o'),
+ (0x1D595, 'M', u'p'),
+ (0x1D596, 'M', u'q'),
+ (0x1D597, 'M', u'r'),
+ (0x1D598, 'M', u's'),
+ (0x1D599, 'M', u't'),
+ (0x1D59A, 'M', u'u'),
+ (0x1D59B, 'M', u'v'),
+ (0x1D59C, 'M', u'w'),
+ (0x1D59D, 'M', u'x'),
+ (0x1D59E, 'M', u'y'),
+ (0x1D59F, 'M', u'z'),
+ (0x1D5A0, 'M', u'a'),
+ (0x1D5A1, 'M', u'b'),
+ (0x1D5A2, 'M', u'c'),
+ (0x1D5A3, 'M', u'd'),
+ (0x1D5A4, 'M', u'e'),
+ (0x1D5A5, 'M', u'f'),
+ (0x1D5A6, 'M', u'g'),
+ (0x1D5A7, 'M', u'h'),
+ (0x1D5A8, 'M', u'i'),
+ (0x1D5A9, 'M', u'j'),
+ (0x1D5AA, 'M', u'k'),
+ (0x1D5AB, 'M', u'l'),
+ (0x1D5AC, 'M', u'm'),
+ (0x1D5AD, 'M', u'n'),
+ (0x1D5AE, 'M', u'o'),
+ (0x1D5AF, 'M', u'p'),
+ (0x1D5B0, 'M', u'q'),
+ (0x1D5B1, 'M', u'r'),
+ (0x1D5B2, 'M', u's'),
+ (0x1D5B3, 'M', u't'),
+ (0x1D5B4, 'M', u'u'),
+ (0x1D5B5, 'M', u'v'),
+ (0x1D5B6, 'M', u'w'),
+ (0x1D5B7, 'M', u'x'),
+ (0x1D5B8, 'M', u'y'),
+ (0x1D5B9, 'M', u'z'),
+ (0x1D5BA, 'M', u'a'),
+ (0x1D5BB, 'M', u'b'),
+ (0x1D5BC, 'M', u'c'),
+ (0x1D5BD, 'M', u'd'),
+ (0x1D5BE, 'M', u'e'),
+ (0x1D5BF, 'M', u'f'),
+ (0x1D5C0, 'M', u'g'),
+ (0x1D5C1, 'M', u'h'),
+ (0x1D5C2, 'M', u'i'),
+ (0x1D5C3, 'M', u'j'),
+ (0x1D5C4, 'M', u'k'),
+ (0x1D5C5, 'M', u'l'),
+ (0x1D5C6, 'M', u'm'),
+ (0x1D5C7, 'M', u'n'),
+ (0x1D5C8, 'M', u'o'),
+ (0x1D5C9, 'M', u'p'),
+ (0x1D5CA, 'M', u'q'),
+ (0x1D5CB, 'M', u'r'),
+ (0x1D5CC, 'M', u's'),
+ (0x1D5CD, 'M', u't'),
+ (0x1D5CE, 'M', u'u'),
+ (0x1D5CF, 'M', u'v'),
+ (0x1D5D0, 'M', u'w'),
+ (0x1D5D1, 'M', u'x'),
+ (0x1D5D2, 'M', u'y'),
+ (0x1D5D3, 'M', u'z'),
+ (0x1D5D4, 'M', u'a'),
+ (0x1D5D5, 'M', u'b'),
+ (0x1D5D6, 'M', u'c'),
+ (0x1D5D7, 'M', u'd'),
+ (0x1D5D8, 'M', u'e'),
+ (0x1D5D9, 'M', u'f'),
+ (0x1D5DA, 'M', u'g'),
+ (0x1D5DB, 'M', u'h'),
+ (0x1D5DC, 'M', u'i'),
+ (0x1D5DD, 'M', u'j'),
+ (0x1D5DE, 'M', u'k'),
+ (0x1D5DF, 'M', u'l'),
+ (0x1D5E0, 'M', u'm'),
+ (0x1D5E1, 'M', u'n'),
+ (0x1D5E2, 'M', u'o'),
+ (0x1D5E3, 'M', u'p'),
+ (0x1D5E4, 'M', u'q'),
+ (0x1D5E5, 'M', u'r'),
+ (0x1D5E6, 'M', u's'),
+ (0x1D5E7, 'M', u't'),
+ (0x1D5E8, 'M', u'u'),
+ (0x1D5E9, 'M', u'v'),
+ (0x1D5EA, 'M', u'w'),
+ (0x1D5EB, 'M', u'x'),
+ (0x1D5EC, 'M', u'y'),
+ (0x1D5ED, 'M', u'z'),
+ (0x1D5EE, 'M', u'a'),
+ (0x1D5EF, 'M', u'b'),
+ (0x1D5F0, 'M', u'c'),
+ (0x1D5F1, 'M', u'd'),
+ (0x1D5F2, 'M', u'e'),
+ ]
+
+def _seg_63():
+ return [
+ (0x1D5F3, 'M', u'f'),
+ (0x1D5F4, 'M', u'g'),
+ (0x1D5F5, 'M', u'h'),
+ (0x1D5F6, 'M', u'i'),
+ (0x1D5F7, 'M', u'j'),
+ (0x1D5F8, 'M', u'k'),
+ (0x1D5F9, 'M', u'l'),
+ (0x1D5FA, 'M', u'm'),
+ (0x1D5FB, 'M', u'n'),
+ (0x1D5FC, 'M', u'o'),
+ (0x1D5FD, 'M', u'p'),
+ (0x1D5FE, 'M', u'q'),
+ (0x1D5FF, 'M', u'r'),
+ (0x1D600, 'M', u's'),
+ (0x1D601, 'M', u't'),
+ (0x1D602, 'M', u'u'),
+ (0x1D603, 'M', u'v'),
+ (0x1D604, 'M', u'w'),
+ (0x1D605, 'M', u'x'),
+ (0x1D606, 'M', u'y'),
+ (0x1D607, 'M', u'z'),
+ (0x1D608, 'M', u'a'),
+ (0x1D609, 'M', u'b'),
+ (0x1D60A, 'M', u'c'),
+ (0x1D60B, 'M', u'd'),
+ (0x1D60C, 'M', u'e'),
+ (0x1D60D, 'M', u'f'),
+ (0x1D60E, 'M', u'g'),
+ (0x1D60F, 'M', u'h'),
+ (0x1D610, 'M', u'i'),
+ (0x1D611, 'M', u'j'),
+ (0x1D612, 'M', u'k'),
+ (0x1D613, 'M', u'l'),
+ (0x1D614, 'M', u'm'),
+ (0x1D615, 'M', u'n'),
+ (0x1D616, 'M', u'o'),
+ (0x1D617, 'M', u'p'),
+ (0x1D618, 'M', u'q'),
+ (0x1D619, 'M', u'r'),
+ (0x1D61A, 'M', u's'),
+ (0x1D61B, 'M', u't'),
+ (0x1D61C, 'M', u'u'),
+ (0x1D61D, 'M', u'v'),
+ (0x1D61E, 'M', u'w'),
+ (0x1D61F, 'M', u'x'),
+ (0x1D620, 'M', u'y'),
+ (0x1D621, 'M', u'z'),
+ (0x1D622, 'M', u'a'),
+ (0x1D623, 'M', u'b'),
+ (0x1D624, 'M', u'c'),
+ (0x1D625, 'M', u'd'),
+ (0x1D626, 'M', u'e'),
+ (0x1D627, 'M', u'f'),
+ (0x1D628, 'M', u'g'),
+ (0x1D629, 'M', u'h'),
+ (0x1D62A, 'M', u'i'),
+ (0x1D62B, 'M', u'j'),
+ (0x1D62C, 'M', u'k'),
+ (0x1D62D, 'M', u'l'),
+ (0x1D62E, 'M', u'm'),
+ (0x1D62F, 'M', u'n'),
+ (0x1D630, 'M', u'o'),
+ (0x1D631, 'M', u'p'),
+ (0x1D632, 'M', u'q'),
+ (0x1D633, 'M', u'r'),
+ (0x1D634, 'M', u's'),
+ (0x1D635, 'M', u't'),
+ (0x1D636, 'M', u'u'),
+ (0x1D637, 'M', u'v'),
+ (0x1D638, 'M', u'w'),
+ (0x1D639, 'M', u'x'),
+ (0x1D63A, 'M', u'y'),
+ (0x1D63B, 'M', u'z'),
+ (0x1D63C, 'M', u'a'),
+ (0x1D63D, 'M', u'b'),
+ (0x1D63E, 'M', u'c'),
+ (0x1D63F, 'M', u'd'),
+ (0x1D640, 'M', u'e'),
+ (0x1D641, 'M', u'f'),
+ (0x1D642, 'M', u'g'),
+ (0x1D643, 'M', u'h'),
+ (0x1D644, 'M', u'i'),
+ (0x1D645, 'M', u'j'),
+ (0x1D646, 'M', u'k'),
+ (0x1D647, 'M', u'l'),
+ (0x1D648, 'M', u'm'),
+ (0x1D649, 'M', u'n'),
+ (0x1D64A, 'M', u'o'),
+ (0x1D64B, 'M', u'p'),
+ (0x1D64C, 'M', u'q'),
+ (0x1D64D, 'M', u'r'),
+ (0x1D64E, 'M', u's'),
+ (0x1D64F, 'M', u't'),
+ (0x1D650, 'M', u'u'),
+ (0x1D651, 'M', u'v'),
+ (0x1D652, 'M', u'w'),
+ (0x1D653, 'M', u'x'),
+ (0x1D654, 'M', u'y'),
+ (0x1D655, 'M', u'z'),
+ (0x1D656, 'M', u'a'),
+ ]
+
+def _seg_64():
+ return [
+ (0x1D657, 'M', u'b'),
+ (0x1D658, 'M', u'c'),
+ (0x1D659, 'M', u'd'),
+ (0x1D65A, 'M', u'e'),
+ (0x1D65B, 'M', u'f'),
+ (0x1D65C, 'M', u'g'),
+ (0x1D65D, 'M', u'h'),
+ (0x1D65E, 'M', u'i'),
+ (0x1D65F, 'M', u'j'),
+ (0x1D660, 'M', u'k'),
+ (0x1D661, 'M', u'l'),
+ (0x1D662, 'M', u'm'),
+ (0x1D663, 'M', u'n'),
+ (0x1D664, 'M', u'o'),
+ (0x1D665, 'M', u'p'),
+ (0x1D666, 'M', u'q'),
+ (0x1D667, 'M', u'r'),
+ (0x1D668, 'M', u's'),
+ (0x1D669, 'M', u't'),
+ (0x1D66A, 'M', u'u'),
+ (0x1D66B, 'M', u'v'),
+ (0x1D66C, 'M', u'w'),
+ (0x1D66D, 'M', u'x'),
+ (0x1D66E, 'M', u'y'),
+ (0x1D66F, 'M', u'z'),
+ (0x1D670, 'M', u'a'),
+ (0x1D671, 'M', u'b'),
+ (0x1D672, 'M', u'c'),
+ (0x1D673, 'M', u'd'),
+ (0x1D674, 'M', u'e'),
+ (0x1D675, 'M', u'f'),
+ (0x1D676, 'M', u'g'),
+ (0x1D677, 'M', u'h'),
+ (0x1D678, 'M', u'i'),
+ (0x1D679, 'M', u'j'),
+ (0x1D67A, 'M', u'k'),
+ (0x1D67B, 'M', u'l'),
+ (0x1D67C, 'M', u'm'),
+ (0x1D67D, 'M', u'n'),
+ (0x1D67E, 'M', u'o'),
+ (0x1D67F, 'M', u'p'),
+ (0x1D680, 'M', u'q'),
+ (0x1D681, 'M', u'r'),
+ (0x1D682, 'M', u's'),
+ (0x1D683, 'M', u't'),
+ (0x1D684, 'M', u'u'),
+ (0x1D685, 'M', u'v'),
+ (0x1D686, 'M', u'w'),
+ (0x1D687, 'M', u'x'),
+ (0x1D688, 'M', u'y'),
+ (0x1D689, 'M', u'z'),
+ (0x1D68A, 'M', u'a'),
+ (0x1D68B, 'M', u'b'),
+ (0x1D68C, 'M', u'c'),
+ (0x1D68D, 'M', u'd'),
+ (0x1D68E, 'M', u'e'),
+ (0x1D68F, 'M', u'f'),
+ (0x1D690, 'M', u'g'),
+ (0x1D691, 'M', u'h'),
+ (0x1D692, 'M', u'i'),
+ (0x1D693, 'M', u'j'),
+ (0x1D694, 'M', u'k'),
+ (0x1D695, 'M', u'l'),
+ (0x1D696, 'M', u'm'),
+ (0x1D697, 'M', u'n'),
+ (0x1D698, 'M', u'o'),
+ (0x1D699, 'M', u'p'),
+ (0x1D69A, 'M', u'q'),
+ (0x1D69B, 'M', u'r'),
+ (0x1D69C, 'M', u's'),
+ (0x1D69D, 'M', u't'),
+ (0x1D69E, 'M', u'u'),
+ (0x1D69F, 'M', u'v'),
+ (0x1D6A0, 'M', u'w'),
+ (0x1D6A1, 'M', u'x'),
+ (0x1D6A2, 'M', u'y'),
+ (0x1D6A3, 'M', u'z'),
+ (0x1D6A4, 'M', u'ı'),
+ (0x1D6A5, 'M', u'ȷ'),
+ (0x1D6A6, 'X'),
+ (0x1D6A8, 'M', u'α'),
+ (0x1D6A9, 'M', u'β'),
+ (0x1D6AA, 'M', u'γ'),
+ (0x1D6AB, 'M', u'δ'),
+ (0x1D6AC, 'M', u'ε'),
+ (0x1D6AD, 'M', u'ζ'),
+ (0x1D6AE, 'M', u'η'),
+ (0x1D6AF, 'M', u'θ'),
+ (0x1D6B0, 'M', u'ι'),
+ (0x1D6B1, 'M', u'κ'),
+ (0x1D6B2, 'M', u'λ'),
+ (0x1D6B3, 'M', u'μ'),
+ (0x1D6B4, 'M', u'ν'),
+ (0x1D6B5, 'M', u'ξ'),
+ (0x1D6B6, 'M', u'ο'),
+ (0x1D6B7, 'M', u'π'),
+ (0x1D6B8, 'M', u'ρ'),
+ (0x1D6B9, 'M', u'θ'),
+ (0x1D6BA, 'M', u'σ'),
+ (0x1D6BB, 'M', u'τ'),
+ ]
+
+def _seg_65():
+ return [
+ (0x1D6BC, 'M', u'υ'),
+ (0x1D6BD, 'M', u'φ'),
+ (0x1D6BE, 'M', u'χ'),
+ (0x1D6BF, 'M', u'ψ'),
+ (0x1D6C0, 'M', u'ω'),
+ (0x1D6C1, 'M', u'∇'),
+ (0x1D6C2, 'M', u'α'),
+ (0x1D6C3, 'M', u'β'),
+ (0x1D6C4, 'M', u'γ'),
+ (0x1D6C5, 'M', u'δ'),
+ (0x1D6C6, 'M', u'ε'),
+ (0x1D6C7, 'M', u'ζ'),
+ (0x1D6C8, 'M', u'η'),
+ (0x1D6C9, 'M', u'θ'),
+ (0x1D6CA, 'M', u'ι'),
+ (0x1D6CB, 'M', u'κ'),
+ (0x1D6CC, 'M', u'λ'),
+ (0x1D6CD, 'M', u'μ'),
+ (0x1D6CE, 'M', u'ν'),
+ (0x1D6CF, 'M', u'ξ'),
+ (0x1D6D0, 'M', u'ο'),
+ (0x1D6D1, 'M', u'π'),
+ (0x1D6D2, 'M', u'ρ'),
+ (0x1D6D3, 'M', u'σ'),
+ (0x1D6D5, 'M', u'τ'),
+ (0x1D6D6, 'M', u'υ'),
+ (0x1D6D7, 'M', u'φ'),
+ (0x1D6D8, 'M', u'χ'),
+ (0x1D6D9, 'M', u'ψ'),
+ (0x1D6DA, 'M', u'ω'),
+ (0x1D6DB, 'M', u'∂'),
+ (0x1D6DC, 'M', u'ε'),
+ (0x1D6DD, 'M', u'θ'),
+ (0x1D6DE, 'M', u'κ'),
+ (0x1D6DF, 'M', u'φ'),
+ (0x1D6E0, 'M', u'ρ'),
+ (0x1D6E1, 'M', u'π'),
+ (0x1D6E2, 'M', u'α'),
+ (0x1D6E3, 'M', u'β'),
+ (0x1D6E4, 'M', u'γ'),
+ (0x1D6E5, 'M', u'δ'),
+ (0x1D6E6, 'M', u'ε'),
+ (0x1D6E7, 'M', u'ζ'),
+ (0x1D6E8, 'M', u'η'),
+ (0x1D6E9, 'M', u'θ'),
+ (0x1D6EA, 'M', u'ι'),
+ (0x1D6EB, 'M', u'κ'),
+ (0x1D6EC, 'M', u'λ'),
+ (0x1D6ED, 'M', u'μ'),
+ (0x1D6EE, 'M', u'ν'),
+ (0x1D6EF, 'M', u'ξ'),
+ (0x1D6F0, 'M', u'ο'),
+ (0x1D6F1, 'M', u'π'),
+ (0x1D6F2, 'M', u'ρ'),
+ (0x1D6F3, 'M', u'θ'),
+ (0x1D6F4, 'M', u'σ'),
+ (0x1D6F5, 'M', u'τ'),
+ (0x1D6F6, 'M', u'υ'),
+ (0x1D6F7, 'M', u'φ'),
+ (0x1D6F8, 'M', u'χ'),
+ (0x1D6F9, 'M', u'ψ'),
+ (0x1D6FA, 'M', u'ω'),
+ (0x1D6FB, 'M', u'∇'),
+ (0x1D6FC, 'M', u'α'),
+ (0x1D6FD, 'M', u'β'),
+ (0x1D6FE, 'M', u'γ'),
+ (0x1D6FF, 'M', u'δ'),
+ (0x1D700, 'M', u'ε'),
+ (0x1D701, 'M', u'ζ'),
+ (0x1D702, 'M', u'η'),
+ (0x1D703, 'M', u'θ'),
+ (0x1D704, 'M', u'ι'),
+ (0x1D705, 'M', u'κ'),
+ (0x1D706, 'M', u'λ'),
+ (0x1D707, 'M', u'μ'),
+ (0x1D708, 'M', u'ν'),
+ (0x1D709, 'M', u'ξ'),
+ (0x1D70A, 'M', u'ο'),
+ (0x1D70B, 'M', u'π'),
+ (0x1D70C, 'M', u'ρ'),
+ (0x1D70D, 'M', u'σ'),
+ (0x1D70F, 'M', u'τ'),
+ (0x1D710, 'M', u'υ'),
+ (0x1D711, 'M', u'φ'),
+ (0x1D712, 'M', u'χ'),
+ (0x1D713, 'M', u'ψ'),
+ (0x1D714, 'M', u'ω'),
+ (0x1D715, 'M', u'∂'),
+ (0x1D716, 'M', u'ε'),
+ (0x1D717, 'M', u'θ'),
+ (0x1D718, 'M', u'κ'),
+ (0x1D719, 'M', u'φ'),
+ (0x1D71A, 'M', u'ρ'),
+ (0x1D71B, 'M', u'π'),
+ (0x1D71C, 'M', u'α'),
+ (0x1D71D, 'M', u'β'),
+ (0x1D71E, 'M', u'γ'),
+ (0x1D71F, 'M', u'δ'),
+ (0x1D720, 'M', u'ε'),
+ (0x1D721, 'M', u'ζ'),
+ ]
+
+def _seg_66():
+ return [
+ (0x1D722, 'M', u'η'),
+ (0x1D723, 'M', u'θ'),
+ (0x1D724, 'M', u'ι'),
+ (0x1D725, 'M', u'κ'),
+ (0x1D726, 'M', u'λ'),
+ (0x1D727, 'M', u'μ'),
+ (0x1D728, 'M', u'ν'),
+ (0x1D729, 'M', u'ξ'),
+ (0x1D72A, 'M', u'ο'),
+ (0x1D72B, 'M', u'π'),
+ (0x1D72C, 'M', u'ρ'),
+ (0x1D72D, 'M', u'θ'),
+ (0x1D72E, 'M', u'σ'),
+ (0x1D72F, 'M', u'τ'),
+ (0x1D730, 'M', u'υ'),
+ (0x1D731, 'M', u'φ'),
+ (0x1D732, 'M', u'χ'),
+ (0x1D733, 'M', u'ψ'),
+ (0x1D734, 'M', u'ω'),
+ (0x1D735, 'M', u'∇'),
+ (0x1D736, 'M', u'α'),
+ (0x1D737, 'M', u'β'),
+ (0x1D738, 'M', u'γ'),
+ (0x1D739, 'M', u'δ'),
+ (0x1D73A, 'M', u'ε'),
+ (0x1D73B, 'M', u'ζ'),
+ (0x1D73C, 'M', u'η'),
+ (0x1D73D, 'M', u'θ'),
+ (0x1D73E, 'M', u'ι'),
+ (0x1D73F, 'M', u'κ'),
+ (0x1D740, 'M', u'λ'),
+ (0x1D741, 'M', u'μ'),
+ (0x1D742, 'M', u'ν'),
+ (0x1D743, 'M', u'ξ'),
+ (0x1D744, 'M', u'ο'),
+ (0x1D745, 'M', u'π'),
+ (0x1D746, 'M', u'ρ'),
+ (0x1D747, 'M', u'σ'),
+ (0x1D749, 'M', u'τ'),
+ (0x1D74A, 'M', u'υ'),
+ (0x1D74B, 'M', u'φ'),
+ (0x1D74C, 'M', u'χ'),
+ (0x1D74D, 'M', u'ψ'),
+ (0x1D74E, 'M', u'ω'),
+ (0x1D74F, 'M', u'∂'),
+ (0x1D750, 'M', u'ε'),
+ (0x1D751, 'M', u'θ'),
+ (0x1D752, 'M', u'κ'),
+ (0x1D753, 'M', u'φ'),
+ (0x1D754, 'M', u'ρ'),
+ (0x1D755, 'M', u'π'),
+ (0x1D756, 'M', u'α'),
+ (0x1D757, 'M', u'β'),
+ (0x1D758, 'M', u'γ'),
+ (0x1D759, 'M', u'δ'),
+ (0x1D75A, 'M', u'ε'),
+ (0x1D75B, 'M', u'ζ'),
+ (0x1D75C, 'M', u'η'),
+ (0x1D75D, 'M', u'θ'),
+ (0x1D75E, 'M', u'ι'),
+ (0x1D75F, 'M', u'κ'),
+ (0x1D760, 'M', u'λ'),
+ (0x1D761, 'M', u'μ'),
+ (0x1D762, 'M', u'ν'),
+ (0x1D763, 'M', u'ξ'),
+ (0x1D764, 'M', u'ο'),
+ (0x1D765, 'M', u'π'),
+ (0x1D766, 'M', u'ρ'),
+ (0x1D767, 'M', u'θ'),
+ (0x1D768, 'M', u'σ'),
+ (0x1D769, 'M', u'τ'),
+ (0x1D76A, 'M', u'υ'),
+ (0x1D76B, 'M', u'φ'),
+ (0x1D76C, 'M', u'χ'),
+ (0x1D76D, 'M', u'ψ'),
+ (0x1D76E, 'M', u'ω'),
+ (0x1D76F, 'M', u'∇'),
+ (0x1D770, 'M', u'α'),
+ (0x1D771, 'M', u'β'),
+ (0x1D772, 'M', u'γ'),
+ (0x1D773, 'M', u'δ'),
+ (0x1D774, 'M', u'ε'),
+ (0x1D775, 'M', u'ζ'),
+ (0x1D776, 'M', u'η'),
+ (0x1D777, 'M', u'θ'),
+ (0x1D778, 'M', u'ι'),
+ (0x1D779, 'M', u'κ'),
+ (0x1D77A, 'M', u'λ'),
+ (0x1D77B, 'M', u'μ'),
+ (0x1D77C, 'M', u'ν'),
+ (0x1D77D, 'M', u'ξ'),
+ (0x1D77E, 'M', u'ο'),
+ (0x1D77F, 'M', u'π'),
+ (0x1D780, 'M', u'ρ'),
+ (0x1D781, 'M', u'σ'),
+ (0x1D783, 'M', u'τ'),
+ (0x1D784, 'M', u'υ'),
+ (0x1D785, 'M', u'φ'),
+ (0x1D786, 'M', u'χ'),
+ (0x1D787, 'M', u'ψ'),
+ ]
+
+def _seg_67():
+ return [
+ (0x1D788, 'M', u'ω'),
+ (0x1D789, 'M', u'∂'),
+ (0x1D78A, 'M', u'ε'),
+ (0x1D78B, 'M', u'θ'),
+ (0x1D78C, 'M', u'κ'),
+ (0x1D78D, 'M', u'φ'),
+ (0x1D78E, 'M', u'ρ'),
+ (0x1D78F, 'M', u'π'),
+ (0x1D790, 'M', u'α'),
+ (0x1D791, 'M', u'β'),
+ (0x1D792, 'M', u'γ'),
+ (0x1D793, 'M', u'δ'),
+ (0x1D794, 'M', u'ε'),
+ (0x1D795, 'M', u'ζ'),
+ (0x1D796, 'M', u'η'),
+ (0x1D797, 'M', u'θ'),
+ (0x1D798, 'M', u'ι'),
+ (0x1D799, 'M', u'κ'),
+ (0x1D79A, 'M', u'λ'),
+ (0x1D79B, 'M', u'μ'),
+ (0x1D79C, 'M', u'ν'),
+ (0x1D79D, 'M', u'ξ'),
+ (0x1D79E, 'M', u'ο'),
+ (0x1D79F, 'M', u'π'),
+ (0x1D7A0, 'M', u'ρ'),
+ (0x1D7A1, 'M', u'θ'),
+ (0x1D7A2, 'M', u'σ'),
+ (0x1D7A3, 'M', u'τ'),
+ (0x1D7A4, 'M', u'υ'),
+ (0x1D7A5, 'M', u'φ'),
+ (0x1D7A6, 'M', u'χ'),
+ (0x1D7A7, 'M', u'ψ'),
+ (0x1D7A8, 'M', u'ω'),
+ (0x1D7A9, 'M', u'∇'),
+ (0x1D7AA, 'M', u'α'),
+ (0x1D7AB, 'M', u'β'),
+ (0x1D7AC, 'M', u'γ'),
+ (0x1D7AD, 'M', u'δ'),
+ (0x1D7AE, 'M', u'ε'),
+ (0x1D7AF, 'M', u'ζ'),
+ (0x1D7B0, 'M', u'η'),
+ (0x1D7B1, 'M', u'θ'),
+ (0x1D7B2, 'M', u'ι'),
+ (0x1D7B3, 'M', u'κ'),
+ (0x1D7B4, 'M', u'λ'),
+ (0x1D7B5, 'M', u'μ'),
+ (0x1D7B6, 'M', u'ν'),
+ (0x1D7B7, 'M', u'ξ'),
+ (0x1D7B8, 'M', u'ο'),
+ (0x1D7B9, 'M', u'π'),
+ (0x1D7BA, 'M', u'ρ'),
+ (0x1D7BB, 'M', u'σ'),
+ (0x1D7BD, 'M', u'τ'),
+ (0x1D7BE, 'M', u'υ'),
+ (0x1D7BF, 'M', u'φ'),
+ (0x1D7C0, 'M', u'χ'),
+ (0x1D7C1, 'M', u'ψ'),
+ (0x1D7C2, 'M', u'ω'),
+ (0x1D7C3, 'M', u'∂'),
+ (0x1D7C4, 'M', u'ε'),
+ (0x1D7C5, 'M', u'θ'),
+ (0x1D7C6, 'M', u'κ'),
+ (0x1D7C7, 'M', u'φ'),
+ (0x1D7C8, 'M', u'ρ'),
+ (0x1D7C9, 'M', u'π'),
+ (0x1D7CA, 'M', u'ϝ'),
+ (0x1D7CC, 'X'),
+ (0x1D7CE, 'M', u'0'),
+ (0x1D7CF, 'M', u'1'),
+ (0x1D7D0, 'M', u'2'),
+ (0x1D7D1, 'M', u'3'),
+ (0x1D7D2, 'M', u'4'),
+ (0x1D7D3, 'M', u'5'),
+ (0x1D7D4, 'M', u'6'),
+ (0x1D7D5, 'M', u'7'),
+ (0x1D7D6, 'M', u'8'),
+ (0x1D7D7, 'M', u'9'),
+ (0x1D7D8, 'M', u'0'),
+ (0x1D7D9, 'M', u'1'),
+ (0x1D7DA, 'M', u'2'),
+ (0x1D7DB, 'M', u'3'),
+ (0x1D7DC, 'M', u'4'),
+ (0x1D7DD, 'M', u'5'),
+ (0x1D7DE, 'M', u'6'),
+ (0x1D7DF, 'M', u'7'),
+ (0x1D7E0, 'M', u'8'),
+ (0x1D7E1, 'M', u'9'),
+ (0x1D7E2, 'M', u'0'),
+ (0x1D7E3, 'M', u'1'),
+ (0x1D7E4, 'M', u'2'),
+ (0x1D7E5, 'M', u'3'),
+ (0x1D7E6, 'M', u'4'),
+ (0x1D7E7, 'M', u'5'),
+ (0x1D7E8, 'M', u'6'),
+ (0x1D7E9, 'M', u'7'),
+ (0x1D7EA, 'M', u'8'),
+ (0x1D7EB, 'M', u'9'),
+ (0x1D7EC, 'M', u'0'),
+ (0x1D7ED, 'M', u'1'),
+ (0x1D7EE, 'M', u'2'),
+ ]
+
+def _seg_68():
+ return [
+ (0x1D7EF, 'M', u'3'),
+ (0x1D7F0, 'M', u'4'),
+ (0x1D7F1, 'M', u'5'),
+ (0x1D7F2, 'M', u'6'),
+ (0x1D7F3, 'M', u'7'),
+ (0x1D7F4, 'M', u'8'),
+ (0x1D7F5, 'M', u'9'),
+ (0x1D7F6, 'M', u'0'),
+ (0x1D7F7, 'M', u'1'),
+ (0x1D7F8, 'M', u'2'),
+ (0x1D7F9, 'M', u'3'),
+ (0x1D7FA, 'M', u'4'),
+ (0x1D7FB, 'M', u'5'),
+ (0x1D7FC, 'M', u'6'),
+ (0x1D7FD, 'M', u'7'),
+ (0x1D7FE, 'M', u'8'),
+ (0x1D7FF, 'M', u'9'),
+ (0x1D800, 'V'),
+ (0x1DA8C, 'X'),
+ (0x1DA9B, 'V'),
+ (0x1DAA0, 'X'),
+ (0x1DAA1, 'V'),
+ (0x1DAB0, 'X'),
+ (0x1E000, 'V'),
+ (0x1E007, 'X'),
+ (0x1E008, 'V'),
+ (0x1E019, 'X'),
+ (0x1E01B, 'V'),
+ (0x1E022, 'X'),
+ (0x1E023, 'V'),
+ (0x1E025, 'X'),
+ (0x1E026, 'V'),
+ (0x1E02B, 'X'),
+ (0x1E800, 'V'),
+ (0x1E8C5, 'X'),
+ (0x1E8C7, 'V'),
+ (0x1E8D7, 'X'),
+ (0x1E900, 'M', u'𞤢'),
+ (0x1E901, 'M', u'𞤣'),
+ (0x1E902, 'M', u'𞤤'),
+ (0x1E903, 'M', u'𞤥'),
+ (0x1E904, 'M', u'𞤦'),
+ (0x1E905, 'M', u'𞤧'),
+ (0x1E906, 'M', u'𞤨'),
+ (0x1E907, 'M', u'𞤩'),
+ (0x1E908, 'M', u'𞤪'),
+ (0x1E909, 'M', u'𞤫'),
+ (0x1E90A, 'M', u'𞤬'),
+ (0x1E90B, 'M', u'𞤭'),
+ (0x1E90C, 'M', u'𞤮'),
+ (0x1E90D, 'M', u'𞤯'),
+ (0x1E90E, 'M', u'𞤰'),
+ (0x1E90F, 'M', u'𞤱'),
+ (0x1E910, 'M', u'𞤲'),
+ (0x1E911, 'M', u'𞤳'),
+ (0x1E912, 'M', u'𞤴'),
+ (0x1E913, 'M', u'𞤵'),
+ (0x1E914, 'M', u'𞤶'),
+ (0x1E915, 'M', u'𞤷'),
+ (0x1E916, 'M', u'𞤸'),
+ (0x1E917, 'M', u'𞤹'),
+ (0x1E918, 'M', u'𞤺'),
+ (0x1E919, 'M', u'𞤻'),
+ (0x1E91A, 'M', u'𞤼'),
+ (0x1E91B, 'M', u'𞤽'),
+ (0x1E91C, 'M', u'𞤾'),
+ (0x1E91D, 'M', u'𞤿'),
+ (0x1E91E, 'M', u'𞥀'),
+ (0x1E91F, 'M', u'𞥁'),
+ (0x1E920, 'M', u'𞥂'),
+ (0x1E921, 'M', u'𞥃'),
+ (0x1E922, 'V'),
+ (0x1E94B, 'X'),
+ (0x1E950, 'V'),
+ (0x1E95A, 'X'),
+ (0x1E95E, 'V'),
+ (0x1E960, 'X'),
+ (0x1EC71, 'V'),
+ (0x1ECB5, 'X'),
+ (0x1EE00, 'M', u'ا'),
+ (0x1EE01, 'M', u'ب'),
+ (0x1EE02, 'M', u'ج'),
+ (0x1EE03, 'M', u'د'),
+ (0x1EE04, 'X'),
+ (0x1EE05, 'M', u'و'),
+ (0x1EE06, 'M', u'ز'),
+ (0x1EE07, 'M', u'ح'),
+ (0x1EE08, 'M', u'ط'),
+ (0x1EE09, 'M', u'ي'),
+ (0x1EE0A, 'M', u'ك'),
+ (0x1EE0B, 'M', u'ل'),
+ (0x1EE0C, 'M', u'م'),
+ (0x1EE0D, 'M', u'ن'),
+ (0x1EE0E, 'M', u'س'),
+ (0x1EE0F, 'M', u'ع'),
+ (0x1EE10, 'M', u'ف'),
+ (0x1EE11, 'M', u'ص'),
+ (0x1EE12, 'M', u'ق'),
+ (0x1EE13, 'M', u'ر'),
+ (0x1EE14, 'M', u'ش'),
+ ]
+
+def _seg_69():
+ return [
+ (0x1EE15, 'M', u'ت'),
+ (0x1EE16, 'M', u'ث'),
+ (0x1EE17, 'M', u'خ'),
+ (0x1EE18, 'M', u'ذ'),
+ (0x1EE19, 'M', u'ض'),
+ (0x1EE1A, 'M', u'ظ'),
+ (0x1EE1B, 'M', u'غ'),
+ (0x1EE1C, 'M', u'ٮ'),
+ (0x1EE1D, 'M', u'ں'),
+ (0x1EE1E, 'M', u'ڡ'),
+ (0x1EE1F, 'M', u'ٯ'),
+ (0x1EE20, 'X'),
+ (0x1EE21, 'M', u'ب'),
+ (0x1EE22, 'M', u'ج'),
+ (0x1EE23, 'X'),
+ (0x1EE24, 'M', u'ه'),
+ (0x1EE25, 'X'),
+ (0x1EE27, 'M', u'ح'),
+ (0x1EE28, 'X'),
+ (0x1EE29, 'M', u'ي'),
+ (0x1EE2A, 'M', u'ك'),
+ (0x1EE2B, 'M', u'ل'),
+ (0x1EE2C, 'M', u'م'),
+ (0x1EE2D, 'M', u'ن'),
+ (0x1EE2E, 'M', u'س'),
+ (0x1EE2F, 'M', u'ع'),
+ (0x1EE30, 'M', u'ف'),
+ (0x1EE31, 'M', u'ص'),
+ (0x1EE32, 'M', u'ق'),
+ (0x1EE33, 'X'),
+ (0x1EE34, 'M', u'ش'),
+ (0x1EE35, 'M', u'ت'),
+ (0x1EE36, 'M', u'ث'),
+ (0x1EE37, 'M', u'خ'),
+ (0x1EE38, 'X'),
+ (0x1EE39, 'M', u'ض'),
+ (0x1EE3A, 'X'),
+ (0x1EE3B, 'M', u'غ'),
+ (0x1EE3C, 'X'),
+ (0x1EE42, 'M', u'ج'),
+ (0x1EE43, 'X'),
+ (0x1EE47, 'M', u'ح'),
+ (0x1EE48, 'X'),
+ (0x1EE49, 'M', u'ي'),
+ (0x1EE4A, 'X'),
+ (0x1EE4B, 'M', u'ل'),
+ (0x1EE4C, 'X'),
+ (0x1EE4D, 'M', u'ن'),
+ (0x1EE4E, 'M', u'س'),
+ (0x1EE4F, 'M', u'ع'),
+ (0x1EE50, 'X'),
+ (0x1EE51, 'M', u'ص'),
+ (0x1EE52, 'M', u'ق'),
+ (0x1EE53, 'X'),
+ (0x1EE54, 'M', u'ش'),
+ (0x1EE55, 'X'),
+ (0x1EE57, 'M', u'خ'),
+ (0x1EE58, 'X'),
+ (0x1EE59, 'M', u'ض'),
+ (0x1EE5A, 'X'),
+ (0x1EE5B, 'M', u'غ'),
+ (0x1EE5C, 'X'),
+ (0x1EE5D, 'M', u'ں'),
+ (0x1EE5E, 'X'),
+ (0x1EE5F, 'M', u'ٯ'),
+ (0x1EE60, 'X'),
+ (0x1EE61, 'M', u'ب'),
+ (0x1EE62, 'M', u'ج'),
+ (0x1EE63, 'X'),
+ (0x1EE64, 'M', u'ه'),
+ (0x1EE65, 'X'),
+ (0x1EE67, 'M', u'ح'),
+ (0x1EE68, 'M', u'ط'),
+ (0x1EE69, 'M', u'ي'),
+ (0x1EE6A, 'M', u'ك'),
+ (0x1EE6B, 'X'),
+ (0x1EE6C, 'M', u'م'),
+ (0x1EE6D, 'M', u'ن'),
+ (0x1EE6E, 'M', u'س'),
+ (0x1EE6F, 'M', u'ع'),
+ (0x1EE70, 'M', u'ف'),
+ (0x1EE71, 'M', u'ص'),
+ (0x1EE72, 'M', u'ق'),
+ (0x1EE73, 'X'),
+ (0x1EE74, 'M', u'ش'),
+ (0x1EE75, 'M', u'ت'),
+ (0x1EE76, 'M', u'ث'),
+ (0x1EE77, 'M', u'خ'),
+ (0x1EE78, 'X'),
+ (0x1EE79, 'M', u'ض'),
+ (0x1EE7A, 'M', u'ظ'),
+ (0x1EE7B, 'M', u'غ'),
+ (0x1EE7C, 'M', u'ٮ'),
+ (0x1EE7D, 'X'),
+ (0x1EE7E, 'M', u'ڡ'),
+ (0x1EE7F, 'X'),
+ (0x1EE80, 'M', u'ا'),
+ (0x1EE81, 'M', u'ب'),
+ (0x1EE82, 'M', u'ج'),
+ (0x1EE83, 'M', u'د'),
+ ]
+
+def _seg_70():
+ return [
+ (0x1EE84, 'M', u'ه'),
+ (0x1EE85, 'M', u'و'),
+ (0x1EE86, 'M', u'ز'),
+ (0x1EE87, 'M', u'ح'),
+ (0x1EE88, 'M', u'ط'),
+ (0x1EE89, 'M', u'ي'),
+ (0x1EE8A, 'X'),
+ (0x1EE8B, 'M', u'ل'),
+ (0x1EE8C, 'M', u'م'),
+ (0x1EE8D, 'M', u'ن'),
+ (0x1EE8E, 'M', u'س'),
+ (0x1EE8F, 'M', u'ع'),
+ (0x1EE90, 'M', u'ف'),
+ (0x1EE91, 'M', u'ص'),
+ (0x1EE92, 'M', u'ق'),
+ (0x1EE93, 'M', u'ر'),
+ (0x1EE94, 'M', u'ش'),
+ (0x1EE95, 'M', u'ت'),
+ (0x1EE96, 'M', u'ث'),
+ (0x1EE97, 'M', u'خ'),
+ (0x1EE98, 'M', u'ذ'),
+ (0x1EE99, 'M', u'ض'),
+ (0x1EE9A, 'M', u'ظ'),
+ (0x1EE9B, 'M', u'غ'),
+ (0x1EE9C, 'X'),
+ (0x1EEA1, 'M', u'ب'),
+ (0x1EEA2, 'M', u'ج'),
+ (0x1EEA3, 'M', u'د'),
+ (0x1EEA4, 'X'),
+ (0x1EEA5, 'M', u'و'),
+ (0x1EEA6, 'M', u'ز'),
+ (0x1EEA7, 'M', u'ح'),
+ (0x1EEA8, 'M', u'ط'),
+ (0x1EEA9, 'M', u'ي'),
+ (0x1EEAA, 'X'),
+ (0x1EEAB, 'M', u'ل'),
+ (0x1EEAC, 'M', u'م'),
+ (0x1EEAD, 'M', u'ن'),
+ (0x1EEAE, 'M', u'س'),
+ (0x1EEAF, 'M', u'ع'),
+ (0x1EEB0, 'M', u'ف'),
+ (0x1EEB1, 'M', u'ص'),
+ (0x1EEB2, 'M', u'ق'),
+ (0x1EEB3, 'M', u'ر'),
+ (0x1EEB4, 'M', u'ش'),
+ (0x1EEB5, 'M', u'ت'),
+ (0x1EEB6, 'M', u'ث'),
+ (0x1EEB7, 'M', u'خ'),
+ (0x1EEB8, 'M', u'ذ'),
+ (0x1EEB9, 'M', u'ض'),
+ (0x1EEBA, 'M', u'ظ'),
+ (0x1EEBB, 'M', u'غ'),
+ (0x1EEBC, 'X'),
+ (0x1EEF0, 'V'),
+ (0x1EEF2, 'X'),
+ (0x1F000, 'V'),
+ (0x1F02C, 'X'),
+ (0x1F030, 'V'),
+ (0x1F094, 'X'),
+ (0x1F0A0, 'V'),
+ (0x1F0AF, 'X'),
+ (0x1F0B1, 'V'),
+ (0x1F0C0, 'X'),
+ (0x1F0C1, 'V'),
+ (0x1F0D0, 'X'),
+ (0x1F0D1, 'V'),
+ (0x1F0F6, 'X'),
+ (0x1F101, '3', u'0,'),
+ (0x1F102, '3', u'1,'),
+ (0x1F103, '3', u'2,'),
+ (0x1F104, '3', u'3,'),
+ (0x1F105, '3', u'4,'),
+ (0x1F106, '3', u'5,'),
+ (0x1F107, '3', u'6,'),
+ (0x1F108, '3', u'7,'),
+ (0x1F109, '3', u'8,'),
+ (0x1F10A, '3', u'9,'),
+ (0x1F10B, 'V'),
+ (0x1F10D, 'X'),
+ (0x1F110, '3', u'(a)'),
+ (0x1F111, '3', u'(b)'),
+ (0x1F112, '3', u'(c)'),
+ (0x1F113, '3', u'(d)'),
+ (0x1F114, '3', u'(e)'),
+ (0x1F115, '3', u'(f)'),
+ (0x1F116, '3', u'(g)'),
+ (0x1F117, '3', u'(h)'),
+ (0x1F118, '3', u'(i)'),
+ (0x1F119, '3', u'(j)'),
+ (0x1F11A, '3', u'(k)'),
+ (0x1F11B, '3', u'(l)'),
+ (0x1F11C, '3', u'(m)'),
+ (0x1F11D, '3', u'(n)'),
+ (0x1F11E, '3', u'(o)'),
+ (0x1F11F, '3', u'(p)'),
+ (0x1F120, '3', u'(q)'),
+ (0x1F121, '3', u'(r)'),
+ (0x1F122, '3', u'(s)'),
+ (0x1F123, '3', u'(t)'),
+ (0x1F124, '3', u'(u)'),
+ ]
+
+def _seg_71():
+ return [
+ (0x1F125, '3', u'(v)'),
+ (0x1F126, '3', u'(w)'),
+ (0x1F127, '3', u'(x)'),
+ (0x1F128, '3', u'(y)'),
+ (0x1F129, '3', u'(z)'),
+ (0x1F12A, 'M', u'〔s〕'),
+ (0x1F12B, 'M', u'c'),
+ (0x1F12C, 'M', u'r'),
+ (0x1F12D, 'M', u'cd'),
+ (0x1F12E, 'M', u'wz'),
+ (0x1F12F, 'V'),
+ (0x1F130, 'M', u'a'),
+ (0x1F131, 'M', u'b'),
+ (0x1F132, 'M', u'c'),
+ (0x1F133, 'M', u'd'),
+ (0x1F134, 'M', u'e'),
+ (0x1F135, 'M', u'f'),
+ (0x1F136, 'M', u'g'),
+ (0x1F137, 'M', u'h'),
+ (0x1F138, 'M', u'i'),
+ (0x1F139, 'M', u'j'),
+ (0x1F13A, 'M', u'k'),
+ (0x1F13B, 'M', u'l'),
+ (0x1F13C, 'M', u'm'),
+ (0x1F13D, 'M', u'n'),
+ (0x1F13E, 'M', u'o'),
+ (0x1F13F, 'M', u'p'),
+ (0x1F140, 'M', u'q'),
+ (0x1F141, 'M', u'r'),
+ (0x1F142, 'M', u's'),
+ (0x1F143, 'M', u't'),
+ (0x1F144, 'M', u'u'),
+ (0x1F145, 'M', u'v'),
+ (0x1F146, 'M', u'w'),
+ (0x1F147, 'M', u'x'),
+ (0x1F148, 'M', u'y'),
+ (0x1F149, 'M', u'z'),
+ (0x1F14A, 'M', u'hv'),
+ (0x1F14B, 'M', u'mv'),
+ (0x1F14C, 'M', u'sd'),
+ (0x1F14D, 'M', u'ss'),
+ (0x1F14E, 'M', u'ppv'),
+ (0x1F14F, 'M', u'wc'),
+ (0x1F150, 'V'),
+ (0x1F16A, 'M', u'mc'),
+ (0x1F16B, 'M', u'md'),
+ (0x1F16C, 'X'),
+ (0x1F170, 'V'),
+ (0x1F190, 'M', u'dj'),
+ (0x1F191, 'V'),
+ (0x1F1AD, 'X'),
+ (0x1F1E6, 'V'),
+ (0x1F200, 'M', u'ほか'),
+ (0x1F201, 'M', u'ココ'),
+ (0x1F202, 'M', u'サ'),
+ (0x1F203, 'X'),
+ (0x1F210, 'M', u'手'),
+ (0x1F211, 'M', u'字'),
+ (0x1F212, 'M', u'双'),
+ (0x1F213, 'M', u'デ'),
+ (0x1F214, 'M', u'二'),
+ (0x1F215, 'M', u'多'),
+ (0x1F216, 'M', u'解'),
+ (0x1F217, 'M', u'天'),
+ (0x1F218, 'M', u'交'),
+ (0x1F219, 'M', u'映'),
+ (0x1F21A, 'M', u'無'),
+ (0x1F21B, 'M', u'料'),
+ (0x1F21C, 'M', u'前'),
+ (0x1F21D, 'M', u'後'),
+ (0x1F21E, 'M', u'再'),
+ (0x1F21F, 'M', u'新'),
+ (0x1F220, 'M', u'初'),
+ (0x1F221, 'M', u'終'),
+ (0x1F222, 'M', u'生'),
+ (0x1F223, 'M', u'販'),
+ (0x1F224, 'M', u'声'),
+ (0x1F225, 'M', u'吹'),
+ (0x1F226, 'M', u'演'),
+ (0x1F227, 'M', u'投'),
+ (0x1F228, 'M', u'捕'),
+ (0x1F229, 'M', u'一'),
+ (0x1F22A, 'M', u'三'),
+ (0x1F22B, 'M', u'遊'),
+ (0x1F22C, 'M', u'左'),
+ (0x1F22D, 'M', u'中'),
+ (0x1F22E, 'M', u'右'),
+ (0x1F22F, 'M', u'指'),
+ (0x1F230, 'M', u'走'),
+ (0x1F231, 'M', u'打'),
+ (0x1F232, 'M', u'禁'),
+ (0x1F233, 'M', u'空'),
+ (0x1F234, 'M', u'合'),
+ (0x1F235, 'M', u'満'),
+ (0x1F236, 'M', u'有'),
+ (0x1F237, 'M', u'月'),
+ (0x1F238, 'M', u'申'),
+ (0x1F239, 'M', u'割'),
+ (0x1F23A, 'M', u'営'),
+ (0x1F23B, 'M', u'配'),
+ ]
+
+def _seg_72():
+ return [
+ (0x1F23C, 'X'),
+ (0x1F240, 'M', u'〔本〕'),
+ (0x1F241, 'M', u'〔三〕'),
+ (0x1F242, 'M', u'〔二〕'),
+ (0x1F243, 'M', u'〔安〕'),
+ (0x1F244, 'M', u'〔点〕'),
+ (0x1F245, 'M', u'〔打〕'),
+ (0x1F246, 'M', u'〔盗〕'),
+ (0x1F247, 'M', u'〔勝〕'),
+ (0x1F248, 'M', u'〔敗〕'),
+ (0x1F249, 'X'),
+ (0x1F250, 'M', u'得'),
+ (0x1F251, 'M', u'可'),
+ (0x1F252, 'X'),
+ (0x1F260, 'V'),
+ (0x1F266, 'X'),
+ (0x1F300, 'V'),
+ (0x1F6D5, 'X'),
+ (0x1F6E0, 'V'),
+ (0x1F6ED, 'X'),
+ (0x1F6F0, 'V'),
+ (0x1F6FA, 'X'),
+ (0x1F700, 'V'),
+ (0x1F774, 'X'),
+ (0x1F780, 'V'),
+ (0x1F7D9, 'X'),
+ (0x1F800, 'V'),
+ (0x1F80C, 'X'),
+ (0x1F810, 'V'),
+ (0x1F848, 'X'),
+ (0x1F850, 'V'),
+ (0x1F85A, 'X'),
+ (0x1F860, 'V'),
+ (0x1F888, 'X'),
+ (0x1F890, 'V'),
+ (0x1F8AE, 'X'),
+ (0x1F900, 'V'),
+ (0x1F90C, 'X'),
+ (0x1F910, 'V'),
+ (0x1F93F, 'X'),
+ (0x1F940, 'V'),
+ (0x1F971, 'X'),
+ (0x1F973, 'V'),
+ (0x1F977, 'X'),
+ (0x1F97A, 'V'),
+ (0x1F97B, 'X'),
+ (0x1F97C, 'V'),
+ (0x1F9A3, 'X'),
+ (0x1F9B0, 'V'),
+ (0x1F9BA, 'X'),
+ (0x1F9C0, 'V'),
+ (0x1F9C3, 'X'),
+ (0x1F9D0, 'V'),
+ (0x1FA00, 'X'),
+ (0x1FA60, 'V'),
+ (0x1FA6E, 'X'),
+ (0x20000, 'V'),
+ (0x2A6D7, 'X'),
+ (0x2A700, 'V'),
+ (0x2B735, 'X'),
+ (0x2B740, 'V'),
+ (0x2B81E, 'X'),
+ (0x2B820, 'V'),
+ (0x2CEA2, 'X'),
+ (0x2CEB0, 'V'),
+ (0x2EBE1, 'X'),
+ (0x2F800, 'M', u'丽'),
+ (0x2F801, 'M', u'丸'),
+ (0x2F802, 'M', u'乁'),
+ (0x2F803, 'M', u'𠄢'),
+ (0x2F804, 'M', u'你'),
+ (0x2F805, 'M', u'侮'),
+ (0x2F806, 'M', u'侻'),
+ (0x2F807, 'M', u'倂'),
+ (0x2F808, 'M', u'偺'),
+ (0x2F809, 'M', u'備'),
+ (0x2F80A, 'M', u'僧'),
+ (0x2F80B, 'M', u'像'),
+ (0x2F80C, 'M', u'㒞'),
+ (0x2F80D, 'M', u'𠘺'),
+ (0x2F80E, 'M', u'免'),
+ (0x2F80F, 'M', u'兔'),
+ (0x2F810, 'M', u'兤'),
+ (0x2F811, 'M', u'具'),
+ (0x2F812, 'M', u'𠔜'),
+ (0x2F813, 'M', u'㒹'),
+ (0x2F814, 'M', u'內'),
+ (0x2F815, 'M', u'再'),
+ (0x2F816, 'M', u'𠕋'),
+ (0x2F817, 'M', u'冗'),
+ (0x2F818, 'M', u'冤'),
+ (0x2F819, 'M', u'仌'),
+ (0x2F81A, 'M', u'冬'),
+ (0x2F81B, 'M', u'况'),
+ (0x2F81C, 'M', u'𩇟'),
+ (0x2F81D, 'M', u'凵'),
+ (0x2F81E, 'M', u'刃'),
+ (0x2F81F, 'M', u'㓟'),
+ (0x2F820, 'M', u'刻'),
+ (0x2F821, 'M', u'剆'),
+ ]
+
+def _seg_73():
+ return [
+ (0x2F822, 'M', u'割'),
+ (0x2F823, 'M', u'剷'),
+ (0x2F824, 'M', u'㔕'),
+ (0x2F825, 'M', u'勇'),
+ (0x2F826, 'M', u'勉'),
+ (0x2F827, 'M', u'勤'),
+ (0x2F828, 'M', u'勺'),
+ (0x2F829, 'M', u'包'),
+ (0x2F82A, 'M', u'匆'),
+ (0x2F82B, 'M', u'北'),
+ (0x2F82C, 'M', u'卉'),
+ (0x2F82D, 'M', u'卑'),
+ (0x2F82E, 'M', u'博'),
+ (0x2F82F, 'M', u'即'),
+ (0x2F830, 'M', u'卽'),
+ (0x2F831, 'M', u'卿'),
+ (0x2F834, 'M', u'𠨬'),
+ (0x2F835, 'M', u'灰'),
+ (0x2F836, 'M', u'及'),
+ (0x2F837, 'M', u'叟'),
+ (0x2F838, 'M', u'𠭣'),
+ (0x2F839, 'M', u'叫'),
+ (0x2F83A, 'M', u'叱'),
+ (0x2F83B, 'M', u'吆'),
+ (0x2F83C, 'M', u'咞'),
+ (0x2F83D, 'M', u'吸'),
+ (0x2F83E, 'M', u'呈'),
+ (0x2F83F, 'M', u'周'),
+ (0x2F840, 'M', u'咢'),
+ (0x2F841, 'M', u'哶'),
+ (0x2F842, 'M', u'唐'),
+ (0x2F843, 'M', u'啓'),
+ (0x2F844, 'M', u'啣'),
+ (0x2F845, 'M', u'善'),
+ (0x2F847, 'M', u'喙'),
+ (0x2F848, 'M', u'喫'),
+ (0x2F849, 'M', u'喳'),
+ (0x2F84A, 'M', u'嗂'),
+ (0x2F84B, 'M', u'圖'),
+ (0x2F84C, 'M', u'嘆'),
+ (0x2F84D, 'M', u'圗'),
+ (0x2F84E, 'M', u'噑'),
+ (0x2F84F, 'M', u'噴'),
+ (0x2F850, 'M', u'切'),
+ (0x2F851, 'M', u'壮'),
+ (0x2F852, 'M', u'城'),
+ (0x2F853, 'M', u'埴'),
+ (0x2F854, 'M', u'堍'),
+ (0x2F855, 'M', u'型'),
+ (0x2F856, 'M', u'堲'),
+ (0x2F857, 'M', u'報'),
+ (0x2F858, 'M', u'墬'),
+ (0x2F859, 'M', u'𡓤'),
+ (0x2F85A, 'M', u'売'),
+ (0x2F85B, 'M', u'壷'),
+ (0x2F85C, 'M', u'夆'),
+ (0x2F85D, 'M', u'多'),
+ (0x2F85E, 'M', u'夢'),
+ (0x2F85F, 'M', u'奢'),
+ (0x2F860, 'M', u'𡚨'),
+ (0x2F861, 'M', u'𡛪'),
+ (0x2F862, 'M', u'姬'),
+ (0x2F863, 'M', u'娛'),
+ (0x2F864, 'M', u'娧'),
+ (0x2F865, 'M', u'姘'),
+ (0x2F866, 'M', u'婦'),
+ (0x2F867, 'M', u'㛮'),
+ (0x2F868, 'X'),
+ (0x2F869, 'M', u'嬈'),
+ (0x2F86A, 'M', u'嬾'),
+ (0x2F86C, 'M', u'𡧈'),
+ (0x2F86D, 'M', u'寃'),
+ (0x2F86E, 'M', u'寘'),
+ (0x2F86F, 'M', u'寧'),
+ (0x2F870, 'M', u'寳'),
+ (0x2F871, 'M', u'𡬘'),
+ (0x2F872, 'M', u'寿'),
+ (0x2F873, 'M', u'将'),
+ (0x2F874, 'X'),
+ (0x2F875, 'M', u'尢'),
+ (0x2F876, 'M', u'㞁'),
+ (0x2F877, 'M', u'屠'),
+ (0x2F878, 'M', u'屮'),
+ (0x2F879, 'M', u'峀'),
+ (0x2F87A, 'M', u'岍'),
+ (0x2F87B, 'M', u'𡷤'),
+ (0x2F87C, 'M', u'嵃'),
+ (0x2F87D, 'M', u'𡷦'),
+ (0x2F87E, 'M', u'嵮'),
+ (0x2F87F, 'M', u'嵫'),
+ (0x2F880, 'M', u'嵼'),
+ (0x2F881, 'M', u'巡'),
+ (0x2F882, 'M', u'巢'),
+ (0x2F883, 'M', u'㠯'),
+ (0x2F884, 'M', u'巽'),
+ (0x2F885, 'M', u'帨'),
+ (0x2F886, 'M', u'帽'),
+ (0x2F887, 'M', u'幩'),
+ (0x2F888, 'M', u'㡢'),
+ (0x2F889, 'M', u'𢆃'),
+ ]
+
+def _seg_74():
+ return [
+ (0x2F88A, 'M', u'㡼'),
+ (0x2F88B, 'M', u'庰'),
+ (0x2F88C, 'M', u'庳'),
+ (0x2F88D, 'M', u'庶'),
+ (0x2F88E, 'M', u'廊'),
+ (0x2F88F, 'M', u'𪎒'),
+ (0x2F890, 'M', u'廾'),
+ (0x2F891, 'M', u'𢌱'),
+ (0x2F893, 'M', u'舁'),
+ (0x2F894, 'M', u'弢'),
+ (0x2F896, 'M', u'㣇'),
+ (0x2F897, 'M', u'𣊸'),
+ (0x2F898, 'M', u'𦇚'),
+ (0x2F899, 'M', u'形'),
+ (0x2F89A, 'M', u'彫'),
+ (0x2F89B, 'M', u'㣣'),
+ (0x2F89C, 'M', u'徚'),
+ (0x2F89D, 'M', u'忍'),
+ (0x2F89E, 'M', u'志'),
+ (0x2F89F, 'M', u'忹'),
+ (0x2F8A0, 'M', u'悁'),
+ (0x2F8A1, 'M', u'㤺'),
+ (0x2F8A2, 'M', u'㤜'),
+ (0x2F8A3, 'M', u'悔'),
+ (0x2F8A4, 'M', u'𢛔'),
+ (0x2F8A5, 'M', u'惇'),
+ (0x2F8A6, 'M', u'慈'),
+ (0x2F8A7, 'M', u'慌'),
+ (0x2F8A8, 'M', u'慎'),
+ (0x2F8A9, 'M', u'慌'),
+ (0x2F8AA, 'M', u'慺'),
+ (0x2F8AB, 'M', u'憎'),
+ (0x2F8AC, 'M', u'憲'),
+ (0x2F8AD, 'M', u'憤'),
+ (0x2F8AE, 'M', u'憯'),
+ (0x2F8AF, 'M', u'懞'),
+ (0x2F8B0, 'M', u'懲'),
+ (0x2F8B1, 'M', u'懶'),
+ (0x2F8B2, 'M', u'成'),
+ (0x2F8B3, 'M', u'戛'),
+ (0x2F8B4, 'M', u'扝'),
+ (0x2F8B5, 'M', u'抱'),
+ (0x2F8B6, 'M', u'拔'),
+ (0x2F8B7, 'M', u'捐'),
+ (0x2F8B8, 'M', u'𢬌'),
+ (0x2F8B9, 'M', u'挽'),
+ (0x2F8BA, 'M', u'拼'),
+ (0x2F8BB, 'M', u'捨'),
+ (0x2F8BC, 'M', u'掃'),
+ (0x2F8BD, 'M', u'揤'),
+ (0x2F8BE, 'M', u'𢯱'),
+ (0x2F8BF, 'M', u'搢'),
+ (0x2F8C0, 'M', u'揅'),
+ (0x2F8C1, 'M', u'掩'),
+ (0x2F8C2, 'M', u'㨮'),
+ (0x2F8C3, 'M', u'摩'),
+ (0x2F8C4, 'M', u'摾'),
+ (0x2F8C5, 'M', u'撝'),
+ (0x2F8C6, 'M', u'摷'),
+ (0x2F8C7, 'M', u'㩬'),
+ (0x2F8C8, 'M', u'敏'),
+ (0x2F8C9, 'M', u'敬'),
+ (0x2F8CA, 'M', u'𣀊'),
+ (0x2F8CB, 'M', u'旣'),
+ (0x2F8CC, 'M', u'書'),
+ (0x2F8CD, 'M', u'晉'),
+ (0x2F8CE, 'M', u'㬙'),
+ (0x2F8CF, 'M', u'暑'),
+ (0x2F8D0, 'M', u'㬈'),
+ (0x2F8D1, 'M', u'㫤'),
+ (0x2F8D2, 'M', u'冒'),
+ (0x2F8D3, 'M', u'冕'),
+ (0x2F8D4, 'M', u'最'),
+ (0x2F8D5, 'M', u'暜'),
+ (0x2F8D6, 'M', u'肭'),
+ (0x2F8D7, 'M', u'䏙'),
+ (0x2F8D8, 'M', u'朗'),
+ (0x2F8D9, 'M', u'望'),
+ (0x2F8DA, 'M', u'朡'),
+ (0x2F8DB, 'M', u'杞'),
+ (0x2F8DC, 'M', u'杓'),
+ (0x2F8DD, 'M', u'𣏃'),
+ (0x2F8DE, 'M', u'㭉'),
+ (0x2F8DF, 'M', u'柺'),
+ (0x2F8E0, 'M', u'枅'),
+ (0x2F8E1, 'M', u'桒'),
+ (0x2F8E2, 'M', u'梅'),
+ (0x2F8E3, 'M', u'𣑭'),
+ (0x2F8E4, 'M', u'梎'),
+ (0x2F8E5, 'M', u'栟'),
+ (0x2F8E6, 'M', u'椔'),
+ (0x2F8E7, 'M', u'㮝'),
+ (0x2F8E8, 'M', u'楂'),
+ (0x2F8E9, 'M', u'榣'),
+ (0x2F8EA, 'M', u'槪'),
+ (0x2F8EB, 'M', u'檨'),
+ (0x2F8EC, 'M', u'𣚣'),
+ (0x2F8ED, 'M', u'櫛'),
+ (0x2F8EE, 'M', u'㰘'),
+ (0x2F8EF, 'M', u'次'),
+ ]
+
+def _seg_75():
+ return [
+ (0x2F8F0, 'M', u'𣢧'),
+ (0x2F8F1, 'M', u'歔'),
+ (0x2F8F2, 'M', u'㱎'),
+ (0x2F8F3, 'M', u'歲'),
+ (0x2F8F4, 'M', u'殟'),
+ (0x2F8F5, 'M', u'殺'),
+ (0x2F8F6, 'M', u'殻'),
+ (0x2F8F7, 'M', u'𣪍'),
+ (0x2F8F8, 'M', u'𡴋'),
+ (0x2F8F9, 'M', u'𣫺'),
+ (0x2F8FA, 'M', u'汎'),
+ (0x2F8FB, 'M', u'𣲼'),
+ (0x2F8FC, 'M', u'沿'),
+ (0x2F8FD, 'M', u'泍'),
+ (0x2F8FE, 'M', u'汧'),
+ (0x2F8FF, 'M', u'洖'),
+ (0x2F900, 'M', u'派'),
+ (0x2F901, 'M', u'海'),
+ (0x2F902, 'M', u'流'),
+ (0x2F903, 'M', u'浩'),
+ (0x2F904, 'M', u'浸'),
+ (0x2F905, 'M', u'涅'),
+ (0x2F906, 'M', u'𣴞'),
+ (0x2F907, 'M', u'洴'),
+ (0x2F908, 'M', u'港'),
+ (0x2F909, 'M', u'湮'),
+ (0x2F90A, 'M', u'㴳'),
+ (0x2F90B, 'M', u'滋'),
+ (0x2F90C, 'M', u'滇'),
+ (0x2F90D, 'M', u'𣻑'),
+ (0x2F90E, 'M', u'淹'),
+ (0x2F90F, 'M', u'潮'),
+ (0x2F910, 'M', u'𣽞'),
+ (0x2F911, 'M', u'𣾎'),
+ (0x2F912, 'M', u'濆'),
+ (0x2F913, 'M', u'瀹'),
+ (0x2F914, 'M', u'瀞'),
+ (0x2F915, 'M', u'瀛'),
+ (0x2F916, 'M', u'㶖'),
+ (0x2F917, 'M', u'灊'),
+ (0x2F918, 'M', u'災'),
+ (0x2F919, 'M', u'灷'),
+ (0x2F91A, 'M', u'炭'),
+ (0x2F91B, 'M', u'𠔥'),
+ (0x2F91C, 'M', u'煅'),
+ (0x2F91D, 'M', u'𤉣'),
+ (0x2F91E, 'M', u'熜'),
+ (0x2F91F, 'X'),
+ (0x2F920, 'M', u'爨'),
+ (0x2F921, 'M', u'爵'),
+ (0x2F922, 'M', u'牐'),
+ (0x2F923, 'M', u'𤘈'),
+ (0x2F924, 'M', u'犀'),
+ (0x2F925, 'M', u'犕'),
+ (0x2F926, 'M', u'𤜵'),
+ (0x2F927, 'M', u'𤠔'),
+ (0x2F928, 'M', u'獺'),
+ (0x2F929, 'M', u'王'),
+ (0x2F92A, 'M', u'㺬'),
+ (0x2F92B, 'M', u'玥'),
+ (0x2F92C, 'M', u'㺸'),
+ (0x2F92E, 'M', u'瑇'),
+ (0x2F92F, 'M', u'瑜'),
+ (0x2F930, 'M', u'瑱'),
+ (0x2F931, 'M', u'璅'),
+ (0x2F932, 'M', u'瓊'),
+ (0x2F933, 'M', u'㼛'),
+ (0x2F934, 'M', u'甤'),
+ (0x2F935, 'M', u'𤰶'),
+ (0x2F936, 'M', u'甾'),
+ (0x2F937, 'M', u'𤲒'),
+ (0x2F938, 'M', u'異'),
+ (0x2F939, 'M', u'𢆟'),
+ (0x2F93A, 'M', u'瘐'),
+ (0x2F93B, 'M', u'𤾡'),
+ (0x2F93C, 'M', u'𤾸'),
+ (0x2F93D, 'M', u'𥁄'),
+ (0x2F93E, 'M', u'㿼'),
+ (0x2F93F, 'M', u'䀈'),
+ (0x2F940, 'M', u'直'),
+ (0x2F941, 'M', u'𥃳'),
+ (0x2F942, 'M', u'𥃲'),
+ (0x2F943, 'M', u'𥄙'),
+ (0x2F944, 'M', u'𥄳'),
+ (0x2F945, 'M', u'眞'),
+ (0x2F946, 'M', u'真'),
+ (0x2F948, 'M', u'睊'),
+ (0x2F949, 'M', u'䀹'),
+ (0x2F94A, 'M', u'瞋'),
+ (0x2F94B, 'M', u'䁆'),
+ (0x2F94C, 'M', u'䂖'),
+ (0x2F94D, 'M', u'𥐝'),
+ (0x2F94E, 'M', u'硎'),
+ (0x2F94F, 'M', u'碌'),
+ (0x2F950, 'M', u'磌'),
+ (0x2F951, 'M', u'䃣'),
+ (0x2F952, 'M', u'𥘦'),
+ (0x2F953, 'M', u'祖'),
+ (0x2F954, 'M', u'𥚚'),
+ (0x2F955, 'M', u'𥛅'),
+ ]
+
+def _seg_76():
+ return [
+ (0x2F956, 'M', u'福'),
+ (0x2F957, 'M', u'秫'),
+ (0x2F958, 'M', u'䄯'),
+ (0x2F959, 'M', u'穀'),
+ (0x2F95A, 'M', u'穊'),
+ (0x2F95B, 'M', u'穏'),
+ (0x2F95C, 'M', u'𥥼'),
+ (0x2F95D, 'M', u'𥪧'),
+ (0x2F95F, 'X'),
+ (0x2F960, 'M', u'䈂'),
+ (0x2F961, 'M', u'𥮫'),
+ (0x2F962, 'M', u'篆'),
+ (0x2F963, 'M', u'築'),
+ (0x2F964, 'M', u'䈧'),
+ (0x2F965, 'M', u'𥲀'),
+ (0x2F966, 'M', u'糒'),
+ (0x2F967, 'M', u'䊠'),
+ (0x2F968, 'M', u'糨'),
+ (0x2F969, 'M', u'糣'),
+ (0x2F96A, 'M', u'紀'),
+ (0x2F96B, 'M', u'𥾆'),
+ (0x2F96C, 'M', u'絣'),
+ (0x2F96D, 'M', u'䌁'),
+ (0x2F96E, 'M', u'緇'),
+ (0x2F96F, 'M', u'縂'),
+ (0x2F970, 'M', u'繅'),
+ (0x2F971, 'M', u'䌴'),
+ (0x2F972, 'M', u'𦈨'),
+ (0x2F973, 'M', u'𦉇'),
+ (0x2F974, 'M', u'䍙'),
+ (0x2F975, 'M', u'𦋙'),
+ (0x2F976, 'M', u'罺'),
+ (0x2F977, 'M', u'𦌾'),
+ (0x2F978, 'M', u'羕'),
+ (0x2F979, 'M', u'翺'),
+ (0x2F97A, 'M', u'者'),
+ (0x2F97B, 'M', u'𦓚'),
+ (0x2F97C, 'M', u'𦔣'),
+ (0x2F97D, 'M', u'聠'),
+ (0x2F97E, 'M', u'𦖨'),
+ (0x2F97F, 'M', u'聰'),
+ (0x2F980, 'M', u'𣍟'),
+ (0x2F981, 'M', u'䏕'),
+ (0x2F982, 'M', u'育'),
+ (0x2F983, 'M', u'脃'),
+ (0x2F984, 'M', u'䐋'),
+ (0x2F985, 'M', u'脾'),
+ (0x2F986, 'M', u'媵'),
+ (0x2F987, 'M', u'𦞧'),
+ (0x2F988, 'M', u'𦞵'),
+ (0x2F989, 'M', u'𣎓'),
+ (0x2F98A, 'M', u'𣎜'),
+ (0x2F98B, 'M', u'舁'),
+ (0x2F98C, 'M', u'舄'),
+ (0x2F98D, 'M', u'辞'),
+ (0x2F98E, 'M', u'䑫'),
+ (0x2F98F, 'M', u'芑'),
+ (0x2F990, 'M', u'芋'),
+ (0x2F991, 'M', u'芝'),
+ (0x2F992, 'M', u'劳'),
+ (0x2F993, 'M', u'花'),
+ (0x2F994, 'M', u'芳'),
+ (0x2F995, 'M', u'芽'),
+ (0x2F996, 'M', u'苦'),
+ (0x2F997, 'M', u'𦬼'),
+ (0x2F998, 'M', u'若'),
+ (0x2F999, 'M', u'茝'),
+ (0x2F99A, 'M', u'荣'),
+ (0x2F99B, 'M', u'莭'),
+ (0x2F99C, 'M', u'茣'),
+ (0x2F99D, 'M', u'莽'),
+ (0x2F99E, 'M', u'菧'),
+ (0x2F99F, 'M', u'著'),
+ (0x2F9A0, 'M', u'荓'),
+ (0x2F9A1, 'M', u'菊'),
+ (0x2F9A2, 'M', u'菌'),
+ (0x2F9A3, 'M', u'菜'),
+ (0x2F9A4, 'M', u'𦰶'),
+ (0x2F9A5, 'M', u'𦵫'),
+ (0x2F9A6, 'M', u'𦳕'),
+ (0x2F9A7, 'M', u'䔫'),
+ (0x2F9A8, 'M', u'蓱'),
+ (0x2F9A9, 'M', u'蓳'),
+ (0x2F9AA, 'M', u'蔖'),
+ (0x2F9AB, 'M', u'𧏊'),
+ (0x2F9AC, 'M', u'蕤'),
+ (0x2F9AD, 'M', u'𦼬'),
+ (0x2F9AE, 'M', u'䕝'),
+ (0x2F9AF, 'M', u'䕡'),
+ (0x2F9B0, 'M', u'𦾱'),
+ (0x2F9B1, 'M', u'𧃒'),
+ (0x2F9B2, 'M', u'䕫'),
+ (0x2F9B3, 'M', u'虐'),
+ (0x2F9B4, 'M', u'虜'),
+ (0x2F9B5, 'M', u'虧'),
+ (0x2F9B6, 'M', u'虩'),
+ (0x2F9B7, 'M', u'蚩'),
+ (0x2F9B8, 'M', u'蚈'),
+ (0x2F9B9, 'M', u'蜎'),
+ (0x2F9BA, 'M', u'蛢'),
+ ]
+
+def _seg_77():
+ return [
+ (0x2F9BB, 'M', u'蝹'),
+ (0x2F9BC, 'M', u'蜨'),
+ (0x2F9BD, 'M', u'蝫'),
+ (0x2F9BE, 'M', u'螆'),
+ (0x2F9BF, 'X'),
+ (0x2F9C0, 'M', u'蟡'),
+ (0x2F9C1, 'M', u'蠁'),
+ (0x2F9C2, 'M', u'䗹'),
+ (0x2F9C3, 'M', u'衠'),
+ (0x2F9C4, 'M', u'衣'),
+ (0x2F9C5, 'M', u'𧙧'),
+ (0x2F9C6, 'M', u'裗'),
+ (0x2F9C7, 'M', u'裞'),
+ (0x2F9C8, 'M', u'䘵'),
+ (0x2F9C9, 'M', u'裺'),
+ (0x2F9CA, 'M', u'㒻'),
+ (0x2F9CB, 'M', u'𧢮'),
+ (0x2F9CC, 'M', u'𧥦'),
+ (0x2F9CD, 'M', u'䚾'),
+ (0x2F9CE, 'M', u'䛇'),
+ (0x2F9CF, 'M', u'誠'),
+ (0x2F9D0, 'M', u'諭'),
+ (0x2F9D1, 'M', u'變'),
+ (0x2F9D2, 'M', u'豕'),
+ (0x2F9D3, 'M', u'𧲨'),
+ (0x2F9D4, 'M', u'貫'),
+ (0x2F9D5, 'M', u'賁'),
+ (0x2F9D6, 'M', u'贛'),
+ (0x2F9D7, 'M', u'起'),
+ (0x2F9D8, 'M', u'𧼯'),
+ (0x2F9D9, 'M', u'𠠄'),
+ (0x2F9DA, 'M', u'跋'),
+ (0x2F9DB, 'M', u'趼'),
+ (0x2F9DC, 'M', u'跰'),
+ (0x2F9DD, 'M', u'𠣞'),
+ (0x2F9DE, 'M', u'軔'),
+ (0x2F9DF, 'M', u'輸'),
+ (0x2F9E0, 'M', u'𨗒'),
+ (0x2F9E1, 'M', u'𨗭'),
+ (0x2F9E2, 'M', u'邔'),
+ (0x2F9E3, 'M', u'郱'),
+ (0x2F9E4, 'M', u'鄑'),
+ (0x2F9E5, 'M', u'𨜮'),
+ (0x2F9E6, 'M', u'鄛'),
+ (0x2F9E7, 'M', u'鈸'),
+ (0x2F9E8, 'M', u'鋗'),
+ (0x2F9E9, 'M', u'鋘'),
+ (0x2F9EA, 'M', u'鉼'),
+ (0x2F9EB, 'M', u'鏹'),
+ (0x2F9EC, 'M', u'鐕'),
+ (0x2F9ED, 'M', u'𨯺'),
+ (0x2F9EE, 'M', u'開'),
+ (0x2F9EF, 'M', u'䦕'),
+ (0x2F9F0, 'M', u'閷'),
+ (0x2F9F1, 'M', u'𨵷'),
+ (0x2F9F2, 'M', u'䧦'),
+ (0x2F9F3, 'M', u'雃'),
+ (0x2F9F4, 'M', u'嶲'),
+ (0x2F9F5, 'M', u'霣'),
+ (0x2F9F6, 'M', u'𩅅'),
+ (0x2F9F7, 'M', u'𩈚'),
+ (0x2F9F8, 'M', u'䩮'),
+ (0x2F9F9, 'M', u'䩶'),
+ (0x2F9FA, 'M', u'韠'),
+ (0x2F9FB, 'M', u'𩐊'),
+ (0x2F9FC, 'M', u'䪲'),
+ (0x2F9FD, 'M', u'𩒖'),
+ (0x2F9FE, 'M', u'頋'),
+ (0x2FA00, 'M', u'頩'),
+ (0x2FA01, 'M', u'𩖶'),
+ (0x2FA02, 'M', u'飢'),
+ (0x2FA03, 'M', u'䬳'),
+ (0x2FA04, 'M', u'餩'),
+ (0x2FA05, 'M', u'馧'),
+ (0x2FA06, 'M', u'駂'),
+ (0x2FA07, 'M', u'駾'),
+ (0x2FA08, 'M', u'䯎'),
+ (0x2FA09, 'M', u'𩬰'),
+ (0x2FA0A, 'M', u'鬒'),
+ (0x2FA0B, 'M', u'鱀'),
+ (0x2FA0C, 'M', u'鳽'),
+ (0x2FA0D, 'M', u'䳎'),
+ (0x2FA0E, 'M', u'䳭'),
+ (0x2FA0F, 'M', u'鵧'),
+ (0x2FA10, 'M', u'𪃎'),
+ (0x2FA11, 'M', u'䳸'),
+ (0x2FA12, 'M', u'𪄅'),
+ (0x2FA13, 'M', u'𪈎'),
+ (0x2FA14, 'M', u'𪊑'),
+ (0x2FA15, 'M', u'麻'),
+ (0x2FA16, 'M', u'䵖'),
+ (0x2FA17, 'M', u'黹'),
+ (0x2FA18, 'M', u'黾'),
+ (0x2FA19, 'M', u'鼅'),
+ (0x2FA1A, 'M', u'鼏'),
+ (0x2FA1B, 'M', u'鼖'),
+ (0x2FA1C, 'M', u'鼻'),
+ (0x2FA1D, 'M', u'𪘀'),
+ (0x2FA1E, 'X'),
+ (0xE0100, 'I'),
+ ]
+
+def _seg_78():
+ return [
+ (0xE01F0, 'X'),
+ ]
+
+uts46data = tuple(
+ _seg_0()
+ + _seg_1()
+ + _seg_2()
+ + _seg_3()
+ + _seg_4()
+ + _seg_5()
+ + _seg_6()
+ + _seg_7()
+ + _seg_8()
+ + _seg_9()
+ + _seg_10()
+ + _seg_11()
+ + _seg_12()
+ + _seg_13()
+ + _seg_14()
+ + _seg_15()
+ + _seg_16()
+ + _seg_17()
+ + _seg_18()
+ + _seg_19()
+ + _seg_20()
+ + _seg_21()
+ + _seg_22()
+ + _seg_23()
+ + _seg_24()
+ + _seg_25()
+ + _seg_26()
+ + _seg_27()
+ + _seg_28()
+ + _seg_29()
+ + _seg_30()
+ + _seg_31()
+ + _seg_32()
+ + _seg_33()
+ + _seg_34()
+ + _seg_35()
+ + _seg_36()
+ + _seg_37()
+ + _seg_38()
+ + _seg_39()
+ + _seg_40()
+ + _seg_41()
+ + _seg_42()
+ + _seg_43()
+ + _seg_44()
+ + _seg_45()
+ + _seg_46()
+ + _seg_47()
+ + _seg_48()
+ + _seg_49()
+ + _seg_50()
+ + _seg_51()
+ + _seg_52()
+ + _seg_53()
+ + _seg_54()
+ + _seg_55()
+ + _seg_56()
+ + _seg_57()
+ + _seg_58()
+ + _seg_59()
+ + _seg_60()
+ + _seg_61()
+ + _seg_62()
+ + _seg_63()
+ + _seg_64()
+ + _seg_65()
+ + _seg_66()
+ + _seg_67()
+ + _seg_68()
+ + _seg_69()
+ + _seg_70()
+ + _seg_71()
+ + _seg_72()
+ + _seg_73()
+ + _seg_74()
+ + _seg_75()
+ + _seg_76()
+ + _seg_77()
+ + _seg_78()
+)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/ipaddress.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/ipaddress.py
new file mode 100644
index 00000000..f2d07668
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/ipaddress.py
@@ -0,0 +1,2419 @@
+# Copyright 2007 Google Inc.
+# Licensed to PSF under a Contributor Agreement.
+
+"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
+
+This library is used to create/poke/manipulate IPv4 and IPv6 addresses
+and networks.
+
+"""
+
+from __future__ import unicode_literals
+
+
+import itertools
+import struct
+
+__version__ = '1.0.22'
+
+# Compatibility functions
+_compat_int_types = (int,)
+try:
+ _compat_int_types = (int, long)
+except NameError:
+ pass
+try:
+ _compat_str = unicode
+except NameError:
+ _compat_str = str
+ assert bytes != str
+if b'\0'[0] == 0: # Python 3 semantics
+ def _compat_bytes_to_byte_vals(byt):
+ return byt
+else:
+ def _compat_bytes_to_byte_vals(byt):
+ return [struct.unpack(b'!B', b)[0] for b in byt]
+try:
+ _compat_int_from_byte_vals = int.from_bytes
+except AttributeError:
+ def _compat_int_from_byte_vals(bytvals, endianess):
+ assert endianess == 'big'
+ res = 0
+ for bv in bytvals:
+ assert isinstance(bv, _compat_int_types)
+ res = (res << 8) + bv
+ return res
+
+
+def _compat_to_bytes(intval, length, endianess):
+ assert isinstance(intval, _compat_int_types)
+ assert endianess == 'big'
+ if length == 4:
+ if intval < 0 or intval >= 2 ** 32:
+ raise struct.error("integer out of range for 'I' format code")
+ return struct.pack(b'!I', intval)
+ elif length == 16:
+ if intval < 0 or intval >= 2 ** 128:
+ raise struct.error("integer out of range for 'QQ' format code")
+ return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
+ else:
+ raise NotImplementedError()
+
+
+if hasattr(int, 'bit_length'):
+ # Not int.bit_length , since that won't work in 2.7 where long exists
+ def _compat_bit_length(i):
+ return i.bit_length()
+else:
+ def _compat_bit_length(i):
+ for res in itertools.count():
+ if i >> res == 0:
+ return res
+
+
+def _compat_range(start, end, step=1):
+ assert step > 0
+ i = start
+ while i < end:
+ yield i
+ i += step
+
+
+class _TotalOrderingMixin(object):
+ __slots__ = ()
+
+ # Helper that derives the other comparison operations from
+ # __lt__ and __eq__
+ # We avoid functools.total_ordering because it doesn't handle
+ # NotImplemented correctly yet (http://bugs.python.org/issue10042)
+ def __eq__(self, other):
+ raise NotImplementedError
+
+ def __ne__(self, other):
+ equal = self.__eq__(other)
+ if equal is NotImplemented:
+ return NotImplemented
+ return not equal
+
+ def __lt__(self, other):
+ raise NotImplementedError
+
+ def __le__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented or not less:
+ return self.__eq__(other)
+ return less
+
+ def __gt__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented:
+ return NotImplemented
+ equal = self.__eq__(other)
+ if equal is NotImplemented:
+ return NotImplemented
+ return not (less or equal)
+
+ def __ge__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented:
+ return NotImplemented
+ return not less
+
+
+IPV4LENGTH = 32
+IPV6LENGTH = 128
+
+
+class AddressValueError(ValueError):
+ """A Value Error related to the address."""
+
+
+class NetmaskValueError(ValueError):
+ """A Value Error related to the netmask."""
+
+
+def ip_address(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Address or IPv6Address object.
+
+ Raises:
+ ValueError: if the *address* passed isn't either a v4 or a v6
+ address
+
+ """
+ try:
+ return IPv4Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ if isinstance(address, bytes):
+ raise AddressValueError(
+ '%r does not appear to be an IPv4 or IPv6 address. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?' % address)
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
+ address)
+
+
+def ip_network(address, strict=True):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP network. Either IPv4 or
+ IPv6 networks may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Network or IPv6Network object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address. Or if the network has host bits set.
+
+ """
+ try:
+ return IPv4Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ if isinstance(address, bytes):
+ raise AddressValueError(
+ '%r does not appear to be an IPv4 or IPv6 network. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?' % address)
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
+ address)
+
+
+def ip_interface(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Interface or IPv6Interface object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address.
+
+ Notes:
+ The IPv?Interface classes describe an Address on a particular
+ Network, so they're basically a combination of both the Address
+ and Network classes.
+
+ """
+ try:
+ return IPv4Interface(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Interface(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
+ address)
+
+
+def v4_int_to_packed(address):
+ """Represent an address as 4 packed bytes in network (big-endian) order.
+
+ Args:
+ address: An integer representation of an IPv4 IP address.
+
+ Returns:
+ The integer address packed as 4 bytes in network (big-endian) order.
+
+ Raises:
+ ValueError: If the integer is negative or too large to be an
+ IPv4 IP address.
+
+ """
+ try:
+ return _compat_to_bytes(address, 4, 'big')
+ except (struct.error, OverflowError):
+ raise ValueError("Address negative or too large for IPv4")
+
+
+def v6_int_to_packed(address):
+ """Represent an address as 16 packed bytes in network (big-endian) order.
+
+ Args:
+ address: An integer representation of an IPv6 IP address.
+
+ Returns:
+ The integer address packed as 16 bytes in network (big-endian) order.
+
+ """
+ try:
+ return _compat_to_bytes(address, 16, 'big')
+ except (struct.error, OverflowError):
+ raise ValueError("Address negative or too large for IPv6")
+
+
+def _split_optional_netmask(address):
+ """Helper to split the netmask and raise AddressValueError if needed"""
+ addr = _compat_str(address).split('/')
+ if len(addr) > 2:
+ raise AddressValueError("Only one '/' permitted in %r" % address)
+ return addr
+
+
+def _find_address_range(addresses):
+ """Find a sequence of sorted deduplicated IPv#Address.
+
+ Args:
+ addresses: a list of IPv#Address objects.
+
+ Yields:
+ A tuple containing the first and last IP addresses in the sequence.
+
+ """
+ it = iter(addresses)
+ first = last = next(it)
+ for ip in it:
+ if ip._ip != last._ip + 1:
+ yield first, last
+ first = ip
+ last = ip
+ yield first, last
+
+
+def _count_righthand_zero_bits(number, bits):
+ """Count the number of zero bits on the right hand side.
+
+ Args:
+ number: an integer.
+ bits: maximum number of bits to count.
+
+ Returns:
+ The number of zero bits on the right hand side of the number.
+
+ """
+ if number == 0:
+ return bits
+ return min(bits, _compat_bit_length(~number & (number - 1)))
+
+
+def summarize_address_range(first, last):
+ """Summarize a network range given the first and last IP addresses.
+
+ Example:
+ >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
+ ... IPv4Address('192.0.2.130')))
+ ... #doctest: +NORMALIZE_WHITESPACE
+ [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
+ IPv4Network('192.0.2.130/32')]
+
+ Args:
+ first: the first IPv4Address or IPv6Address in the range.
+ last: the last IPv4Address or IPv6Address in the range.
+
+ Returns:
+ An iterator of the summarized IPv(4|6) network objects.
+
+ Raise:
+ TypeError:
+ If the first and last objects are not IP addresses.
+ If the first and last objects are not the same version.
+ ValueError:
+ If the last object is not greater than the first.
+ If the version of the first address is not 4 or 6.
+
+ """
+ if (not (isinstance(first, _BaseAddress) and
+ isinstance(last, _BaseAddress))):
+ raise TypeError('first and last must be IP addresses, not networks')
+ if first.version != last.version:
+ raise TypeError("%s and %s are not of the same version" % (
+ first, last))
+ if first > last:
+ raise ValueError('last IP address must be greater than first')
+
+ if first.version == 4:
+ ip = IPv4Network
+ elif first.version == 6:
+ ip = IPv6Network
+ else:
+ raise ValueError('unknown IP version')
+
+ ip_bits = first._max_prefixlen
+ first_int = first._ip
+ last_int = last._ip
+ while first_int <= last_int:
+ nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
+ _compat_bit_length(last_int - first_int + 1) - 1)
+ net = ip((first_int, ip_bits - nbits))
+ yield net
+ first_int += 1 << nbits
+ if first_int - 1 == ip._ALL_ONES:
+ break
+
+
+def _collapse_addresses_internal(addresses):
+ """Loops through the addresses, collapsing concurrent netblocks.
+
+ Example:
+
+ ip1 = IPv4Network('192.0.2.0/26')
+ ip2 = IPv4Network('192.0.2.64/26')
+ ip3 = IPv4Network('192.0.2.128/26')
+ ip4 = IPv4Network('192.0.2.192/26')
+
+ _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
+ [IPv4Network('192.0.2.0/24')]
+
+ This shouldn't be called directly; it is called via
+ collapse_addresses([]).
+
+ Args:
+ addresses: A list of IPv4Network's or IPv6Network's
+
+ Returns:
+ A list of IPv4Network's or IPv6Network's depending on what we were
+ passed.
+
+ """
+ # First merge
+ to_merge = list(addresses)
+ subnets = {}
+ while to_merge:
+ net = to_merge.pop()
+ supernet = net.supernet()
+ existing = subnets.get(supernet)
+ if existing is None:
+ subnets[supernet] = net
+ elif existing != net:
+ # Merge consecutive subnets
+ del subnets[supernet]
+ to_merge.append(supernet)
+ # Then iterate over resulting networks, skipping subsumed subnets
+ last = None
+ for net in sorted(subnets.values()):
+ if last is not None:
+ # Since they are sorted,
+ # last.network_address <= net.network_address is a given.
+ if last.broadcast_address >= net.broadcast_address:
+ continue
+ yield net
+ last = net
+
+
+def collapse_addresses(addresses):
+ """Collapse a list of IP objects.
+
+ Example:
+ collapse_addresses([IPv4Network('192.0.2.0/25'),
+ IPv4Network('192.0.2.128/25')]) ->
+ [IPv4Network('192.0.2.0/24')]
+
+ Args:
+ addresses: An iterator of IPv4Network or IPv6Network objects.
+
+ Returns:
+ An iterator of the collapsed IPv(4|6)Network objects.
+
+ Raises:
+ TypeError: If passed a list of mixed version objects.
+
+ """
+ addrs = []
+ ips = []
+ nets = []
+
+ # split IP addresses and networks
+ for ip in addresses:
+ if isinstance(ip, _BaseAddress):
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, ips[-1]))
+ ips.append(ip)
+ elif ip._prefixlen == ip._max_prefixlen:
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, ips[-1]))
+ try:
+ ips.append(ip.ip)
+ except AttributeError:
+ ips.append(ip.network_address)
+ else:
+ if nets and nets[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, nets[-1]))
+ nets.append(ip)
+
+ # sort and dedup
+ ips = sorted(set(ips))
+
+ # find consecutive address ranges in the sorted sequence and summarize them
+ if ips:
+ for first, last in _find_address_range(ips):
+ addrs.extend(summarize_address_range(first, last))
+
+ return _collapse_addresses_internal(addrs + nets)
+
+
+def get_mixed_type_key(obj):
+ """Return a key suitable for sorting between networks and addresses.
+
+ Address and Network objects are not sortable by default; they're
+ fundamentally different so the expression
+
+ IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
+
+ doesn't make any sense. There are some times however, where you may wish
+ to have ipaddress sort these for you anyway. If you need to do this, you
+ can use this function as the key= argument to sorted().
+
+ Args:
+ obj: either a Network or Address object.
+ Returns:
+ appropriate key.
+
+ """
+ if isinstance(obj, _BaseNetwork):
+ return obj._get_networks_key()
+ elif isinstance(obj, _BaseAddress):
+ return obj._get_address_key()
+ return NotImplemented
+
+
+class _IPAddressBase(_TotalOrderingMixin):
+
+ """The mother class."""
+
+ __slots__ = ()
+
+ @property
+ def exploded(self):
+ """Return the longhand version of the IP address as a string."""
+ return self._explode_shorthand_ip_string()
+
+ @property
+ def compressed(self):
+ """Return the shorthand version of the IP address as a string."""
+ return _compat_str(self)
+
+ @property
+ def reverse_pointer(self):
+ """The name of the reverse DNS pointer for the IP address, e.g.:
+ >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
+ '1.0.0.127.in-addr.arpa'
+ >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
+ '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
+
+ """
+ return self._reverse_pointer()
+
+ @property
+ def version(self):
+ msg = '%200s has no version specified' % (type(self),)
+ raise NotImplementedError(msg)
+
+ def _check_int_address(self, address):
+ if address < 0:
+ msg = "%d (< 0) is not permitted as an IPv%d address"
+ raise AddressValueError(msg % (address, self._version))
+ if address > self._ALL_ONES:
+ msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
+ raise AddressValueError(msg % (address, self._max_prefixlen,
+ self._version))
+
+ def _check_packed_address(self, address, expected_len):
+ address_len = len(address)
+ if address_len != expected_len:
+ msg = (
+ '%r (len %d != %d) is not permitted as an IPv%d address. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?')
+ raise AddressValueError(msg % (address, address_len,
+ expected_len, self._version))
+
+ @classmethod
+ def _ip_int_from_prefix(cls, prefixlen):
+ """Turn the prefix length into a bitwise netmask
+
+ Args:
+ prefixlen: An integer, the prefix length.
+
+ Returns:
+ An integer.
+
+ """
+ return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
+
+ @classmethod
+ def _prefix_from_ip_int(cls, ip_int):
+ """Return prefix length from the bitwise netmask.
+
+ Args:
+ ip_int: An integer, the netmask in expanded bitwise format
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ ValueError: If the input intermingles zeroes & ones
+ """
+ trailing_zeroes = _count_righthand_zero_bits(ip_int,
+ cls._max_prefixlen)
+ prefixlen = cls._max_prefixlen - trailing_zeroes
+ leading_ones = ip_int >> trailing_zeroes
+ all_ones = (1 << prefixlen) - 1
+ if leading_ones != all_ones:
+ byteslen = cls._max_prefixlen // 8
+ details = _compat_to_bytes(ip_int, byteslen, 'big')
+ msg = 'Netmask pattern %r mixes zeroes & ones'
+ raise ValueError(msg % details)
+ return prefixlen
+
+ @classmethod
+ def _report_invalid_netmask(cls, netmask_str):
+ msg = '%r is not a valid netmask' % netmask_str
+ raise NetmaskValueError(msg)
+
+ @classmethod
+ def _prefix_from_prefix_string(cls, prefixlen_str):
+ """Return prefix length from a numeric string
+
+ Args:
+ prefixlen_str: The string to be converted
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ NetmaskValueError: If the input is not a valid netmask
+ """
+ # int allows a leading +/- as well as surrounding whitespace,
+ # so we ensure that isn't the case
+ if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
+ cls._report_invalid_netmask(prefixlen_str)
+ try:
+ prefixlen = int(prefixlen_str)
+ except ValueError:
+ cls._report_invalid_netmask(prefixlen_str)
+ if not (0 <= prefixlen <= cls._max_prefixlen):
+ cls._report_invalid_netmask(prefixlen_str)
+ return prefixlen
+
+ @classmethod
+ def _prefix_from_ip_string(cls, ip_str):
+ """Turn a netmask/hostmask string into a prefix length
+
+ Args:
+ ip_str: The netmask/hostmask to be converted
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ NetmaskValueError: If the input is not a valid netmask/hostmask
+ """
+ # Parse the netmask/hostmask like an IP address.
+ try:
+ ip_int = cls._ip_int_from_string(ip_str)
+ except AddressValueError:
+ cls._report_invalid_netmask(ip_str)
+
+ # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
+ # Note that the two ambiguous cases (all-ones and all-zeroes) are
+ # treated as netmasks.
+ try:
+ return cls._prefix_from_ip_int(ip_int)
+ except ValueError:
+ pass
+
+ # Invert the bits, and try matching a /0+1+/ hostmask instead.
+ ip_int ^= cls._ALL_ONES
+ try:
+ return cls._prefix_from_ip_int(ip_int)
+ except ValueError:
+ cls._report_invalid_netmask(ip_str)
+
+ def __reduce__(self):
+ return self.__class__, (_compat_str(self),)
+
+
+class _BaseAddress(_IPAddressBase):
+
+ """A generic IP object.
+
+ This IP class contains the version independent methods which are
+ used by single IP addresses.
+ """
+
+ __slots__ = ()
+
+ def __int__(self):
+ return self._ip
+
+ def __eq__(self, other):
+ try:
+ return (self._ip == other._ip and
+ self._version == other._version)
+ except AttributeError:
+ return NotImplemented
+
+ def __lt__(self, other):
+ if not isinstance(other, _IPAddressBase):
+ return NotImplemented
+ if not isinstance(other, _BaseAddress):
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ self, other))
+ if self._ip != other._ip:
+ return self._ip < other._ip
+ return False
+
+ # Shorthand for Integer addition and subtraction. This is not
+ # meant to ever support addition/subtraction of addresses.
+ def __add__(self, other):
+ if not isinstance(other, _compat_int_types):
+ return NotImplemented
+ return self.__class__(int(self) + other)
+
+ def __sub__(self, other):
+ if not isinstance(other, _compat_int_types):
+ return NotImplemented
+ return self.__class__(int(self) - other)
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+ def __str__(self):
+ return _compat_str(self._string_from_ip_int(self._ip))
+
+ def __hash__(self):
+ return hash(hex(int(self._ip)))
+
+ def _get_address_key(self):
+ return (self._version, self)
+
+ def __reduce__(self):
+ return self.__class__, (self._ip,)
+
+
+class _BaseNetwork(_IPAddressBase):
+
+ """A generic IP network object.
+
+ This IP class contains the version independent methods which are
+ used by networks.
+
+ """
+ def __init__(self, address):
+ self._cache = {}
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+ def __str__(self):
+ return '%s/%d' % (self.network_address, self.prefixlen)
+
+ def hosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the network
+ or broadcast addresses.
+
+ """
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network + 1, broadcast):
+ yield self._address_class(x)
+
+ def __iter__(self):
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network, broadcast + 1):
+ yield self._address_class(x)
+
+ def __getitem__(self, n):
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ if n >= 0:
+ if network + n > broadcast:
+ raise IndexError('address out of range')
+ return self._address_class(network + n)
+ else:
+ n += 1
+ if broadcast + n < network:
+ raise IndexError('address out of range')
+ return self._address_class(broadcast + n)
+
+ def __lt__(self, other):
+ if not isinstance(other, _IPAddressBase):
+ return NotImplemented
+ if not isinstance(other, _BaseNetwork):
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ self, other))
+ if self.network_address != other.network_address:
+ return self.network_address < other.network_address
+ if self.netmask != other.netmask:
+ return self.netmask < other.netmask
+ return False
+
+ def __eq__(self, other):
+ try:
+ return (self._version == other._version and
+ self.network_address == other.network_address and
+ int(self.netmask) == int(other.netmask))
+ except AttributeError:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(int(self.network_address) ^ int(self.netmask))
+
+ def __contains__(self, other):
+ # always false if one is v4 and the other is v6.
+ if self._version != other._version:
+ return False
+ # dealing with another network.
+ if isinstance(other, _BaseNetwork):
+ return False
+ # dealing with another address
+ else:
+ # address
+ return (int(self.network_address) <= int(other._ip) <=
+ int(self.broadcast_address))
+
+ def overlaps(self, other):
+ """Tell if self is partly contained in other."""
+ return self.network_address in other or (
+ self.broadcast_address in other or (
+ other.network_address in self or (
+ other.broadcast_address in self)))
+
+ @property
+ def broadcast_address(self):
+ x = self._cache.get('broadcast_address')
+ if x is None:
+ x = self._address_class(int(self.network_address) |
+ int(self.hostmask))
+ self._cache['broadcast_address'] = x
+ return x
+
+ @property
+ def hostmask(self):
+ x = self._cache.get('hostmask')
+ if x is None:
+ x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
+ self._cache['hostmask'] = x
+ return x
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%d' % (self.network_address, self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self.network_address, self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self.network_address, self.hostmask)
+
+ @property
+ def num_addresses(self):
+ """Number of hosts in the current subnet."""
+ return int(self.broadcast_address) - int(self.network_address) + 1
+
+ @property
+ def _address_class(self):
+ # Returning bare address objects (rather than interfaces) allows for
+ # more consistent behaviour across the network address, broadcast
+ # address and individual host addresses.
+ msg = '%200s has no associated address class' % (type(self),)
+ raise NotImplementedError(msg)
+
+ @property
+ def prefixlen(self):
+ return self._prefixlen
+
+ def address_exclude(self, other):
+ """Remove an address from a larger block.
+
+ For example:
+
+ addr1 = ip_network('192.0.2.0/28')
+ addr2 = ip_network('192.0.2.1/32')
+ list(addr1.address_exclude(addr2)) =
+ [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
+ IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
+
+ or IPv6:
+
+ addr1 = ip_network('2001:db8::1/32')
+ addr2 = ip_network('2001:db8::1/128')
+ list(addr1.address_exclude(addr2)) =
+ [ip_network('2001:db8::1/128'),
+ ip_network('2001:db8::2/127'),
+ ip_network('2001:db8::4/126'),
+ ip_network('2001:db8::8/125'),
+ ...
+ ip_network('2001:db8:8000::/33')]
+
+ Args:
+ other: An IPv4Network or IPv6Network object of the same type.
+
+ Returns:
+ An iterator of the IPv(4|6)Network objects which is self
+ minus other.
+
+ Raises:
+ TypeError: If self and other are of differing address
+ versions, or if other is not a network object.
+ ValueError: If other is not completely contained by self.
+
+ """
+ if not self._version == other._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ self, other))
+
+ if not isinstance(other, _BaseNetwork):
+ raise TypeError("%s is not a network object" % other)
+
+ if not other.subnet_of(self):
+ raise ValueError('%s not contained in %s' % (other, self))
+ if other == self:
+ return
+
+ # Make sure we're comparing the network of other.
+ other = other.__class__('%s/%s' % (other.network_address,
+ other.prefixlen))
+
+ s1, s2 = self.subnets()
+ while s1 != other and s2 != other:
+ if other.subnet_of(s1):
+ yield s2
+ s1, s2 = s1.subnets()
+ elif other.subnet_of(s2):
+ yield s1
+ s1, s2 = s2.subnets()
+ else:
+ # If we got here, there's a bug somewhere.
+ raise AssertionError('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (s1, s2, other))
+ if s1 == other:
+ yield s2
+ elif s2 == other:
+ yield s1
+ else:
+ # If we got here, there's a bug somewhere.
+ raise AssertionError('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (s1, s2, other))
+
+ def compare_networks(self, other):
+ """Compare two IP objects.
+
+ This is only concerned about the comparison of the integer
+ representation of the network addresses. This means that the
+ host bits aren't considered at all in this method. If you want
+ to compare host bits, you can easily enough do a
+ 'HostA._ip < HostB._ip'
+
+ Args:
+ other: An IP object.
+
+ Returns:
+ If the IP versions of self and other are the same, returns:
+
+ -1 if self < other:
+ eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
+ IPv6Network('2001:db8::1000/124') <
+ IPv6Network('2001:db8::2000/124')
+ 0 if self == other
+ eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
+ IPv6Network('2001:db8::1000/124') ==
+ IPv6Network('2001:db8::1000/124')
+ 1 if self > other
+ eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
+ IPv6Network('2001:db8::2000/124') >
+ IPv6Network('2001:db8::1000/124')
+
+ Raises:
+ TypeError if the IP versions are different.
+
+ """
+ # does this need to raise a ValueError?
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ # self._version == other._version below here:
+ if self.network_address < other.network_address:
+ return -1
+ if self.network_address > other.network_address:
+ return 1
+ # self.network_address == other.network_address below here:
+ if self.netmask < other.netmask:
+ return -1
+ if self.netmask > other.netmask:
+ return 1
+ return 0
+
+ def _get_networks_key(self):
+ """Network-only key function.
+
+ Returns an object that identifies this address' network and
+ netmask. This function is a suitable "key" argument for sorted()
+ and list.sort().
+
+ """
+ return (self._version, self.network_address, self.netmask)
+
+ def subnets(self, prefixlen_diff=1, new_prefix=None):
+ """The subnets which join to make the current subnet.
+
+ In the case that self contains only one IP
+ (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
+ for IPv6), yield an iterator with just ourself.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length
+ should be increased by. This should not be set if
+ new_prefix is also set.
+ new_prefix: The desired new prefix length. This must be a
+ larger number (smaller prefix) than the existing prefix.
+ This should not be set if prefixlen_diff is also set.
+
+ Returns:
+ An iterator of IPv(4|6) objects.
+
+ Raises:
+ ValueError: The prefixlen_diff is too small or too large.
+ OR
+ prefixlen_diff and new_prefix are both set or new_prefix
+ is a smaller number than the current prefix (smaller
+ number means a larger network)
+
+ """
+ if self._prefixlen == self._max_prefixlen:
+ yield self
+ return
+
+ if new_prefix is not None:
+ if new_prefix < self._prefixlen:
+ raise ValueError('new prefix must be longer')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = new_prefix - self._prefixlen
+
+ if prefixlen_diff < 0:
+ raise ValueError('prefix length diff must be > 0')
+ new_prefixlen = self._prefixlen + prefixlen_diff
+
+ if new_prefixlen > self._max_prefixlen:
+ raise ValueError(
+ 'prefix length diff %d is invalid for netblock %s' % (
+ new_prefixlen, self))
+
+ start = int(self.network_address)
+ end = int(self.broadcast_address) + 1
+ step = (int(self.hostmask) + 1) >> prefixlen_diff
+ for new_addr in _compat_range(start, end, step):
+ current = self.__class__((new_addr, new_prefixlen))
+ yield current
+
+ def supernet(self, prefixlen_diff=1, new_prefix=None):
+ """The supernet containing the current network.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length of
+ the network should be decreased by. For example, given a
+ /24 network and a prefixlen_diff of 3, a supernet with a
+ /21 netmask is returned.
+
+ Returns:
+ An IPv4 network object.
+
+ Raises:
+ ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
+ a negative prefix length.
+ OR
+ If prefixlen_diff and new_prefix are both set or new_prefix is a
+ larger number than the current prefix (larger number means a
+ smaller network)
+
+ """
+ if self._prefixlen == 0:
+ return self
+
+ if new_prefix is not None:
+ if new_prefix > self._prefixlen:
+ raise ValueError('new prefix must be shorter')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = self._prefixlen - new_prefix
+
+ new_prefixlen = self.prefixlen - prefixlen_diff
+ if new_prefixlen < 0:
+ raise ValueError(
+ 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
+ (self.prefixlen, prefixlen_diff))
+ return self.__class__((
+ int(self.network_address) & (int(self.netmask) << prefixlen_diff),
+ new_prefixlen))
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return (self.network_address.is_multicast and
+ self.broadcast_address.is_multicast)
+
+ @staticmethod
+ def _is_subnet_of(a, b):
+ try:
+ # Always false if one is v4 and the other is v6.
+ if a._version != b._version:
+ raise TypeError("%s and %s are not of the same version" (a, b))
+ return (b.network_address <= a.network_address and
+ b.broadcast_address >= a.broadcast_address)
+ except AttributeError:
+ raise TypeError("Unable to test subnet containment "
+ "between %s and %s" % (a, b))
+
+ def subnet_of(self, other):
+ """Return True if this network is a subnet of other."""
+ return self._is_subnet_of(self, other)
+
+ def supernet_of(self, other):
+ """Return True if this network is a supernet of other."""
+ return self._is_subnet_of(other, self)
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return (self.network_address.is_reserved and
+ self.broadcast_address.is_reserved)
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return (self.network_address.is_link_local and
+ self.broadcast_address.is_link_local)
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+ """
+ return (self.network_address.is_private and
+ self.broadcast_address.is_private)
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, True if the address is not reserved per
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+ """
+ return not self.is_private
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return (self.network_address.is_unspecified and
+ self.broadcast_address.is_unspecified)
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return (self.network_address.is_loopback and
+ self.broadcast_address.is_loopback)
+
+
+class _BaseV4(object):
+
+ """Base IPv4 object.
+
+ The following methods are used by IPv4 objects in both single IP
+ addresses and networks.
+
+ """
+
+ __slots__ = ()
+ _version = 4
+ # Equivalent to 255.255.255.255 or 32 bits of 1's.
+ _ALL_ONES = (2 ** IPV4LENGTH) - 1
+ _DECIMAL_DIGITS = frozenset('0123456789')
+
+ # the valid octets for host and netmasks. only useful for IPv4.
+ _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
+
+ _max_prefixlen = IPV4LENGTH
+ # There are only a handful of valid v4 netmasks, so we cache them all
+ # when constructed (see _make_netmask()).
+ _netmask_cache = {}
+
+ def _explode_shorthand_ip_string(self):
+ return _compat_str(self)
+
+ @classmethod
+ def _make_netmask(cls, arg):
+ """Make a (netmask, prefix_len) tuple from the given argument.
+
+ Argument can be:
+ - an integer (the prefix length)
+ - a string representing the prefix length (e.g. "24")
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
+ """
+ if arg not in cls._netmask_cache:
+ if isinstance(arg, _compat_int_types):
+ prefixlen = arg
+ else:
+ try:
+ # Check for a netmask in prefix length form
+ prefixlen = cls._prefix_from_prefix_string(arg)
+ except NetmaskValueError:
+ # Check for a netmask or hostmask in dotted-quad form.
+ # This may raise NetmaskValueError.
+ prefixlen = cls._prefix_from_ip_string(arg)
+ netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
+ cls._netmask_cache[arg] = netmask, prefixlen
+ return cls._netmask_cache[arg]
+
+ @classmethod
+ def _ip_int_from_string(cls, ip_str):
+ """Turn the given IP string into an integer for comparison.
+
+ Args:
+ ip_str: A string, the IP ip_str.
+
+ Returns:
+ The IP ip_str as an integer.
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv4 Address.
+
+ """
+ if not ip_str:
+ raise AddressValueError('Address cannot be empty')
+
+ octets = ip_str.split('.')
+ if len(octets) != 4:
+ raise AddressValueError("Expected 4 octets in %r" % ip_str)
+
+ try:
+ return _compat_int_from_byte_vals(
+ map(cls._parse_octet, octets), 'big')
+ except ValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+
+ @classmethod
+ def _parse_octet(cls, octet_str):
+ """Convert a decimal octet into an integer.
+
+ Args:
+ octet_str: A string, the number to parse.
+
+ Returns:
+ The octet as an integer.
+
+ Raises:
+ ValueError: if the octet isn't strictly a decimal from [0..255].
+
+ """
+ if not octet_str:
+ raise ValueError("Empty octet not permitted")
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not cls._DECIMAL_DIGITS.issuperset(octet_str):
+ msg = "Only decimal digits permitted in %r"
+ raise ValueError(msg % octet_str)
+ # We do the length check second, since the invalid character error
+ # is likely to be more informative for the user
+ if len(octet_str) > 3:
+ msg = "At most 3 characters permitted in %r"
+ raise ValueError(msg % octet_str)
+ # Convert to integer (we know digits are legal)
+ octet_int = int(octet_str, 10)
+ # Any octets that look like they *might* be written in octal,
+ # and which don't look exactly the same in both octal and
+ # decimal are rejected as ambiguous
+ if octet_int > 7 and octet_str[0] == '0':
+ msg = "Ambiguous (octal/decimal) value in %r not permitted"
+ raise ValueError(msg % octet_str)
+ if octet_int > 255:
+ raise ValueError("Octet %d (> 255) not permitted" % octet_int)
+ return octet_int
+
+ @classmethod
+ def _string_from_ip_int(cls, ip_int):
+ """Turns a 32-bit integer into dotted decimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ The IP address as a string in dotted decimal notation.
+
+ """
+ return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
+ if isinstance(b, bytes)
+ else b)
+ for b in _compat_to_bytes(ip_int, 4, 'big'))
+
+ def _is_hostmask(self, ip_str):
+ """Test if the IP string is a hostmask (rather than a netmask).
+
+ Args:
+ ip_str: A string, the potential hostmask.
+
+ Returns:
+ A boolean, True if the IP string is a hostmask.
+
+ """
+ bits = ip_str.split('.')
+ try:
+ parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
+ except ValueError:
+ return False
+ if len(parts) != len(bits):
+ return False
+ if parts[0] < parts[-1]:
+ return True
+ return False
+
+ def _reverse_pointer(self):
+ """Return the reverse DNS pointer name for the IPv4 address.
+
+ This implements the method described in RFC1035 3.5.
+
+ """
+ reverse_octets = _compat_str(self).split('.')[::-1]
+ return '.'.join(reverse_octets) + '.in-addr.arpa'
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def version(self):
+ return self._version
+
+
+class IPv4Address(_BaseV4, _BaseAddress):
+
+ """Represent and manipulate single IPv4 Addresses."""
+
+ __slots__ = ('_ip', '__weakref__')
+
+ def __init__(self, address):
+
+ """
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv4Address('192.0.2.1') == IPv4Address(3221225985).
+ or, more generally
+ IPv4Address(int(IPv4Address('192.0.2.1'))) ==
+ IPv4Address('192.0.2.1')
+
+ Raises:
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
+
+ """
+ # Efficient constructor from integer.
+ if isinstance(address, _compat_int_types):
+ self._check_int_address(address)
+ self._ip = address
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, bytes):
+ self._check_packed_address(address, 4)
+ bvs = _compat_bytes_to_byte_vals(address)
+ self._ip = _compat_int_from_byte_vals(bvs, 'big')
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = _compat_str(address)
+ if '/' in addr_str:
+ raise AddressValueError("Unexpected '/' in %r" % address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v4_int_to_packed(self._ip)
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within the
+ reserved IPv4 Network range.
+
+ """
+ return self in self._constants._reserved_network
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv4-special-registry.
+
+ """
+ return any(self in net for net in self._constants._private_networks)
+
+ @property
+ def is_global(self):
+ return (
+ self not in self._constants._public_network and
+ not self.is_private)
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is multicast.
+ See RFC 3171 for details.
+
+ """
+ return self in self._constants._multicast_network
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 5735 3.
+
+ """
+ return self == self._constants._unspecified_address
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback per RFC 3330.
+
+ """
+ return self in self._constants._loopback_network
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is link-local per RFC 3927.
+
+ """
+ return self in self._constants._linklocal_network
+
+
+class IPv4Interface(IPv4Address):
+
+ def __init__(self, address):
+ if isinstance(address, (bytes, _compat_int_types)):
+ IPv4Address.__init__(self, address)
+ self.network = IPv4Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+
+ if isinstance(address, tuple):
+ IPv4Address.__init__(self, address[0])
+ if len(address) > 1:
+ self._prefixlen = int(address[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+
+ self.network = IPv4Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+ return
+
+ addr = _split_optional_netmask(address)
+ IPv4Address.__init__(self, addr[0])
+
+ self.network = IPv4Network(address, strict=False)
+ self._prefixlen = self.network._prefixlen
+
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+
+ def __str__(self):
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
+ self.network.prefixlen)
+
+ def __eq__(self, other):
+ address_equal = IPv4Address.__eq__(self, other)
+ if not address_equal or address_equal is NotImplemented:
+ return address_equal
+ try:
+ return self.network == other.network
+ except AttributeError:
+ # An interface with an associated network is NOT the
+ # same as an unassociated address. That's why the hash
+ # takes the extra info into account.
+ return False
+
+ def __lt__(self, other):
+ address_less = IPv4Address.__lt__(self, other)
+ if address_less is NotImplemented:
+ return NotImplemented
+ try:
+ return (self.network < other.network or
+ self.network == other.network and address_less)
+ except AttributeError:
+ # We *do* allow addresses and interfaces to be sorted. The
+ # unassociated address is considered less than all interfaces.
+ return False
+
+ def __hash__(self):
+ return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+ __reduce__ = _IPAddressBase.__reduce__
+
+ @property
+ def ip(self):
+ return IPv4Address(self._ip)
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.hostmask)
+
+
+class IPv4Network(_BaseV4, _BaseNetwork):
+
+ """This class represents and manipulates 32-bit IPv4 network + addresses..
+
+ Attributes: [examples for IPv4Network('192.0.2.0/27')]
+ .network_address: IPv4Address('192.0.2.0')
+ .hostmask: IPv4Address('0.0.0.31')
+ .broadcast_address: IPv4Address('192.0.2.32')
+ .netmask: IPv4Address('255.255.255.224')
+ .prefixlen: 27
+
+ """
+ # Class to use when creating address objects
+ _address_class = IPv4Address
+
+ def __init__(self, address, strict=True):
+
+ """Instantiate a new IPv4 network object.
+
+ Args:
+ address: A string or integer representing the IP [& network].
+ '192.0.2.0/24'
+ '192.0.2.0/255.255.255.0'
+ '192.0.0.2/0.0.0.255'
+ are all functionally the same in IPv4. Similarly,
+ '192.0.2.1'
+ '192.0.2.1/255.255.255.255'
+ '192.0.2.1/32'
+ are also functionally equivalent. That is to say, failing to
+ provide a subnetmask will create an object with a mask of /32.
+
+ If the mask (portion after the / in the argument) is given in
+ dotted quad form, it is treated as a netmask if it starts with a
+ non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
+ starts with a zero field (e.g. 0.255.255.255 == /8), with the
+ single exception of an all-zero mask which is treated as a
+ netmask == /0. If no mask is given, a default of /32 is used.
+
+ Additionally, an integer can be passed, so
+ IPv4Network('192.0.2.1') == IPv4Network(3221225985)
+ or, more generally
+ IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
+ IPv4Interface('192.0.2.1')
+
+ Raises:
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv4 address.
+ ValueError: If strict is True and a network address is not
+ supplied.
+
+ """
+ _BaseNetwork.__init__(self, address)
+
+ # Constructing from a packed address or integer
+ if isinstance(address, (_compat_int_types, bytes)):
+ self.network_address = IPv4Address(address)
+ self.netmask, self._prefixlen = self._make_netmask(
+ self._max_prefixlen)
+ # fixme: address/network test here.
+ return
+
+ if isinstance(address, tuple):
+ if len(address) > 1:
+ arg = address[1]
+ else:
+ # We weren't given an address[1]
+ arg = self._max_prefixlen
+ self.network_address = IPv4Address(address[0])
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+ packed = int(self.network_address)
+ if packed & int(self.netmask) != packed:
+ if strict:
+ raise ValueError('%s has host bits set' % self)
+ else:
+ self.network_address = IPv4Address(packed &
+ int(self.netmask))
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = _split_optional_netmask(address)
+ self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
+
+ if len(addr) == 2:
+ arg = addr[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+
+ if strict:
+ if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
+ self.network_address):
+ raise ValueError('%s has host bits set' % self)
+ self.network_address = IPv4Address(int(self.network_address) &
+ int(self.netmask))
+
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.hosts = self.__iter__
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, True if the address is not reserved per
+ iana-ipv4-special-registry.
+
+ """
+ return (not (self.network_address in IPv4Network('100.64.0.0/10') and
+ self.broadcast_address in IPv4Network('100.64.0.0/10')) and
+ not self.is_private)
+
+
+class _IPv4Constants(object):
+
+ _linklocal_network = IPv4Network('169.254.0.0/16')
+
+ _loopback_network = IPv4Network('127.0.0.0/8')
+
+ _multicast_network = IPv4Network('224.0.0.0/4')
+
+ _public_network = IPv4Network('100.64.0.0/10')
+
+ _private_networks = [
+ IPv4Network('0.0.0.0/8'),
+ IPv4Network('10.0.0.0/8'),
+ IPv4Network('127.0.0.0/8'),
+ IPv4Network('169.254.0.0/16'),
+ IPv4Network('172.16.0.0/12'),
+ IPv4Network('192.0.0.0/29'),
+ IPv4Network('192.0.0.170/31'),
+ IPv4Network('192.0.2.0/24'),
+ IPv4Network('192.168.0.0/16'),
+ IPv4Network('198.18.0.0/15'),
+ IPv4Network('198.51.100.0/24'),
+ IPv4Network('203.0.113.0/24'),
+ IPv4Network('240.0.0.0/4'),
+ IPv4Network('255.255.255.255/32'),
+ ]
+
+ _reserved_network = IPv4Network('240.0.0.0/4')
+
+ _unspecified_address = IPv4Address('0.0.0.0')
+
+
+IPv4Address._constants = _IPv4Constants
+
+
+class _BaseV6(object):
+
+ """Base IPv6 object.
+
+ The following methods are used by IPv6 objects in both single IP
+ addresses and networks.
+
+ """
+
+ __slots__ = ()
+ _version = 6
+ _ALL_ONES = (2 ** IPV6LENGTH) - 1
+ _HEXTET_COUNT = 8
+ _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
+ _max_prefixlen = IPV6LENGTH
+
+ # There are only a bunch of valid v6 netmasks, so we cache them all
+ # when constructed (see _make_netmask()).
+ _netmask_cache = {}
+
+ @classmethod
+ def _make_netmask(cls, arg):
+ """Make a (netmask, prefix_len) tuple from the given argument.
+
+ Argument can be:
+ - an integer (the prefix length)
+ - a string representing the prefix length (e.g. "24")
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
+ """
+ if arg not in cls._netmask_cache:
+ if isinstance(arg, _compat_int_types):
+ prefixlen = arg
+ else:
+ prefixlen = cls._prefix_from_prefix_string(arg)
+ netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
+ cls._netmask_cache[arg] = netmask, prefixlen
+ return cls._netmask_cache[arg]
+
+ @classmethod
+ def _ip_int_from_string(cls, ip_str):
+ """Turn an IPv6 ip_str into an integer.
+
+ Args:
+ ip_str: A string, the IPv6 ip_str.
+
+ Returns:
+ An int, the IPv6 address
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv6 Address.
+
+ """
+ if not ip_str:
+ raise AddressValueError('Address cannot be empty')
+
+ parts = ip_str.split(':')
+
+ # An IPv6 address needs at least 2 colons (3 parts).
+ _min_parts = 3
+ if len(parts) < _min_parts:
+ msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
+ raise AddressValueError(msg)
+
+ # If the address has an IPv4-style suffix, convert it to hexadecimal.
+ if '.' in parts[-1]:
+ try:
+ ipv4_int = IPv4Address(parts.pop())._ip
+ except AddressValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+ parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
+ parts.append('%x' % (ipv4_int & 0xFFFF))
+
+ # An IPv6 address can't have more than 8 colons (9 parts).
+ # The extra colon comes from using the "::" notation for a single
+ # leading or trailing zero part.
+ _max_parts = cls._HEXTET_COUNT + 1
+ if len(parts) > _max_parts:
+ msg = "At most %d colons permitted in %r" % (
+ _max_parts - 1, ip_str)
+ raise AddressValueError(msg)
+
+ # Disregarding the endpoints, find '::' with nothing in between.
+ # This indicates that a run of zeroes has been skipped.
+ skip_index = None
+ for i in _compat_range(1, len(parts) - 1):
+ if not parts[i]:
+ if skip_index is not None:
+ # Can't have more than one '::'
+ msg = "At most one '::' permitted in %r" % ip_str
+ raise AddressValueError(msg)
+ skip_index = i
+
+ # parts_hi is the number of parts to copy from above/before the '::'
+ # parts_lo is the number of parts to copy from below/after the '::'
+ if skip_index is not None:
+ # If we found a '::', then check if it also covers the endpoints.
+ parts_hi = skip_index
+ parts_lo = len(parts) - skip_index - 1
+ if not parts[0]:
+ parts_hi -= 1
+ if parts_hi:
+ msg = "Leading ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
+ if not parts[-1]:
+ parts_lo -= 1
+ if parts_lo:
+ msg = "Trailing ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
+ parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
+ if parts_skipped < 1:
+ msg = "Expected at most %d other parts with '::' in %r"
+ raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
+ else:
+ # Otherwise, allocate the entire address to parts_hi. The
+ # endpoints could still be empty, but _parse_hextet() will check
+ # for that.
+ if len(parts) != cls._HEXTET_COUNT:
+ msg = "Exactly %d parts expected without '::' in %r"
+ raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
+ if not parts[0]:
+ msg = "Leading ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
+ if not parts[-1]:
+ msg = "Trailing ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
+ parts_hi = len(parts)
+ parts_lo = 0
+ parts_skipped = 0
+
+ try:
+ # Now, parse the hextets into a 128-bit integer.
+ ip_int = 0
+ for i in range(parts_hi):
+ ip_int <<= 16
+ ip_int |= cls._parse_hextet(parts[i])
+ ip_int <<= 16 * parts_skipped
+ for i in range(-parts_lo, 0):
+ ip_int <<= 16
+ ip_int |= cls._parse_hextet(parts[i])
+ return ip_int
+ except ValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+
+ @classmethod
+ def _parse_hextet(cls, hextet_str):
+ """Convert an IPv6 hextet string into an integer.
+
+ Args:
+ hextet_str: A string, the number to parse.
+
+ Returns:
+ The hextet as an integer.
+
+ Raises:
+ ValueError: if the input isn't strictly a hex number from
+ [0..FFFF].
+
+ """
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not cls._HEX_DIGITS.issuperset(hextet_str):
+ raise ValueError("Only hex digits permitted in %r" % hextet_str)
+ # We do the length check second, since the invalid character error
+ # is likely to be more informative for the user
+ if len(hextet_str) > 4:
+ msg = "At most 4 characters permitted in %r"
+ raise ValueError(msg % hextet_str)
+ # Length check means we can skip checking the integer value
+ return int(hextet_str, 16)
+
+ @classmethod
+ def _compress_hextets(cls, hextets):
+ """Compresses a list of hextets.
+
+ Compresses a list of strings, replacing the longest continuous
+ sequence of "0" in the list with "" and adding empty strings at
+ the beginning or at the end of the string such that subsequently
+ calling ":".join(hextets) will produce the compressed version of
+ the IPv6 address.
+
+ Args:
+ hextets: A list of strings, the hextets to compress.
+
+ Returns:
+ A list of strings.
+
+ """
+ best_doublecolon_start = -1
+ best_doublecolon_len = 0
+ doublecolon_start = -1
+ doublecolon_len = 0
+ for index, hextet in enumerate(hextets):
+ if hextet == '0':
+ doublecolon_len += 1
+ if doublecolon_start == -1:
+ # Start of a sequence of zeros.
+ doublecolon_start = index
+ if doublecolon_len > best_doublecolon_len:
+ # This is the longest sequence of zeros so far.
+ best_doublecolon_len = doublecolon_len
+ best_doublecolon_start = doublecolon_start
+ else:
+ doublecolon_len = 0
+ doublecolon_start = -1
+
+ if best_doublecolon_len > 1:
+ best_doublecolon_end = (best_doublecolon_start +
+ best_doublecolon_len)
+ # For zeros at the end of the address.
+ if best_doublecolon_end == len(hextets):
+ hextets += ['']
+ hextets[best_doublecolon_start:best_doublecolon_end] = ['']
+ # For zeros at the beginning of the address.
+ if best_doublecolon_start == 0:
+ hextets = [''] + hextets
+
+ return hextets
+
+ @classmethod
+ def _string_from_ip_int(cls, ip_int=None):
+ """Turns a 128-bit integer into hexadecimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ A string, the hexadecimal representation of the address.
+
+ Raises:
+ ValueError: The address is bigger than 128 bits of all ones.
+
+ """
+ if ip_int is None:
+ ip_int = int(cls._ip)
+
+ if ip_int > cls._ALL_ONES:
+ raise ValueError('IPv6 address is too large')
+
+ hex_str = '%032x' % ip_int
+ hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
+
+ hextets = cls._compress_hextets(hextets)
+ return ':'.join(hextets)
+
+ def _explode_shorthand_ip_string(self):
+ """Expand a shortened IPv6 address.
+
+ Args:
+ ip_str: A string, the IPv6 address.
+
+ Returns:
+ A string, the expanded IPv6 address.
+
+ """
+ if isinstance(self, IPv6Network):
+ ip_str = _compat_str(self.network_address)
+ elif isinstance(self, IPv6Interface):
+ ip_str = _compat_str(self.ip)
+ else:
+ ip_str = _compat_str(self)
+
+ ip_int = self._ip_int_from_string(ip_str)
+ hex_str = '%032x' % ip_int
+ parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
+ if isinstance(self, (_BaseNetwork, IPv6Interface)):
+ return '%s/%d' % (':'.join(parts), self._prefixlen)
+ return ':'.join(parts)
+
+ def _reverse_pointer(self):
+ """Return the reverse DNS pointer name for the IPv6 address.
+
+ This implements the method described in RFC3596 2.5.
+
+ """
+ reverse_chars = self.exploded[::-1].replace(':', '')
+ return '.'.join(reverse_chars) + '.ip6.arpa'
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def version(self):
+ return self._version
+
+
+class IPv6Address(_BaseV6, _BaseAddress):
+
+ """Represent and manipulate single IPv6 Addresses."""
+
+ __slots__ = ('_ip', '__weakref__')
+
+ def __init__(self, address):
+ """Instantiate a new IPv6 address object.
+
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv6Address('2001:db8::') ==
+ IPv6Address(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Address(int(IPv6Address('2001:db8::'))) ==
+ IPv6Address('2001:db8::')
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+
+ """
+ # Efficient constructor from integer.
+ if isinstance(address, _compat_int_types):
+ self._check_int_address(address)
+ self._ip = address
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, bytes):
+ self._check_packed_address(address, 16)
+ bvs = _compat_bytes_to_byte_vals(address)
+ self._ip = _compat_int_from_byte_vals(bvs, 'big')
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = _compat_str(address)
+ if '/' in addr_str:
+ raise AddressValueError("Unexpected '/' in %r" % address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v6_int_to_packed(self._ip)
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return self in self._constants._multicast_network
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return any(self in x for x in self._constants._reserved_networks)
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return self in self._constants._linklocal_network
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return self in self._constants._sitelocal_network
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv6-special-registry.
+
+ """
+ return any(self in net for net in self._constants._private_networks)
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, true if the address is not reserved per
+ iana-ipv6-special-registry.
+
+ """
+ return not self.is_private
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return self._ip == 0
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return self._ip == 1
+
+ @property
+ def ipv4_mapped(self):
+ """Return the IPv4 mapped address.
+
+ Returns:
+ If the IPv6 address is a v4 mapped address, return the
+ IPv4 mapped address. Return None otherwise.
+
+ """
+ if (self._ip >> 32) != 0xFFFF:
+ return None
+ return IPv4Address(self._ip & 0xFFFFFFFF)
+
+ @property
+ def teredo(self):
+ """Tuple of embedded teredo IPs.
+
+ Returns:
+ Tuple of the (server, client) IPs or None if the address
+ doesn't appear to be a teredo address (doesn't start with
+ 2001::/32)
+
+ """
+ if (self._ip >> 96) != 0x20010000:
+ return None
+ return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
+ IPv4Address(~self._ip & 0xFFFFFFFF))
+
+ @property
+ def sixtofour(self):
+ """Return the IPv4 6to4 embedded address.
+
+ Returns:
+ The IPv4 6to4-embedded address if present or None if the
+ address doesn't appear to contain a 6to4 embedded address.
+
+ """
+ if (self._ip >> 112) != 0x2002:
+ return None
+ return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
+
+
+class IPv6Interface(IPv6Address):
+
+ def __init__(self, address):
+ if isinstance(address, (bytes, _compat_int_types)):
+ IPv6Address.__init__(self, address)
+ self.network = IPv6Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+ if isinstance(address, tuple):
+ IPv6Address.__init__(self, address[0])
+ if len(address) > 1:
+ self._prefixlen = int(address[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+ self.network = IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+ return
+
+ addr = _split_optional_netmask(address)
+ IPv6Address.__init__(self, addr[0])
+ self.network = IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self._prefixlen = self.network._prefixlen
+ self.hostmask = self.network.hostmask
+
+ def __str__(self):
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
+ self.network.prefixlen)
+
+ def __eq__(self, other):
+ address_equal = IPv6Address.__eq__(self, other)
+ if not address_equal or address_equal is NotImplemented:
+ return address_equal
+ try:
+ return self.network == other.network
+ except AttributeError:
+ # An interface with an associated network is NOT the
+ # same as an unassociated address. That's why the hash
+ # takes the extra info into account.
+ return False
+
+ def __lt__(self, other):
+ address_less = IPv6Address.__lt__(self, other)
+ if address_less is NotImplemented:
+ return NotImplemented
+ try:
+ return (self.network < other.network or
+ self.network == other.network and address_less)
+ except AttributeError:
+ # We *do* allow addresses and interfaces to be sorted. The
+ # unassociated address is considered less than all interfaces.
+ return False
+
+ def __hash__(self):
+ return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+ __reduce__ = _IPAddressBase.__reduce__
+
+ @property
+ def ip(self):
+ return IPv6Address(self._ip)
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.hostmask)
+
+ @property
+ def is_unspecified(self):
+ return self._ip == 0 and self.network.is_unspecified
+
+ @property
+ def is_loopback(self):
+ return self._ip == 1 and self.network.is_loopback
+
+
+class IPv6Network(_BaseV6, _BaseNetwork):
+
+ """This class represents and manipulates 128-bit IPv6 networks.
+
+ Attributes: [examples for IPv6('2001:db8::1000/124')]
+ .network_address: IPv6Address('2001:db8::1000')
+ .hostmask: IPv6Address('::f')
+ .broadcast_address: IPv6Address('2001:db8::100f')
+ .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
+ .prefixlen: 124
+
+ """
+
+ # Class to use when creating address objects
+ _address_class = IPv6Address
+
+ def __init__(self, address, strict=True):
+ """Instantiate a new IPv6 Network object.
+
+ Args:
+ address: A string or integer representing the IPv6 network or the
+ IP and prefix/netmask.
+ '2001:db8::/128'
+ '2001:db8:0000:0000:0000:0000:0000:0000/128'
+ '2001:db8::'
+ are all functionally the same in IPv6. That is to say,
+ failing to provide a subnetmask will create an object with
+ a mask of /128.
+
+ Additionally, an integer can be passed, so
+ IPv6Network('2001:db8::') ==
+ IPv6Network(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Network(int(IPv6Network('2001:db8::'))) ==
+ IPv6Network('2001:db8::')
+
+ strict: A boolean. If true, ensure that we have been passed
+ A true network address, eg, 2001:db8::1000/124 and not an
+ IP address on a network, eg, 2001:db8::1/124.
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv6 address.
+ ValueError: If strict was True and a network address was not
+ supplied.
+
+ """
+ _BaseNetwork.__init__(self, address)
+
+ # Efficient constructor from integer or packed address
+ if isinstance(address, (bytes, _compat_int_types)):
+ self.network_address = IPv6Address(address)
+ self.netmask, self._prefixlen = self._make_netmask(
+ self._max_prefixlen)
+ return
+
+ if isinstance(address, tuple):
+ if len(address) > 1:
+ arg = address[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+ self.network_address = IPv6Address(address[0])
+ packed = int(self.network_address)
+ if packed & int(self.netmask) != packed:
+ if strict:
+ raise ValueError('%s has host bits set' % self)
+ else:
+ self.network_address = IPv6Address(packed &
+ int(self.netmask))
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = _split_optional_netmask(address)
+
+ self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
+
+ if len(addr) == 2:
+ arg = addr[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+
+ if strict:
+ if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
+ self.network_address):
+ raise ValueError('%s has host bits set' % self)
+ self.network_address = IPv6Address(int(self.network_address) &
+ int(self.netmask))
+
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.hosts = self.__iter__
+
+ def hosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the
+ Subnet-Router anycast address.
+
+ """
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network + 1, broadcast + 1):
+ yield self._address_class(x)
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return (self.network_address.is_site_local and
+ self.broadcast_address.is_site_local)
+
+
+class _IPv6Constants(object):
+
+ _linklocal_network = IPv6Network('fe80::/10')
+
+ _multicast_network = IPv6Network('ff00::/8')
+
+ _private_networks = [
+ IPv6Network('::1/128'),
+ IPv6Network('::/128'),
+ IPv6Network('::ffff:0:0/96'),
+ IPv6Network('100::/64'),
+ IPv6Network('2001::/23'),
+ IPv6Network('2001:2::/48'),
+ IPv6Network('2001:db8::/32'),
+ IPv6Network('2001:10::/28'),
+ IPv6Network('fc00::/7'),
+ IPv6Network('fe80::/10'),
+ ]
+
+ _reserved_networks = [
+ IPv6Network('::/8'), IPv6Network('100::/8'),
+ IPv6Network('200::/7'), IPv6Network('400::/6'),
+ IPv6Network('800::/5'), IPv6Network('1000::/4'),
+ IPv6Network('4000::/3'), IPv6Network('6000::/3'),
+ IPv6Network('8000::/3'), IPv6Network('A000::/3'),
+ IPv6Network('C000::/3'), IPv6Network('E000::/4'),
+ IPv6Network('F000::/5'), IPv6Network('F800::/6'),
+ IPv6Network('FE00::/9'),
+ ]
+
+ _sitelocal_network = IPv6Network('fec0::/10')
+
+
+IPv6Address._constants = _IPv6Constants
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__init__.py
new file mode 100644
index 00000000..a6f44a55
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__init__.py
@@ -0,0 +1,347 @@
+# -*- coding: utf-8 -*-
+
+"""
+lockfile.py - Platform-independent advisory file locks.
+
+Requires Python 2.5 unless you apply 2.4.diff
+Locking is done on a per-thread basis instead of a per-process basis.
+
+Usage:
+
+>>> lock = LockFile('somefile')
+>>> try:
+... lock.acquire()
+... except AlreadyLocked:
+... print 'somefile', 'is locked already.'
+... except LockFailed:
+... print 'somefile', 'can\\'t be locked.'
+... else:
+... print 'got lock'
+got lock
+>>> print lock.is_locked()
+True
+>>> lock.release()
+
+>>> lock = LockFile('somefile')
+>>> print lock.is_locked()
+False
+>>> with lock:
+... print lock.is_locked()
+True
+>>> print lock.is_locked()
+False
+
+>>> lock = LockFile('somefile')
+>>> # It is okay to lock twice from the same thread...
+>>> with lock:
+... lock.acquire()
+...
+>>> # Though no counter is kept, so you can't unlock multiple times...
+>>> print lock.is_locked()
+False
+
+Exceptions:
+
+ Error - base class for other exceptions
+ LockError - base class for all locking exceptions
+ AlreadyLocked - Another thread or process already holds the lock
+ LockFailed - Lock failed for some other reason
+ UnlockError - base class for all unlocking exceptions
+ AlreadyUnlocked - File was not locked.
+ NotMyLock - File was locked but not by the current thread/process
+"""
+
+from __future__ import absolute_import
+
+import functools
+import os
+import socket
+import threading
+import warnings
+
+# Work with PEP8 and non-PEP8 versions of threading module.
+if not hasattr(threading, "current_thread"):
+ threading.current_thread = threading.currentThread
+if not hasattr(threading.Thread, "get_name"):
+ threading.Thread.get_name = threading.Thread.getName
+
+__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked',
+ 'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock',
+ 'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock',
+ 'LockBase', 'locked']
+
+
+class Error(Exception):
+ """
+ Base class for other exceptions.
+
+ >>> try:
+ ... raise Error
+ ... except Exception:
+ ... pass
+ """
+ pass
+
+
+class LockError(Error):
+ """
+ Base class for error arising from attempts to acquire the lock.
+
+ >>> try:
+ ... raise LockError
+ ... except Error:
+ ... pass
+ """
+ pass
+
+
+class LockTimeout(LockError):
+ """Raised when lock creation fails within a user-defined period of time.
+
+ >>> try:
+ ... raise LockTimeout
+ ... except LockError:
+ ... pass
+ """
+ pass
+
+
+class AlreadyLocked(LockError):
+ """Some other thread/process is locking the file.
+
+ >>> try:
+ ... raise AlreadyLocked
+ ... except LockError:
+ ... pass
+ """
+ pass
+
+
+class LockFailed(LockError):
+ """Lock file creation failed for some other reason.
+
+ >>> try:
+ ... raise LockFailed
+ ... except LockError:
+ ... pass
+ """
+ pass
+
+
+class UnlockError(Error):
+ """
+ Base class for errors arising from attempts to release the lock.
+
+ >>> try:
+ ... raise UnlockError
+ ... except Error:
+ ... pass
+ """
+ pass
+
+
+class NotLocked(UnlockError):
+ """Raised when an attempt is made to unlock an unlocked file.
+
+ >>> try:
+ ... raise NotLocked
+ ... except UnlockError:
+ ... pass
+ """
+ pass
+
+
+class NotMyLock(UnlockError):
+ """Raised when an attempt is made to unlock a file someone else locked.
+
+ >>> try:
+ ... raise NotMyLock
+ ... except UnlockError:
+ ... pass
+ """
+ pass
+
+
+class _SharedBase(object):
+ def __init__(self, path):
+ self.path = path
+
+ def acquire(self, timeout=None):
+ """
+ Acquire the lock.
+
+ * If timeout is omitted (or None), wait forever trying to lock the
+ file.
+
+ * If timeout > 0, try to acquire the lock for that many seconds. If
+ the lock period expires and the file is still locked, raise
+ LockTimeout.
+
+ * If timeout <= 0, raise AlreadyLocked immediately if the file is
+ already locked.
+ """
+ raise NotImplemented("implement in subclass")
+
+ def release(self):
+ """
+ Release the lock.
+
+ If the file is not locked, raise NotLocked.
+ """
+ raise NotImplemented("implement in subclass")
+
+ def __enter__(self):
+ """
+ Context manager support.
+ """
+ self.acquire()
+ return self
+
+ def __exit__(self, *_exc):
+ """
+ Context manager support.
+ """
+ self.release()
+
+ def __repr__(self):
+ return "<%s: %r>" % (self.__class__.__name__, self.path)
+
+
+class LockBase(_SharedBase):
+ """Base class for platform-specific lock classes."""
+ def __init__(self, path, threaded=True, timeout=None):
+ """
+ >>> lock = LockBase('somefile')
+ >>> lock = LockBase('somefile', threaded=False)
+ """
+ super(LockBase, self).__init__(path)
+ self.lock_file = os.path.abspath(path) + ".lock"
+ self.hostname = socket.gethostname()
+ self.pid = os.getpid()
+ if threaded:
+ t = threading.current_thread()
+ # Thread objects in Python 2.4 and earlier do not have ident
+ # attrs. Worm around that.
+ ident = getattr(t, "ident", hash(t))
+ self.tname = "-%x" % (ident & 0xffffffff)
+ else:
+ self.tname = ""
+ dirname = os.path.dirname(self.lock_file)
+
+ # unique name is mostly about the current process, but must
+ # also contain the path -- otherwise, two adjacent locked
+ # files conflict (one file gets locked, creating lock-file and
+ # unique file, the other one gets locked, creating lock-file
+ # and overwriting the already existing lock-file, then one
+ # gets unlocked, deleting both lock-file and unique file,
+ # finally the last lock errors out upon releasing.
+ self.unique_name = os.path.join(dirname,
+ "%s%s.%s%s" % (self.hostname,
+ self.tname,
+ self.pid,
+ hash(self.path)))
+ self.timeout = timeout
+
+ def is_locked(self):
+ """
+ Tell whether or not the file is locked.
+ """
+ raise NotImplemented("implement in subclass")
+
+ def i_am_locking(self):
+ """
+ Return True if this object is locking the file.
+ """
+ raise NotImplemented("implement in subclass")
+
+ def break_lock(self):
+ """
+ Remove a lock. Useful if a locking thread failed to unlock.
+ """
+ raise NotImplemented("implement in subclass")
+
+ def __repr__(self):
+ return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name,
+ self.path)
+
+
+def _fl_helper(cls, mod, *args, **kwds):
+ warnings.warn("Import from %s module instead of lockfile package" % mod,
+ DeprecationWarning, stacklevel=2)
+ # This is a bit funky, but it's only for awhile. The way the unit tests
+ # are constructed this function winds up as an unbound method, so it
+ # actually takes three args, not two. We want to toss out self.
+ if not isinstance(args[0], str):
+ # We are testing, avoid the first arg
+ args = args[1:]
+ if len(args) == 1 and not kwds:
+ kwds["threaded"] = True
+ return cls(*args, **kwds)
+
+
+def LinkFileLock(*args, **kwds):
+ """Factory function provided for backwards compatibility.
+
+ Do not use in new code. Instead, import LinkLockFile from the
+ lockfile.linklockfile module.
+ """
+ from . import linklockfile
+ return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile",
+ *args, **kwds)
+
+
+def MkdirFileLock(*args, **kwds):
+ """Factory function provided for backwards compatibility.
+
+ Do not use in new code. Instead, import MkdirLockFile from the
+ lockfile.mkdirlockfile module.
+ """
+ from . import mkdirlockfile
+ return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile",
+ *args, **kwds)
+
+
+def SQLiteFileLock(*args, **kwds):
+ """Factory function provided for backwards compatibility.
+
+ Do not use in new code. Instead, import SQLiteLockFile from the
+ lockfile.mkdirlockfile module.
+ """
+ from . import sqlitelockfile
+ return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile",
+ *args, **kwds)
+
+
+def locked(path, timeout=None):
+ """Decorator which enables locks for decorated function.
+
+ Arguments:
+ - path: path for lockfile.
+ - timeout (optional): Timeout for acquiring lock.
+
+ Usage:
+ @locked('/var/run/myname', timeout=0)
+ def myname(...):
+ ...
+ """
+ def decor(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ lock = FileLock(path, timeout=timeout)
+ lock.acquire()
+ try:
+ return func(*args, **kwargs)
+ finally:
+ lock.release()
+ return wrapper
+ return decor
+
+
+if hasattr(os, "link"):
+ from . import linklockfile as _llf
+ LockFile = _llf.LinkLockFile
+else:
+ from . import mkdirlockfile as _mlf
+ LockFile = _mlf.MkdirLockFile
+
+FileLock = LockFile
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..22a11a87
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/linklockfile.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/linklockfile.cpython-37.pyc
new file mode 100644
index 00000000..eb655c83
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/linklockfile.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-37.pyc
new file mode 100644
index 00000000..b3efe287
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-37.pyc
new file mode 100644
index 00000000..4447ee32
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-37.pyc
new file mode 100644
index 00000000..c5f3955a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-37.pyc
new file mode 100644
index 00000000..e32643a3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/linklockfile.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/linklockfile.py
new file mode 100644
index 00000000..2ca9be04
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/linklockfile.py
@@ -0,0 +1,73 @@
+from __future__ import absolute_import
+
+import time
+import os
+
+from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
+ AlreadyLocked)
+
+
+class LinkLockFile(LockBase):
+ """Lock access to a file using atomic property of link(2).
+
+ >>> lock = LinkLockFile('somefile')
+ >>> lock = LinkLockFile('somefile', threaded=False)
+ """
+
+ def acquire(self, timeout=None):
+ try:
+ open(self.unique_name, "wb").close()
+ except IOError:
+ raise LockFailed("failed to create %s" % self.unique_name)
+
+ timeout = timeout if timeout is not None else self.timeout
+ end_time = time.time()
+ if timeout is not None and timeout > 0:
+ end_time += timeout
+
+ while True:
+ # Try and create a hard link to it.
+ try:
+ os.link(self.unique_name, self.lock_file)
+ except OSError:
+ # Link creation failed. Maybe we've double-locked?
+ nlinks = os.stat(self.unique_name).st_nlink
+ if nlinks == 2:
+ # The original link plus the one I created == 2. We're
+ # good to go.
+ return
+ else:
+ # Otherwise the lock creation failed.
+ if timeout is not None and time.time() > end_time:
+ os.unlink(self.unique_name)
+ if timeout > 0:
+ raise LockTimeout("Timeout waiting to acquire"
+ " lock for %s" %
+ self.path)
+ else:
+ raise AlreadyLocked("%s is already locked" %
+ self.path)
+ time.sleep(timeout is not None and timeout / 10 or 0.1)
+ else:
+ # Link creation succeeded. We're good to go.
+ return
+
+ def release(self):
+ if not self.is_locked():
+ raise NotLocked("%s is not locked" % self.path)
+ elif not os.path.exists(self.unique_name):
+ raise NotMyLock("%s is locked, but not by me" % self.path)
+ os.unlink(self.unique_name)
+ os.unlink(self.lock_file)
+
+ def is_locked(self):
+ return os.path.exists(self.lock_file)
+
+ def i_am_locking(self):
+ return (self.is_locked() and
+ os.path.exists(self.unique_name) and
+ os.stat(self.unique_name).st_nlink == 2)
+
+ def break_lock(self):
+ if os.path.exists(self.lock_file):
+ os.unlink(self.lock_file)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py
new file mode 100644
index 00000000..05a8c96c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py
@@ -0,0 +1,84 @@
+from __future__ import absolute_import, division
+
+import time
+import os
+import sys
+import errno
+
+from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
+ AlreadyLocked)
+
+
+class MkdirLockFile(LockBase):
+ """Lock file by creating a directory."""
+ def __init__(self, path, threaded=True, timeout=None):
+ """
+ >>> lock = MkdirLockFile('somefile')
+ >>> lock = MkdirLockFile('somefile', threaded=False)
+ """
+ LockBase.__init__(self, path, threaded, timeout)
+ # Lock file itself is a directory. Place the unique file name into
+ # it.
+ self.unique_name = os.path.join(self.lock_file,
+ "%s.%s%s" % (self.hostname,
+ self.tname,
+ self.pid))
+
+ def acquire(self, timeout=None):
+ timeout = timeout if timeout is not None else self.timeout
+ end_time = time.time()
+ if timeout is not None and timeout > 0:
+ end_time += timeout
+
+ if timeout is None:
+ wait = 0.1
+ else:
+ wait = max(0, timeout / 10)
+
+ while True:
+ try:
+ os.mkdir(self.lock_file)
+ except OSError:
+ err = sys.exc_info()[1]
+ if err.errno == errno.EEXIST:
+ # Already locked.
+ if os.path.exists(self.unique_name):
+ # Already locked by me.
+ return
+ if timeout is not None and time.time() > end_time:
+ if timeout > 0:
+ raise LockTimeout("Timeout waiting to acquire"
+ " lock for %s" %
+ self.path)
+ else:
+ # Someone else has the lock.
+ raise AlreadyLocked("%s is already locked" %
+ self.path)
+ time.sleep(wait)
+ else:
+ # Couldn't create the lock for some other reason
+ raise LockFailed("failed to create %s" % self.lock_file)
+ else:
+ open(self.unique_name, "wb").close()
+ return
+
+ def release(self):
+ if not self.is_locked():
+ raise NotLocked("%s is not locked" % self.path)
+ elif not os.path.exists(self.unique_name):
+ raise NotMyLock("%s is locked, but not by me" % self.path)
+ os.unlink(self.unique_name)
+ os.rmdir(self.lock_file)
+
+ def is_locked(self):
+ return os.path.exists(self.lock_file)
+
+ def i_am_locking(self):
+ return (self.is_locked() and
+ os.path.exists(self.unique_name))
+
+ def break_lock(self):
+ if os.path.exists(self.lock_file):
+ for name in os.listdir(self.lock_file):
+ os.unlink(os.path.join(self.lock_file, name))
+ os.rmdir(self.lock_file)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/pidlockfile.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/pidlockfile.py
new file mode 100644
index 00000000..069e85b1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/pidlockfile.py
@@ -0,0 +1,190 @@
+# -*- coding: utf-8 -*-
+
+# pidlockfile.py
+#
+# Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au>
+#
+# This is free software: you may copy, modify, and/or distribute this work
+# under the terms of the Python Software Foundation License, version 2 or
+# later as published by the Python Software Foundation.
+# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
+
+""" Lockfile behaviour implemented via Unix PID files.
+ """
+
+from __future__ import absolute_import
+
+import errno
+import os
+import time
+
+from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock,
+ LockTimeout)
+
+
+class PIDLockFile(LockBase):
+ """ Lockfile implemented as a Unix PID file.
+
+ The lock file is a normal file named by the attribute `path`.
+ A lock's PID file contains a single line of text, containing
+ the process ID (PID) of the process that acquired the lock.
+
+ >>> lock = PIDLockFile('somefile')
+ >>> lock = PIDLockFile('somefile')
+ """
+
+ def __init__(self, path, threaded=False, timeout=None):
+ # pid lockfiles don't support threaded operation, so always force
+ # False as the threaded arg.
+ LockBase.__init__(self, path, False, timeout)
+ self.unique_name = self.path
+
+ def read_pid(self):
+ """ Get the PID from the lock file.
+ """
+ return read_pid_from_pidfile(self.path)
+
+ def is_locked(self):
+ """ Test if the lock is currently held.
+
+ The lock is held if the PID file for this lock exists.
+
+ """
+ return os.path.exists(self.path)
+
+ def i_am_locking(self):
+ """ Test if the lock is held by the current process.
+
+ Returns ``True`` if the current process ID matches the
+ number stored in the PID file.
+ """
+ return self.is_locked() and os.getpid() == self.read_pid()
+
+ def acquire(self, timeout=None):
+ """ Acquire the lock.
+
+ Creates the PID file for this lock, or raises an error if
+ the lock could not be acquired.
+ """
+
+ timeout = timeout if timeout is not None else self.timeout
+ end_time = time.time()
+ if timeout is not None and timeout > 0:
+ end_time += timeout
+
+ while True:
+ try:
+ write_pid_to_pidfile(self.path)
+ except OSError as exc:
+ if exc.errno == errno.EEXIST:
+ # The lock creation failed. Maybe sleep a bit.
+ if time.time() > end_time:
+ if timeout is not None and timeout > 0:
+ raise LockTimeout("Timeout waiting to acquire"
+ " lock for %s" %
+ self.path)
+ else:
+ raise AlreadyLocked("%s is already locked" %
+ self.path)
+ time.sleep(timeout is not None and timeout / 10 or 0.1)
+ else:
+ raise LockFailed("failed to create %s" % self.path)
+ else:
+ return
+
+ def release(self):
+ """ Release the lock.
+
+ Removes the PID file to release the lock, or raises an
+ error if the current process does not hold the lock.
+
+ """
+ if not self.is_locked():
+ raise NotLocked("%s is not locked" % self.path)
+ if not self.i_am_locking():
+ raise NotMyLock("%s is locked, but not by me" % self.path)
+ remove_existing_pidfile(self.path)
+
+ def break_lock(self):
+ """ Break an existing lock.
+
+ Removes the PID file if it already exists, otherwise does
+ nothing.
+
+ """
+ remove_existing_pidfile(self.path)
+
+
+def read_pid_from_pidfile(pidfile_path):
+ """ Read the PID recorded in the named PID file.
+
+ Read and return the numeric PID recorded as text in the named
+ PID file. If the PID file cannot be read, or if the content is
+ not a valid PID, return ``None``.
+
+ """
+ pid = None
+ try:
+ pidfile = open(pidfile_path, 'r')
+ except IOError:
+ pass
+ else:
+ # According to the FHS 2.3 section on PID files in /var/run:
+ #
+ # The file must consist of the process identifier in
+ # ASCII-encoded decimal, followed by a newline character.
+ #
+ # Programs that read PID files should be somewhat flexible
+ # in what they accept; i.e., they should ignore extra
+ # whitespace, leading zeroes, absence of the trailing
+ # newline, or additional lines in the PID file.
+
+ line = pidfile.readline().strip()
+ try:
+ pid = int(line)
+ except ValueError:
+ pass
+ pidfile.close()
+
+ return pid
+
+
+def write_pid_to_pidfile(pidfile_path):
+ """ Write the PID in the named PID file.
+
+ Get the numeric process ID (“PID”) of the current process
+ and write it to the named file as a line of text.
+
+ """
+ open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+ open_mode = 0o644
+ pidfile_fd = os.open(pidfile_path, open_flags, open_mode)
+ pidfile = os.fdopen(pidfile_fd, 'w')
+
+ # According to the FHS 2.3 section on PID files in /var/run:
+ #
+ # The file must consist of the process identifier in
+ # ASCII-encoded decimal, followed by a newline character. For
+ # example, if crond was process number 25, /var/run/crond.pid
+ # would contain three characters: two, five, and newline.
+
+ pid = os.getpid()
+ pidfile.write("%s\n" % pid)
+ pidfile.close()
+
+
+def remove_existing_pidfile(pidfile_path):
+ """ Remove the named PID file if it exists.
+
+ Removing a PID file that doesn't already exist puts us in the
+ desired state, so we ignore the condition if the file does not
+ exist.
+
+ """
+ try:
+ os.remove(pidfile_path)
+ except OSError as exc:
+ if exc.errno == errno.ENOENT:
+ pass
+ else:
+ raise
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py
new file mode 100644
index 00000000..f997e244
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py
@@ -0,0 +1,156 @@
+from __future__ import absolute_import, division
+
+import time
+import os
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked
+
+
+class SQLiteLockFile(LockBase):
+ "Demonstrate SQL-based locking."
+
+ testdb = None
+
+ def __init__(self, path, threaded=True, timeout=None):
+ """
+ >>> lock = SQLiteLockFile('somefile')
+ >>> lock = SQLiteLockFile('somefile', threaded=False)
+ """
+ LockBase.__init__(self, path, threaded, timeout)
+ self.lock_file = unicode(self.lock_file)
+ self.unique_name = unicode(self.unique_name)
+
+ if SQLiteLockFile.testdb is None:
+ import tempfile
+ _fd, testdb = tempfile.mkstemp()
+ os.close(_fd)
+ os.unlink(testdb)
+ del _fd, tempfile
+ SQLiteLockFile.testdb = testdb
+
+ import sqlite3
+ self.connection = sqlite3.connect(SQLiteLockFile.testdb)
+
+ c = self.connection.cursor()
+ try:
+ c.execute("create table locks"
+ "("
+ " lock_file varchar(32),"
+ " unique_name varchar(32)"
+ ")")
+ except sqlite3.OperationalError:
+ pass
+ else:
+ self.connection.commit()
+ import atexit
+ atexit.register(os.unlink, SQLiteLockFile.testdb)
+
+ def acquire(self, timeout=None):
+ timeout = timeout if timeout is not None else self.timeout
+ end_time = time.time()
+ if timeout is not None and timeout > 0:
+ end_time += timeout
+
+ if timeout is None:
+ wait = 0.1
+ elif timeout <= 0:
+ wait = 0
+ else:
+ wait = timeout / 10
+
+ cursor = self.connection.cursor()
+
+ while True:
+ if not self.is_locked():
+ # Not locked. Try to lock it.
+ cursor.execute("insert into locks"
+ " (lock_file, unique_name)"
+ " values"
+ " (?, ?)",
+ (self.lock_file, self.unique_name))
+ self.connection.commit()
+
+ # Check to see if we are the only lock holder.
+ cursor.execute("select * from locks"
+ " where unique_name = ?",
+ (self.unique_name,))
+ rows = cursor.fetchall()
+ if len(rows) > 1:
+ # Nope. Someone else got there. Remove our lock.
+ cursor.execute("delete from locks"
+ " where unique_name = ?",
+ (self.unique_name,))
+ self.connection.commit()
+ else:
+ # Yup. We're done, so go home.
+ return
+ else:
+ # Check to see if we are the only lock holder.
+ cursor.execute("select * from locks"
+ " where unique_name = ?",
+ (self.unique_name,))
+ rows = cursor.fetchall()
+ if len(rows) == 1:
+ # We're the locker, so go home.
+ return
+
+ # Maybe we should wait a bit longer.
+ if timeout is not None and time.time() > end_time:
+ if timeout > 0:
+ # No more waiting.
+ raise LockTimeout("Timeout waiting to acquire"
+ " lock for %s" %
+ self.path)
+ else:
+ # Someone else has the lock and we are impatient..
+ raise AlreadyLocked("%s is already locked" % self.path)
+
+ # Well, okay. We'll give it a bit longer.
+ time.sleep(wait)
+
+ def release(self):
+ if not self.is_locked():
+ raise NotLocked("%s is not locked" % self.path)
+ if not self.i_am_locking():
+ raise NotMyLock("%s is locked, but not by me (by %s)" %
+ (self.unique_name, self._who_is_locking()))
+ cursor = self.connection.cursor()
+ cursor.execute("delete from locks"
+ " where unique_name = ?",
+ (self.unique_name,))
+ self.connection.commit()
+
+ def _who_is_locking(self):
+ cursor = self.connection.cursor()
+ cursor.execute("select unique_name from locks"
+ " where lock_file = ?",
+ (self.lock_file,))
+ return cursor.fetchone()[0]
+
+ def is_locked(self):
+ cursor = self.connection.cursor()
+ cursor.execute("select * from locks"
+ " where lock_file = ?",
+ (self.lock_file,))
+ rows = cursor.fetchall()
+ return not not rows
+
+ def i_am_locking(self):
+ cursor = self.connection.cursor()
+ cursor.execute("select * from locks"
+ " where lock_file = ?"
+ " and unique_name = ?",
+ (self.lock_file, self.unique_name))
+ return not not cursor.fetchall()
+
+ def break_lock(self):
+ cursor = self.connection.cursor()
+ cursor.execute("delete from locks"
+ " where lock_file = ?",
+ (self.lock_file,))
+ self.connection.commit()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py
new file mode 100644
index 00000000..23b41f58
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py
@@ -0,0 +1,70 @@
+from __future__ import absolute_import
+
+import os
+import time
+
+from . import (LockBase, NotLocked, NotMyLock, LockTimeout,
+ AlreadyLocked)
+
+
+class SymlinkLockFile(LockBase):
+ """Lock access to a file using symlink(2)."""
+
+ def __init__(self, path, threaded=True, timeout=None):
+ # super(SymlinkLockFile).__init(...)
+ LockBase.__init__(self, path, threaded, timeout)
+ # split it back!
+ self.unique_name = os.path.split(self.unique_name)[1]
+
+ def acquire(self, timeout=None):
+ # Hopefully unnecessary for symlink.
+ # try:
+ # open(self.unique_name, "wb").close()
+ # except IOError:
+ # raise LockFailed("failed to create %s" % self.unique_name)
+ timeout = timeout if timeout is not None else self.timeout
+ end_time = time.time()
+ if timeout is not None and timeout > 0:
+ end_time += timeout
+
+ while True:
+ # Try and create a symbolic link to it.
+ try:
+ os.symlink(self.unique_name, self.lock_file)
+ except OSError:
+ # Link creation failed. Maybe we've double-locked?
+ if self.i_am_locking():
+ # Linked to out unique name. Proceed.
+ return
+ else:
+ # Otherwise the lock creation failed.
+ if timeout is not None and time.time() > end_time:
+ if timeout > 0:
+ raise LockTimeout("Timeout waiting to acquire"
+ " lock for %s" %
+ self.path)
+ else:
+ raise AlreadyLocked("%s is already locked" %
+ self.path)
+ time.sleep(timeout / 10 if timeout is not None else 0.1)
+ else:
+ # Link creation succeeded. We're good to go.
+ return
+
+ def release(self):
+ if not self.is_locked():
+ raise NotLocked("%s is not locked" % self.path)
+ elif not self.i_am_locking():
+ raise NotMyLock("%s is locked, but not by me" % self.path)
+ os.unlink(self.lock_file)
+
+ def is_locked(self):
+ return os.path.islink(self.lock_file)
+
+ def i_am_locking(self):
+ return (os.path.islink(self.lock_file)
+ and os.readlink(self.lock_file) == self.unique_name)
+
+ def break_lock(self):
+ if os.path.islink(self.lock_file): # exists && link
+ os.unlink(self.lock_file)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__init__.py
new file mode 100644
index 00000000..b3265075
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__init__.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+from pip._vendor.msgpack._version import version
+from pip._vendor.msgpack.exceptions import *
+
+from collections import namedtuple
+
+
+class ExtType(namedtuple('ExtType', 'code data')):
+ """ExtType represents ext type in msgpack."""
+ def __new__(cls, code, data):
+ if not isinstance(code, int):
+ raise TypeError("code must be int")
+ if not isinstance(data, bytes):
+ raise TypeError("data must be bytes")
+ if not 0 <= code <= 127:
+ raise ValueError("code must be 0~127")
+ return super(ExtType, cls).__new__(cls, code, data)
+
+
+import os
+if os.environ.get('MSGPACK_PUREPYTHON'):
+ from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker
+else:
+ try:
+ from pip._vendor.msgpack._cmsgpack import Packer, unpackb, Unpacker
+ except ImportError:
+ from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker
+
+
+def pack(o, stream, **kwargs):
+ """
+ Pack object `o` and write it to `stream`
+
+ See :class:`Packer` for options.
+ """
+ packer = Packer(**kwargs)
+ stream.write(packer.pack(o))
+
+
+def packb(o, **kwargs):
+ """
+ Pack object `o` and return packed bytes
+
+ See :class:`Packer` for options.
+ """
+ return Packer(**kwargs).pack(o)
+
+
+def unpack(stream, **kwargs):
+ """
+ Unpack an object from `stream`.
+
+ Raises `ExtraData` when `stream` contains extra bytes.
+ See :class:`Unpacker` for options.
+ """
+ data = stream.read()
+ return unpackb(data, **kwargs)
+
+
+# alias for compatibility to simplejson/marshal/pickle.
+load = unpack
+loads = unpackb
+
+dump = pack
+dumps = packb
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..6005e61c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc
new file mode 100644
index 00000000..08709af5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 00000000..f90fa944
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc
new file mode 100644
index 00000000..d4611aea
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/_version.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/_version.py
new file mode 100644
index 00000000..926c5e7b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/_version.py
@@ -0,0 +1 @@
+version = (0, 6, 1)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/exceptions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/exceptions.py
new file mode 100644
index 00000000..d6d2615c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/exceptions.py
@@ -0,0 +1,48 @@
+class UnpackException(Exception):
+ """Base class for some exceptions raised while unpacking.
+
+ NOTE: unpack may raise exception other than subclass of
+ UnpackException. If you want to catch all error, catch
+ Exception instead.
+ """
+
+
+class BufferFull(UnpackException):
+ pass
+
+
+class OutOfData(UnpackException):
+ pass
+
+
+class FormatError(ValueError, UnpackException):
+ """Invalid msgpack format"""
+
+
+class StackError(ValueError, UnpackException):
+ """Too nested"""
+
+
+# Deprecated. Use ValueError instead
+UnpackValueError = ValueError
+
+
+class ExtraData(UnpackValueError):
+ """ExtraData is raised when there is trailing data.
+
+ This exception is raised while only one-shot (not streaming)
+ unpack.
+ """
+
+ def __init__(self, unpacked, extra):
+ self.unpacked = unpacked
+ self.extra = extra
+
+ def __str__(self):
+ return "unpack(b) received extra data."
+
+
+# Deprecated. Use Exception instead to catch all exception during packing.
+PackException = Exception
+PackValueError = ValueError
+PackOverflowError = OverflowError
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/fallback.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/fallback.py
new file mode 100644
index 00000000..5b731ddd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/msgpack/fallback.py
@@ -0,0 +1,1027 @@
+"""Fallback pure Python implementation of msgpack"""
+
+import sys
+import struct
+import warnings
+
+
+if sys.version_info[0] == 2:
+ PY2 = True
+ int_types = (int, long)
+ def dict_iteritems(d):
+ return d.iteritems()
+else:
+ PY2 = False
+ int_types = int
+ unicode = str
+ xrange = range
+ def dict_iteritems(d):
+ return d.items()
+
+if sys.version_info < (3, 5):
+ # Ugly hack...
+ RecursionError = RuntimeError
+
+ def _is_recursionerror(e):
+ return len(e.args) == 1 and isinstance(e.args[0], str) and \
+ e.args[0].startswith('maximum recursion depth exceeded')
+else:
+ def _is_recursionerror(e):
+ return True
+
+if hasattr(sys, 'pypy_version_info'):
+ # cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own
+ # StringBuilder is fastest.
+ from __pypy__ import newlist_hint
+ try:
+ from __pypy__.builders import BytesBuilder as StringBuilder
+ except ImportError:
+ from __pypy__.builders import StringBuilder
+ USING_STRINGBUILDER = True
+ class StringIO(object):
+ def __init__(self, s=b''):
+ if s:
+ self.builder = StringBuilder(len(s))
+ self.builder.append(s)
+ else:
+ self.builder = StringBuilder()
+ def write(self, s):
+ if isinstance(s, memoryview):
+ s = s.tobytes()
+ elif isinstance(s, bytearray):
+ s = bytes(s)
+ self.builder.append(s)
+ def getvalue(self):
+ return self.builder.build()
+else:
+ USING_STRINGBUILDER = False
+ from io import BytesIO as StringIO
+ newlist_hint = lambda size: []
+
+
+from pip._vendor.msgpack.exceptions import (
+ BufferFull,
+ OutOfData,
+ ExtraData,
+ FormatError,
+ StackError,
+)
+
+from pip._vendor.msgpack import ExtType
+
+
+EX_SKIP = 0
+EX_CONSTRUCT = 1
+EX_READ_ARRAY_HEADER = 2
+EX_READ_MAP_HEADER = 3
+
+TYPE_IMMEDIATE = 0
+TYPE_ARRAY = 1
+TYPE_MAP = 2
+TYPE_RAW = 3
+TYPE_BIN = 4
+TYPE_EXT = 5
+
+DEFAULT_RECURSE_LIMIT = 511
+
+
+def _check_type_strict(obj, t, type=type, tuple=tuple):
+ if type(t) is tuple:
+ return type(obj) in t
+ else:
+ return type(obj) is t
+
+
+def _get_data_from_buffer(obj):
+ try:
+ view = memoryview(obj)
+ except TypeError:
+ # try to use legacy buffer protocol if 2.7, otherwise re-raise
+ if PY2:
+ view = memoryview(buffer(obj))
+ warnings.warn("using old buffer interface to unpack %s; "
+ "this leads to unpacking errors if slicing is used and "
+ "will be removed in a future version" % type(obj),
+ RuntimeWarning, stacklevel=3)
+ else:
+ raise
+ if view.itemsize != 1:
+ raise ValueError("cannot unpack from multi-byte object")
+ return view
+
+
+def unpack(stream, **kwargs):
+ warnings.warn(
+ "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.",
+ DeprecationWarning, stacklevel=2)
+ data = stream.read()
+ return unpackb(data, **kwargs)
+
+
+def unpackb(packed, **kwargs):
+ """
+ Unpack an object from `packed`.
+
+ Raises ``ExtraData`` when *packed* contains extra bytes.
+ Raises ``ValueError`` when *packed* is incomplete.
+ Raises ``FormatError`` when *packed* is not valid msgpack.
+ Raises ``StackError`` when *packed* contains too nested.
+ Other exceptions can be raised during unpacking.
+
+ See :class:`Unpacker` for options.
+ """
+ unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs)
+ unpacker.feed(packed)
+ try:
+ ret = unpacker._unpack()
+ except OutOfData:
+ raise ValueError("Unpack failed: incomplete input")
+ except RecursionError as e:
+ if _is_recursionerror(e):
+ raise StackError
+ raise
+ if unpacker._got_extradata():
+ raise ExtraData(ret, unpacker._get_extradata())
+ return ret
+
+
+if sys.version_info < (2, 7, 6):
+ def _unpack_from(f, b, o=0):
+ """Explicit typcast for legacy struct.unpack_from"""
+ return struct.unpack_from(f, bytes(b), o)
+else:
+ _unpack_from = struct.unpack_from
+
+
+class Unpacker(object):
+ """Streaming unpacker.
+
+ arguments:
+
+ :param file_like:
+ File-like object having `.read(n)` method.
+ If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable.
+
+ :param int read_size:
+ Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`)
+
+ :param bool use_list:
+ If true, unpack msgpack array to Python list.
+ Otherwise, unpack to Python tuple. (default: True)
+
+ :param bool raw:
+ If true, unpack msgpack raw to Python bytes (default).
+ Otherwise, unpack to Python str (or unicode on Python 2) by decoding
+ with UTF-8 encoding (recommended).
+ Currently, the default is true, but it will be changed to false in
+ near future. So you must specify it explicitly for keeping backward
+ compatibility.
+
+ *encoding* option which is deprecated overrides this option.
+
+ :param bool strict_map_key:
+ If true, only str or bytes are accepted for map (dict) keys.
+ It's False by default for backward-compatibility.
+ But it will be True from msgpack 1.0.
+
+ :param callable object_hook:
+ When specified, it should be callable.
+ Unpacker calls it with a dict argument after unpacking msgpack map.
+ (See also simplejson)
+
+ :param callable object_pairs_hook:
+ When specified, it should be callable.
+ Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
+ (See also simplejson)
+
+ :param str encoding:
+ Encoding used for decoding msgpack raw.
+ If it is None (default), msgpack raw is deserialized to Python bytes.
+
+ :param str unicode_errors:
+ (deprecated) Used for decoding msgpack raw with *encoding*.
+ (default: `'strict'`)
+
+ :param int max_buffer_size:
+ Limits size of data waiting unpacked. 0 means system's INT_MAX (default).
+ Raises `BufferFull` exception when it is insufficient.
+ You should set this parameter when unpacking data from untrusted source.
+
+ :param int max_str_len:
+ Deprecated, use *max_buffer_size* instead.
+ Limits max length of str. (default: max_buffer_size or 1024*1024)
+
+ :param int max_bin_len:
+ Deprecated, use *max_buffer_size* instead.
+ Limits max length of bin. (default: max_buffer_size or 1024*1024)
+
+ :param int max_array_len:
+ Limits max length of array.
+ (default: max_buffer_size or 128*1024)
+
+ :param int max_map_len:
+ Limits max length of map.
+ (default: max_buffer_size//2 or 32*1024)
+
+ :param int max_ext_len:
+ Deprecated, use *max_buffer_size* instead.
+ Limits max size of ext type. (default: max_buffer_size or 1024*1024)
+
+ Example of streaming deserialize from file-like object::
+
+ unpacker = Unpacker(file_like, raw=False, max_buffer_size=10*1024*1024)
+ for o in unpacker:
+ process(o)
+
+ Example of streaming deserialize from socket::
+
+ unpacker = Unpacker(raw=False, max_buffer_size=10*1024*1024)
+ while True:
+ buf = sock.recv(1024**2)
+ if not buf:
+ break
+ unpacker.feed(buf)
+ for o in unpacker:
+ process(o)
+
+ Raises ``ExtraData`` when *packed* contains extra bytes.
+ Raises ``OutOfData`` when *packed* is incomplete.
+ Raises ``FormatError`` when *packed* is not valid msgpack.
+ Raises ``StackError`` when *packed* contains too nested.
+ Other exceptions can be raised during unpacking.
+ """
+
+ def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, strict_map_key=False,
+ object_hook=None, object_pairs_hook=None, list_hook=None,
+ encoding=None, unicode_errors=None, max_buffer_size=0,
+ ext_hook=ExtType,
+ max_str_len=-1,
+ max_bin_len=-1,
+ max_array_len=-1,
+ max_map_len=-1,
+ max_ext_len=-1):
+ if encoding is not None:
+ warnings.warn(
+ "encoding is deprecated, Use raw=False instead.",
+ DeprecationWarning, stacklevel=2)
+
+ if unicode_errors is None:
+ unicode_errors = 'strict'
+
+ if file_like is None:
+ self._feeding = True
+ else:
+ if not callable(file_like.read):
+ raise TypeError("`file_like.read` must be callable")
+ self.file_like = file_like
+ self._feeding = False
+
+ #: array of bytes fed.
+ self._buffer = bytearray()
+ #: Which position we currently reads
+ self._buff_i = 0
+
+ # When Unpacker is used as an iterable, between the calls to next(),
+ # the buffer is not "consumed" completely, for efficiency sake.
+ # Instead, it is done sloppily. To make sure we raise BufferFull at
+ # the correct moments, we have to keep track of how sloppy we were.
+ # Furthermore, when the buffer is incomplete (that is: in the case
+ # we raise an OutOfData) we need to rollback the buffer to the correct
+ # state, which _buf_checkpoint records.
+ self._buf_checkpoint = 0
+
+ if max_str_len == -1:
+ max_str_len = max_buffer_size or 1024*1024
+ if max_bin_len == -1:
+ max_bin_len = max_buffer_size or 1024*1024
+ if max_array_len == -1:
+ max_array_len = max_buffer_size or 128*1024
+ if max_map_len == -1:
+ max_map_len = max_buffer_size//2 or 32*1024
+ if max_ext_len == -1:
+ max_ext_len = max_buffer_size or 1024*1024
+
+ self._max_buffer_size = max_buffer_size or 2**31-1
+ if read_size > self._max_buffer_size:
+ raise ValueError("read_size must be smaller than max_buffer_size")
+ self._read_size = read_size or min(self._max_buffer_size, 16*1024)
+ self._raw = bool(raw)
+ self._strict_map_key = bool(strict_map_key)
+ self._encoding = encoding
+ self._unicode_errors = unicode_errors
+ self._use_list = use_list
+ self._list_hook = list_hook
+ self._object_hook = object_hook
+ self._object_pairs_hook = object_pairs_hook
+ self._ext_hook = ext_hook
+ self._max_str_len = max_str_len
+ self._max_bin_len = max_bin_len
+ self._max_array_len = max_array_len
+ self._max_map_len = max_map_len
+ self._max_ext_len = max_ext_len
+ self._stream_offset = 0
+
+ if list_hook is not None and not callable(list_hook):
+ raise TypeError('`list_hook` is not callable')
+ if object_hook is not None and not callable(object_hook):
+ raise TypeError('`object_hook` is not callable')
+ if object_pairs_hook is not None and not callable(object_pairs_hook):
+ raise TypeError('`object_pairs_hook` is not callable')
+ if object_hook is not None and object_pairs_hook is not None:
+ raise TypeError("object_pairs_hook and object_hook are mutually "
+ "exclusive")
+ if not callable(ext_hook):
+ raise TypeError("`ext_hook` is not callable")
+
+ def feed(self, next_bytes):
+ assert self._feeding
+ view = _get_data_from_buffer(next_bytes)
+ if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size):
+ raise BufferFull
+
+ # Strip buffer before checkpoint before reading file.
+ if self._buf_checkpoint > 0:
+ del self._buffer[:self._buf_checkpoint]
+ self._buff_i -= self._buf_checkpoint
+ self._buf_checkpoint = 0
+
+ # Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython
+ self._buffer.extend(view)
+
+ def _consume(self):
+ """ Gets rid of the used parts of the buffer. """
+ self._stream_offset += self._buff_i - self._buf_checkpoint
+ self._buf_checkpoint = self._buff_i
+
+ def _got_extradata(self):
+ return self._buff_i < len(self._buffer)
+
+ def _get_extradata(self):
+ return self._buffer[self._buff_i:]
+
+ def read_bytes(self, n):
+ return self._read(n)
+
+ def _read(self, n):
+ # (int) -> bytearray
+ self._reserve(n)
+ i = self._buff_i
+ self._buff_i = i+n
+ return self._buffer[i:i+n]
+
+ def _reserve(self, n):
+ remain_bytes = len(self._buffer) - self._buff_i - n
+
+ # Fast path: buffer has n bytes already
+ if remain_bytes >= 0:
+ return
+
+ if self._feeding:
+ self._buff_i = self._buf_checkpoint
+ raise OutOfData
+
+ # Strip buffer before checkpoint before reading file.
+ if self._buf_checkpoint > 0:
+ del self._buffer[:self._buf_checkpoint]
+ self._buff_i -= self._buf_checkpoint
+ self._buf_checkpoint = 0
+
+ # Read from file
+ remain_bytes = -remain_bytes
+ while remain_bytes > 0:
+ to_read_bytes = max(self._read_size, remain_bytes)
+ read_data = self.file_like.read(to_read_bytes)
+ if not read_data:
+ break
+ assert isinstance(read_data, bytes)
+ self._buffer += read_data
+ remain_bytes -= len(read_data)
+
+ if len(self._buffer) < n + self._buff_i:
+ self._buff_i = 0 # rollback
+ raise OutOfData
+
+ def _read_header(self, execute=EX_CONSTRUCT):
+ typ = TYPE_IMMEDIATE
+ n = 0
+ obj = None
+ self._reserve(1)
+ b = self._buffer[self._buff_i]
+ self._buff_i += 1
+ if b & 0b10000000 == 0:
+ obj = b
+ elif b & 0b11100000 == 0b11100000:
+ obj = -1 - (b ^ 0xff)
+ elif b & 0b11100000 == 0b10100000:
+ n = b & 0b00011111
+ typ = TYPE_RAW
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ obj = self._read(n)
+ elif b & 0b11110000 == 0b10010000:
+ n = b & 0b00001111
+ typ = TYPE_ARRAY
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ elif b & 0b11110000 == 0b10000000:
+ n = b & 0b00001111
+ typ = TYPE_MAP
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ elif b == 0xc0:
+ obj = None
+ elif b == 0xc2:
+ obj = False
+ elif b == 0xc3:
+ obj = True
+ elif b == 0xc4:
+ typ = TYPE_BIN
+ self._reserve(1)
+ n = self._buffer[self._buff_i]
+ self._buff_i += 1
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ obj = self._read(n)
+ elif b == 0xc5:
+ typ = TYPE_BIN
+ self._reserve(2)
+ n = _unpack_from(">H", self._buffer, self._buff_i)[0]
+ self._buff_i += 2
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ obj = self._read(n)
+ elif b == 0xc6:
+ typ = TYPE_BIN
+ self._reserve(4)
+ n = _unpack_from(">I", self._buffer, self._buff_i)[0]
+ self._buff_i += 4
+ if n > self._max_bin_len:
+ raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
+ obj = self._read(n)
+ elif b == 0xc7: # ext 8
+ typ = TYPE_EXT
+ self._reserve(2)
+ L, n = _unpack_from('Bb', self._buffer, self._buff_i)
+ self._buff_i += 2
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ obj = self._read(L)
+ elif b == 0xc8: # ext 16
+ typ = TYPE_EXT
+ self._reserve(3)
+ L, n = _unpack_from('>Hb', self._buffer, self._buff_i)
+ self._buff_i += 3
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ obj = self._read(L)
+ elif b == 0xc9: # ext 32
+ typ = TYPE_EXT
+ self._reserve(5)
+ L, n = _unpack_from('>Ib', self._buffer, self._buff_i)
+ self._buff_i += 5
+ if L > self._max_ext_len:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
+ obj = self._read(L)
+ elif b == 0xca:
+ self._reserve(4)
+ obj = _unpack_from(">f", self._buffer, self._buff_i)[0]
+ self._buff_i += 4
+ elif b == 0xcb:
+ self._reserve(8)
+ obj = _unpack_from(">d", self._buffer, self._buff_i)[0]
+ self._buff_i += 8
+ elif b == 0xcc:
+ self._reserve(1)
+ obj = self._buffer[self._buff_i]
+ self._buff_i += 1
+ elif b == 0xcd:
+ self._reserve(2)
+ obj = _unpack_from(">H", self._buffer, self._buff_i)[0]
+ self._buff_i += 2
+ elif b == 0xce:
+ self._reserve(4)
+ obj = _unpack_from(">I", self._buffer, self._buff_i)[0]
+ self._buff_i += 4
+ elif b == 0xcf:
+ self._reserve(8)
+ obj = _unpack_from(">Q", self._buffer, self._buff_i)[0]
+ self._buff_i += 8
+ elif b == 0xd0:
+ self._reserve(1)
+ obj = _unpack_from("b", self._buffer, self._buff_i)[0]
+ self._buff_i += 1
+ elif b == 0xd1:
+ self._reserve(2)
+ obj = _unpack_from(">h", self._buffer, self._buff_i)[0]
+ self._buff_i += 2
+ elif b == 0xd2:
+ self._reserve(4)
+ obj = _unpack_from(">i", self._buffer, self._buff_i)[0]
+ self._buff_i += 4
+ elif b == 0xd3:
+ self._reserve(8)
+ obj = _unpack_from(">q", self._buffer, self._buff_i)[0]
+ self._buff_i += 8
+ elif b == 0xd4: # fixext 1
+ typ = TYPE_EXT
+ if self._max_ext_len < 1:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
+ self._reserve(2)
+ n, obj = _unpack_from("b1s", self._buffer, self._buff_i)
+ self._buff_i += 2
+ elif b == 0xd5: # fixext 2
+ typ = TYPE_EXT
+ if self._max_ext_len < 2:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
+ self._reserve(3)
+ n, obj = _unpack_from("b2s", self._buffer, self._buff_i)
+ self._buff_i += 3
+ elif b == 0xd6: # fixext 4
+ typ = TYPE_EXT
+ if self._max_ext_len < 4:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
+ self._reserve(5)
+ n, obj = _unpack_from("b4s", self._buffer, self._buff_i)
+ self._buff_i += 5
+ elif b == 0xd7: # fixext 8
+ typ = TYPE_EXT
+ if self._max_ext_len < 8:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
+ self._reserve(9)
+ n, obj = _unpack_from("b8s", self._buffer, self._buff_i)
+ self._buff_i += 9
+ elif b == 0xd8: # fixext 16
+ typ = TYPE_EXT
+ if self._max_ext_len < 16:
+ raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
+ self._reserve(17)
+ n, obj = _unpack_from("b16s", self._buffer, self._buff_i)
+ self._buff_i += 17
+ elif b == 0xd9:
+ typ = TYPE_RAW
+ self._reserve(1)
+ n = self._buffer[self._buff_i]
+ self._buff_i += 1
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ obj = self._read(n)
+ elif b == 0xda:
+ typ = TYPE_RAW
+ self._reserve(2)
+ n, = _unpack_from(">H", self._buffer, self._buff_i)
+ self._buff_i += 2
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ obj = self._read(n)
+ elif b == 0xdb:
+ typ = TYPE_RAW
+ self._reserve(4)
+ n, = _unpack_from(">I", self._buffer, self._buff_i)
+ self._buff_i += 4
+ if n > self._max_str_len:
+ raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
+ obj = self._read(n)
+ elif b == 0xdc:
+ typ = TYPE_ARRAY
+ self._reserve(2)
+ n, = _unpack_from(">H", self._buffer, self._buff_i)
+ self._buff_i += 2
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ elif b == 0xdd:
+ typ = TYPE_ARRAY
+ self._reserve(4)
+ n, = _unpack_from(">I", self._buffer, self._buff_i)
+ self._buff_i += 4
+ if n > self._max_array_len:
+ raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
+ elif b == 0xde:
+ self._reserve(2)
+ n, = _unpack_from(">H", self._buffer, self._buff_i)
+ self._buff_i += 2
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ typ = TYPE_MAP
+ elif b == 0xdf:
+ self._reserve(4)
+ n, = _unpack_from(">I", self._buffer, self._buff_i)
+ self._buff_i += 4
+ if n > self._max_map_len:
+ raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
+ typ = TYPE_MAP
+ else:
+ raise FormatError("Unknown header: 0x%x" % b)
+ return typ, n, obj
+
+ def _unpack(self, execute=EX_CONSTRUCT):
+ typ, n, obj = self._read_header(execute)
+
+ if execute == EX_READ_ARRAY_HEADER:
+ if typ != TYPE_ARRAY:
+ raise ValueError("Expected array")
+ return n
+ if execute == EX_READ_MAP_HEADER:
+ if typ != TYPE_MAP:
+ raise ValueError("Expected map")
+ return n
+ # TODO should we eliminate the recursion?
+ if typ == TYPE_ARRAY:
+ if execute == EX_SKIP:
+ for i in xrange(n):
+ # TODO check whether we need to call `list_hook`
+ self._unpack(EX_SKIP)
+ return
+ ret = newlist_hint(n)
+ for i in xrange(n):
+ ret.append(self._unpack(EX_CONSTRUCT))
+ if self._list_hook is not None:
+ ret = self._list_hook(ret)
+ # TODO is the interaction between `list_hook` and `use_list` ok?
+ return ret if self._use_list else tuple(ret)
+ if typ == TYPE_MAP:
+ if execute == EX_SKIP:
+ for i in xrange(n):
+ # TODO check whether we need to call hooks
+ self._unpack(EX_SKIP)
+ self._unpack(EX_SKIP)
+ return
+ if self._object_pairs_hook is not None:
+ ret = self._object_pairs_hook(
+ (self._unpack(EX_CONSTRUCT),
+ self._unpack(EX_CONSTRUCT))
+ for _ in xrange(n))
+ else:
+ ret = {}
+ for _ in xrange(n):
+ key = self._unpack(EX_CONSTRUCT)
+ if self._strict_map_key and type(key) not in (unicode, bytes):
+ raise ValueError("%s is not allowed for map key" % str(type(key)))
+ ret[key] = self._unpack(EX_CONSTRUCT)
+ if self._object_hook is not None:
+ ret = self._object_hook(ret)
+ return ret
+ if execute == EX_SKIP:
+ return
+ if typ == TYPE_RAW:
+ if self._encoding is not None:
+ obj = obj.decode(self._encoding, self._unicode_errors)
+ elif self._raw:
+ obj = bytes(obj)
+ else:
+ obj = obj.decode('utf_8')
+ return obj
+ if typ == TYPE_EXT:
+ return self._ext_hook(n, bytes(obj))
+ if typ == TYPE_BIN:
+ return bytes(obj)
+ assert typ == TYPE_IMMEDIATE
+ return obj
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ try:
+ ret = self._unpack(EX_CONSTRUCT)
+ self._consume()
+ return ret
+ except OutOfData:
+ self._consume()
+ raise StopIteration
+ except RecursionError:
+ raise StackError
+
+ next = __next__
+
+ def skip(self):
+ self._unpack(EX_SKIP)
+ self._consume()
+
+ def unpack(self):
+ try:
+ ret = self._unpack(EX_CONSTRUCT)
+ except RecursionError:
+ raise StackError
+ self._consume()
+ return ret
+
+ def read_array_header(self):
+ ret = self._unpack(EX_READ_ARRAY_HEADER)
+ self._consume()
+ return ret
+
+ def read_map_header(self):
+ ret = self._unpack(EX_READ_MAP_HEADER)
+ self._consume()
+ return ret
+
+ def tell(self):
+ return self._stream_offset
+
+
+class Packer(object):
+ """
+ MessagePack Packer
+
+ usage:
+
+ packer = Packer()
+ astream.write(packer.pack(a))
+ astream.write(packer.pack(b))
+
+ Packer's constructor has some keyword arguments:
+
+ :param callable default:
+ Convert user type to builtin type that Packer supports.
+ See also simplejson's document.
+
+ :param bool use_single_float:
+ Use single precision float type for float. (default: False)
+
+ :param bool autoreset:
+ Reset buffer after each pack and return its content as `bytes`. (default: True).
+ If set this to false, use `bytes()` to get content and `.reset()` to clear buffer.
+
+ :param bool use_bin_type:
+ Use bin type introduced in msgpack spec 2.0 for bytes.
+ It also enables str8 type for unicode.
+
+ :param bool strict_types:
+ If set to true, types will be checked to be exact. Derived classes
+ from serializeable types will not be serialized and will be
+ treated as unsupported type and forwarded to default.
+ Additionally tuples will not be serialized as lists.
+ This is useful when trying to implement accurate serialization
+ for python types.
+
+ :param str encoding:
+ (deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8')
+
+ :param str unicode_errors:
+ Error handler for encoding unicode. (default: 'strict')
+ """
+ def __init__(self, default=None, encoding=None, unicode_errors=None,
+ use_single_float=False, autoreset=True, use_bin_type=False,
+ strict_types=False):
+ if encoding is None:
+ encoding = 'utf_8'
+ else:
+ warnings.warn(
+ "encoding is deprecated, Use raw=False instead.",
+ DeprecationWarning, stacklevel=2)
+
+ if unicode_errors is None:
+ unicode_errors = 'strict'
+
+ self._strict_types = strict_types
+ self._use_float = use_single_float
+ self._autoreset = autoreset
+ self._use_bin_type = use_bin_type
+ self._encoding = encoding
+ self._unicode_errors = unicode_errors
+ self._buffer = StringIO()
+ if default is not None:
+ if not callable(default):
+ raise TypeError("default must be callable")
+ self._default = default
+
+ def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT,
+ check=isinstance, check_type_strict=_check_type_strict):
+ default_used = False
+ if self._strict_types:
+ check = check_type_strict
+ list_types = list
+ else:
+ list_types = (list, tuple)
+ while True:
+ if nest_limit < 0:
+ raise ValueError("recursion limit exceeded")
+ if obj is None:
+ return self._buffer.write(b"\xc0")
+ if check(obj, bool):
+ if obj:
+ return self._buffer.write(b"\xc3")
+ return self._buffer.write(b"\xc2")
+ if check(obj, int_types):
+ if 0 <= obj < 0x80:
+ return self._buffer.write(struct.pack("B", obj))
+ if -0x20 <= obj < 0:
+ return self._buffer.write(struct.pack("b", obj))
+ if 0x80 <= obj <= 0xff:
+ return self._buffer.write(struct.pack("BB", 0xcc, obj))
+ if -0x80 <= obj < 0:
+ return self._buffer.write(struct.pack(">Bb", 0xd0, obj))
+ if 0xff < obj <= 0xffff:
+ return self._buffer.write(struct.pack(">BH", 0xcd, obj))
+ if -0x8000 <= obj < -0x80:
+ return self._buffer.write(struct.pack(">Bh", 0xd1, obj))
+ if 0xffff < obj <= 0xffffffff:
+ return self._buffer.write(struct.pack(">BI", 0xce, obj))
+ if -0x80000000 <= obj < -0x8000:
+ return self._buffer.write(struct.pack(">Bi", 0xd2, obj))
+ if 0xffffffff < obj <= 0xffffffffffffffff:
+ return self._buffer.write(struct.pack(">BQ", 0xcf, obj))
+ if -0x8000000000000000 <= obj < -0x80000000:
+ return self._buffer.write(struct.pack(">Bq", 0xd3, obj))
+ if not default_used and self._default is not None:
+ obj = self._default(obj)
+ default_used = True
+ continue
+ raise OverflowError("Integer value out of range")
+ if check(obj, (bytes, bytearray)):
+ n = len(obj)
+ if n >= 2**32:
+ raise ValueError("%s is too large" % type(obj).__name__)
+ self._pack_bin_header(n)
+ return self._buffer.write(obj)
+ if check(obj, unicode):
+ if self._encoding is None:
+ raise TypeError(
+ "Can't encode unicode string: "
+ "no encoding is specified")
+ obj = obj.encode(self._encoding, self._unicode_errors)
+ n = len(obj)
+ if n >= 2**32:
+ raise ValueError("String is too large")
+ self._pack_raw_header(n)
+ return self._buffer.write(obj)
+ if check(obj, memoryview):
+ n = len(obj) * obj.itemsize
+ if n >= 2**32:
+ raise ValueError("Memoryview is too large")
+ self._pack_bin_header(n)
+ return self._buffer.write(obj)
+ if check(obj, float):
+ if self._use_float:
+ return self._buffer.write(struct.pack(">Bf", 0xca, obj))
+ return self._buffer.write(struct.pack(">Bd", 0xcb, obj))
+ if check(obj, ExtType):
+ code = obj.code
+ data = obj.data
+ assert isinstance(code, int)
+ assert isinstance(data, bytes)
+ L = len(data)
+ if L == 1:
+ self._buffer.write(b'\xd4')
+ elif L == 2:
+ self._buffer.write(b'\xd5')
+ elif L == 4:
+ self._buffer.write(b'\xd6')
+ elif L == 8:
+ self._buffer.write(b'\xd7')
+ elif L == 16:
+ self._buffer.write(b'\xd8')
+ elif L <= 0xff:
+ self._buffer.write(struct.pack(">BB", 0xc7, L))
+ elif L <= 0xffff:
+ self._buffer.write(struct.pack(">BH", 0xc8, L))
+ else:
+ self._buffer.write(struct.pack(">BI", 0xc9, L))
+ self._buffer.write(struct.pack("b", code))
+ self._buffer.write(data)
+ return
+ if check(obj, list_types):
+ n = len(obj)
+ self._pack_array_header(n)
+ for i in xrange(n):
+ self._pack(obj[i], nest_limit - 1)
+ return
+ if check(obj, dict):
+ return self._pack_map_pairs(len(obj), dict_iteritems(obj),
+ nest_limit - 1)
+ if not default_used and self._default is not None:
+ obj = self._default(obj)
+ default_used = 1
+ continue
+ raise TypeError("Cannot serialize %r" % (obj, ))
+
+ def pack(self, obj):
+ try:
+ self._pack(obj)
+ except:
+ self._buffer = StringIO() # force reset
+ raise
+ if self._autoreset:
+ ret = self._buffer.getvalue()
+ self._buffer = StringIO()
+ return ret
+
+ def pack_map_pairs(self, pairs):
+ self._pack_map_pairs(len(pairs), pairs)
+ if self._autoreset:
+ ret = self._buffer.getvalue()
+ self._buffer = StringIO()
+ return ret
+
+ def pack_array_header(self, n):
+ if n >= 2**32:
+ raise ValueError
+ self._pack_array_header(n)
+ if self._autoreset:
+ ret = self._buffer.getvalue()
+ self._buffer = StringIO()
+ return ret
+
+ def pack_map_header(self, n):
+ if n >= 2**32:
+ raise ValueError
+ self._pack_map_header(n)
+ if self._autoreset:
+ ret = self._buffer.getvalue()
+ self._buffer = StringIO()
+ return ret
+
+ def pack_ext_type(self, typecode, data):
+ if not isinstance(typecode, int):
+ raise TypeError("typecode must have int type.")
+ if not 0 <= typecode <= 127:
+ raise ValueError("typecode should be 0-127")
+ if not isinstance(data, bytes):
+ raise TypeError("data must have bytes type")
+ L = len(data)
+ if L > 0xffffffff:
+ raise ValueError("Too large data")
+ if L == 1:
+ self._buffer.write(b'\xd4')
+ elif L == 2:
+ self._buffer.write(b'\xd5')
+ elif L == 4:
+ self._buffer.write(b'\xd6')
+ elif L == 8:
+ self._buffer.write(b'\xd7')
+ elif L == 16:
+ self._buffer.write(b'\xd8')
+ elif L <= 0xff:
+ self._buffer.write(b'\xc7' + struct.pack('B', L))
+ elif L <= 0xffff:
+ self._buffer.write(b'\xc8' + struct.pack('>H', L))
+ else:
+ self._buffer.write(b'\xc9' + struct.pack('>I', L))
+ self._buffer.write(struct.pack('B', typecode))
+ self._buffer.write(data)
+
+ def _pack_array_header(self, n):
+ if n <= 0x0f:
+ return self._buffer.write(struct.pack('B', 0x90 + n))
+ if n <= 0xffff:
+ return self._buffer.write(struct.pack(">BH", 0xdc, n))
+ if n <= 0xffffffff:
+ return self._buffer.write(struct.pack(">BI", 0xdd, n))
+ raise ValueError("Array is too large")
+
+ def _pack_map_header(self, n):
+ if n <= 0x0f:
+ return self._buffer.write(struct.pack('B', 0x80 + n))
+ if n <= 0xffff:
+ return self._buffer.write(struct.pack(">BH", 0xde, n))
+ if n <= 0xffffffff:
+ return self._buffer.write(struct.pack(">BI", 0xdf, n))
+ raise ValueError("Dict is too large")
+
+ def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
+ self._pack_map_header(n)
+ for (k, v) in pairs:
+ self._pack(k, nest_limit - 1)
+ self._pack(v, nest_limit - 1)
+
+ def _pack_raw_header(self, n):
+ if n <= 0x1f:
+ self._buffer.write(struct.pack('B', 0xa0 + n))
+ elif self._use_bin_type and n <= 0xff:
+ self._buffer.write(struct.pack('>BB', 0xd9, n))
+ elif n <= 0xffff:
+ self._buffer.write(struct.pack(">BH", 0xda, n))
+ elif n <= 0xffffffff:
+ self._buffer.write(struct.pack(">BI", 0xdb, n))
+ else:
+ raise ValueError('Raw is too large')
+
+ def _pack_bin_header(self, n):
+ if not self._use_bin_type:
+ return self._pack_raw_header(n)
+ elif n <= 0xff:
+ return self._buffer.write(struct.pack('>BB', 0xc4, n))
+ elif n <= 0xffff:
+ return self._buffer.write(struct.pack(">BH", 0xc5, n))
+ elif n <= 0xffffffff:
+ return self._buffer.write(struct.pack(">BI", 0xc6, n))
+ else:
+ raise ValueError('Bin is too large')
+
+ def bytes(self):
+ """Return internal buffer contents as bytes object"""
+ return self._buffer.getvalue()
+
+ def reset(self):
+ """Reset internal buffer.
+
+ This method is usaful only when autoreset=False.
+ """
+ self._buffer = StringIO()
+
+ def getbuffer(self):
+ """Return view of internal buffer."""
+ if USING_STRINGBUILDER or PY2:
+ return memoryview(self.bytes())
+ else:
+ return self._buffer.getbuffer()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__about__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__about__.py
new file mode 100644
index 00000000..7481c9e2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__about__.py
@@ -0,0 +1,27 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+__all__ = [
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "19.0"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD or Apache License, Version 2.0"
+__copyright__ = "Copyright 2014-2019 %s" % __author__
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__init__.py
new file mode 100644
index 00000000..a0cf67df
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__init__.py
@@ -0,0 +1,26 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+from .__about__ import (
+ __author__,
+ __copyright__,
+ __email__,
+ __license__,
+ __summary__,
+ __title__,
+ __uri__,
+ __version__,
+)
+
+__all__ = [
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc
new file mode 100644
index 00000000..f32de6ee
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..f3820b38
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc
new file mode 100644
index 00000000..ad7504d6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc
new file mode 100644
index 00000000..49b10b35
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc
new file mode 100644
index 00000000..267d8aa3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc
new file mode 100644
index 00000000..e8cf0361
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc
new file mode 100644
index 00000000..95caa337
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 00000000..69352849
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-37.pyc
new file mode 100644
index 00000000..ce06da04
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/_compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/_compat.py
new file mode 100644
index 00000000..25da473c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/_compat.py
@@ -0,0 +1,31 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+# flake8: noqa
+
+if PY3:
+ string_types = (str,)
+else:
+ string_types = (basestring,)
+
+
+def with_metaclass(meta, *bases):
+ """
+ Create a base class with a metaclass.
+ """
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+
+ return type.__new__(metaclass, "temporary_class", (), {})
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/_structures.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/_structures.py
new file mode 100644
index 00000000..68dcca63
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/_structures.py
@@ -0,0 +1,68 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+
+class Infinity(object):
+ def __repr__(self):
+ return "Infinity"
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __lt__(self, other):
+ return False
+
+ def __le__(self, other):
+ return False
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ne__(self, other):
+ return not isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return True
+
+ def __ge__(self, other):
+ return True
+
+ def __neg__(self):
+ return NegativeInfinity
+
+
+Infinity = Infinity()
+
+
+class NegativeInfinity(object):
+ def __repr__(self):
+ return "-Infinity"
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __lt__(self, other):
+ return True
+
+ def __le__(self, other):
+ return True
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ne__(self, other):
+ return not isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return False
+
+ def __ge__(self, other):
+ return False
+
+ def __neg__(self):
+ return Infinity
+
+
+NegativeInfinity = NegativeInfinity()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/markers.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/markers.py
new file mode 100644
index 00000000..54824768
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/markers.py
@@ -0,0 +1,296 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import operator
+import os
+import platform
+import sys
+
+from pip._vendor.pyparsing import ParseException, ParseResults, stringStart, stringEnd
+from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString
+from pip._vendor.pyparsing import Literal as L # noqa
+
+from ._compat import string_types
+from .specifiers import Specifier, InvalidSpecifier
+
+
+__all__ = [
+ "InvalidMarker",
+ "UndefinedComparison",
+ "UndefinedEnvironmentName",
+ "Marker",
+ "default_environment",
+]
+
+
+class InvalidMarker(ValueError):
+ """
+ An invalid marker was found, users should refer to PEP 508.
+ """
+
+
+class UndefinedComparison(ValueError):
+ """
+ An invalid operation was attempted on a value that doesn't support it.
+ """
+
+
+class UndefinedEnvironmentName(ValueError):
+ """
+ A name was attempted to be used that does not exist inside of the
+ environment.
+ """
+
+
+class Node(object):
+ def __init__(self, value):
+ self.value = value
+
+ def __str__(self):
+ return str(self.value)
+
+ def __repr__(self):
+ return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
+
+ def serialize(self):
+ raise NotImplementedError
+
+
+class Variable(Node):
+ def serialize(self):
+ return str(self)
+
+
+class Value(Node):
+ def serialize(self):
+ return '"{0}"'.format(self)
+
+
+class Op(Node):
+ def serialize(self):
+ return str(self)
+
+
+VARIABLE = (
+ L("implementation_version")
+ | L("platform_python_implementation")
+ | L("implementation_name")
+ | L("python_full_version")
+ | L("platform_release")
+ | L("platform_version")
+ | L("platform_machine")
+ | L("platform_system")
+ | L("python_version")
+ | L("sys_platform")
+ | L("os_name")
+ | L("os.name")
+ | L("sys.platform") # PEP-345
+ | L("platform.version") # PEP-345
+ | L("platform.machine") # PEP-345
+ | L("platform.python_implementation") # PEP-345
+ | L("python_implementation") # PEP-345
+ | L("extra") # undocumented setuptools legacy
+)
+ALIASES = {
+ "os.name": "os_name",
+ "sys.platform": "sys_platform",
+ "platform.version": "platform_version",
+ "platform.machine": "platform_machine",
+ "platform.python_implementation": "platform_python_implementation",
+ "python_implementation": "platform_python_implementation",
+}
+VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
+
+VERSION_CMP = (
+ L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
+)
+
+MARKER_OP = VERSION_CMP | L("not in") | L("in")
+MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
+
+MARKER_VALUE = QuotedString("'") | QuotedString('"')
+MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
+
+BOOLOP = L("and") | L("or")
+
+MARKER_VAR = VARIABLE | MARKER_VALUE
+
+MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
+MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
+
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+
+MARKER_EXPR = Forward()
+MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
+MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
+
+MARKER = stringStart + MARKER_EXPR + stringEnd
+
+
+def _coerce_parse_result(results):
+ if isinstance(results, ParseResults):
+ return [_coerce_parse_result(i) for i in results]
+ else:
+ return results
+
+
+def _format_marker(marker, first=True):
+ assert isinstance(marker, (list, tuple, string_types))
+
+ # Sometimes we have a structure like [[...]] which is a single item list
+ # where the single item is itself it's own list. In that case we want skip
+ # the rest of this function so that we don't get extraneous () on the
+ # outside.
+ if (
+ isinstance(marker, list)
+ and len(marker) == 1
+ and isinstance(marker[0], (list, tuple))
+ ):
+ return _format_marker(marker[0])
+
+ if isinstance(marker, list):
+ inner = (_format_marker(m, first=False) for m in marker)
+ if first:
+ return " ".join(inner)
+ else:
+ return "(" + " ".join(inner) + ")"
+ elif isinstance(marker, tuple):
+ return " ".join([m.serialize() for m in marker])
+ else:
+ return marker
+
+
+_operators = {
+ "in": lambda lhs, rhs: lhs in rhs,
+ "not in": lambda lhs, rhs: lhs not in rhs,
+ "<": operator.lt,
+ "<=": operator.le,
+ "==": operator.eq,
+ "!=": operator.ne,
+ ">=": operator.ge,
+ ">": operator.gt,
+}
+
+
+def _eval_op(lhs, op, rhs):
+ try:
+ spec = Specifier("".join([op.serialize(), rhs]))
+ except InvalidSpecifier:
+ pass
+ else:
+ return spec.contains(lhs)
+
+ oper = _operators.get(op.serialize())
+ if oper is None:
+ raise UndefinedComparison(
+ "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
+ )
+
+ return oper(lhs, rhs)
+
+
+_undefined = object()
+
+
+def _get_env(environment, name):
+ value = environment.get(name, _undefined)
+
+ if value is _undefined:
+ raise UndefinedEnvironmentName(
+ "{0!r} does not exist in evaluation environment.".format(name)
+ )
+
+ return value
+
+
+def _evaluate_markers(markers, environment):
+ groups = [[]]
+
+ for marker in markers:
+ assert isinstance(marker, (list, tuple, string_types))
+
+ if isinstance(marker, list):
+ groups[-1].append(_evaluate_markers(marker, environment))
+ elif isinstance(marker, tuple):
+ lhs, op, rhs = marker
+
+ if isinstance(lhs, Variable):
+ lhs_value = _get_env(environment, lhs.value)
+ rhs_value = rhs.value
+ else:
+ lhs_value = lhs.value
+ rhs_value = _get_env(environment, rhs.value)
+
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+ else:
+ assert marker in ["and", "or"]
+ if marker == "or":
+ groups.append([])
+
+ return any(all(item) for item in groups)
+
+
+def format_full_version(info):
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
+ kind = info.releaselevel
+ if kind != "final":
+ version += kind[0] + str(info.serial)
+ return version
+
+
+def default_environment():
+ if hasattr(sys, "implementation"):
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ else:
+ iver = "0"
+ implementation_name = ""
+
+ return {
+ "implementation_name": implementation_name,
+ "implementation_version": iver,
+ "os_name": os.name,
+ "platform_machine": platform.machine(),
+ "platform_release": platform.release(),
+ "platform_system": platform.system(),
+ "platform_version": platform.version(),
+ "python_full_version": platform.python_version(),
+ "platform_python_implementation": platform.python_implementation(),
+ "python_version": platform.python_version()[:3],
+ "sys_platform": sys.platform,
+ }
+
+
+class Marker(object):
+ def __init__(self, marker):
+ try:
+ self._markers = _coerce_parse_result(MARKER.parseString(marker))
+ except ParseException as e:
+ err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
+ marker, marker[e.loc : e.loc + 8]
+ )
+ raise InvalidMarker(err_str)
+
+ def __str__(self):
+ return _format_marker(self._markers)
+
+ def __repr__(self):
+ return "<Marker({0!r})>".format(str(self))
+
+ def evaluate(self, environment=None):
+ """Evaluate a marker.
+
+ Return the boolean from evaluating the given marker against the
+ environment. environment is an optional argument to override all or
+ part of the determined environment.
+
+ The environment is determined from the current Python process.
+ """
+ current_environment = default_environment()
+ if environment is not None:
+ current_environment.update(environment)
+
+ return _evaluate_markers(self._markers, current_environment)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/requirements.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/requirements.py
new file mode 100644
index 00000000..dbc5f11d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/requirements.py
@@ -0,0 +1,138 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import string
+import re
+
+from pip._vendor.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
+from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
+from pip._vendor.pyparsing import Literal as L # noqa
+from pip._vendor.six.moves.urllib import parse as urlparse
+
+from .markers import MARKER_EXPR, Marker
+from .specifiers import LegacySpecifier, Specifier, SpecifierSet
+
+
+class InvalidRequirement(ValueError):
+ """
+ An invalid requirement was found, users should refer to PEP 508.
+ """
+
+
+ALPHANUM = Word(string.ascii_letters + string.digits)
+
+LBRACKET = L("[").suppress()
+RBRACKET = L("]").suppress()
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+COMMA = L(",").suppress()
+SEMICOLON = L(";").suppress()
+AT = L("@").suppress()
+
+PUNCTUATION = Word("-_.")
+IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
+IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
+
+NAME = IDENTIFIER("name")
+EXTRA = IDENTIFIER
+
+URI = Regex(r"[^ ]+")("url")
+URL = AT + URI
+
+EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
+EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
+
+VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
+VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
+
+VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
+VERSION_MANY = Combine(
+ VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
+)("_raw_spec")
+_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
+
+VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
+VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
+
+MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
+MARKER_EXPR.setParseAction(
+ lambda s, l, t: Marker(s[t._original_start : t._original_end])
+)
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
+
+VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
+URL_AND_MARKER = URL + Optional(MARKER)
+
+NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+
+REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+# pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
+
+
+class Requirement(object):
+ """Parse a requirement.
+
+ Parse a given requirement string into its parts, such as name, specifier,
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+ string.
+ """
+
+ # TODO: Can we test whether something is contained within a requirement?
+ # If so how do we do that? Do we need to test against the _name_ of
+ # the thing as well as the version? What about the markers?
+ # TODO: Can we normalize the name and extra name?
+
+ def __init__(self, requirement_string):
+ try:
+ req = REQUIREMENT.parseString(requirement_string)
+ except ParseException as e:
+ raise InvalidRequirement(
+ 'Parse error at "{0!r}": {1}'.format(
+ requirement_string[e.loc : e.loc + 8], e.msg
+ )
+ )
+
+ self.name = req.name
+ if req.url:
+ parsed_url = urlparse.urlparse(req.url)
+ if parsed_url.scheme == "file":
+ if urlparse.urlunparse(parsed_url) != req.url:
+ raise InvalidRequirement("Invalid URL given")
+ elif not (parsed_url.scheme and parsed_url.netloc) or (
+ not parsed_url.scheme and not parsed_url.netloc
+ ):
+ raise InvalidRequirement("Invalid URL: {0}".format(req.url))
+ self.url = req.url
+ else:
+ self.url = None
+ self.extras = set(req.extras.asList() if req.extras else [])
+ self.specifier = SpecifierSet(req.specifier)
+ self.marker = req.marker if req.marker else None
+
+ def __str__(self):
+ parts = [self.name]
+
+ if self.extras:
+ parts.append("[{0}]".format(",".join(sorted(self.extras))))
+
+ if self.specifier:
+ parts.append(str(self.specifier))
+
+ if self.url:
+ parts.append("@ {0}".format(self.url))
+ if self.marker:
+ parts.append(" ")
+
+ if self.marker:
+ parts.append("; {0}".format(self.marker))
+
+ return "".join(parts)
+
+ def __repr__(self):
+ return "<Requirement({0!r})>".format(str(self))
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/specifiers.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/specifiers.py
new file mode 100644
index 00000000..743576a0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/specifiers.py
@@ -0,0 +1,749 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import abc
+import functools
+import itertools
+import re
+
+from ._compat import string_types, with_metaclass
+from .version import Version, LegacyVersion, parse
+
+
+class InvalidSpecifier(ValueError):
+ """
+ An invalid specifier was found, users should refer to PEP 440.
+ """
+
+
+class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
+ @abc.abstractmethod
+ def __str__(self):
+ """
+ Returns the str representation of this Specifier like object. This
+ should be representative of the Specifier itself.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self):
+ """
+ Returns a hash value for this Specifier like object.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other):
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are equal.
+ """
+
+ @abc.abstractmethod
+ def __ne__(self, other):
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are not equal.
+ """
+
+ @abc.abstractproperty
+ def prereleases(self):
+ """
+ Returns whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @prereleases.setter
+ def prereleases(self, value):
+ """
+ Sets whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @abc.abstractmethod
+ def contains(self, item, prereleases=None):
+ """
+ Determines if the given item is contained within this specifier.
+ """
+
+ @abc.abstractmethod
+ def filter(self, iterable, prereleases=None):
+ """
+ Takes an iterable of items and filters them so that only items which
+ are contained within this specifier are allowed in it.
+ """
+
+
+class _IndividualSpecifier(BaseSpecifier):
+
+ _operators = {}
+
+ def __init__(self, spec="", prereleases=None):
+ match = self._regex.search(spec)
+ if not match:
+ raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
+
+ self._spec = (match.group("operator").strip(), match.group("version").strip())
+
+ # Store whether or not this Specifier should accept prereleases
+ self._prereleases = prereleases
+
+ def __repr__(self):
+ pre = (
+ ", prereleases={0!r}".format(self.prereleases)
+ if self._prereleases is not None
+ else ""
+ )
+
+ return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
+
+ def __str__(self):
+ return "{0}{1}".format(*self._spec)
+
+ def __hash__(self):
+ return hash(self._spec)
+
+ def __eq__(self, other):
+ if isinstance(other, string_types):
+ try:
+ other = self.__class__(other)
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._spec == other._spec
+
+ def __ne__(self, other):
+ if isinstance(other, string_types):
+ try:
+ other = self.__class__(other)
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._spec != other._spec
+
+ def _get_operator(self, op):
+ return getattr(self, "_compare_{0}".format(self._operators[op]))
+
+ def _coerce_version(self, version):
+ if not isinstance(version, (LegacyVersion, Version)):
+ version = parse(version)
+ return version
+
+ @property
+ def operator(self):
+ return self._spec[0]
+
+ @property
+ def version(self):
+ return self._spec[1]
+
+ @property
+ def prereleases(self):
+ return self._prereleases
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+ def __contains__(self, item):
+ return self.contains(item)
+
+ def contains(self, item, prereleases=None):
+ # Determine if prereleases are to be allowed or not.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # Normalize item to a Version or LegacyVersion, this allows us to have
+ # a shortcut for ``"2.0" in Specifier(">=2")
+ item = self._coerce_version(item)
+
+ # Determine if we should be supporting prereleases in this specifier
+ # or not, if we do not support prereleases than we can short circuit
+ # logic if this version is a prereleases.
+ if item.is_prerelease and not prereleases:
+ return False
+
+ # Actually do the comparison to determine if this item is contained
+ # within this Specifier or not.
+ return self._get_operator(self.operator)(item, self.version)
+
+ def filter(self, iterable, prereleases=None):
+ yielded = False
+ found_prereleases = []
+
+ kw = {"prereleases": prereleases if prereleases is not None else True}
+
+ # Attempt to iterate over all the values in the iterable and if any of
+ # them match, yield them.
+ for version in iterable:
+ parsed_version = self._coerce_version(version)
+
+ if self.contains(parsed_version, **kw):
+ # If our version is a prerelease, and we were not set to allow
+ # prereleases, then we'll store it for later incase nothing
+ # else matches this specifier.
+ if parsed_version.is_prerelease and not (
+ prereleases or self.prereleases
+ ):
+ found_prereleases.append(version)
+ # Either this is not a prerelease, or we should have been
+ # accepting prereleases from the beginning.
+ else:
+ yielded = True
+ yield version
+
+ # Now that we've iterated over everything, determine if we've yielded
+ # any values, and if we have not and we have any prereleases stored up
+ # then we will go ahead and yield the prereleases.
+ if not yielded and found_prereleases:
+ for version in found_prereleases:
+ yield version
+
+
+class LegacySpecifier(_IndividualSpecifier):
+
+ _regex_str = r"""
+ (?P<operator>(==|!=|<=|>=|<|>))
+ \s*
+ (?P<version>
+ [^,;\s)]* # Since this is a "legacy" specifier, and the version
+ # string can be just about anything, we match everything
+ # except for whitespace, a semi-colon for marker support,
+ # a closing paren since versions can be enclosed in
+ # them, and a comma since it's a version separator.
+ )
+ """
+
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ }
+
+ def _coerce_version(self, version):
+ if not isinstance(version, LegacyVersion):
+ version = LegacyVersion(str(version))
+ return version
+
+ def _compare_equal(self, prospective, spec):
+ return prospective == self._coerce_version(spec)
+
+ def _compare_not_equal(self, prospective, spec):
+ return prospective != self._coerce_version(spec)
+
+ def _compare_less_than_equal(self, prospective, spec):
+ return prospective <= self._coerce_version(spec)
+
+ def _compare_greater_than_equal(self, prospective, spec):
+ return prospective >= self._coerce_version(spec)
+
+ def _compare_less_than(self, prospective, spec):
+ return prospective < self._coerce_version(spec)
+
+ def _compare_greater_than(self, prospective, spec):
+ return prospective > self._coerce_version(spec)
+
+
+def _require_version_compare(fn):
+ @functools.wraps(fn)
+ def wrapped(self, prospective, spec):
+ if not isinstance(prospective, Version):
+ return False
+ return fn(self, prospective, spec)
+
+ return wrapped
+
+
+class Specifier(_IndividualSpecifier):
+
+ _regex_str = r"""
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+ (?P<version>
+ (?:
+ # The identity operators allow for an escape hatch that will
+ # do an exact string match of the version you wish to install.
+ # This will not be parsed by PEP 440 and we cannot determine
+ # any semantic meaning from it. This operator is discouraged
+ # but included entirely as an escape hatch.
+ (?<====) # Only match for the identity operator
+ \s*
+ [^\s]* # We just match everything, except for whitespace
+ # since we are only testing for strict identity.
+ )
+ |
+ (?:
+ # The (non)equality operators allow for wild card and local
+ # versions to be specified so we have to define these two
+ # operators separately to enable that.
+ (?<===|!=) # Only match for equals and not equals
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+
+ # You cannot use a wild card and a dev or local version
+ # together so group them with a | and make them optional.
+ (?:
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+ |
+ \.\* # Wild card syntax of .*
+ )?
+ )
+ |
+ (?:
+ # The compatible operator requires at least two digits in the
+ # release segment.
+ (?<=~=) # Only match for the compatible operator
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ |
+ (?:
+ # All other operators only allow a sub set of what the
+ # (non)equality operators do. Specifically they do not allow
+ # local versions to be specified nor do they allow the prefix
+ # matching wild cards.
+ (?<!==|!=|~=) # We have special cases for these
+ # operators so we want to make sure they
+ # don't match here.
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ )
+ """
+
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "~=": "compatible",
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ "===": "arbitrary",
+ }
+
+ @_require_version_compare
+ def _compare_compatible(self, prospective, spec):
+ # Compatible releases have an equivalent combination of >= and ==. That
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+ # implement this in terms of the other specifiers instead of
+ # implementing it ourselves. The only thing we need to do is construct
+ # the other specifiers.
+
+ # We want everything but the last item in the version, but we want to
+ # ignore post and dev releases and we want to treat the pre-release as
+ # it's own separate segment.
+ prefix = ".".join(
+ list(
+ itertools.takewhile(
+ lambda x: (not x.startswith("post") and not x.startswith("dev")),
+ _version_split(spec),
+ )
+ )[:-1]
+ )
+
+ # Add the prefix notation to the end of our string
+ prefix += ".*"
+
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+ prospective, prefix
+ )
+
+ @_require_version_compare
+ def _compare_equal(self, prospective, spec):
+ # We need special logic to handle prefix matching
+ if spec.endswith(".*"):
+ # In the case of prefix matching we want to ignore local segment.
+ prospective = Version(prospective.public)
+ # Split the spec out by dots, and pretend that there is an implicit
+ # dot in between a release segment and a pre-release segment.
+ spec = _version_split(spec[:-2]) # Remove the trailing .*
+
+ # Split the prospective version out by dots, and pretend that there
+ # is an implicit dot in between a release segment and a pre-release
+ # segment.
+ prospective = _version_split(str(prospective))
+
+ # Shorten the prospective version to be the same length as the spec
+ # so that we can determine if the specifier is a prefix of the
+ # prospective version or not.
+ prospective = prospective[: len(spec)]
+
+ # Pad out our two sides with zeros so that they both equal the same
+ # length.
+ spec, prospective = _pad_version(spec, prospective)
+ else:
+ # Convert our spec string into a Version
+ spec = Version(spec)
+
+ # If the specifier does not have a local segment, then we want to
+ # act as if the prospective version also does not have a local
+ # segment.
+ if not spec.local:
+ prospective = Version(prospective.public)
+
+ return prospective == spec
+
+ @_require_version_compare
+ def _compare_not_equal(self, prospective, spec):
+ return not self._compare_equal(prospective, spec)
+
+ @_require_version_compare
+ def _compare_less_than_equal(self, prospective, spec):
+ return prospective <= Version(spec)
+
+ @_require_version_compare
+ def _compare_greater_than_equal(self, prospective, spec):
+ return prospective >= Version(spec)
+
+ @_require_version_compare
+ def _compare_less_than(self, prospective, spec):
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec)
+
+ # Check to see if the prospective version is less than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective < spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a pre-release version, that we do not accept pre-release
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
+ # not match 3.1.dev0, but should match 3.0.dev0).
+ if not spec.is_prerelease and prospective.is_prerelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # less than the spec version *and* it's not a pre-release of the same
+ # version in the spec.
+ return True
+
+ @_require_version_compare
+ def _compare_greater_than(self, prospective, spec):
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec)
+
+ # Check to see if the prospective version is greater than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective > spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a post-release version, that we do not accept
+ # post-release versions for the version mentioned in the specifier
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+ if not spec.is_postrelease and prospective.is_postrelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # Ensure that we do not allow a local version of the version mentioned
+ # in the specifier, which is technically greater than, to match.
+ if prospective.local is not None:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # greater than the spec version *and* it's not a pre-release of the
+ # same version in the spec.
+ return True
+
+ def _compare_arbitrary(self, prospective, spec):
+ return str(prospective).lower() == str(spec).lower()
+
+ @property
+ def prereleases(self):
+ # If there is an explicit prereleases set for this, then we'll just
+ # blindly use that.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # Look at all of our specifiers and determine if they are inclusive
+ # operators, and if they are if they are including an explicit
+ # prerelease.
+ operator, version = self._spec
+ if operator in ["==", ">=", "<=", "~=", "==="]:
+ # The == specifier can include a trailing .*, if it does we
+ # want to remove before parsing.
+ if operator == "==" and version.endswith(".*"):
+ version = version[:-2]
+
+ # Parse the version, and if it is a pre-release than this
+ # specifier allows pre-releases.
+ if parse(version).is_prerelease:
+ return True
+
+ return False
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version):
+ result = []
+ for item in version.split("."):
+ match = _prefix_regex.search(item)
+ if match:
+ result.extend(match.groups())
+ else:
+ result.append(item)
+ return result
+
+
+def _pad_version(left, right):
+ left_split, right_split = [], []
+
+ # Get the release segment of our versions
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+ # Get the rest of our versions
+ left_split.append(left[len(left_split[0]) :])
+ right_split.append(right[len(right_split[0]) :])
+
+ # Insert our padding
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+ return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+
+
+class SpecifierSet(BaseSpecifier):
+ def __init__(self, specifiers="", prereleases=None):
+ # Split on , to break each indidivual specifier into it's own item, and
+ # strip each item to remove leading/trailing whitespace.
+ specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+ # Parsed each individual specifier, attempting first to make it a
+ # Specifier and falling back to a LegacySpecifier.
+ parsed = set()
+ for specifier in specifiers:
+ try:
+ parsed.add(Specifier(specifier))
+ except InvalidSpecifier:
+ parsed.add(LegacySpecifier(specifier))
+
+ # Turn our parsed specifiers into a frozen set and save them for later.
+ self._specs = frozenset(parsed)
+
+ # Store our prereleases value so we can use it later to determine if
+ # we accept prereleases or not.
+ self._prereleases = prereleases
+
+ def __repr__(self):
+ pre = (
+ ", prereleases={0!r}".format(self.prereleases)
+ if self._prereleases is not None
+ else ""
+ )
+
+ return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
+
+ def __str__(self):
+ return ",".join(sorted(str(s) for s in self._specs))
+
+ def __hash__(self):
+ return hash(self._specs)
+
+ def __and__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ specifier = SpecifierSet()
+ specifier._specs = frozenset(self._specs | other._specs)
+
+ if self._prereleases is None and other._prereleases is not None:
+ specifier._prereleases = other._prereleases
+ elif self._prereleases is not None and other._prereleases is None:
+ specifier._prereleases = self._prereleases
+ elif self._prereleases == other._prereleases:
+ specifier._prereleases = self._prereleases
+ else:
+ raise ValueError(
+ "Cannot combine SpecifierSets with True and False prerelease "
+ "overrides."
+ )
+
+ return specifier
+
+ def __eq__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif isinstance(other, _IndividualSpecifier):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs == other._specs
+
+ def __ne__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif isinstance(other, _IndividualSpecifier):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs != other._specs
+
+ def __len__(self):
+ return len(self._specs)
+
+ def __iter__(self):
+ return iter(self._specs)
+
+ @property
+ def prereleases(self):
+ # If we have been given an explicit prerelease modifier, then we'll
+ # pass that through here.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # If we don't have any specifiers, and we don't have a forced value,
+ # then we'll just return None since we don't know if this should have
+ # pre-releases or not.
+ if not self._specs:
+ return None
+
+ # Otherwise we'll see if any of the given specifiers accept
+ # prereleases, if any of them do we'll return True, otherwise False.
+ return any(s.prereleases for s in self._specs)
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+ def __contains__(self, item):
+ return self.contains(item)
+
+ def contains(self, item, prereleases=None):
+ # Ensure that our item is a Version or LegacyVersion instance.
+ if not isinstance(item, (LegacyVersion, Version)):
+ item = parse(item)
+
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # We can determine if we're going to allow pre-releases by looking to
+ # see if any of the underlying items supports them. If none of them do
+ # and this item is a pre-release then we do not allow it and we can
+ # short circuit that here.
+ # Note: This means that 1.0.dev1 would not be contained in something
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+ if not prereleases and item.is_prerelease:
+ return False
+
+ # We simply dispatch to the underlying specs here to make sure that the
+ # given version is contained within all of them.
+ # Note: This use of all() here means that an empty set of specifiers
+ # will always return True, this is an explicit design decision.
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+ def filter(self, iterable, prereleases=None):
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # If we have any specifiers, then we want to wrap our iterable in the
+ # filter method for each one, this will act as a logical AND amongst
+ # each specifier.
+ if self._specs:
+ for spec in self._specs:
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
+ return iterable
+ # If we do not have any specifiers, then we need to have a rough filter
+ # which will filter out any pre-releases, unless there are no final
+ # releases, and which will filter out LegacyVersion in general.
+ else:
+ filtered = []
+ found_prereleases = []
+
+ for item in iterable:
+ # Ensure that we some kind of Version class for this item.
+ if not isinstance(item, (LegacyVersion, Version)):
+ parsed_version = parse(item)
+ else:
+ parsed_version = item
+
+ # Filter out any item which is parsed as a LegacyVersion
+ if isinstance(parsed_version, LegacyVersion):
+ continue
+
+ # Store any item which is a pre-release for later unless we've
+ # already found a final version or we are accepting prereleases
+ if parsed_version.is_prerelease and not prereleases:
+ if not filtered:
+ found_prereleases.append(item)
+ else:
+ filtered.append(item)
+
+ # If we've found no items except for pre-releases, then we'll go
+ # ahead and use the pre-releases
+ if not filtered and found_prereleases and prereleases is None:
+ return found_prereleases
+
+ return filtered
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/utils.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/utils.py
new file mode 100644
index 00000000..88418786
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/utils.py
@@ -0,0 +1,57 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import re
+
+from .version import InvalidVersion, Version
+
+
+_canonicalize_regex = re.compile(r"[-_.]+")
+
+
+def canonicalize_name(name):
+ # This is taken from PEP 503.
+ return _canonicalize_regex.sub("-", name).lower()
+
+
+def canonicalize_version(version):
+ """
+ This is very similar to Version.__str__, but has one subtle differences
+ with the way it handles the release segment.
+ """
+
+ try:
+ version = Version(version)
+ except InvalidVersion:
+ # Legacy versions cannot be normalized
+ return version
+
+ parts = []
+
+ # Epoch
+ if version.epoch != 0:
+ parts.append("{0}!".format(version.epoch))
+
+ # Release segment
+ # NB: This strips trailing '.0's to normalize
+ parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
+
+ # Pre-release
+ if version.pre is not None:
+ parts.append("".join(str(x) for x in version.pre))
+
+ # Post-release
+ if version.post is not None:
+ parts.append(".post{0}".format(version.post))
+
+ # Development release
+ if version.dev is not None:
+ parts.append(".dev{0}".format(version.dev))
+
+ # Local version segment
+ if version.local is not None:
+ parts.append("+{0}".format(version.local))
+
+ return "".join(parts)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/version.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/version.py
new file mode 100644
index 00000000..95157a1f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/packaging/version.py
@@ -0,0 +1,420 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import collections
+import itertools
+import re
+
+from ._structures import Infinity
+
+
+__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
+
+
+_Version = collections.namedtuple(
+ "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
+)
+
+
+def parse(version):
+ """
+ Parse the given version string and return either a :class:`Version` object
+ or a :class:`LegacyVersion` object depending on if the given version is
+ a valid PEP 440 version or a legacy version.
+ """
+ try:
+ return Version(version)
+ except InvalidVersion:
+ return LegacyVersion(version)
+
+
+class InvalidVersion(ValueError):
+ """
+ An invalid version was found, users should refer to PEP 440.
+ """
+
+
+class _BaseVersion(object):
+ def __hash__(self):
+ return hash(self._key)
+
+ def __lt__(self, other):
+ return self._compare(other, lambda s, o: s < o)
+
+ def __le__(self, other):
+ return self._compare(other, lambda s, o: s <= o)
+
+ def __eq__(self, other):
+ return self._compare(other, lambda s, o: s == o)
+
+ def __ge__(self, other):
+ return self._compare(other, lambda s, o: s >= o)
+
+ def __gt__(self, other):
+ return self._compare(other, lambda s, o: s > o)
+
+ def __ne__(self, other):
+ return self._compare(other, lambda s, o: s != o)
+
+ def _compare(self, other, method):
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return method(self._key, other._key)
+
+
+class LegacyVersion(_BaseVersion):
+ def __init__(self, version):
+ self._version = str(version)
+ self._key = _legacy_cmpkey(self._version)
+
+ def __str__(self):
+ return self._version
+
+ def __repr__(self):
+ return "<LegacyVersion({0})>".format(repr(str(self)))
+
+ @property
+ def public(self):
+ return self._version
+
+ @property
+ def base_version(self):
+ return self._version
+
+ @property
+ def epoch(self):
+ return -1
+
+ @property
+ def release(self):
+ return None
+
+ @property
+ def pre(self):
+ return None
+
+ @property
+ def post(self):
+ return None
+
+ @property
+ def dev(self):
+ return None
+
+ @property
+ def local(self):
+ return None
+
+ @property
+ def is_prerelease(self):
+ return False
+
+ @property
+ def is_postrelease(self):
+ return False
+
+ @property
+ def is_devrelease(self):
+ return False
+
+
+_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
+
+_legacy_version_replacement_map = {
+ "pre": "c",
+ "preview": "c",
+ "-": "final-",
+ "rc": "c",
+ "dev": "@",
+}
+
+
+def _parse_version_parts(s):
+ for part in _legacy_version_component_re.split(s):
+ part = _legacy_version_replacement_map.get(part, part)
+
+ if not part or part == ".":
+ continue
+
+ if part[:1] in "0123456789":
+ # pad for numeric comparison
+ yield part.zfill(8)
+ else:
+ yield "*" + part
+
+ # ensure that alpha/beta/candidate are before final
+ yield "*final"
+
+
+def _legacy_cmpkey(version):
+ # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
+ # greater than or equal to 0. This will effectively put the LegacyVersion,
+ # which uses the defacto standard originally implemented by setuptools,
+ # as before all PEP 440 versions.
+ epoch = -1
+
+ # This scheme is taken from pkg_resources.parse_version setuptools prior to
+ # it's adoption of the packaging library.
+ parts = []
+ for part in _parse_version_parts(version.lower()):
+ if part.startswith("*"):
+ # remove "-" before a prerelease tag
+ if part < "*final":
+ while parts and parts[-1] == "*final-":
+ parts.pop()
+
+ # remove trailing zeros from each series of numeric parts
+ while parts and parts[-1] == "00000000":
+ parts.pop()
+
+ parts.append(part)
+ parts = tuple(parts)
+
+ return epoch, parts
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+VERSION_PATTERN = r"""
+ v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+
+class Version(_BaseVersion):
+
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ def __init__(self, version):
+ # Validate the version and parse it into pieces
+ match = self._regex.search(version)
+ if not match:
+ raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+ # Store the parsed out pieces of the version
+ self._version = _Version(
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+ release=tuple(int(i) for i in match.group("release").split(".")),
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+ post=_parse_letter_version(
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+ ),
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+ local=_parse_local_version(match.group("local")),
+ )
+
+ # Generate a key which will be used for sorting
+ self._key = _cmpkey(
+ self._version.epoch,
+ self._version.release,
+ self._version.pre,
+ self._version.post,
+ self._version.dev,
+ self._version.local,
+ )
+
+ def __repr__(self):
+ return "<Version({0})>".format(repr(str(self)))
+
+ def __str__(self):
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append("{0}!".format(self.epoch))
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ # Pre-release
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
+
+ # Post-release
+ if self.post is not None:
+ parts.append(".post{0}".format(self.post))
+
+ # Development release
+ if self.dev is not None:
+ parts.append(".dev{0}".format(self.dev))
+
+ # Local version segment
+ if self.local is not None:
+ parts.append("+{0}".format(self.local))
+
+ return "".join(parts)
+
+ @property
+ def epoch(self):
+ return self._version.epoch
+
+ @property
+ def release(self):
+ return self._version.release
+
+ @property
+ def pre(self):
+ return self._version.pre
+
+ @property
+ def post(self):
+ return self._version.post[1] if self._version.post else None
+
+ @property
+ def dev(self):
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
+ def local(self):
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
+ def public(self):
+ return str(self).split("+", 1)[0]
+
+ @property
+ def base_version(self):
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append("{0}!".format(self.epoch))
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ return "".join(parts)
+
+ @property
+ def is_prerelease(self):
+ return self.dev is not None or self.pre is not None
+
+ @property
+ def is_postrelease(self):
+ return self.post is not None
+
+ @property
+ def is_devrelease(self):
+ return self.dev is not None
+
+
+def _parse_letter_version(letter, number):
+ if letter:
+ # We consider there to be an implicit 0 in a pre-release if there is
+ # not a numeral associated with it.
+ if number is None:
+ number = 0
+
+ # We normalize any letters to their lower case form
+ letter = letter.lower()
+
+ # We consider some words to be alternate spellings of other words and
+ # in those cases we want to normalize the spellings to our preferred
+ # spelling.
+ if letter == "alpha":
+ letter = "a"
+ elif letter == "beta":
+ letter = "b"
+ elif letter in ["c", "pre", "preview"]:
+ letter = "rc"
+ elif letter in ["rev", "r"]:
+ letter = "post"
+
+ return letter, int(number)
+ if not letter and number:
+ # We assume if we are given a number, but we are not given a letter
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
+ letter = "post"
+
+ return letter, int(number)
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+ """
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+ """
+ if local is not None:
+ return tuple(
+ part.lower() if not part.isdigit() else int(part)
+ for part in _local_version_separators.split(local)
+ )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+ # When we compare a release version, we want to compare it with all of the
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
+ # leading zeros until we come to something non zero, then take the rest
+ # re-reverse it back into the correct order and make it a tuple and use
+ # that for our sorting key.
+ release = tuple(
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+ )
+
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
+ # if there is not a pre or a post segment. If we have one of those then
+ # the normal sorting rules will handle this case correctly.
+ if pre is None and post is None and dev is not None:
+ pre = -Infinity
+ # Versions without a pre-release (except as noted above) should sort after
+ # those with one.
+ elif pre is None:
+ pre = Infinity
+
+ # Versions without a post segment should sort before those with one.
+ if post is None:
+ post = -Infinity
+
+ # Versions without a development segment should sort after those with one.
+ if dev is None:
+ dev = Infinity
+
+ if local is None:
+ # Versions without a local segment should sort before those with one.
+ local = -Infinity
+ else:
+ # Versions with a local segment need that segment parsed to implement
+ # the sorting rules in PEP440.
+ # - Alpha numeric segments sort before numeric segments
+ # - Alpha numeric segments sort lexicographically
+ # - Numeric segments sort numerically
+ # - Shorter versions sort before longer versions when the prefixes
+ # match exactly
+ local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)
+
+ return epoch, release, pre, post, dev, local
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__init__.py
new file mode 100644
index 00000000..9c1a098f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__init__.py
@@ -0,0 +1,4 @@
+"""Wrappers to build Python packages using PEP 517 hooks
+"""
+
+__version__ = '0.5.0'
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..10aa9fea
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pyc
new file mode 100644
index 00000000..55f8ff17
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-37.pyc
new file mode 100644
index 00000000..1ec4c7b1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-37.pyc
new file mode 100644
index 00000000..34d3785b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pyc
new file mode 100644
index 00000000..29f35461
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..0ccab21c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pyc
new file mode 100644
index 00000000..661eb0d4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc
new file mode 100644
index 00000000..bc4b5616
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/_in_process.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/_in_process.py
new file mode 100644
index 00000000..d6524b66
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/_in_process.py
@@ -0,0 +1,207 @@
+"""This is invoked in a subprocess to call the build backend hooks.
+
+It expects:
+- Command line args: hook_name, control_dir
+- Environment variable: PEP517_BUILD_BACKEND=entry.point:spec
+- control_dir/input.json:
+ - {"kwargs": {...}}
+
+Results:
+- control_dir/output.json
+ - {"return_val": ...}
+"""
+from glob import glob
+from importlib import import_module
+import os
+from os.path import join as pjoin
+import re
+import shutil
+import sys
+
+# This is run as a script, not a module, so it can't do a relative import
+import compat
+
+
+class BackendUnavailable(Exception):
+ """Raised if we cannot import the backend"""
+
+
+def _build_backend():
+ """Find and load the build backend"""
+ ep = os.environ['PEP517_BUILD_BACKEND']
+ mod_path, _, obj_path = ep.partition(':')
+ try:
+ obj = import_module(mod_path)
+ except ImportError:
+ raise BackendUnavailable
+ if obj_path:
+ for path_part in obj_path.split('.'):
+ obj = getattr(obj, path_part)
+ return obj
+
+
+def get_requires_for_build_wheel(config_settings):
+ """Invoke the optional get_requires_for_build_wheel hook
+
+ Returns [] if the hook is not defined.
+ """
+ backend = _build_backend()
+ try:
+ hook = backend.get_requires_for_build_wheel
+ except AttributeError:
+ return []
+ else:
+ return hook(config_settings)
+
+
+def prepare_metadata_for_build_wheel(metadata_directory, config_settings):
+ """Invoke optional prepare_metadata_for_build_wheel
+
+ Implements a fallback by building a wheel if the hook isn't defined.
+ """
+ backend = _build_backend()
+ try:
+ hook = backend.prepare_metadata_for_build_wheel
+ except AttributeError:
+ return _get_wheel_metadata_from_wheel(backend, metadata_directory,
+ config_settings)
+ else:
+ return hook(metadata_directory, config_settings)
+
+
+WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL'
+
+
+def _dist_info_files(whl_zip):
+ """Identify the .dist-info folder inside a wheel ZipFile."""
+ res = []
+ for path in whl_zip.namelist():
+ m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path)
+ if m:
+ res.append(path)
+ if res:
+ return res
+ raise Exception("No .dist-info folder found in wheel")
+
+
+def _get_wheel_metadata_from_wheel(
+ backend, metadata_directory, config_settings):
+ """Build a wheel and extract the metadata from it.
+
+ Fallback for when the build backend does not
+ define the 'get_wheel_metadata' hook.
+ """
+ from zipfile import ZipFile
+ whl_basename = backend.build_wheel(metadata_directory, config_settings)
+ with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'):
+ pass # Touch marker file
+
+ whl_file = os.path.join(metadata_directory, whl_basename)
+ with ZipFile(whl_file) as zipf:
+ dist_info = _dist_info_files(zipf)
+ zipf.extractall(path=metadata_directory, members=dist_info)
+ return dist_info[0].split('/')[0]
+
+
+def _find_already_built_wheel(metadata_directory):
+ """Check for a wheel already built during the get_wheel_metadata hook.
+ """
+ if not metadata_directory:
+ return None
+ metadata_parent = os.path.dirname(metadata_directory)
+ if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)):
+ return None
+
+ whl_files = glob(os.path.join(metadata_parent, '*.whl'))
+ if not whl_files:
+ print('Found wheel built marker, but no .whl files')
+ return None
+ if len(whl_files) > 1:
+ print('Found multiple .whl files; unspecified behaviour. '
+ 'Will call build_wheel.')
+ return None
+
+ # Exactly one .whl file
+ return whl_files[0]
+
+
+def build_wheel(wheel_directory, config_settings, metadata_directory=None):
+ """Invoke the mandatory build_wheel hook.
+
+ If a wheel was already built in the
+ prepare_metadata_for_build_wheel fallback, this
+ will copy it rather than rebuilding the wheel.
+ """
+ prebuilt_whl = _find_already_built_wheel(metadata_directory)
+ if prebuilt_whl:
+ shutil.copy2(prebuilt_whl, wheel_directory)
+ return os.path.basename(prebuilt_whl)
+
+ return _build_backend().build_wheel(wheel_directory, config_settings,
+ metadata_directory)
+
+
+def get_requires_for_build_sdist(config_settings):
+ """Invoke the optional get_requires_for_build_wheel hook
+
+ Returns [] if the hook is not defined.
+ """
+ backend = _build_backend()
+ try:
+ hook = backend.get_requires_for_build_sdist
+ except AttributeError:
+ return []
+ else:
+ return hook(config_settings)
+
+
+class _DummyException(Exception):
+ """Nothing should ever raise this exception"""
+
+
+class GotUnsupportedOperation(Exception):
+ """For internal use when backend raises UnsupportedOperation"""
+
+
+def build_sdist(sdist_directory, config_settings):
+ """Invoke the mandatory build_sdist hook."""
+ backend = _build_backend()
+ try:
+ return backend.build_sdist(sdist_directory, config_settings)
+ except getattr(backend, 'UnsupportedOperation', _DummyException):
+ raise GotUnsupportedOperation
+
+
+HOOK_NAMES = {
+ 'get_requires_for_build_wheel',
+ 'prepare_metadata_for_build_wheel',
+ 'build_wheel',
+ 'get_requires_for_build_sdist',
+ 'build_sdist',
+}
+
+
+def main():
+ if len(sys.argv) < 3:
+ sys.exit("Needs args: hook_name, control_dir")
+ hook_name = sys.argv[1]
+ control_dir = sys.argv[2]
+ if hook_name not in HOOK_NAMES:
+ sys.exit("Unknown hook: %s" % hook_name)
+ hook = globals()[hook_name]
+
+ hook_input = compat.read_json(pjoin(control_dir, 'input.json'))
+
+ json_out = {'unsupported': False, 'return_val': None}
+ try:
+ json_out['return_val'] = hook(**hook_input['kwargs'])
+ except BackendUnavailable:
+ json_out['no_backend'] = True
+ except GotUnsupportedOperation:
+ json_out['unsupported'] = True
+
+ compat.write_json(json_out, pjoin(control_dir, 'output.json'), indent=2)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/build.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/build.py
new file mode 100644
index 00000000..ac6c9495
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/build.py
@@ -0,0 +1,108 @@
+"""Build a project using PEP 517 hooks.
+"""
+import argparse
+import logging
+import os
+import contextlib
+from pip._vendor import pytoml
+import shutil
+import errno
+import tempfile
+
+from .envbuild import BuildEnvironment
+from .wrappers import Pep517HookCaller
+
+log = logging.getLogger(__name__)
+
+
+@contextlib.contextmanager
+def tempdir():
+ td = tempfile.mkdtemp()
+ try:
+ yield td
+ finally:
+ shutil.rmtree(td)
+
+
+def _do_build(hooks, env, dist, dest):
+ get_requires_name = 'get_requires_for_build_{dist}'.format(**locals())
+ get_requires = getattr(hooks, get_requires_name)
+ reqs = get_requires({})
+ log.info('Got build requires: %s', reqs)
+
+ env.pip_install(reqs)
+ log.info('Installed dynamic build dependencies')
+
+ with tempdir() as td:
+ log.info('Trying to build %s in %s', dist, td)
+ build_name = 'build_{dist}'.format(**locals())
+ build = getattr(hooks, build_name)
+ filename = build(td, {})
+ source = os.path.join(td, filename)
+ shutil.move(source, os.path.join(dest, os.path.basename(filename)))
+
+
+def mkdir_p(*args, **kwargs):
+ """Like `mkdir`, but does not raise an exception if the
+ directory already exists.
+ """
+ try:
+ return os.mkdir(*args, **kwargs)
+ except OSError as exc:
+ if exc.errno != errno.EEXIST:
+ raise
+
+
+def build(source_dir, dist, dest=None):
+ pyproject = os.path.join(source_dir, 'pyproject.toml')
+ dest = os.path.join(source_dir, dest or 'dist')
+ mkdir_p(dest)
+
+ with open(pyproject) as f:
+ pyproject_data = pytoml.load(f)
+ # Ensure the mandatory data can be loaded
+ buildsys = pyproject_data['build-system']
+ requires = buildsys['requires']
+ backend = buildsys['build-backend']
+
+ hooks = Pep517HookCaller(source_dir, backend)
+
+ with BuildEnvironment() as env:
+ env.pip_install(requires)
+ _do_build(hooks, env, dist, dest)
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument(
+ 'source_dir',
+ help="A directory containing pyproject.toml",
+)
+parser.add_argument(
+ '--binary', '-b',
+ action='store_true',
+ default=False,
+)
+parser.add_argument(
+ '--source', '-s',
+ action='store_true',
+ default=False,
+)
+parser.add_argument(
+ '--out-dir', '-o',
+ help="Destination in which to save the builds relative to source dir",
+)
+
+
+def main(args):
+ # determine which dists to build
+ dists = list(filter(None, (
+ 'sdist' if args.source or not args.binary else None,
+ 'wheel' if args.binary or not args.source else None,
+ )))
+
+ for dist in dists:
+ build(args.source_dir, dist, args.out_dir)
+
+
+if __name__ == '__main__':
+ main(parser.parse_args())
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/check.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/check.py
new file mode 100644
index 00000000..f4cdc6be
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/check.py
@@ -0,0 +1,202 @@
+"""Check a project and backend by attempting to build using PEP 517 hooks.
+"""
+import argparse
+import logging
+import os
+from os.path import isfile, join as pjoin
+from pip._vendor.pytoml import TomlError, load as toml_load
+import shutil
+from subprocess import CalledProcessError
+import sys
+import tarfile
+from tempfile import mkdtemp
+import zipfile
+
+from .colorlog import enable_colourful_output
+from .envbuild import BuildEnvironment
+from .wrappers import Pep517HookCaller
+
+log = logging.getLogger(__name__)
+
+
+def check_build_sdist(hooks, build_sys_requires):
+ with BuildEnvironment() as env:
+ try:
+ env.pip_install(build_sys_requires)
+ log.info('Installed static build dependencies')
+ except CalledProcessError:
+ log.error('Failed to install static build dependencies')
+ return False
+
+ try:
+ reqs = hooks.get_requires_for_build_sdist({})
+ log.info('Got build requires: %s', reqs)
+ except Exception:
+ log.error('Failure in get_requires_for_build_sdist', exc_info=True)
+ return False
+
+ try:
+ env.pip_install(reqs)
+ log.info('Installed dynamic build dependencies')
+ except CalledProcessError:
+ log.error('Failed to install dynamic build dependencies')
+ return False
+
+ td = mkdtemp()
+ log.info('Trying to build sdist in %s', td)
+ try:
+ try:
+ filename = hooks.build_sdist(td, {})
+ log.info('build_sdist returned %r', filename)
+ except Exception:
+ log.info('Failure in build_sdist', exc_info=True)
+ return False
+
+ if not filename.endswith('.tar.gz'):
+ log.error(
+ "Filename %s doesn't have .tar.gz extension", filename)
+ return False
+
+ path = pjoin(td, filename)
+ if isfile(path):
+ log.info("Output file %s exists", path)
+ else:
+ log.error("Output file %s does not exist", path)
+ return False
+
+ if tarfile.is_tarfile(path):
+ log.info("Output file is a tar file")
+ else:
+ log.error("Output file is not a tar file")
+ return False
+
+ finally:
+ shutil.rmtree(td)
+
+ return True
+
+
+def check_build_wheel(hooks, build_sys_requires):
+ with BuildEnvironment() as env:
+ try:
+ env.pip_install(build_sys_requires)
+ log.info('Installed static build dependencies')
+ except CalledProcessError:
+ log.error('Failed to install static build dependencies')
+ return False
+
+ try:
+ reqs = hooks.get_requires_for_build_wheel({})
+ log.info('Got build requires: %s', reqs)
+ except Exception:
+ log.error('Failure in get_requires_for_build_sdist', exc_info=True)
+ return False
+
+ try:
+ env.pip_install(reqs)
+ log.info('Installed dynamic build dependencies')
+ except CalledProcessError:
+ log.error('Failed to install dynamic build dependencies')
+ return False
+
+ td = mkdtemp()
+ log.info('Trying to build wheel in %s', td)
+ try:
+ try:
+ filename = hooks.build_wheel(td, {})
+ log.info('build_wheel returned %r', filename)
+ except Exception:
+ log.info('Failure in build_wheel', exc_info=True)
+ return False
+
+ if not filename.endswith('.whl'):
+ log.error("Filename %s doesn't have .whl extension", filename)
+ return False
+
+ path = pjoin(td, filename)
+ if isfile(path):
+ log.info("Output file %s exists", path)
+ else:
+ log.error("Output file %s does not exist", path)
+ return False
+
+ if zipfile.is_zipfile(path):
+ log.info("Output file is a zip file")
+ else:
+ log.error("Output file is not a zip file")
+ return False
+
+ finally:
+ shutil.rmtree(td)
+
+ return True
+
+
+def check(source_dir):
+ pyproject = pjoin(source_dir, 'pyproject.toml')
+ if isfile(pyproject):
+ log.info('Found pyproject.toml')
+ else:
+ log.error('Missing pyproject.toml')
+ return False
+
+ try:
+ with open(pyproject) as f:
+ pyproject_data = toml_load(f)
+ # Ensure the mandatory data can be loaded
+ buildsys = pyproject_data['build-system']
+ requires = buildsys['requires']
+ backend = buildsys['build-backend']
+ log.info('Loaded pyproject.toml')
+ except (TomlError, KeyError):
+ log.error("Invalid pyproject.toml", exc_info=True)
+ return False
+
+ hooks = Pep517HookCaller(source_dir, backend)
+
+ sdist_ok = check_build_sdist(hooks, requires)
+ wheel_ok = check_build_wheel(hooks, requires)
+
+ if not sdist_ok:
+ log.warning('Sdist checks failed; scroll up to see')
+ if not wheel_ok:
+ log.warning('Wheel checks failed')
+
+ return sdist_ok
+
+
+def main(argv=None):
+ ap = argparse.ArgumentParser()
+ ap.add_argument(
+ 'source_dir',
+ help="A directory containing pyproject.toml")
+ args = ap.parse_args(argv)
+
+ enable_colourful_output()
+
+ ok = check(args.source_dir)
+
+ if ok:
+ print(ansi('Checks passed', 'green'))
+ else:
+ print(ansi('Checks failed', 'red'))
+ sys.exit(1)
+
+
+ansi_codes = {
+ 'reset': '\x1b[0m',
+ 'bold': '\x1b[1m',
+ 'red': '\x1b[31m',
+ 'green': '\x1b[32m',
+}
+
+
+def ansi(s, attr):
+ if os.name != 'nt' and sys.stdout.isatty():
+ return ansi_codes[attr] + str(s) + ansi_codes['reset']
+ else:
+ return str(s)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/colorlog.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/colorlog.py
new file mode 100644
index 00000000..69c8a59d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/colorlog.py
@@ -0,0 +1,115 @@
+"""Nicer log formatting with colours.
+
+Code copied from Tornado, Apache licensed.
+"""
+# Copyright 2012 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import logging
+import sys
+
+try:
+ import curses
+except ImportError:
+ curses = None
+
+
+def _stderr_supports_color():
+ color = False
+ if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
+ try:
+ curses.setupterm()
+ if curses.tigetnum("colors") > 0:
+ color = True
+ except Exception:
+ pass
+ return color
+
+
+class LogFormatter(logging.Formatter):
+ """Log formatter with colour support
+ """
+ DEFAULT_COLORS = {
+ logging.INFO: 2, # Green
+ logging.WARNING: 3, # Yellow
+ logging.ERROR: 1, # Red
+ logging.CRITICAL: 1,
+ }
+
+ def __init__(self, color=True, datefmt=None):
+ r"""
+ :arg bool color: Enables color support.
+ :arg string fmt: Log message format.
+ It will be applied to the attributes dict of log records. The
+ text between ``%(color)s`` and ``%(end_color)s`` will be colored
+ depending on the level if color support is on.
+ :arg dict colors: color mappings from logging level to terminal color
+ code
+ :arg string datefmt: Datetime format.
+ Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
+ .. versionchanged:: 3.2
+ Added ``fmt`` and ``datefmt`` arguments.
+ """
+ logging.Formatter.__init__(self, datefmt=datefmt)
+ self._colors = {}
+ if color and _stderr_supports_color():
+ # The curses module has some str/bytes confusion in
+ # python3. Until version 3.2.3, most methods return
+ # bytes, but only accept strings. In addition, we want to
+ # output these strings with the logging module, which
+ # works with unicode strings. The explicit calls to
+ # unicode() below are harmless in python2 but will do the
+ # right conversion in python 3.
+ fg_color = (curses.tigetstr("setaf") or
+ curses.tigetstr("setf") or "")
+ if (3, 0) < sys.version_info < (3, 2, 3):
+ fg_color = str(fg_color, "ascii")
+
+ for levelno, code in self.DEFAULT_COLORS.items():
+ self._colors[levelno] = str(
+ curses.tparm(fg_color, code), "ascii")
+ self._normal = str(curses.tigetstr("sgr0"), "ascii")
+
+ scr = curses.initscr()
+ self.termwidth = scr.getmaxyx()[1]
+ curses.endwin()
+ else:
+ self._normal = ''
+ # Default width is usually 80, but too wide is
+ # worse than too narrow
+ self.termwidth = 70
+
+ def formatMessage(self, record):
+ mlen = len(record.message)
+ right_text = '{initial}-{name}'.format(initial=record.levelname[0],
+ name=record.name)
+ if mlen + len(right_text) < self.termwidth:
+ space = ' ' * (self.termwidth - (mlen + len(right_text)))
+ else:
+ space = ' '
+
+ if record.levelno in self._colors:
+ start_color = self._colors[record.levelno]
+ end_color = self._normal
+ else:
+ start_color = end_color = ''
+
+ return record.message + space + start_color + right_text + end_color
+
+
+def enable_colourful_output(level=logging.INFO):
+ handler = logging.StreamHandler()
+ handler.setFormatter(LogFormatter())
+ logging.root.addHandler(handler)
+ logging.root.setLevel(level)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/compat.py
new file mode 100644
index 00000000..01c66fc7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/compat.py
@@ -0,0 +1,23 @@
+"""Handle reading and writing JSON in UTF-8, on Python 3 and 2."""
+import json
+import sys
+
+if sys.version_info[0] >= 3:
+ # Python 3
+ def write_json(obj, path, **kwargs):
+ with open(path, 'w', encoding='utf-8') as f:
+ json.dump(obj, f, **kwargs)
+
+ def read_json(path):
+ with open(path, 'r', encoding='utf-8') as f:
+ return json.load(f)
+
+else:
+ # Python 2
+ def write_json(obj, path, **kwargs):
+ with open(path, 'wb') as f:
+ json.dump(obj, f, encoding='utf-8', **kwargs)
+
+ def read_json(path):
+ with open(path, 'rb') as f:
+ return json.load(f)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/envbuild.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/envbuild.py
new file mode 100644
index 00000000..f7ac5f46
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/envbuild.py
@@ -0,0 +1,158 @@
+"""Build wheels/sdists by installing build deps to a temporary environment.
+"""
+
+import os
+import logging
+from pip._vendor import pytoml
+import shutil
+from subprocess import check_call
+import sys
+from sysconfig import get_paths
+from tempfile import mkdtemp
+
+from .wrappers import Pep517HookCaller
+
+log = logging.getLogger(__name__)
+
+
+def _load_pyproject(source_dir):
+ with open(os.path.join(source_dir, 'pyproject.toml')) as f:
+ pyproject_data = pytoml.load(f)
+ buildsys = pyproject_data['build-system']
+ return buildsys['requires'], buildsys['build-backend']
+
+
+class BuildEnvironment(object):
+ """Context manager to install build deps in a simple temporary environment
+
+ Based on code I wrote for pip, which is MIT licensed.
+ """
+ # Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file)
+ #
+ # Permission is hereby granted, free of charge, to any person obtaining
+ # a copy of this software and associated documentation files (the
+ # "Software"), to deal in the Software without restriction, including
+ # without limitation the rights to use, copy, modify, merge, publish,
+ # distribute, sublicense, and/or sell copies of the Software, and to
+ # permit persons to whom the Software is furnished to do so, subject to
+ # the following conditions:
+ #
+ # The above copyright notice and this permission notice shall be
+ # included in all copies or substantial portions of the Software.
+ #
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+ path = None
+
+ def __init__(self, cleanup=True):
+ self._cleanup = cleanup
+
+ def __enter__(self):
+ self.path = mkdtemp(prefix='pep517-build-env-')
+ log.info('Temporary build environment: %s', self.path)
+
+ self.save_path = os.environ.get('PATH', None)
+ self.save_pythonpath = os.environ.get('PYTHONPATH', None)
+
+ install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
+ install_dirs = get_paths(install_scheme, vars={
+ 'base': self.path,
+ 'platbase': self.path,
+ })
+
+ scripts = install_dirs['scripts']
+ if self.save_path:
+ os.environ['PATH'] = scripts + os.pathsep + self.save_path
+ else:
+ os.environ['PATH'] = scripts + os.pathsep + os.defpath
+
+ if install_dirs['purelib'] == install_dirs['platlib']:
+ lib_dirs = install_dirs['purelib']
+ else:
+ lib_dirs = install_dirs['purelib'] + os.pathsep + \
+ install_dirs['platlib']
+ if self.save_pythonpath:
+ os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
+ self.save_pythonpath
+ else:
+ os.environ['PYTHONPATH'] = lib_dirs
+
+ return self
+
+ def pip_install(self, reqs):
+ """Install dependencies into this env by calling pip in a subprocess"""
+ if not reqs:
+ return
+ log.info('Calling pip to install %s', reqs)
+ check_call([
+ sys.executable, '-m', 'pip', 'install', '--ignore-installed',
+ '--prefix', self.path] + list(reqs))
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ needs_cleanup = (
+ self._cleanup and
+ self.path is not None and
+ os.path.isdir(self.path)
+ )
+ if needs_cleanup:
+ shutil.rmtree(self.path)
+
+ if self.save_path is None:
+ os.environ.pop('PATH', None)
+ else:
+ os.environ['PATH'] = self.save_path
+
+ if self.save_pythonpath is None:
+ os.environ.pop('PYTHONPATH', None)
+ else:
+ os.environ['PYTHONPATH'] = self.save_pythonpath
+
+
+def build_wheel(source_dir, wheel_dir, config_settings=None):
+ """Build a wheel from a source directory using PEP 517 hooks.
+
+ :param str source_dir: Source directory containing pyproject.toml
+ :param str wheel_dir: Target directory to create wheel in
+ :param dict config_settings: Options to pass to build backend
+
+ This is a blocking function which will run pip in a subprocess to install
+ build requirements.
+ """
+ if config_settings is None:
+ config_settings = {}
+ requires, backend = _load_pyproject(source_dir)
+ hooks = Pep517HookCaller(source_dir, backend)
+
+ with BuildEnvironment() as env:
+ env.pip_install(requires)
+ reqs = hooks.get_requires_for_build_wheel(config_settings)
+ env.pip_install(reqs)
+ return hooks.build_wheel(wheel_dir, config_settings)
+
+
+def build_sdist(source_dir, sdist_dir, config_settings=None):
+ """Build an sdist from a source directory using PEP 517 hooks.
+
+ :param str source_dir: Source directory containing pyproject.toml
+ :param str sdist_dir: Target directory to place sdist in
+ :param dict config_settings: Options to pass to build backend
+
+ This is a blocking function which will run pip in a subprocess to install
+ build requirements.
+ """
+ if config_settings is None:
+ config_settings = {}
+ requires, backend = _load_pyproject(source_dir)
+ hooks = Pep517HookCaller(source_dir, backend)
+
+ with BuildEnvironment() as env:
+ env.pip_install(requires)
+ reqs = hooks.get_requires_for_build_sdist(config_settings)
+ env.pip_install(reqs)
+ return hooks.build_sdist(sdist_dir, config_settings)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/wrappers.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/wrappers.py
new file mode 100644
index 00000000..b14b8991
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pep517/wrappers.py
@@ -0,0 +1,163 @@
+from contextlib import contextmanager
+import os
+from os.path import dirname, abspath, join as pjoin
+import shutil
+from subprocess import check_call
+import sys
+from tempfile import mkdtemp
+
+from . import compat
+
+_in_proc_script = pjoin(dirname(abspath(__file__)), '_in_process.py')
+
+
+@contextmanager
+def tempdir():
+ td = mkdtemp()
+ try:
+ yield td
+ finally:
+ shutil.rmtree(td)
+
+
+class BackendUnavailable(Exception):
+ """Will be raised if the backend cannot be imported in the hook process."""
+
+
+class UnsupportedOperation(Exception):
+ """May be raised by build_sdist if the backend indicates that it can't."""
+
+
+def default_subprocess_runner(cmd, cwd=None, extra_environ=None):
+ """The default method of calling the wrapper subprocess."""
+ env = os.environ.copy()
+ if extra_environ:
+ env.update(extra_environ)
+
+ check_call(cmd, cwd=cwd, env=env)
+
+
+class Pep517HookCaller(object):
+ """A wrapper around a source directory to be built with a PEP 517 backend.
+
+ source_dir : The path to the source directory, containing pyproject.toml.
+ backend : The build backend spec, as per PEP 517, from pyproject.toml.
+ """
+ def __init__(self, source_dir, build_backend):
+ self.source_dir = abspath(source_dir)
+ self.build_backend = build_backend
+ self._subprocess_runner = default_subprocess_runner
+
+ # TODO: Is this over-engineered? Maybe frontends only need to
+ # set this when creating the wrapper, not on every call.
+ @contextmanager
+ def subprocess_runner(self, runner):
+ prev = self._subprocess_runner
+ self._subprocess_runner = runner
+ yield
+ self._subprocess_runner = prev
+
+ def get_requires_for_build_wheel(self, config_settings=None):
+ """Identify packages required for building a wheel
+
+ Returns a list of dependency specifications, e.g.:
+ ["wheel >= 0.25", "setuptools"]
+
+ This does not include requirements specified in pyproject.toml.
+ It returns the result of calling the equivalently named hook in a
+ subprocess.
+ """
+ return self._call_hook('get_requires_for_build_wheel', {
+ 'config_settings': config_settings
+ })
+
+ def prepare_metadata_for_build_wheel(
+ self, metadata_directory, config_settings=None):
+ """Prepare a *.dist-info folder with metadata for this project.
+
+ Returns the name of the newly created folder.
+
+ If the build backend defines a hook with this name, it will be called
+ in a subprocess. If not, the backend will be asked to build a wheel,
+ and the dist-info extracted from that.
+ """
+ return self._call_hook('prepare_metadata_for_build_wheel', {
+ 'metadata_directory': abspath(metadata_directory),
+ 'config_settings': config_settings,
+ })
+
+ def build_wheel(
+ self, wheel_directory, config_settings=None,
+ metadata_directory=None):
+ """Build a wheel from this project.
+
+ Returns the name of the newly created file.
+
+ In general, this will call the 'build_wheel' hook in the backend.
+ However, if that was previously called by
+ 'prepare_metadata_for_build_wheel', and the same metadata_directory is
+ used, the previously built wheel will be copied to wheel_directory.
+ """
+ if metadata_directory is not None:
+ metadata_directory = abspath(metadata_directory)
+ return self._call_hook('build_wheel', {
+ 'wheel_directory': abspath(wheel_directory),
+ 'config_settings': config_settings,
+ 'metadata_directory': metadata_directory,
+ })
+
+ def get_requires_for_build_sdist(self, config_settings=None):
+ """Identify packages required for building a wheel
+
+ Returns a list of dependency specifications, e.g.:
+ ["setuptools >= 26"]
+
+ This does not include requirements specified in pyproject.toml.
+ It returns the result of calling the equivalently named hook in a
+ subprocess.
+ """
+ return self._call_hook('get_requires_for_build_sdist', {
+ 'config_settings': config_settings
+ })
+
+ def build_sdist(self, sdist_directory, config_settings=None):
+ """Build an sdist from this project.
+
+ Returns the name of the newly created file.
+
+ This calls the 'build_sdist' backend hook in a subprocess.
+ """
+ return self._call_hook('build_sdist', {
+ 'sdist_directory': abspath(sdist_directory),
+ 'config_settings': config_settings,
+ })
+
+ def _call_hook(self, hook_name, kwargs):
+ # On Python 2, pytoml returns Unicode values (which is correct) but the
+ # environment passed to check_call needs to contain string values. We
+ # convert here by encoding using ASCII (the backend can only contain
+ # letters, digits and _, . and : characters, and will be used as a
+ # Python identifier, so non-ASCII content is wrong on Python 2 in
+ # any case).
+ if sys.version_info[0] == 2:
+ build_backend = self.build_backend.encode('ASCII')
+ else:
+ build_backend = self.build_backend
+
+ with tempdir() as td:
+ compat.write_json({'kwargs': kwargs}, pjoin(td, 'input.json'),
+ indent=2)
+
+ # Run the hook in a subprocess
+ self._subprocess_runner(
+ [sys.executable, _in_proc_script, hook_name, td],
+ cwd=self.source_dir,
+ extra_environ={'PEP517_BUILD_BACKEND': build_backend}
+ )
+
+ data = compat.read_json(pjoin(td, 'output.json'))
+ if data.get('unsupported'):
+ raise UnsupportedOperation
+ if data.get('no_backend'):
+ raise BackendUnavailable
+ return data['return_val']
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__init__.py
new file mode 100644
index 00000000..fdd40de4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__init__.py
@@ -0,0 +1,3286 @@
+# coding: utf-8
+"""
+Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof. The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is. Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files. It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+"""
+
+from __future__ import absolute_import
+
+import sys
+import os
+import io
+import time
+import re
+import types
+import zipfile
+import zipimport
+import warnings
+import stat
+import functools
+import pkgutil
+import operator
+import platform
+import collections
+import plistlib
+import email.parser
+import errno
+import tempfile
+import textwrap
+import itertools
+import inspect
+import ntpath
+import posixpath
+from pkgutil import get_importer
+
+try:
+ import _imp
+except ImportError:
+ # Python 3.2 compatibility
+ import imp as _imp
+
+try:
+ FileExistsError
+except NameError:
+ FileExistsError = OSError
+
+from pip._vendor import six
+from pip._vendor.six.moves import urllib, map, filter
+
+# capture these to bypass sandboxing
+from os import utime
+try:
+ from os import mkdir, rename, unlink
+ WRITE_SUPPORT = True
+except ImportError:
+ # no write support, probably under GAE
+ WRITE_SUPPORT = False
+
+from os import open as os_open
+from os.path import isdir, split
+
+try:
+ import importlib.machinery as importlib_machinery
+ # access attribute to force import under delayed import mechanisms.
+ importlib_machinery.__name__
+except ImportError:
+ importlib_machinery = None
+
+from . import py31compat
+from pip._vendor import appdirs
+from pip._vendor import packaging
+__import__('pip._vendor.packaging.version')
+__import__('pip._vendor.packaging.specifiers')
+__import__('pip._vendor.packaging.requirements')
+__import__('pip._vendor.packaging.markers')
+
+
+__metaclass__ = type
+
+
+if (3, 0) < sys.version_info < (3, 4):
+ raise RuntimeError("Python 3.4 or later is required")
+
+if six.PY2:
+ # Those builtin exceptions are only defined in Python 3
+ PermissionError = None
+ NotADirectoryError = None
+
+# declare some globals that will be defined later to
+# satisfy the linters.
+require = None
+working_set = None
+add_activation_listener = None
+resources_stream = None
+cleanup_resources = None
+resource_dir = None
+resource_stream = None
+set_extraction_path = None
+resource_isdir = None
+resource_string = None
+iter_entry_points = None
+resource_listdir = None
+resource_filename = None
+resource_exists = None
+_distribution_finders = None
+_namespace_handlers = None
+_namespace_packages = None
+
+
+class PEP440Warning(RuntimeWarning):
+ """
+ Used when there is an issue with a version or specifier not complying with
+ PEP 440.
+ """
+
+
+def parse_version(v):
+ try:
+ return packaging.version.Version(v)
+ except packaging.version.InvalidVersion:
+ return packaging.version.LegacyVersion(v)
+
+
+_state_vars = {}
+
+
+def _declare_state(vartype, **kw):
+ globals().update(kw)
+ _state_vars.update(dict.fromkeys(kw, vartype))
+
+
+def __getstate__():
+ state = {}
+ g = globals()
+ for k, v in _state_vars.items():
+ state[k] = g['_sget_' + v](g[k])
+ return state
+
+
+def __setstate__(state):
+ g = globals()
+ for k, v in state.items():
+ g['_sset_' + _state_vars[k]](k, g[k], v)
+ return state
+
+
+def _sget_dict(val):
+ return val.copy()
+
+
+def _sset_dict(key, ob, state):
+ ob.clear()
+ ob.update(state)
+
+
+def _sget_object(val):
+ return val.__getstate__()
+
+
+def _sset_object(key, ob, state):
+ ob.__setstate__(state)
+
+
+_sget_none = _sset_none = lambda *args: None
+
+
+def get_supported_platform():
+ """Return this platform's maximum compatible version.
+
+ distutils.util.get_platform() normally reports the minimum version
+ of Mac OS X that would be required to *use* extensions produced by
+ distutils. But what we want when checking compatibility is to know the
+ version of Mac OS X that we are *running*. To allow usage of packages that
+ explicitly require a newer version of Mac OS X, we must also know the
+ current version of the OS.
+
+ If this condition occurs for any other platform with a version in its
+ platform strings, this function should be extended accordingly.
+ """
+ plat = get_build_platform()
+ m = macosVersionString.match(plat)
+ if m is not None and sys.platform == "darwin":
+ try:
+ plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
+ except ValueError:
+ # not Mac OS X
+ pass
+ return plat
+
+
+__all__ = [
+ # Basic resource access and distribution/entry point discovery
+ 'require', 'run_script', 'get_provider', 'get_distribution',
+ 'load_entry_point', 'get_entry_map', 'get_entry_info',
+ 'iter_entry_points',
+ 'resource_string', 'resource_stream', 'resource_filename',
+ 'resource_listdir', 'resource_exists', 'resource_isdir',
+
+ # Environmental control
+ 'declare_namespace', 'working_set', 'add_activation_listener',
+ 'find_distributions', 'set_extraction_path', 'cleanup_resources',
+ 'get_default_cache',
+
+ # Primary implementation classes
+ 'Environment', 'WorkingSet', 'ResourceManager',
+ 'Distribution', 'Requirement', 'EntryPoint',
+
+ # Exceptions
+ 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
+ 'UnknownExtra', 'ExtractionError',
+
+ # Warnings
+ 'PEP440Warning',
+
+ # Parsing functions and string utilities
+ 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
+ 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
+ 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
+
+ # filesystem utilities
+ 'ensure_directory', 'normalize_path',
+
+ # Distribution "precedence" constants
+ 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
+
+ # "Provider" interfaces, implementations, and registration/lookup APIs
+ 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
+ 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
+ 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
+ 'register_finder', 'register_namespace_handler', 'register_loader_type',
+ 'fixup_namespace_packages', 'get_importer',
+
+ # Warnings
+ 'PkgResourcesDeprecationWarning',
+
+ # Deprecated/backward compatibility only
+ 'run_main', 'AvailableDistributions',
+]
+
+
+class ResolutionError(Exception):
+ """Abstract base for dependency resolution errors"""
+
+ def __repr__(self):
+ return self.__class__.__name__ + repr(self.args)
+
+
+class VersionConflict(ResolutionError):
+ """
+ An already-installed version conflicts with the requested version.
+
+ Should be initialized with the installed Distribution and the requested
+ Requirement.
+ """
+
+ _template = "{self.dist} is installed but {self.req} is required"
+
+ @property
+ def dist(self):
+ return self.args[0]
+
+ @property
+ def req(self):
+ return self.args[1]
+
+ def report(self):
+ return self._template.format(**locals())
+
+ def with_context(self, required_by):
+ """
+ If required_by is non-empty, return a version of self that is a
+ ContextualVersionConflict.
+ """
+ if not required_by:
+ return self
+ args = self.args + (required_by,)
+ return ContextualVersionConflict(*args)
+
+
+class ContextualVersionConflict(VersionConflict):
+ """
+ A VersionConflict that accepts a third parameter, the set of the
+ requirements that required the installed Distribution.
+ """
+
+ _template = VersionConflict._template + ' by {self.required_by}'
+
+ @property
+ def required_by(self):
+ return self.args[2]
+
+
+class DistributionNotFound(ResolutionError):
+ """A requested distribution was not found"""
+
+ _template = ("The '{self.req}' distribution was not found "
+ "and is required by {self.requirers_str}")
+
+ @property
+ def req(self):
+ return self.args[0]
+
+ @property
+ def requirers(self):
+ return self.args[1]
+
+ @property
+ def requirers_str(self):
+ if not self.requirers:
+ return 'the application'
+ return ', '.join(self.requirers)
+
+ def report(self):
+ return self._template.format(**locals())
+
+ def __str__(self):
+ return self.report()
+
+
+class UnknownExtra(ResolutionError):
+ """Distribution doesn't have an "extra feature" of the given name"""
+
+
+_provider_factories = {}
+
+PY_MAJOR = sys.version[:3]
+EGG_DIST = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+
+def register_loader_type(loader_type, provider_factory):
+ """Register `provider_factory` to make providers for `loader_type`
+
+ `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+ and `provider_factory` is a function that, passed a *module* object,
+ returns an ``IResourceProvider`` for that module.
+ """
+ _provider_factories[loader_type] = provider_factory
+
+
+def get_provider(moduleOrReq):
+ """Return an IResourceProvider for the named module or requirement"""
+ if isinstance(moduleOrReq, Requirement):
+ return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+ try:
+ module = sys.modules[moduleOrReq]
+ except KeyError:
+ __import__(moduleOrReq)
+ module = sys.modules[moduleOrReq]
+ loader = getattr(module, '__loader__', None)
+ return _find_adapter(_provider_factories, loader)(module)
+
+
+def _macosx_vers(_cache=[]):
+ if not _cache:
+ version = platform.mac_ver()[0]
+ # fallback for MacPorts
+ if version == '':
+ plist = '/System/Library/CoreServices/SystemVersion.plist'
+ if os.path.exists(plist):
+ if hasattr(plistlib, 'readPlist'):
+ plist_content = plistlib.readPlist(plist)
+ if 'ProductVersion' in plist_content:
+ version = plist_content['ProductVersion']
+
+ _cache.append(version.split('.'))
+ return _cache[0]
+
+
+def _macosx_arch(machine):
+ return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
+
+
+def get_build_platform():
+ """Return this platform's string for platform-specific distributions
+
+ XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+ needs some hacks for Linux and Mac OS X.
+ """
+ from sysconfig import get_platform
+
+ plat = get_platform()
+ if sys.platform == "darwin" and not plat.startswith('macosx-'):
+ try:
+ version = _macosx_vers()
+ machine = os.uname()[4].replace(" ", "_")
+ return "macosx-%d.%d-%s" % (
+ int(version[0]), int(version[1]),
+ _macosx_arch(machine),
+ )
+ except ValueError:
+ # if someone is running a non-Mac darwin system, this will fall
+ # through to the default implementation
+ pass
+ return plat
+
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+# XXX backward compat
+get_platform = get_build_platform
+
+
+def compatible_platforms(provided, required):
+ """Can code for the `provided` platform run on the `required` platform?
+
+ Returns true if either platform is ``None``, or the platforms are equal.
+
+ XXX Needs compatibility checks for Linux and other unixy OSes.
+ """
+ if provided is None or required is None or provided == required:
+ # easy case
+ return True
+
+ # Mac OS X special cases
+ reqMac = macosVersionString.match(required)
+ if reqMac:
+ provMac = macosVersionString.match(provided)
+
+ # is this a Mac package?
+ if not provMac:
+ # this is backwards compatibility for packages built before
+ # setuptools 0.6. All packages built after this point will
+ # use the new macosx designation.
+ provDarwin = darwinVersionString.match(provided)
+ if provDarwin:
+ dversion = int(provDarwin.group(1))
+ macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+ if dversion == 7 and macosversion >= "10.3" or \
+ dversion == 8 and macosversion >= "10.4":
+ return True
+ # egg isn't macosx or legacy darwin
+ return False
+
+ # are they the same major version and machine type?
+ if provMac.group(1) != reqMac.group(1) or \
+ provMac.group(3) != reqMac.group(3):
+ return False
+
+ # is the required OS major update >= the provided one?
+ if int(provMac.group(2)) > int(reqMac.group(2)):
+ return False
+
+ return True
+
+ # XXX Linux and other platforms' special cases should go here
+ return False
+
+
+def run_script(dist_spec, script_name):
+ """Locate distribution `dist_spec` and run its `script_name` script"""
+ ns = sys._getframe(1).f_globals
+ name = ns['__name__']
+ ns.clear()
+ ns['__name__'] = name
+ require(dist_spec)[0].run_script(script_name, ns)
+
+
+# backward compatibility
+run_main = run_script
+
+
+def get_distribution(dist):
+ """Return a current distribution object for a Requirement or string"""
+ if isinstance(dist, six.string_types):
+ dist = Requirement.parse(dist)
+ if isinstance(dist, Requirement):
+ dist = get_provider(dist)
+ if not isinstance(dist, Distribution):
+ raise TypeError("Expected string, Requirement, or Distribution", dist)
+ return dist
+
+
+def load_entry_point(dist, group, name):
+ """Return `name` entry point of `group` for `dist` or raise ImportError"""
+ return get_distribution(dist).load_entry_point(group, name)
+
+
+def get_entry_map(dist, group=None):
+ """Return the entry point map for `group`, or the full entry map"""
+ return get_distribution(dist).get_entry_map(group)
+
+
+def get_entry_info(dist, group, name):
+ """Return the EntryPoint object for `group`+`name`, or ``None``"""
+ return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider:
+ def has_metadata(name):
+ """Does the package's distribution contain the named metadata?"""
+
+ def get_metadata(name):
+ """The named metadata resource as a string"""
+
+ def get_metadata_lines(name):
+ """Yield named metadata resource as list of non-blank non-comment lines
+
+ Leading and trailing whitespace is stripped from each line, and lines
+ with ``#`` as the first non-blank character are omitted."""
+
+ def metadata_isdir(name):
+ """Is the named metadata a directory? (like ``os.path.isdir()``)"""
+
+ def metadata_listdir(name):
+ """List of metadata names in the directory (like ``os.listdir()``)"""
+
+ def run_script(script_name, namespace):
+ """Execute the named script in the supplied namespace dictionary"""
+
+
+class IResourceProvider(IMetadataProvider):
+ """An object that provides access to package resources"""
+
+ def get_resource_filename(manager, resource_name):
+ """Return a true filesystem path for `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def get_resource_stream(manager, resource_name):
+ """Return a readable file-like object for `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def get_resource_string(manager, resource_name):
+ """Return a string containing the contents of `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def has_resource(resource_name):
+ """Does the package contain the named resource?"""
+
+ def resource_isdir(resource_name):
+ """Is the named resource a directory? (like ``os.path.isdir()``)"""
+
+ def resource_listdir(resource_name):
+ """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+class WorkingSet:
+ """A collection of active distributions on sys.path (or a similar list)"""
+
+ def __init__(self, entries=None):
+ """Create working set from list of path entries (default=sys.path)"""
+ self.entries = []
+ self.entry_keys = {}
+ self.by_key = {}
+ self.callbacks = []
+
+ if entries is None:
+ entries = sys.path
+
+ for entry in entries:
+ self.add_entry(entry)
+
+ @classmethod
+ def _build_master(cls):
+ """
+ Prepare the master working set.
+ """
+ ws = cls()
+ try:
+ from __main__ import __requires__
+ except ImportError:
+ # The main program does not list any requirements
+ return ws
+
+ # ensure the requirements are met
+ try:
+ ws.require(__requires__)
+ except VersionConflict:
+ return cls._build_from_requirements(__requires__)
+
+ return ws
+
+ @classmethod
+ def _build_from_requirements(cls, req_spec):
+ """
+ Build a working set from a requirement spec. Rewrites sys.path.
+ """
+ # try it without defaults already on sys.path
+ # by starting with an empty path
+ ws = cls([])
+ reqs = parse_requirements(req_spec)
+ dists = ws.resolve(reqs, Environment())
+ for dist in dists:
+ ws.add(dist)
+
+ # add any missing entries from sys.path
+ for entry in sys.path:
+ if entry not in ws.entries:
+ ws.add_entry(entry)
+
+ # then copy back to sys.path
+ sys.path[:] = ws.entries
+ return ws
+
+ def add_entry(self, entry):
+ """Add a path item to ``.entries``, finding any distributions on it
+
+ ``find_distributions(entry, True)`` is used to find distributions
+ corresponding to the path entry, and they are added. `entry` is
+ always appended to ``.entries``, even if it is already present.
+ (This is because ``sys.path`` can contain the same value more than
+ once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+ equal ``sys.path``.)
+ """
+ self.entry_keys.setdefault(entry, [])
+ self.entries.append(entry)
+ for dist in find_distributions(entry, True):
+ self.add(dist, entry, False)
+
+ def __contains__(self, dist):
+ """True if `dist` is the active distribution for its project"""
+ return self.by_key.get(dist.key) == dist
+
+ def find(self, req):
+ """Find a distribution matching requirement `req`
+
+ If there is an active distribution for the requested project, this
+ returns it as long as it meets the version requirement specified by
+ `req`. But, if there is an active distribution for the project and it
+ does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+ If there is no active distribution for the requested project, ``None``
+ is returned.
+ """
+ dist = self.by_key.get(req.key)
+ if dist is not None and dist not in req:
+ # XXX add more info
+ raise VersionConflict(dist, req)
+ return dist
+
+ def iter_entry_points(self, group, name=None):
+ """Yield entry point objects from `group` matching `name`
+
+ If `name` is None, yields all entry points in `group` from all
+ distributions in the working set, otherwise only ones matching
+ both `group` and `name` are yielded (in distribution order).
+ """
+ return (
+ entry
+ for dist in self
+ for entry in dist.get_entry_map(group).values()
+ if name is None or name == entry.name
+ )
+
+ def run_script(self, requires, script_name):
+ """Locate distribution for `requires` and run `script_name` script"""
+ ns = sys._getframe(1).f_globals
+ name = ns['__name__']
+ ns.clear()
+ ns['__name__'] = name
+ self.require(requires)[0].run_script(script_name, ns)
+
+ def __iter__(self):
+ """Yield distributions for non-duplicate projects in the working set
+
+ The yield order is the order in which the items' path entries were
+ added to the working set.
+ """
+ seen = {}
+ for item in self.entries:
+ if item not in self.entry_keys:
+ # workaround a cache issue
+ continue
+
+ for key in self.entry_keys[item]:
+ if key not in seen:
+ seen[key] = 1
+ yield self.by_key[key]
+
+ def add(self, dist, entry=None, insert=True, replace=False):
+ """Add `dist` to working set, associated with `entry`
+
+ If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+ On exit from this routine, `entry` is added to the end of the working
+ set's ``.entries`` (if it wasn't already present).
+
+ `dist` is only added to the working set if it's for a project that
+ doesn't already have a distribution in the set, unless `replace=True`.
+ If it's added, any callbacks registered with the ``subscribe()`` method
+ will be called.
+ """
+ if insert:
+ dist.insert_on(self.entries, entry, replace=replace)
+
+ if entry is None:
+ entry = dist.location
+ keys = self.entry_keys.setdefault(entry, [])
+ keys2 = self.entry_keys.setdefault(dist.location, [])
+ if not replace and dist.key in self.by_key:
+ # ignore hidden distros
+ return
+
+ self.by_key[dist.key] = dist
+ if dist.key not in keys:
+ keys.append(dist.key)
+ if dist.key not in keys2:
+ keys2.append(dist.key)
+ self._added_new(dist)
+
+ def resolve(self, requirements, env=None, installer=None,
+ replace_conflicting=False, extras=None):
+ """List all distributions needed to (recursively) meet `requirements`
+
+ `requirements` must be a sequence of ``Requirement`` objects. `env`,
+ if supplied, should be an ``Environment`` instance. If
+ not supplied, it defaults to all distributions available within any
+ entry or distribution in the working set. `installer`, if supplied,
+ will be invoked with each requirement that cannot be met by an
+ already-installed distribution; it should return a ``Distribution`` or
+ ``None``.
+
+ Unless `replace_conflicting=True`, raises a VersionConflict exception
+ if
+ any requirements are found on the path that have the correct name but
+ the wrong version. Otherwise, if an `installer` is supplied it will be
+ invoked to obtain the correct version of the requirement and activate
+ it.
+
+ `extras` is a list of the extras to be used with these requirements.
+ This is important because extra requirements may look like `my_req;
+ extra = "my_extra"`, which would otherwise be interpreted as a purely
+ optional requirement. Instead, we want to be able to assert that these
+ requirements are truly required.
+ """
+
+ # set up the stack
+ requirements = list(requirements)[::-1]
+ # set of processed requirements
+ processed = {}
+ # key -> dist
+ best = {}
+ to_activate = []
+
+ req_extras = _ReqExtras()
+
+ # Mapping of requirement to set of distributions that required it;
+ # useful for reporting info about conflicts.
+ required_by = collections.defaultdict(set)
+
+ while requirements:
+ # process dependencies breadth-first
+ req = requirements.pop(0)
+ if req in processed:
+ # Ignore cyclic or redundant dependencies
+ continue
+
+ if not req_extras.markers_pass(req, extras):
+ continue
+
+ dist = best.get(req.key)
+ if dist is None:
+ # Find the best distribution and add it to the map
+ dist = self.by_key.get(req.key)
+ if dist is None or (dist not in req and replace_conflicting):
+ ws = self
+ if env is None:
+ if dist is None:
+ env = Environment(self.entries)
+ else:
+ # Use an empty environment and workingset to avoid
+ # any further conflicts with the conflicting
+ # distribution
+ env = Environment([])
+ ws = WorkingSet([])
+ dist = best[req.key] = env.best_match(
+ req, ws, installer,
+ replace_conflicting=replace_conflicting
+ )
+ if dist is None:
+ requirers = required_by.get(req, None)
+ raise DistributionNotFound(req, requirers)
+ to_activate.append(dist)
+ if dist not in req:
+ # Oops, the "best" so far conflicts with a dependency
+ dependent_req = required_by[req]
+ raise VersionConflict(dist, req).with_context(dependent_req)
+
+ # push the new requirements onto the stack
+ new_requirements = dist.requires(req.extras)[::-1]
+ requirements.extend(new_requirements)
+
+ # Register the new requirements needed by req
+ for new_requirement in new_requirements:
+ required_by[new_requirement].add(req.project_name)
+ req_extras[new_requirement] = req.extras
+
+ processed[req] = True
+
+ # return list of distros to activate
+ return to_activate
+
+ def find_plugins(
+ self, plugin_env, full_env=None, installer=None, fallback=True):
+ """Find all activatable distributions in `plugin_env`
+
+ Example usage::
+
+ distributions, errors = working_set.find_plugins(
+ Environment(plugin_dirlist)
+ )
+ # add plugins+libs to sys.path
+ map(working_set.add, distributions)
+ # display errors
+ print('Could not load', errors)
+
+ The `plugin_env` should be an ``Environment`` instance that contains
+ only distributions that are in the project's "plugin directory" or
+ directories. The `full_env`, if supplied, should be an ``Environment``
+ contains all currently-available distributions. If `full_env` is not
+ supplied, one is created automatically from the ``WorkingSet`` this
+ method is called on, which will typically mean that every directory on
+ ``sys.path`` will be scanned for distributions.
+
+ `installer` is a standard installer callback as used by the
+ ``resolve()`` method. The `fallback` flag indicates whether we should
+ attempt to resolve older versions of a plugin if the newest version
+ cannot be resolved.
+
+ This method returns a 2-tuple: (`distributions`, `error_info`), where
+ `distributions` is a list of the distributions found in `plugin_env`
+ that were loadable, along with any other distributions that are needed
+ to resolve their dependencies. `error_info` is a dictionary mapping
+ unloadable plugin distributions to an exception instance describing the
+ error that occurred. Usually this will be a ``DistributionNotFound`` or
+ ``VersionConflict`` instance.
+ """
+
+ plugin_projects = list(plugin_env)
+ # scan project names in alphabetic order
+ plugin_projects.sort()
+
+ error_info = {}
+ distributions = {}
+
+ if full_env is None:
+ env = Environment(self.entries)
+ env += plugin_env
+ else:
+ env = full_env + plugin_env
+
+ shadow_set = self.__class__([])
+ # put all our entries in shadow_set
+ list(map(shadow_set.add, self))
+
+ for project_name in plugin_projects:
+
+ for dist in plugin_env[project_name]:
+
+ req = [dist.as_requirement()]
+
+ try:
+ resolvees = shadow_set.resolve(req, env, installer)
+
+ except ResolutionError as v:
+ # save error info
+ error_info[dist] = v
+ if fallback:
+ # try the next older version of project
+ continue
+ else:
+ # give up on this project, keep going
+ break
+
+ else:
+ list(map(shadow_set.add, resolvees))
+ distributions.update(dict.fromkeys(resolvees))
+
+ # success, no need to try any more versions of this project
+ break
+
+ distributions = list(distributions)
+ distributions.sort()
+
+ return distributions, error_info
+
+ def require(self, *requirements):
+ """Ensure that distributions matching `requirements` are activated
+
+ `requirements` must be a string or a (possibly-nested) sequence
+ thereof, specifying the distributions and versions required. The
+ return value is a sequence of the distributions that needed to be
+ activated to fulfill the requirements; all relevant distributions are
+ included, even if they were already activated in this working set.
+ """
+ needed = self.resolve(parse_requirements(requirements))
+
+ for dist in needed:
+ self.add(dist)
+
+ return needed
+
+ def subscribe(self, callback, existing=True):
+ """Invoke `callback` for all distributions
+
+ If `existing=True` (default),
+ call on all existing ones, as well.
+ """
+ if callback in self.callbacks:
+ return
+ self.callbacks.append(callback)
+ if not existing:
+ return
+ for dist in self:
+ callback(dist)
+
+ def _added_new(self, dist):
+ for callback in self.callbacks:
+ callback(dist)
+
+ def __getstate__(self):
+ return (
+ self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
+ self.callbacks[:]
+ )
+
+ def __setstate__(self, e_k_b_c):
+ entries, keys, by_key, callbacks = e_k_b_c
+ self.entries = entries[:]
+ self.entry_keys = keys.copy()
+ self.by_key = by_key.copy()
+ self.callbacks = callbacks[:]
+
+
+class _ReqExtras(dict):
+ """
+ Map each requirement to the extras that demanded it.
+ """
+
+ def markers_pass(self, req, extras=None):
+ """
+ Evaluate markers for req against each extra that
+ demanded it.
+
+ Return False if the req has a marker and fails
+ evaluation. Otherwise, return True.
+ """
+ extra_evals = (
+ req.marker.evaluate({'extra': extra})
+ for extra in self.get(req, ()) + (extras or (None,))
+ )
+ return not req.marker or any(extra_evals)
+
+
+class Environment:
+ """Searchable snapshot of distributions on a search path"""
+
+ def __init__(
+ self, search_path=None, platform=get_supported_platform(),
+ python=PY_MAJOR):
+ """Snapshot distributions available on a search path
+
+ Any distributions found on `search_path` are added to the environment.
+ `search_path` should be a sequence of ``sys.path`` items. If not
+ supplied, ``sys.path`` is used.
+
+ `platform` is an optional string specifying the name of the platform
+ that platform-specific distributions must be compatible with. If
+ unspecified, it defaults to the current platform. `python` is an
+ optional string naming the desired version of Python (e.g. ``'3.6'``);
+ it defaults to the current version.
+
+ You may explicitly set `platform` (and/or `python`) to ``None`` if you
+ wish to map *all* distributions, not just those compatible with the
+ running platform or Python version.
+ """
+ self._distmap = {}
+ self.platform = platform
+ self.python = python
+ self.scan(search_path)
+
+ def can_add(self, dist):
+ """Is distribution `dist` acceptable for this environment?
+
+ The distribution must match the platform and python version
+ requirements specified when this environment was created, or False
+ is returned.
+ """
+ py_compat = (
+ self.python is None
+ or dist.py_version is None
+ or dist.py_version == self.python
+ )
+ return py_compat and compatible_platforms(dist.platform, self.platform)
+
+ def remove(self, dist):
+ """Remove `dist` from the environment"""
+ self._distmap[dist.key].remove(dist)
+
+ def scan(self, search_path=None):
+ """Scan `search_path` for distributions usable in this environment
+
+ Any distributions found are added to the environment.
+ `search_path` should be a sequence of ``sys.path`` items. If not
+ supplied, ``sys.path`` is used. Only distributions conforming to
+ the platform/python version defined at initialization are added.
+ """
+ if search_path is None:
+ search_path = sys.path
+
+ for item in search_path:
+ for dist in find_distributions(item):
+ self.add(dist)
+
+ def __getitem__(self, project_name):
+ """Return a newest-to-oldest list of distributions for `project_name`
+
+ Uses case-insensitive `project_name` comparison, assuming all the
+ project's distributions use their project's name converted to all
+ lowercase as their key.
+
+ """
+ distribution_key = project_name.lower()
+ return self._distmap.get(distribution_key, [])
+
+ def add(self, dist):
+ """Add `dist` if we ``can_add()`` it and it has not already been added
+ """
+ if self.can_add(dist) and dist.has_version():
+ dists = self._distmap.setdefault(dist.key, [])
+ if dist not in dists:
+ dists.append(dist)
+ dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
+
+ def best_match(
+ self, req, working_set, installer=None, replace_conflicting=False):
+ """Find distribution best matching `req` and usable on `working_set`
+
+ This calls the ``find(req)`` method of the `working_set` to see if a
+ suitable distribution is already active. (This may raise
+ ``VersionConflict`` if an unsuitable version of the project is already
+ active in the specified `working_set`.) If a suitable distribution
+ isn't active, this method returns the newest distribution in the
+ environment that meets the ``Requirement`` in `req`. If no suitable
+ distribution is found, and `installer` is supplied, then the result of
+ calling the environment's ``obtain(req, installer)`` method will be
+ returned.
+ """
+ try:
+ dist = working_set.find(req)
+ except VersionConflict:
+ if not replace_conflicting:
+ raise
+ dist = None
+ if dist is not None:
+ return dist
+ for dist in self[req.key]:
+ if dist in req:
+ return dist
+ # try to download/install
+ return self.obtain(req, installer)
+
+ def obtain(self, requirement, installer=None):
+ """Obtain a distribution matching `requirement` (e.g. via download)
+
+ Obtain a distro that matches requirement (e.g. via download). In the
+ base ``Environment`` class, this routine just returns
+ ``installer(requirement)``, unless `installer` is None, in which case
+ None is returned instead. This method is a hook that allows subclasses
+ to attempt other ways of obtaining a distribution before falling back
+ to the `installer` argument."""
+ if installer is not None:
+ return installer(requirement)
+
+ def __iter__(self):
+ """Yield the unique project names of the available distributions"""
+ for key in self._distmap.keys():
+ if self[key]:
+ yield key
+
+ def __iadd__(self, other):
+ """In-place addition of a distribution or environment"""
+ if isinstance(other, Distribution):
+ self.add(other)
+ elif isinstance(other, Environment):
+ for project in other:
+ for dist in other[project]:
+ self.add(dist)
+ else:
+ raise TypeError("Can't add %r to environment" % (other,))
+ return self
+
+ def __add__(self, other):
+ """Add an environment or distribution to an environment"""
+ new = self.__class__([], platform=None, python=None)
+ for env in self, other:
+ new += env
+ return new
+
+
+# XXX backward compatibility
+AvailableDistributions = Environment
+
+
+class ExtractionError(RuntimeError):
+ """An error occurred extracting a resource
+
+ The following attributes are available from instances of this exception:
+
+ manager
+ The resource manager that raised this exception
+
+ cache_path
+ The base directory for resource extraction
+
+ original_error
+ The exception instance that caused extraction to fail
+ """
+
+
+class ResourceManager:
+ """Manage resource extraction and packages"""
+ extraction_path = None
+
+ def __init__(self):
+ self.cached_files = {}
+
+ def resource_exists(self, package_or_requirement, resource_name):
+ """Does the named resource exist?"""
+ return get_provider(package_or_requirement).has_resource(resource_name)
+
+ def resource_isdir(self, package_or_requirement, resource_name):
+ """Is the named resource an existing directory?"""
+ return get_provider(package_or_requirement).resource_isdir(
+ resource_name
+ )
+
+ def resource_filename(self, package_or_requirement, resource_name):
+ """Return a true filesystem path for specified resource"""
+ return get_provider(package_or_requirement).get_resource_filename(
+ self, resource_name
+ )
+
+ def resource_stream(self, package_or_requirement, resource_name):
+ """Return a readable file-like object for specified resource"""
+ return get_provider(package_or_requirement).get_resource_stream(
+ self, resource_name
+ )
+
+ def resource_string(self, package_or_requirement, resource_name):
+ """Return specified resource as a string"""
+ return get_provider(package_or_requirement).get_resource_string(
+ self, resource_name
+ )
+
+ def resource_listdir(self, package_or_requirement, resource_name):
+ """List the contents of the named resource directory"""
+ return get_provider(package_or_requirement).resource_listdir(
+ resource_name
+ )
+
+ def extraction_error(self):
+ """Give an error message for problems extracting file(s)"""
+
+ old_exc = sys.exc_info()[1]
+ cache_path = self.extraction_path or get_default_cache()
+
+ tmpl = textwrap.dedent("""
+ Can't extract file(s) to egg cache
+
+ The following error occurred while trying to extract file(s)
+ to the Python egg cache:
+
+ {old_exc}
+
+ The Python egg cache directory is currently set to:
+
+ {cache_path}
+
+ Perhaps your account does not have write access to this directory?
+ You can change the cache directory by setting the PYTHON_EGG_CACHE
+ environment variable to point to an accessible directory.
+ """).lstrip()
+ err = ExtractionError(tmpl.format(**locals()))
+ err.manager = self
+ err.cache_path = cache_path
+ err.original_error = old_exc
+ raise err
+
+ def get_cache_path(self, archive_name, names=()):
+ """Return absolute location in cache for `archive_name` and `names`
+
+ The parent directory of the resulting path will be created if it does
+ not already exist. `archive_name` should be the base filename of the
+ enclosing egg (which may not be the name of the enclosing zipfile!),
+ including its ".egg" extension. `names`, if provided, should be a
+ sequence of path name parts "under" the egg's extraction location.
+
+ This method should only be called by resource providers that need to
+ obtain an extraction location, and only for names they intend to
+ extract, as it tracks the generated names for possible cleanup later.
+ """
+ extract_path = self.extraction_path or get_default_cache()
+ target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
+ try:
+ _bypass_ensure_directory(target_path)
+ except Exception:
+ self.extraction_error()
+
+ self._warn_unsafe_extraction_path(extract_path)
+
+ self.cached_files[target_path] = 1
+ return target_path
+
+ @staticmethod
+ def _warn_unsafe_extraction_path(path):
+ """
+ If the default extraction path is overridden and set to an insecure
+ location, such as /tmp, it opens up an opportunity for an attacker to
+ replace an extracted file with an unauthorized payload. Warn the user
+ if a known insecure location is used.
+
+ See Distribute #375 for more details.
+ """
+ if os.name == 'nt' and not path.startswith(os.environ['windir']):
+ # On Windows, permissions are generally restrictive by default
+ # and temp directories are not writable by other users, so
+ # bypass the warning.
+ return
+ mode = os.stat(path).st_mode
+ if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
+ msg = (
+ "%s is writable by group/others and vulnerable to attack "
+ "when "
+ "used with get_resource_filename. Consider a more secure "
+ "location (set with .set_extraction_path or the "
+ "PYTHON_EGG_CACHE environment variable)." % path
+ )
+ warnings.warn(msg, UserWarning)
+
+ def postprocess(self, tempname, filename):
+ """Perform any platform-specific postprocessing of `tempname`
+
+ This is where Mac header rewrites should be done; other platforms don't
+ have anything special they should do.
+
+ Resource providers should call this method ONLY after successfully
+ extracting a compressed resource. They must NOT call it on resources
+ that are already in the filesystem.
+
+ `tempname` is the current (temporary) name of the file, and `filename`
+ is the name it will be renamed to by the caller after this routine
+ returns.
+ """
+
+ if os.name == 'posix':
+ # Make the resource executable
+ mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
+ os.chmod(tempname, mode)
+
+ def set_extraction_path(self, path):
+ """Set the base path where resources will be extracted to, if needed.
+
+ If you do not call this routine before any extractions take place, the
+ path defaults to the return value of ``get_default_cache()``. (Which
+ is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+ platform-specific fallbacks. See that routine's documentation for more
+ details.)
+
+ Resources are extracted to subdirectories of this path based upon
+ information given by the ``IResourceProvider``. You may set this to a
+ temporary directory, but then you must call ``cleanup_resources()`` to
+ delete the extracted files when done. There is no guarantee that
+ ``cleanup_resources()`` will be able to remove all extracted files.
+
+ (Note: you may not change the extraction path for a given resource
+ manager once resources have been extracted, unless you first call
+ ``cleanup_resources()``.)
+ """
+ if self.cached_files:
+ raise ValueError(
+ "Can't change extraction path, files already extracted"
+ )
+
+ self.extraction_path = path
+
+ def cleanup_resources(self, force=False):
+ """
+ Delete all extracted resource files and directories, returning a list
+ of the file and directory names that could not be successfully removed.
+ This function does not have any concurrency protection, so it should
+ generally only be called when the extraction path is a temporary
+ directory exclusive to a single process. This method is not
+ automatically called; you must call it explicitly or register it as an
+ ``atexit`` function if you wish to ensure cleanup of a temporary
+ directory used for extractions.
+ """
+ # XXX
+
+
+def get_default_cache():
+ """
+ Return the ``PYTHON_EGG_CACHE`` environment variable
+ or a platform-relevant user cache dir for an app
+ named "Python-Eggs".
+ """
+ return (
+ os.environ.get('PYTHON_EGG_CACHE')
+ or appdirs.user_cache_dir(appname='Python-Eggs')
+ )
+
+
+def safe_name(name):
+ """Convert an arbitrary string to a standard distribution name
+
+ Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+ """
+ return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+ """
+ Convert an arbitrary string to a standard version string
+ """
+ try:
+ # normalize the version
+ return str(packaging.version.Version(version))
+ except packaging.version.InvalidVersion:
+ version = version.replace(' ', '.')
+ return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def safe_extra(extra):
+ """Convert an arbitrary string to a standard 'extra' name
+
+ Any runs of non-alphanumeric characters are replaced with a single '_',
+ and the result is always lowercased.
+ """
+ return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
+
+
+def to_filename(name):
+ """Convert a project or version name to its filename-escaped form
+
+ Any '-' characters are currently replaced with '_'.
+ """
+ return name.replace('-', '_')
+
+
+def invalid_marker(text):
+ """
+ Validate text as a PEP 508 environment marker; return an exception
+ if invalid or False otherwise.
+ """
+ try:
+ evaluate_marker(text)
+ except SyntaxError as e:
+ e.filename = None
+ e.lineno = None
+ return e
+ return False
+
+
+def evaluate_marker(text, extra=None):
+ """
+ Evaluate a PEP 508 environment marker.
+ Return a boolean indicating the marker result in this environment.
+ Raise SyntaxError if marker is invalid.
+
+ This implementation uses the 'pyparsing' module.
+ """
+ try:
+ marker = packaging.markers.Marker(text)
+ return marker.evaluate()
+ except packaging.markers.InvalidMarker as e:
+ raise SyntaxError(e)
+
+
+class NullProvider:
+ """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+ egg_name = None
+ egg_info = None
+ loader = None
+
+ def __init__(self, module):
+ self.loader = getattr(module, '__loader__', None)
+ self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+ def get_resource_filename(self, manager, resource_name):
+ return self._fn(self.module_path, resource_name)
+
+ def get_resource_stream(self, manager, resource_name):
+ return io.BytesIO(self.get_resource_string(manager, resource_name))
+
+ def get_resource_string(self, manager, resource_name):
+ return self._get(self._fn(self.module_path, resource_name))
+
+ def has_resource(self, resource_name):
+ return self._has(self._fn(self.module_path, resource_name))
+
+ def _get_metadata_path(self, name):
+ return self._fn(self.egg_info, name)
+
+ def has_metadata(self, name):
+ if not self.egg_info:
+ return self.egg_info
+
+ path = self._get_metadata_path(name)
+ return self._has(path)
+
+ def get_metadata(self, name):
+ if not self.egg_info:
+ return ""
+ value = self._get(self._fn(self.egg_info, name))
+ return value.decode('utf-8') if six.PY3 else value
+
+ def get_metadata_lines(self, name):
+ return yield_lines(self.get_metadata(name))
+
+ def resource_isdir(self, resource_name):
+ return self._isdir(self._fn(self.module_path, resource_name))
+
+ def metadata_isdir(self, name):
+ return self.egg_info and self._isdir(self._fn(self.egg_info, name))
+
+ def resource_listdir(self, resource_name):
+ return self._listdir(self._fn(self.module_path, resource_name))
+
+ def metadata_listdir(self, name):
+ if self.egg_info:
+ return self._listdir(self._fn(self.egg_info, name))
+ return []
+
+ def run_script(self, script_name, namespace):
+ script = 'scripts/' + script_name
+ if not self.has_metadata(script):
+ raise ResolutionError(
+ "Script {script!r} not found in metadata at {self.egg_info!r}"
+ .format(**locals()),
+ )
+ script_text = self.get_metadata(script).replace('\r\n', '\n')
+ script_text = script_text.replace('\r', '\n')
+ script_filename = self._fn(self.egg_info, script)
+ namespace['__file__'] = script_filename
+ if os.path.exists(script_filename):
+ source = open(script_filename).read()
+ code = compile(source, script_filename, 'exec')
+ exec(code, namespace, namespace)
+ else:
+ from linecache import cache
+ cache[script_filename] = (
+ len(script_text), 0, script_text.split('\n'), script_filename
+ )
+ script_code = compile(script_text, script_filename, 'exec')
+ exec(script_code, namespace, namespace)
+
+ def _has(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _isdir(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _listdir(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _fn(self, base, resource_name):
+ self._validate_resource_path(resource_name)
+ if resource_name:
+ return os.path.join(base, *resource_name.split('/'))
+ return base
+
+ @staticmethod
+ def _validate_resource_path(path):
+ """
+ Validate the resource paths according to the docs.
+ https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access
+
+ >>> warned = getfixture('recwarn')
+ >>> warnings.simplefilter('always')
+ >>> vrp = NullProvider._validate_resource_path
+ >>> vrp('foo/bar.txt')
+ >>> bool(warned)
+ False
+ >>> vrp('../foo/bar.txt')
+ >>> bool(warned)
+ True
+ >>> warned.clear()
+ >>> vrp('/foo/bar.txt')
+ >>> bool(warned)
+ True
+ >>> vrp('foo/../../bar.txt')
+ >>> bool(warned)
+ True
+ >>> warned.clear()
+ >>> vrp('foo/f../bar.txt')
+ >>> bool(warned)
+ False
+
+ Windows path separators are straight-up disallowed.
+ >>> vrp(r'\\foo/bar.txt')
+ Traceback (most recent call last):
+ ...
+ ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+ >>> vrp(r'C:\\foo/bar.txt')
+ Traceback (most recent call last):
+ ...
+ ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+ Blank values are allowed
+
+ >>> vrp('')
+ >>> bool(warned)
+ False
+
+ Non-string values are not.
+
+ >>> vrp(None)
+ Traceback (most recent call last):
+ ...
+ AttributeError: ...
+ """
+ invalid = (
+ os.path.pardir in path.split(posixpath.sep) or
+ posixpath.isabs(path) or
+ ntpath.isabs(path)
+ )
+ if not invalid:
+ return
+
+ msg = "Use of .. or absolute path in a resource path is not allowed."
+
+ # Aggressively disallow Windows absolute paths
+ if ntpath.isabs(path) and not posixpath.isabs(path):
+ raise ValueError(msg)
+
+ # for compatibility, warn; in future
+ # raise ValueError(msg)
+ warnings.warn(
+ msg[:-1] + " and will raise exceptions in a future release.",
+ DeprecationWarning,
+ stacklevel=4,
+ )
+
+ def _get(self, path):
+ if hasattr(self.loader, 'get_data'):
+ return self.loader.get_data(path)
+ raise NotImplementedError(
+ "Can't perform this operation for loaders without 'get_data()'"
+ )
+
+
+register_loader_type(object, NullProvider)
+
+
+class EggProvider(NullProvider):
+ """Provider based on a virtual filesystem"""
+
+ def __init__(self, module):
+ NullProvider.__init__(self, module)
+ self._setup_prefix()
+
+ def _setup_prefix(self):
+ # we assume here that our metadata may be nested inside a "basket"
+ # of multiple eggs; that's why we use module_path instead of .archive
+ path = self.module_path
+ old = None
+ while path != old:
+ if _is_egg_path(path):
+ self.egg_name = os.path.basename(path)
+ self.egg_info = os.path.join(path, 'EGG-INFO')
+ self.egg_root = path
+ break
+ old = path
+ path, base = os.path.split(path)
+
+
+class DefaultProvider(EggProvider):
+ """Provides access to package resources in the filesystem"""
+
+ def _has(self, path):
+ return os.path.exists(path)
+
+ def _isdir(self, path):
+ return os.path.isdir(path)
+
+ def _listdir(self, path):
+ return os.listdir(path)
+
+ def get_resource_stream(self, manager, resource_name):
+ return open(self._fn(self.module_path, resource_name), 'rb')
+
+ def _get(self, path):
+ with open(path, 'rb') as stream:
+ return stream.read()
+
+ @classmethod
+ def _register(cls):
+ loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
+ for name in loader_names:
+ loader_cls = getattr(importlib_machinery, name, type(None))
+ register_loader_type(loader_cls, cls)
+
+
+DefaultProvider._register()
+
+
+class EmptyProvider(NullProvider):
+ """Provider that returns nothing for all requests"""
+
+ module_path = None
+
+ _isdir = _has = lambda self, path: False
+
+ def _get(self, path):
+ return ''
+
+ def _listdir(self, path):
+ return []
+
+ def __init__(self):
+ pass
+
+
+empty_provider = EmptyProvider()
+
+
+class ZipManifests(dict):
+ """
+ zip manifest builder
+ """
+
+ @classmethod
+ def build(cls, path):
+ """
+ Build a dictionary similar to the zipimport directory
+ caches, except instead of tuples, store ZipInfo objects.
+
+ Use a platform-specific path separator (os.sep) for the path keys
+ for compatibility with pypy on Windows.
+ """
+ with zipfile.ZipFile(path) as zfile:
+ items = (
+ (
+ name.replace('/', os.sep),
+ zfile.getinfo(name),
+ )
+ for name in zfile.namelist()
+ )
+ return dict(items)
+
+ load = build
+
+
+class MemoizedZipManifests(ZipManifests):
+ """
+ Memoized zipfile manifests.
+ """
+ manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+
+ def load(self, path):
+ """
+ Load a manifest at path or return a suitable manifest already loaded.
+ """
+ path = os.path.normpath(path)
+ mtime = os.stat(path).st_mtime
+
+ if path not in self or self[path].mtime != mtime:
+ manifest = self.build(path)
+ self[path] = self.manifest_mod(manifest, mtime)
+
+ return self[path].manifest
+
+
+class ZipProvider(EggProvider):
+ """Resource support for zips and eggs"""
+
+ eagers = None
+ _zip_manifests = MemoizedZipManifests()
+
+ def __init__(self, module):
+ EggProvider.__init__(self, module)
+ self.zip_pre = self.loader.archive + os.sep
+
+ def _zipinfo_name(self, fspath):
+ # Convert a virtual filename (full path to file) into a zipfile subpath
+ # usable with the zipimport directory cache for our target archive
+ fspath = fspath.rstrip(os.sep)
+ if fspath == self.loader.archive:
+ return ''
+ if fspath.startswith(self.zip_pre):
+ return fspath[len(self.zip_pre):]
+ raise AssertionError(
+ "%s is not a subpath of %s" % (fspath, self.zip_pre)
+ )
+
+ def _parts(self, zip_path):
+ # Convert a zipfile subpath into an egg-relative path part list.
+ # pseudo-fs path
+ fspath = self.zip_pre + zip_path
+ if fspath.startswith(self.egg_root + os.sep):
+ return fspath[len(self.egg_root) + 1:].split(os.sep)
+ raise AssertionError(
+ "%s is not a subpath of %s" % (fspath, self.egg_root)
+ )
+
+ @property
+ def zipinfo(self):
+ return self._zip_manifests.load(self.loader.archive)
+
+ def get_resource_filename(self, manager, resource_name):
+ if not self.egg_name:
+ raise NotImplementedError(
+ "resource_filename() only supported for .egg, not .zip"
+ )
+ # no need to lock for extraction, since we use temp names
+ zip_path = self._resource_to_zip(resource_name)
+ eagers = self._get_eager_resources()
+ if '/'.join(self._parts(zip_path)) in eagers:
+ for name in eagers:
+ self._extract_resource(manager, self._eager_to_zip(name))
+ return self._extract_resource(manager, zip_path)
+
+ @staticmethod
+ def _get_date_and_size(zip_stat):
+ size = zip_stat.file_size
+ # ymdhms+wday, yday, dst
+ date_time = zip_stat.date_time + (0, 0, -1)
+ # 1980 offset already done
+ timestamp = time.mktime(date_time)
+ return timestamp, size
+
+ def _extract_resource(self, manager, zip_path):
+
+ if zip_path in self._index():
+ for name in self._index()[zip_path]:
+ last = self._extract_resource(
+ manager, os.path.join(zip_path, name)
+ )
+ # return the extracted directory name
+ return os.path.dirname(last)
+
+ timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+
+ if not WRITE_SUPPORT:
+ raise IOError('"os.rename" and "os.unlink" are not supported '
+ 'on this platform')
+ try:
+
+ real_path = manager.get_cache_path(
+ self.egg_name, self._parts(zip_path)
+ )
+
+ if self._is_current(real_path, zip_path):
+ return real_path
+
+ outf, tmpnam = _mkstemp(
+ ".$extract",
+ dir=os.path.dirname(real_path),
+ )
+ os.write(outf, self.loader.get_data(zip_path))
+ os.close(outf)
+ utime(tmpnam, (timestamp, timestamp))
+ manager.postprocess(tmpnam, real_path)
+
+ try:
+ rename(tmpnam, real_path)
+
+ except os.error:
+ if os.path.isfile(real_path):
+ if self._is_current(real_path, zip_path):
+ # the file became current since it was checked above,
+ # so proceed.
+ return real_path
+ # Windows, del old file and retry
+ elif os.name == 'nt':
+ unlink(real_path)
+ rename(tmpnam, real_path)
+ return real_path
+ raise
+
+ except os.error:
+ # report a user-friendly error
+ manager.extraction_error()
+
+ return real_path
+
+ def _is_current(self, file_path, zip_path):
+ """
+ Return True if the file_path is current for this zip_path
+ """
+ timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+ if not os.path.isfile(file_path):
+ return False
+ stat = os.stat(file_path)
+ if stat.st_size != size or stat.st_mtime != timestamp:
+ return False
+ # check that the contents match
+ zip_contents = self.loader.get_data(zip_path)
+ with open(file_path, 'rb') as f:
+ file_contents = f.read()
+ return zip_contents == file_contents
+
+ def _get_eager_resources(self):
+ if self.eagers is None:
+ eagers = []
+ for name in ('native_libs.txt', 'eager_resources.txt'):
+ if self.has_metadata(name):
+ eagers.extend(self.get_metadata_lines(name))
+ self.eagers = eagers
+ return self.eagers
+
+ def _index(self):
+ try:
+ return self._dirindex
+ except AttributeError:
+ ind = {}
+ for path in self.zipinfo:
+ parts = path.split(os.sep)
+ while parts:
+ parent = os.sep.join(parts[:-1])
+ if parent in ind:
+ ind[parent].append(parts[-1])
+ break
+ else:
+ ind[parent] = [parts.pop()]
+ self._dirindex = ind
+ return ind
+
+ def _has(self, fspath):
+ zip_path = self._zipinfo_name(fspath)
+ return zip_path in self.zipinfo or zip_path in self._index()
+
+ def _isdir(self, fspath):
+ return self._zipinfo_name(fspath) in self._index()
+
+ def _listdir(self, fspath):
+ return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+ def _eager_to_zip(self, resource_name):
+ return self._zipinfo_name(self._fn(self.egg_root, resource_name))
+
+ def _resource_to_zip(self, resource_name):
+ return self._zipinfo_name(self._fn(self.module_path, resource_name))
+
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+class FileMetadata(EmptyProvider):
+ """Metadata handler for standalone PKG-INFO files
+
+ Usage::
+
+ metadata = FileMetadata("/path/to/PKG-INFO")
+
+ This provider rejects all data and metadata requests except for PKG-INFO,
+ which is treated as existing, and will be the contents of the file at
+ the provided location.
+ """
+
+ def __init__(self, path):
+ self.path = path
+
+ def _get_metadata_path(self, name):
+ return self.path
+
+ def has_metadata(self, name):
+ return name == 'PKG-INFO' and os.path.isfile(self.path)
+
+ def get_metadata(self, name):
+ if name != 'PKG-INFO':
+ raise KeyError("No metadata except PKG-INFO is available")
+
+ with io.open(self.path, encoding='utf-8', errors="replace") as f:
+ metadata = f.read()
+ self._warn_on_replacement(metadata)
+ return metadata
+
+ def _warn_on_replacement(self, metadata):
+ # Python 2.7 compat for: replacement_char = '�'
+ replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
+ if replacement_char in metadata:
+ tmpl = "{self.path} could not be properly decoded in UTF-8"
+ msg = tmpl.format(**locals())
+ warnings.warn(msg)
+
+ def get_metadata_lines(self, name):
+ return yield_lines(self.get_metadata(name))
+
+
+class PathMetadata(DefaultProvider):
+ """Metadata provider for egg directories
+
+ Usage::
+
+ # Development eggs:
+
+ egg_info = "/path/to/PackageName.egg-info"
+ base_dir = os.path.dirname(egg_info)
+ metadata = PathMetadata(base_dir, egg_info)
+ dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+ dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
+
+ # Unpacked egg directories:
+
+ egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+ metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+ dist = Distribution.from_filename(egg_path, metadata=metadata)
+ """
+
+ def __init__(self, path, egg_info):
+ self.module_path = path
+ self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+ """Metadata provider for .egg files"""
+
+ def __init__(self, importer):
+ """Create a metadata provider from a zipimporter"""
+
+ self.zip_pre = importer.archive + os.sep
+ self.loader = importer
+ if importer.prefix:
+ self.module_path = os.path.join(importer.archive, importer.prefix)
+ else:
+ self.module_path = importer.archive
+ self._setup_prefix()
+
+
+_declare_state('dict', _distribution_finders={})
+
+
+def register_finder(importer_type, distribution_finder):
+ """Register `distribution_finder` to find distributions in sys.path items
+
+ `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+ handler), and `distribution_finder` is a callable that, passed a path
+ item and the importer instance, yields ``Distribution`` instances found on
+ that path item. See ``pkg_resources.find_on_path`` for an example."""
+ _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+ """Yield distributions accessible via `path_item`"""
+ importer = get_importer(path_item)
+ finder = _find_adapter(_distribution_finders, importer)
+ return finder(importer, path_item, only)
+
+
+def find_eggs_in_zip(importer, path_item, only=False):
+ """
+ Find eggs in zip files; possibly multiple nested eggs.
+ """
+ if importer.archive.endswith('.whl'):
+ # wheels are not supported with this finder
+ # they don't have PKG-INFO metadata, and won't ever contain eggs
+ return
+ metadata = EggMetadata(importer)
+ if metadata.has_metadata('PKG-INFO'):
+ yield Distribution.from_filename(path_item, metadata=metadata)
+ if only:
+ # don't yield nested distros
+ return
+ for subitem in metadata.resource_listdir(''):
+ if _is_egg_path(subitem):
+ subpath = os.path.join(path_item, subitem)
+ dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
+ for dist in dists:
+ yield dist
+ elif subitem.lower().endswith('.dist-info'):
+ subpath = os.path.join(path_item, subitem)
+ submeta = EggMetadata(zipimport.zipimporter(subpath))
+ submeta.egg_info = subpath
+ yield Distribution.from_location(path_item, subitem, submeta)
+
+
+register_finder(zipimport.zipimporter, find_eggs_in_zip)
+
+
+def find_nothing(importer, path_item, only=False):
+ return ()
+
+
+register_finder(object, find_nothing)
+
+
+def _by_version_descending(names):
+ """
+ Given a list of filenames, return them in descending order
+ by version number.
+
+ >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
+ >>> _by_version_descending(names)
+ ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
+ >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
+ >>> _by_version_descending(names)
+ ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
+ >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
+ >>> _by_version_descending(names)
+ ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
+ """
+ def _by_version(name):
+ """
+ Parse each component of the filename
+ """
+ name, ext = os.path.splitext(name)
+ parts = itertools.chain(name.split('-'), [ext])
+ return [packaging.version.parse(part) for part in parts]
+
+ return sorted(names, key=_by_version, reverse=True)
+
+
+def find_on_path(importer, path_item, only=False):
+ """Yield distributions accessible on a sys.path directory"""
+ path_item = _normalize_cached(path_item)
+
+ if _is_unpacked_egg(path_item):
+ yield Distribution.from_filename(
+ path_item, metadata=PathMetadata(
+ path_item, os.path.join(path_item, 'EGG-INFO')
+ )
+ )
+ return
+
+ entries = safe_listdir(path_item)
+
+ # for performance, before sorting by version,
+ # screen entries for only those that will yield
+ # distributions
+ filtered = (
+ entry
+ for entry in entries
+ if dist_factory(path_item, entry, only)
+ )
+
+ # scan for .egg and .egg-info in directory
+ path_item_entries = _by_version_descending(filtered)
+ for entry in path_item_entries:
+ fullpath = os.path.join(path_item, entry)
+ factory = dist_factory(path_item, entry, only)
+ for dist in factory(fullpath):
+ yield dist
+
+
+def dist_factory(path_item, entry, only):
+ """
+ Return a dist_factory for a path_item and entry
+ """
+ lower = entry.lower()
+ is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))
+ return (
+ distributions_from_metadata
+ if is_meta else
+ find_distributions
+ if not only and _is_egg_path(entry) else
+ resolve_egg_link
+ if not only and lower.endswith('.egg-link') else
+ NoDists()
+ )
+
+
+class NoDists:
+ """
+ >>> bool(NoDists())
+ False
+
+ >>> list(NoDists()('anything'))
+ []
+ """
+ def __bool__(self):
+ return False
+ if six.PY2:
+ __nonzero__ = __bool__
+
+ def __call__(self, fullpath):
+ return iter(())
+
+
+def safe_listdir(path):
+ """
+ Attempt to list contents of path, but suppress some exceptions.
+ """
+ try:
+ return os.listdir(path)
+ except (PermissionError, NotADirectoryError):
+ pass
+ except OSError as e:
+ # Ignore the directory if does not exist, not a directory or
+ # permission denied
+ ignorable = (
+ e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
+ # Python 2 on Windows needs to be handled this way :(
+ or getattr(e, "winerror", None) == 267
+ )
+ if not ignorable:
+ raise
+ return ()
+
+
+def distributions_from_metadata(path):
+ root = os.path.dirname(path)
+ if os.path.isdir(path):
+ if len(os.listdir(path)) == 0:
+ # empty metadata dir; skip
+ return
+ metadata = PathMetadata(root, path)
+ else:
+ metadata = FileMetadata(path)
+ entry = os.path.basename(path)
+ yield Distribution.from_location(
+ root, entry, metadata, precedence=DEVELOP_DIST,
+ )
+
+
+def non_empty_lines(path):
+ """
+ Yield non-empty lines from file at path
+ """
+ with open(path) as f:
+ for line in f:
+ line = line.strip()
+ if line:
+ yield line
+
+
+def resolve_egg_link(path):
+ """
+ Given a path to an .egg-link, resolve distributions
+ present in the referenced path.
+ """
+ referenced_paths = non_empty_lines(path)
+ resolved_paths = (
+ os.path.join(os.path.dirname(path), ref)
+ for ref in referenced_paths
+ )
+ dist_groups = map(find_distributions, resolved_paths)
+ return next(dist_groups, ())
+
+
+register_finder(pkgutil.ImpImporter, find_on_path)
+
+if hasattr(importlib_machinery, 'FileFinder'):
+ register_finder(importlib_machinery.FileFinder, find_on_path)
+
+_declare_state('dict', _namespace_handlers={})
+_declare_state('dict', _namespace_packages={})
+
+
+def register_namespace_handler(importer_type, namespace_handler):
+ """Register `namespace_handler` to declare namespace packages
+
+ `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+ handler), and `namespace_handler` is a callable like this::
+
+ def namespace_handler(importer, path_entry, moduleName, module):
+ # return a path_entry to use for child packages
+
+ Namespace handlers are only called if the importer object has already
+ agreed that it can handle the relevant path item, and they should only
+ return a subpath if the module __path__ does not already contain an
+ equivalent subpath. For an example namespace handler, see
+ ``pkg_resources.file_ns_handler``.
+ """
+ _namespace_handlers[importer_type] = namespace_handler
+
+
+def _handle_ns(packageName, path_item):
+ """Ensure that named package includes a subpath of path_item (if needed)"""
+
+ importer = get_importer(path_item)
+ if importer is None:
+ return None
+
+ # capture warnings due to #1111
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ loader = importer.find_module(packageName)
+
+ if loader is None:
+ return None
+ module = sys.modules.get(packageName)
+ if module is None:
+ module = sys.modules[packageName] = types.ModuleType(packageName)
+ module.__path__ = []
+ _set_parent_ns(packageName)
+ elif not hasattr(module, '__path__'):
+ raise TypeError("Not a package:", packageName)
+ handler = _find_adapter(_namespace_handlers, importer)
+ subpath = handler(importer, path_item, packageName, module)
+ if subpath is not None:
+ path = module.__path__
+ path.append(subpath)
+ loader.load_module(packageName)
+ _rebuild_mod_path(path, packageName, module)
+ return subpath
+
+
+def _rebuild_mod_path(orig_path, package_name, module):
+ """
+ Rebuild module.__path__ ensuring that all entries are ordered
+ corresponding to their sys.path order
+ """
+ sys_path = [_normalize_cached(p) for p in sys.path]
+
+ def safe_sys_path_index(entry):
+ """
+ Workaround for #520 and #513.
+ """
+ try:
+ return sys_path.index(entry)
+ except ValueError:
+ return float('inf')
+
+ def position_in_sys_path(path):
+ """
+ Return the ordinal of the path based on its position in sys.path
+ """
+ path_parts = path.split(os.sep)
+ module_parts = package_name.count('.') + 1
+ parts = path_parts[:-module_parts]
+ return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
+
+ new_path = sorted(orig_path, key=position_in_sys_path)
+ new_path = [_normalize_cached(p) for p in new_path]
+
+ if isinstance(module.__path__, list):
+ module.__path__[:] = new_path
+ else:
+ module.__path__ = new_path
+
+
+def declare_namespace(packageName):
+ """Declare that package 'packageName' is a namespace package"""
+
+ _imp.acquire_lock()
+ try:
+ if packageName in _namespace_packages:
+ return
+
+ path = sys.path
+ parent, _, _ = packageName.rpartition('.')
+
+ if parent:
+ declare_namespace(parent)
+ if parent not in _namespace_packages:
+ __import__(parent)
+ try:
+ path = sys.modules[parent].__path__
+ except AttributeError:
+ raise TypeError("Not a package:", parent)
+
+ # Track what packages are namespaces, so when new path items are added,
+ # they can be updated
+ _namespace_packages.setdefault(parent or None, []).append(packageName)
+ _namespace_packages.setdefault(packageName, [])
+
+ for path_item in path:
+ # Ensure all the parent's path items are reflected in the child,
+ # if they apply
+ _handle_ns(packageName, path_item)
+
+ finally:
+ _imp.release_lock()
+
+
+def fixup_namespace_packages(path_item, parent=None):
+ """Ensure that previously-declared namespace packages include path_item"""
+ _imp.acquire_lock()
+ try:
+ for package in _namespace_packages.get(parent, ()):
+ subpath = _handle_ns(package, path_item)
+ if subpath:
+ fixup_namespace_packages(subpath, package)
+ finally:
+ _imp.release_lock()
+
+
+def file_ns_handler(importer, path_item, packageName, module):
+ """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+ subpath = os.path.join(path_item, packageName.split('.')[-1])
+ normalized = _normalize_cached(subpath)
+ for item in module.__path__:
+ if _normalize_cached(item) == normalized:
+ break
+ else:
+ # Only return the path if it's not already there
+ return subpath
+
+
+register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+
+if hasattr(importlib_machinery, 'FileFinder'):
+ register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+ return None
+
+
+register_namespace_handler(object, null_ns_handler)
+
+
+def normalize_path(filename):
+ """Normalize a file/dir name for comparison purposes"""
+ return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
+
+
+def _cygwin_patch(filename): # pragma: nocover
+ """
+ Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
+ symlink components. Using
+ os.path.abspath() works around this limitation. A fix in os.getcwd()
+ would probably better, in Cygwin even more so, except
+ that this seems to be by design...
+ """
+ return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
+
+
+def _normalize_cached(filename, _cache={}):
+ try:
+ return _cache[filename]
+ except KeyError:
+ _cache[filename] = result = normalize_path(filename)
+ return result
+
+
+def _is_egg_path(path):
+ """
+ Determine if given path appears to be an egg.
+ """
+ return path.lower().endswith('.egg')
+
+
+def _is_unpacked_egg(path):
+ """
+ Determine if given path appears to be an unpacked egg.
+ """
+ return (
+ _is_egg_path(path) and
+ os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
+ )
+
+
+def _set_parent_ns(packageName):
+ parts = packageName.split('.')
+ name = parts.pop()
+ if parts:
+ parent = '.'.join(parts)
+ setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+def yield_lines(strs):
+ """Yield non-empty/non-comment lines of a string or sequence"""
+ if isinstance(strs, six.string_types):
+ for s in strs.splitlines():
+ s = s.strip()
+ # skip blank lines/comments
+ if s and not s.startswith('#'):
+ yield s
+ else:
+ for ss in strs:
+ for s in yield_lines(ss):
+ yield s
+
+
+MODULE = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+ r"""
+ (?P<name>[^-]+) (
+ -(?P<ver>[^-]+) (
+ -py(?P<pyver>[^-]+) (
+ -(?P<plat>.+)
+ )?
+ )?
+ )?
+ """,
+ re.VERBOSE | re.IGNORECASE,
+).match
+
+
+class EntryPoint:
+ """Object representing an advertised importable object"""
+
+ def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+ if not MODULE(module_name):
+ raise ValueError("Invalid module name", module_name)
+ self.name = name
+ self.module_name = module_name
+ self.attrs = tuple(attrs)
+ self.extras = tuple(extras)
+ self.dist = dist
+
+ def __str__(self):
+ s = "%s = %s" % (self.name, self.module_name)
+ if self.attrs:
+ s += ':' + '.'.join(self.attrs)
+ if self.extras:
+ s += ' [%s]' % ','.join(self.extras)
+ return s
+
+ def __repr__(self):
+ return "EntryPoint.parse(%r)" % str(self)
+
+ def load(self, require=True, *args, **kwargs):
+ """
+ Require packages for this EntryPoint, then resolve it.
+ """
+ if not require or args or kwargs:
+ warnings.warn(
+ "Parameters to load are deprecated. Call .resolve and "
+ ".require separately.",
+ PkgResourcesDeprecationWarning,
+ stacklevel=2,
+ )
+ if require:
+ self.require(*args, **kwargs)
+ return self.resolve()
+
+ def resolve(self):
+ """
+ Resolve the entry point from its module and attrs.
+ """
+ module = __import__(self.module_name, fromlist=['__name__'], level=0)
+ try:
+ return functools.reduce(getattr, self.attrs, module)
+ except AttributeError as exc:
+ raise ImportError(str(exc))
+
+ def require(self, env=None, installer=None):
+ if self.extras and not self.dist:
+ raise UnknownExtra("Can't require() without a distribution", self)
+
+ # Get the requirements for this entry point with all its extras and
+ # then resolve them. We have to pass `extras` along when resolving so
+ # that the working set knows what extras we want. Otherwise, for
+ # dist-info distributions, the working set will assume that the
+ # requirements for that extra are purely optional and skip over them.
+ reqs = self.dist.requires(self.extras)
+ items = working_set.resolve(reqs, env, installer, extras=self.extras)
+ list(map(working_set.add, items))
+
+ pattern = re.compile(
+ r'\s*'
+ r'(?P<name>.+?)\s*'
+ r'=\s*'
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+
+ @classmethod
+ def parse(cls, src, dist=None):
+ """Parse a single entry point from string `src`
+
+ Entry point syntax follows the form::
+
+ name = some.module:some.attr [extra1, extra2]
+
+ The entry name and module name are required, but the ``:attrs`` and
+ ``[extras]`` parts are optional
+ """
+ m = cls.pattern.match(src)
+ if not m:
+ msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
+ raise ValueError(msg, src)
+ res = m.groupdict()
+ extras = cls._parse_extras(res['extras'])
+ attrs = res['attr'].split('.') if res['attr'] else ()
+ return cls(res['name'], res['module'], attrs, extras, dist)
+
+ @classmethod
+ def _parse_extras(cls, extras_spec):
+ if not extras_spec:
+ return ()
+ req = Requirement.parse('x' + extras_spec)
+ if req.specs:
+ raise ValueError()
+ return req.extras
+
+ @classmethod
+ def parse_group(cls, group, lines, dist=None):
+ """Parse an entry point group"""
+ if not MODULE(group):
+ raise ValueError("Invalid group name", group)
+ this = {}
+ for line in yield_lines(lines):
+ ep = cls.parse(line, dist)
+ if ep.name in this:
+ raise ValueError("Duplicate entry point", group, ep.name)
+ this[ep.name] = ep
+ return this
+
+ @classmethod
+ def parse_map(cls, data, dist=None):
+ """Parse a map of entry point groups"""
+ if isinstance(data, dict):
+ data = data.items()
+ else:
+ data = split_sections(data)
+ maps = {}
+ for group, lines in data:
+ if group is None:
+ if not lines:
+ continue
+ raise ValueError("Entry points must be listed in groups")
+ group = group.strip()
+ if group in maps:
+ raise ValueError("Duplicate group name", group)
+ maps[group] = cls.parse_group(group, lines, dist)
+ return maps
+
+
+def _remove_md5_fragment(location):
+ if not location:
+ return ''
+ parsed = urllib.parse.urlparse(location)
+ if parsed[-1].startswith('md5='):
+ return urllib.parse.urlunparse(parsed[:-1] + ('',))
+ return location
+
+
+def _version_from_file(lines):
+ """
+ Given an iterable of lines from a Metadata file, return
+ the value of the Version field, if present, or None otherwise.
+ """
+ def is_version_line(line):
+ return line.lower().startswith('version:')
+ version_lines = filter(is_version_line, lines)
+ line = next(iter(version_lines), '')
+ _, _, value = line.partition(':')
+ return safe_version(value.strip()) or None
+
+
+class Distribution:
+ """Wrap an actual or potential sys.path entry w/metadata"""
+ PKG_INFO = 'PKG-INFO'
+
+ def __init__(
+ self, location=None, metadata=None, project_name=None,
+ version=None, py_version=PY_MAJOR, platform=None,
+ precedence=EGG_DIST):
+ self.project_name = safe_name(project_name or 'Unknown')
+ if version is not None:
+ self._version = safe_version(version)
+ self.py_version = py_version
+ self.platform = platform
+ self.location = location
+ self.precedence = precedence
+ self._provider = metadata or empty_provider
+
+ @classmethod
+ def from_location(cls, location, basename, metadata=None, **kw):
+ project_name, version, py_version, platform = [None] * 4
+ basename, ext = os.path.splitext(basename)
+ if ext.lower() in _distributionImpl:
+ cls = _distributionImpl[ext.lower()]
+
+ match = EGG_NAME(basename)
+ if match:
+ project_name, version, py_version, platform = match.group(
+ 'name', 'ver', 'pyver', 'plat'
+ )
+ return cls(
+ location, metadata, project_name=project_name, version=version,
+ py_version=py_version, platform=platform, **kw
+ )._reload_version()
+
+ def _reload_version(self):
+ return self
+
+ @property
+ def hashcmp(self):
+ return (
+ self.parsed_version,
+ self.precedence,
+ self.key,
+ _remove_md5_fragment(self.location),
+ self.py_version or '',
+ self.platform or '',
+ )
+
+ def __hash__(self):
+ return hash(self.hashcmp)
+
+ def __lt__(self, other):
+ return self.hashcmp < other.hashcmp
+
+ def __le__(self, other):
+ return self.hashcmp <= other.hashcmp
+
+ def __gt__(self, other):
+ return self.hashcmp > other.hashcmp
+
+ def __ge__(self, other):
+ return self.hashcmp >= other.hashcmp
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ # It's not a Distribution, so they are not equal
+ return False
+ return self.hashcmp == other.hashcmp
+
+ def __ne__(self, other):
+ return not self == other
+
+ # These properties have to be lazy so that we don't have to load any
+ # metadata until/unless it's actually needed. (i.e., some distributions
+ # may not know their name or version without loading PKG-INFO)
+
+ @property
+ def key(self):
+ try:
+ return self._key
+ except AttributeError:
+ self._key = key = self.project_name.lower()
+ return key
+
+ @property
+ def parsed_version(self):
+ if not hasattr(self, "_parsed_version"):
+ self._parsed_version = parse_version(self.version)
+
+ return self._parsed_version
+
+ def _warn_legacy_version(self):
+ LV = packaging.version.LegacyVersion
+ is_legacy = isinstance(self._parsed_version, LV)
+ if not is_legacy:
+ return
+
+ # While an empty version is technically a legacy version and
+ # is not a valid PEP 440 version, it's also unlikely to
+ # actually come from someone and instead it is more likely that
+ # it comes from setuptools attempting to parse a filename and
+ # including it in the list. So for that we'll gate this warning
+ # on if the version is anything at all or not.
+ if not self.version:
+ return
+
+ tmpl = textwrap.dedent("""
+ '{project_name} ({version})' is being parsed as a legacy,
+ non PEP 440,
+ version. You may find odd behavior and sort order.
+ In particular it will be sorted as less than 0.0. It
+ is recommended to migrate to PEP 440 compatible
+ versions.
+ """).strip().replace('\n', ' ')
+
+ warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
+
+ @property
+ def version(self):
+ try:
+ return self._version
+ except AttributeError:
+ version = self._get_version()
+ if version is None:
+ path = self._get_metadata_path_for_display(self.PKG_INFO)
+ msg = (
+ "Missing 'Version:' header and/or {} file at path: {}"
+ ).format(self.PKG_INFO, path)
+ raise ValueError(msg, self)
+
+ return version
+
+ @property
+ def _dep_map(self):
+ """
+ A map of extra to its list of (direct) requirements
+ for this distribution, including the null extra.
+ """
+ try:
+ return self.__dep_map
+ except AttributeError:
+ self.__dep_map = self._filter_extras(self._build_dep_map())
+ return self.__dep_map
+
+ @staticmethod
+ def _filter_extras(dm):
+ """
+ Given a mapping of extras to dependencies, strip off
+ environment markers and filter out any dependencies
+ not matching the markers.
+ """
+ for extra in list(filter(None, dm)):
+ new_extra = extra
+ reqs = dm.pop(extra)
+ new_extra, _, marker = extra.partition(':')
+ fails_marker = marker and (
+ invalid_marker(marker)
+ or not evaluate_marker(marker)
+ )
+ if fails_marker:
+ reqs = []
+ new_extra = safe_extra(new_extra) or None
+
+ dm.setdefault(new_extra, []).extend(reqs)
+ return dm
+
+ def _build_dep_map(self):
+ dm = {}
+ for name in 'requires.txt', 'depends.txt':
+ for extra, reqs in split_sections(self._get_metadata(name)):
+ dm.setdefault(extra, []).extend(parse_requirements(reqs))
+ return dm
+
+ def requires(self, extras=()):
+ """List of Requirements needed for this distro if `extras` are used"""
+ dm = self._dep_map
+ deps = []
+ deps.extend(dm.get(None, ()))
+ for ext in extras:
+ try:
+ deps.extend(dm[safe_extra(ext)])
+ except KeyError:
+ raise UnknownExtra(
+ "%s has no such extra feature %r" % (self, ext)
+ )
+ return deps
+
+ def _get_metadata_path_for_display(self, name):
+ """
+ Return the path to the given metadata file, if available.
+ """
+ try:
+ # We need to access _get_metadata_path() on the provider object
+ # directly rather than through this class's __getattr__()
+ # since _get_metadata_path() is marked private.
+ path = self._provider._get_metadata_path(name)
+
+ # Handle exceptions e.g. in case the distribution's metadata
+ # provider doesn't support _get_metadata_path().
+ except Exception:
+ return '[could not detect]'
+
+ return path
+
+ def _get_metadata(self, name):
+ if self.has_metadata(name):
+ for line in self.get_metadata_lines(name):
+ yield line
+
+ def _get_version(self):
+ lines = self._get_metadata(self.PKG_INFO)
+ version = _version_from_file(lines)
+
+ return version
+
+ def activate(self, path=None, replace=False):
+ """Ensure distribution is importable on `path` (default=sys.path)"""
+ if path is None:
+ path = sys.path
+ self.insert_on(path, replace=replace)
+ if path is sys.path:
+ fixup_namespace_packages(self.location)
+ for pkg in self._get_metadata('namespace_packages.txt'):
+ if pkg in sys.modules:
+ declare_namespace(pkg)
+
+ def egg_name(self):
+ """Return what this distribution's standard .egg filename should be"""
+ filename = "%s-%s-py%s" % (
+ to_filename(self.project_name), to_filename(self.version),
+ self.py_version or PY_MAJOR
+ )
+
+ if self.platform:
+ filename += '-' + self.platform
+ return filename
+
+ def __repr__(self):
+ if self.location:
+ return "%s (%s)" % (self, self.location)
+ else:
+ return str(self)
+
+ def __str__(self):
+ try:
+ version = getattr(self, 'version', None)
+ except ValueError:
+ version = None
+ version = version or "[unknown version]"
+ return "%s %s" % (self.project_name, version)
+
+ def __getattr__(self, attr):
+ """Delegate all unrecognized public attributes to .metadata provider"""
+ if attr.startswith('_'):
+ raise AttributeError(attr)
+ return getattr(self._provider, attr)
+
+ def __dir__(self):
+ return list(
+ set(super(Distribution, self).__dir__())
+ | set(
+ attr for attr in self._provider.__dir__()
+ if not attr.startswith('_')
+ )
+ )
+
+ if not hasattr(object, '__dir__'):
+ # python 2.7 not supported
+ del __dir__
+
+ @classmethod
+ def from_filename(cls, filename, metadata=None, **kw):
+ return cls.from_location(
+ _normalize_cached(filename), os.path.basename(filename), metadata,
+ **kw
+ )
+
+ def as_requirement(self):
+ """Return a ``Requirement`` that matches this distribution exactly"""
+ if isinstance(self.parsed_version, packaging.version.Version):
+ spec = "%s==%s" % (self.project_name, self.parsed_version)
+ else:
+ spec = "%s===%s" % (self.project_name, self.parsed_version)
+
+ return Requirement.parse(spec)
+
+ def load_entry_point(self, group, name):
+ """Return the `name` entry point of `group` or raise ImportError"""
+ ep = self.get_entry_info(group, name)
+ if ep is None:
+ raise ImportError("Entry point %r not found" % ((group, name),))
+ return ep.load()
+
+ def get_entry_map(self, group=None):
+ """Return the entry point map for `group`, or the full entry map"""
+ try:
+ ep_map = self._ep_map
+ except AttributeError:
+ ep_map = self._ep_map = EntryPoint.parse_map(
+ self._get_metadata('entry_points.txt'), self
+ )
+ if group is not None:
+ return ep_map.get(group, {})
+ return ep_map
+
+ def get_entry_info(self, group, name):
+ """Return the EntryPoint object for `group`+`name`, or ``None``"""
+ return self.get_entry_map(group).get(name)
+
+ def insert_on(self, path, loc=None, replace=False):
+ """Ensure self.location is on path
+
+ If replace=False (default):
+ - If location is already in path anywhere, do nothing.
+ - Else:
+ - If it's an egg and its parent directory is on path,
+ insert just ahead of the parent.
+ - Else: add to the end of path.
+ If replace=True:
+ - If location is already on path anywhere (not eggs)
+ or higher priority than its parent (eggs)
+ do nothing.
+ - Else:
+ - If it's an egg and its parent directory is on path,
+ insert just ahead of the parent,
+ removing any lower-priority entries.
+ - Else: add it to the front of path.
+ """
+
+ loc = loc or self.location
+ if not loc:
+ return
+
+ nloc = _normalize_cached(loc)
+ bdir = os.path.dirname(nloc)
+ npath = [(p and _normalize_cached(p) or p) for p in path]
+
+ for p, item in enumerate(npath):
+ if item == nloc:
+ if replace:
+ break
+ else:
+ # don't modify path (even removing duplicates) if
+ # found and not replace
+ return
+ elif item == bdir and self.precedence == EGG_DIST:
+ # if it's an .egg, give it precedence over its directory
+ # UNLESS it's already been added to sys.path and replace=False
+ if (not replace) and nloc in npath[p:]:
+ return
+ if path is sys.path:
+ self.check_version_conflict()
+ path.insert(p, loc)
+ npath.insert(p, nloc)
+ break
+ else:
+ if path is sys.path:
+ self.check_version_conflict()
+ if replace:
+ path.insert(0, loc)
+ else:
+ path.append(loc)
+ return
+
+ # p is the spot where we found or inserted loc; now remove duplicates
+ while True:
+ try:
+ np = npath.index(nloc, p + 1)
+ except ValueError:
+ break
+ else:
+ del npath[np], path[np]
+ # ha!
+ p = np
+
+ return
+
+ def check_version_conflict(self):
+ if self.key == 'setuptools':
+ # ignore the inevitable setuptools self-conflicts :(
+ return
+
+ nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+ loc = normalize_path(self.location)
+ for modname in self._get_metadata('top_level.txt'):
+ if (modname not in sys.modules or modname in nsp
+ or modname in _namespace_packages):
+ continue
+ if modname in ('pkg_resources', 'setuptools', 'site'):
+ continue
+ fn = getattr(sys.modules[modname], '__file__', None)
+ if fn and (normalize_path(fn).startswith(loc) or
+ fn.startswith(self.location)):
+ continue
+ issue_warning(
+ "Module %s was already imported from %s, but %s is being added"
+ " to sys.path" % (modname, fn, self.location),
+ )
+
+ def has_version(self):
+ try:
+ self.version
+ except ValueError:
+ issue_warning("Unbuilt egg for " + repr(self))
+ return False
+ return True
+
+ def clone(self, **kw):
+ """Copy this distribution, substituting in any changed keyword args"""
+ names = 'project_name version py_version platform location precedence'
+ for attr in names.split():
+ kw.setdefault(attr, getattr(self, attr, None))
+ kw.setdefault('metadata', self._provider)
+ return self.__class__(**kw)
+
+ @property
+ def extras(self):
+ return [dep for dep in self._dep_map if dep]
+
+
+class EggInfoDistribution(Distribution):
+ def _reload_version(self):
+ """
+ Packages installed by distutils (e.g. numpy or scipy),
+ which uses an old safe_version, and so
+ their version numbers can get mangled when
+ converted to filenames (e.g., 1.11.0.dev0+2329eae to
+ 1.11.0.dev0_2329eae). These distributions will not be
+ parsed properly
+ downstream by Distribution and safe_version, so
+ take an extra step and try to get the version number from
+ the metadata file itself instead of the filename.
+ """
+ md_version = self._get_version()
+ if md_version:
+ self._version = md_version
+ return self
+
+
+class DistInfoDistribution(Distribution):
+ """
+ Wrap an actual or potential sys.path entry
+ w/metadata, .dist-info style.
+ """
+ PKG_INFO = 'METADATA'
+ EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
+
+ @property
+ def _parsed_pkg_info(self):
+ """Parse and cache metadata"""
+ try:
+ return self._pkg_info
+ except AttributeError:
+ metadata = self.get_metadata(self.PKG_INFO)
+ self._pkg_info = email.parser.Parser().parsestr(metadata)
+ return self._pkg_info
+
+ @property
+ def _dep_map(self):
+ try:
+ return self.__dep_map
+ except AttributeError:
+ self.__dep_map = self._compute_dependencies()
+ return self.__dep_map
+
+ def _compute_dependencies(self):
+ """Recompute this distribution's dependencies."""
+ dm = self.__dep_map = {None: []}
+
+ reqs = []
+ # Including any condition expressions
+ for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
+ reqs.extend(parse_requirements(req))
+
+ def reqs_for_extra(extra):
+ for req in reqs:
+ if not req.marker or req.marker.evaluate({'extra': extra}):
+ yield req
+
+ common = frozenset(reqs_for_extra(None))
+ dm[None].extend(common)
+
+ for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
+ s_extra = safe_extra(extra.strip())
+ dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
+
+ return dm
+
+
+_distributionImpl = {
+ '.egg': Distribution,
+ '.egg-info': EggInfoDistribution,
+ '.dist-info': DistInfoDistribution,
+}
+
+
+def issue_warning(*args, **kw):
+ level = 1
+ g = globals()
+ try:
+ # find the first stack frame that is *not* code in
+ # the pkg_resources module, to use for the warning
+ while sys._getframe(level).f_globals is g:
+ level += 1
+ except ValueError:
+ pass
+ warnings.warn(stacklevel=level + 1, *args, **kw)
+
+
+class RequirementParseError(ValueError):
+ def __str__(self):
+ return ' '.join(self.args)
+
+
+def parse_requirements(strs):
+ """Yield ``Requirement`` objects for each specification in `strs`
+
+ `strs` must be a string, or a (possibly-nested) iterable thereof.
+ """
+ # create a steppable iterator, so we can handle \-continuations
+ lines = iter(yield_lines(strs))
+
+ for line in lines:
+ # Drop comments -- a hash without a space may be in a URL.
+ if ' #' in line:
+ line = line[:line.find(' #')]
+ # If there is a line continuation, drop it, and append the next line.
+ if line.endswith('\\'):
+ line = line[:-2].strip()
+ try:
+ line += next(lines)
+ except StopIteration:
+ return
+ yield Requirement(line)
+
+
+class Requirement(packaging.requirements.Requirement):
+ def __init__(self, requirement_string):
+ """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+ try:
+ super(Requirement, self).__init__(requirement_string)
+ except packaging.requirements.InvalidRequirement as e:
+ raise RequirementParseError(str(e))
+ self.unsafe_name = self.name
+ project_name = safe_name(self.name)
+ self.project_name, self.key = project_name, project_name.lower()
+ self.specs = [
+ (spec.operator, spec.version) for spec in self.specifier]
+ self.extras = tuple(map(safe_extra, self.extras))
+ self.hashCmp = (
+ self.key,
+ self.specifier,
+ frozenset(self.extras),
+ str(self.marker) if self.marker else None,
+ )
+ self.__hash = hash(self.hashCmp)
+
+ def __eq__(self, other):
+ return (
+ isinstance(other, Requirement) and
+ self.hashCmp == other.hashCmp
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __contains__(self, item):
+ if isinstance(item, Distribution):
+ if item.key != self.key:
+ return False
+
+ item = item.version
+
+ # Allow prereleases always in order to match the previous behavior of
+ # this method. In the future this should be smarter and follow PEP 440
+ # more accurately.
+ return self.specifier.contains(item, prereleases=True)
+
+ def __hash__(self):
+ return self.__hash
+
+ def __repr__(self):
+ return "Requirement.parse(%r)" % str(self)
+
+ @staticmethod
+ def parse(s):
+ req, = parse_requirements(s)
+ return req
+
+
+def _always_object(classes):
+ """
+ Ensure object appears in the mro even
+ for old-style classes.
+ """
+ if object not in classes:
+ return classes + (object,)
+ return classes
+
+
+def _find_adapter(registry, ob):
+ """Return an adapter factory for `ob` from `registry`"""
+ types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
+ for t in types:
+ if t in registry:
+ return registry[t]
+
+
+def ensure_directory(path):
+ """Ensure that the parent directory of `path` exists"""
+ dirname = os.path.dirname(path)
+ py31compat.makedirs(dirname, exist_ok=True)
+
+
+def _bypass_ensure_directory(path):
+ """Sandbox-bypassing version of ensure_directory()"""
+ if not WRITE_SUPPORT:
+ raise IOError('"os.mkdir" not supported on this platform.')
+ dirname, filename = split(path)
+ if dirname and filename and not isdir(dirname):
+ _bypass_ensure_directory(dirname)
+ try:
+ mkdir(dirname, 0o755)
+ except FileExistsError:
+ pass
+
+
+def split_sections(s):
+ """Split a string or iterable thereof into (section, content) pairs
+
+ Each ``section`` is a stripped version of the section header ("[section]")
+ and each ``content`` is a list of stripped lines excluding blank lines and
+ comment-only lines. If there are any such lines before the first section
+ header, they're returned in a first ``section`` of ``None``.
+ """
+ section = None
+ content = []
+ for line in yield_lines(s):
+ if line.startswith("["):
+ if line.endswith("]"):
+ if section or content:
+ yield section, content
+ section = line[1:-1].strip()
+ content = []
+ else:
+ raise ValueError("Invalid section heading", line)
+ else:
+ content.append(line)
+
+ # wrap up last segment
+ yield section, content
+
+
+def _mkstemp(*args, **kw):
+ old_open = os.open
+ try:
+ # temporarily bypass sandboxing
+ os.open = os_open
+ return tempfile.mkstemp(*args, **kw)
+ finally:
+ # and then put it back
+ os.open = old_open
+
+
+# Silence the PEP440Warning by default, so that end users don't get hit by it
+# randomly just because they use pkg_resources. We want to append the rule
+# because we want earlier uses of filterwarnings to take precedence over this
+# one.
+warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
+
+
+# from jaraco.functools 1.3
+def _call_aside(f, *args, **kwargs):
+ f(*args, **kwargs)
+ return f
+
+
+@_call_aside
+def _initialize(g=globals()):
+ "Set up global resource manager (deliberately not state-saved)"
+ manager = ResourceManager()
+ g['_manager'] = manager
+ g.update(
+ (name, getattr(manager, name))
+ for name in dir(manager)
+ if not name.startswith('_')
+ )
+
+
+@_call_aside
+def _initialize_master_working_set():
+ """
+ Prepare the master working set and make the ``require()``
+ API available.
+
+ This function has explicit effects on the global state
+ of pkg_resources. It is intended to be invoked once at
+ the initialization of this module.
+
+ Invocation by other packages is unsupported and done
+ at their own risk.
+ """
+ working_set = WorkingSet._build_master()
+ _declare_state('object', working_set=working_set)
+
+ require = working_set.require
+ iter_entry_points = working_set.iter_entry_points
+ add_activation_listener = working_set.subscribe
+ run_script = working_set.run_script
+ # backward compatibility
+ run_main = run_script
+ # Activate all distributions already on sys.path with replace=False and
+ # ensure that all distributions added to the working set in the future
+ # (e.g. by calling ``require()``) will get activated as well,
+ # with higher priority (replace=True).
+ tuple(
+ dist.activate(replace=False)
+ for dist in working_set
+ )
+ add_activation_listener(
+ lambda dist: dist.activate(replace=True),
+ existing=False,
+ )
+ working_set.entries = []
+ # match order
+ list(map(working_set.add_entry, sys.path))
+ globals().update(locals())
+
+class PkgResourcesDeprecationWarning(Warning):
+ """
+ Base class for warning about deprecations in ``pkg_resources``
+
+ This class is not derived from ``DeprecationWarning``, and as such is
+ visible by default.
+ """
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..ded754dc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc
new file mode 100644
index 00000000..d351c239
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/py31compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/py31compat.py
new file mode 100644
index 00000000..a2d3007c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pkg_resources/py31compat.py
@@ -0,0 +1,23 @@
+import os
+import errno
+import sys
+
+from pip._vendor import six
+
+
+def _makedirs_31(path, exist_ok=False):
+ try:
+ os.makedirs(path)
+ except OSError as exc:
+ if not exist_ok or exc.errno != errno.EEXIST:
+ raise
+
+
+# rely on compatibility behavior until mode considerations
+# and exists_ok considerations are disentangled.
+# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
+needs_makedirs = (
+ six.PY2 or
+ (3, 4) <= sys.version_info < (3, 4, 1)
+)
+makedirs = _makedirs_31 if needs_makedirs else os.makedirs
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__init__.py
new file mode 100644
index 00000000..e434c257
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__init__.py
@@ -0,0 +1,177 @@
+# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import division, print_function
+
+from collections import deque
+from datetime import timedelta
+from math import ceil
+from sys import stderr
+try:
+ from time import monotonic
+except ImportError:
+ from time import time as monotonic
+
+
+__version__ = '1.5'
+
+HIDE_CURSOR = '\x1b[?25l'
+SHOW_CURSOR = '\x1b[?25h'
+
+
+class Infinite(object):
+ file = stderr
+ sma_window = 10 # Simple Moving Average window
+ check_tty = True
+ hide_cursor = True
+
+ def __init__(self, message='', **kwargs):
+ self.index = 0
+ self.start_ts = monotonic()
+ self.avg = 0
+ self._avg_update_ts = self.start_ts
+ self._ts = self.start_ts
+ self._xput = deque(maxlen=self.sma_window)
+ for key, val in kwargs.items():
+ setattr(self, key, val)
+
+ self._width = 0
+ self.message = message
+
+ if self.file and self.is_tty():
+ if self.hide_cursor:
+ print(HIDE_CURSOR, end='', file=self.file)
+ print(self.message, end='', file=self.file)
+ self.file.flush()
+
+ def __getitem__(self, key):
+ if key.startswith('_'):
+ return None
+ return getattr(self, key, None)
+
+ @property
+ def elapsed(self):
+ return int(monotonic() - self.start_ts)
+
+ @property
+ def elapsed_td(self):
+ return timedelta(seconds=self.elapsed)
+
+ def update_avg(self, n, dt):
+ if n > 0:
+ xput_len = len(self._xput)
+ self._xput.append(dt / n)
+ now = monotonic()
+ # update when we're still filling _xput, then after every second
+ if (xput_len < self.sma_window or
+ now - self._avg_update_ts > 1):
+ self.avg = sum(self._xput) / len(self._xput)
+ self._avg_update_ts = now
+
+ def update(self):
+ pass
+
+ def start(self):
+ pass
+
+ def clearln(self):
+ if self.file and self.is_tty():
+ print('\r\x1b[K', end='', file=self.file)
+
+ def write(self, s):
+ if self.file and self.is_tty():
+ line = self.message + s.ljust(self._width)
+ print('\r' + line, end='', file=self.file)
+ self._width = max(self._width, len(s))
+ self.file.flush()
+
+ def writeln(self, line):
+ if self.file and self.is_tty():
+ self.clearln()
+ print(line, end='', file=self.file)
+ self.file.flush()
+
+ def finish(self):
+ if self.file and self.is_tty():
+ print(file=self.file)
+ if self.hide_cursor:
+ print(SHOW_CURSOR, end='', file=self.file)
+
+ def is_tty(self):
+ return self.file.isatty() if self.check_tty else True
+
+ def next(self, n=1):
+ now = monotonic()
+ dt = now - self._ts
+ self.update_avg(n, dt)
+ self._ts = now
+ self.index = self.index + n
+ self.update()
+
+ def iter(self, it):
+ with self:
+ for x in it:
+ yield x
+ self.next()
+
+ def __enter__(self):
+ self.start()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.finish()
+
+
+class Progress(Infinite):
+ def __init__(self, *args, **kwargs):
+ super(Progress, self).__init__(*args, **kwargs)
+ self.max = kwargs.get('max', 100)
+
+ @property
+ def eta(self):
+ return int(ceil(self.avg * self.remaining))
+
+ @property
+ def eta_td(self):
+ return timedelta(seconds=self.eta)
+
+ @property
+ def percent(self):
+ return self.progress * 100
+
+ @property
+ def progress(self):
+ return min(1, self.index / self.max)
+
+ @property
+ def remaining(self):
+ return max(self.max - self.index, 0)
+
+ def start(self):
+ self.update()
+
+ def goto(self, index):
+ incr = index - self.index
+ self.next(incr)
+
+ def iter(self, it):
+ try:
+ self.max = len(it)
+ except TypeError:
+ pass
+
+ with self:
+ for x in it:
+ yield x
+ self.next()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..3b449b22
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-37.pyc
new file mode 100644
index 00000000..b30d33ff
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-37.pyc
new file mode 100644
index 00000000..2b67d351
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc
new file mode 100644
index 00000000..9cd44f0c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/bar.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/bar.py
new file mode 100644
index 00000000..8819efda
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/bar.py
@@ -0,0 +1,91 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import unicode_literals
+
+import sys
+
+from . import Progress
+
+
+class Bar(Progress):
+ width = 32
+ suffix = '%(index)d/%(max)d'
+ bar_prefix = ' |'
+ bar_suffix = '| '
+ empty_fill = ' '
+ fill = '#'
+
+ def update(self):
+ filled_length = int(self.width * self.progress)
+ empty_length = self.width - filled_length
+
+ message = self.message % self
+ bar = self.fill * filled_length
+ empty = self.empty_fill * empty_length
+ suffix = self.suffix % self
+ line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,
+ suffix])
+ self.writeln(line)
+
+
+class ChargingBar(Bar):
+ suffix = '%(percent)d%%'
+ bar_prefix = ' '
+ bar_suffix = ' '
+ empty_fill = '∙'
+ fill = '█'
+
+
+class FillingSquaresBar(ChargingBar):
+ empty_fill = '▢'
+ fill = '▣'
+
+
+class FillingCirclesBar(ChargingBar):
+ empty_fill = '◯'
+ fill = '◉'
+
+
+class IncrementalBar(Bar):
+ if sys.platform.startswith('win'):
+ phases = (u' ', u'▌', u'█')
+ else:
+ phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█')
+
+ def update(self):
+ nphases = len(self.phases)
+ filled_len = self.width * self.progress
+ nfull = int(filled_len) # Number of full chars
+ phase = int((filled_len - nfull) * nphases) # Phase of last char
+ nempty = self.width - nfull # Number of empty chars
+
+ message = self.message % self
+ bar = self.phases[-1] * nfull
+ current = self.phases[phase] if phase > 0 else ''
+ empty = self.empty_fill * max(0, nempty - len(current))
+ suffix = self.suffix % self
+ line = ''.join([message, self.bar_prefix, bar, current, empty,
+ self.bar_suffix, suffix])
+ self.writeln(line)
+
+
+class PixelBar(IncrementalBar):
+ phases = ('⡀', '⡄', '⡆', '⡇', '⣇', '⣧', '⣷', '⣿')
+
+
+class ShadyBar(IncrementalBar):
+ phases = (' ', '░', '▒', '▓', '█')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/counter.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/counter.py
new file mode 100644
index 00000000..d955ca47
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/counter.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import unicode_literals
+from . import Infinite, Progress
+
+
+class Counter(Infinite):
+ def update(self):
+ self.write(str(self.index))
+
+
+class Countdown(Progress):
+ def update(self):
+ self.write(str(self.remaining))
+
+
+class Stack(Progress):
+ phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
+
+ def update(self):
+ nphases = len(self.phases)
+ i = min(nphases - 1, int(self.progress * nphases))
+ self.write(self.phases[i])
+
+
+class Pie(Stack):
+ phases = ('○', '◔', '◑', '◕', '●')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/spinner.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/spinner.py
new file mode 100644
index 00000000..4e100cab
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/progress/spinner.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import unicode_literals
+from . import Infinite
+
+
+class Spinner(Infinite):
+ phases = ('-', '\\', '|', '/')
+ hide_cursor = True
+
+ def update(self):
+ i = self.index % len(self.phases)
+ self.write(self.phases[i])
+
+
+class PieSpinner(Spinner):
+ phases = ['◷', '◶', '◵', '◴']
+
+
+class MoonSpinner(Spinner):
+ phases = ['◑', '◒', '◐', '◓']
+
+
+class LineSpinner(Spinner):
+ phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻']
+
+
+class PixelSpinner(Spinner):
+ phases = ['⣾', '⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽']
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pyparsing.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pyparsing.py
new file mode 100644
index 00000000..9d6a01d5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pyparsing.py
@@ -0,0 +1,6493 @@
+#-*- coding: utf-8 -*-
+# module pyparsing.py
+#
+# Copyright (c) 2003-2019 Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
+
+The pyparsing module is an alternative approach to creating and
+executing simple grammars, vs. the traditional lex/yacc approach, or the
+use of regular expressions. With pyparsing, you don't need to learn
+a new syntax for defining grammars or matching expressions - the parsing
+module provides a library of classes that you use to construct the
+grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form
+``"<salutation>, <addressee>!"``), built up using :class:`Word`,
+:class:`Literal`, and :class:`And` elements
+(the :class:`'+'<ParserElement.__add__>` operators create :class:`And` expressions,
+and the strings are auto-converted to :class:`Literal` expressions)::
+
+ from pip._vendor.pyparsing import Word, alphas
+
+ # define grammar of a greeting
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+ Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the
+self-explanatory class names, and the use of '+', '|' and '^' operators.
+
+The :class:`ParseResults` object returned from
+:class:`ParserElement.parseString` can be
+accessed as a nested list, a dictionary, or an object with named
+attributes.
+
+The pyparsing module handles some of the problems that are typically
+vexing when writing text parsers:
+
+ - extra or missing whitespace (the above program will also handle
+ "Hello,World!", "Hello , World !", etc.)
+ - quoted strings
+ - embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes :class:`ParserElement` and :class:`ParseResults` to
+see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+
+ - construct literal match expressions from :class:`Literal` and
+ :class:`CaselessLiteral` classes
+ - construct character word-group expressions using the :class:`Word`
+ class
+ - see how to create repetitive expressions using :class:`ZeroOrMore`
+ and :class:`OneOrMore` classes
+ - use :class:`'+'<And>`, :class:`'|'<MatchFirst>`, :class:`'^'<Or>`,
+ and :class:`'&'<Each>` operators to combine simple expressions into
+ more complex ones
+ - associate names with your parsed results using
+ :class:`ParserElement.setResultsName`
+ - find some helpful expression short-cuts like :class:`delimitedList`
+ and :class:`oneOf`
+ - find more useful common expressions in the :class:`pyparsing_common`
+ namespace class
+"""
+
+__version__ = "2.4.0"
+__versionTime__ = "07 Apr 2019 18:28 UTC"
+__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+
+try:
+ # Python 3
+ from itertools import filterfalse
+except ImportError:
+ from itertools import ifilterfalse as filterfalse
+
+try:
+ from _thread import RLock
+except ImportError:
+ from threading import RLock
+
+try:
+ # Python 3
+ from collections.abc import Iterable
+ from collections.abc import MutableMapping
+except ImportError:
+ # Python 2.7
+ from collections import Iterable
+ from collections import MutableMapping
+
+try:
+ from collections import OrderedDict as _OrderedDict
+except ImportError:
+ try:
+ from ordereddict import OrderedDict as _OrderedDict
+ except ImportError:
+ _OrderedDict = None
+
+try:
+ from types import SimpleNamespace
+except ImportError:
+ class SimpleNamespace: pass
+
+# version compatibility configuration
+__compat__ = SimpleNamespace()
+__compat__.__doc__ = """
+ A cross-version compatibility configuration for pyparsing features that will be
+ released in a future version. By setting values in this configuration to True,
+ those features can be enabled in prior versions for compatibility development
+ and testing.
+
+ - collect_all_And_tokens - flag to enable fix for Issue #63 that fixes erroneous grouping
+ of results names when an And expression is nested within an Or or MatchFirst; set to
+ True to enable bugfix to be released in pyparsing 2.4
+"""
+__compat__.collect_all_And_tokens = True
+
+
+#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
+
+__all__ = [ '__version__', '__versionTime__', '__author__', '__compat__',
+'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+'PrecededBy', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter',
+'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'Char',
+'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
+'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
+'CloseMatch', 'tokenMap', 'pyparsing_common', 'pyparsing_unicode', 'unicode_set',
+]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+ _MAX_INT = sys.maxsize
+ basestring = str
+ unichr = chr
+ unicode = str
+ _ustr = str
+
+ # build list of single arg builtins, that can be used as parse actions
+ singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+ _MAX_INT = sys.maxint
+ range = xrange
+
+ def _ustr(obj):
+ """Drop-in replacement for str(obj) that tries to be Unicode
+ friendly. It first tries str(obj). If that fails with
+ a UnicodeEncodeError, then it tries unicode(obj). It then
+ < returns the unicode object | encodes it with the default
+ encoding | ... >.
+ """
+ if isinstance(obj,unicode):
+ return obj
+
+ try:
+ # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+ # it won't break any existing code.
+ return str(obj)
+
+ except UnicodeEncodeError:
+ # Else encode it
+ ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+ xmlcharref = Regex(r'&#\d+;')
+ xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+ return xmlcharref.transformString(ret)
+
+ # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+ singleArgBuiltins = []
+ import __builtin__
+ for fname in "sum len sorted reversed list tuple set any all min max".split():
+ try:
+ singleArgBuiltins.append(getattr(__builtin__,fname))
+ except AttributeError:
+ continue
+
+_generatorType = type((y for y in range(1)))
+
+def _xml_escape(data):
+ """Escape &, <, >, ", ', etc. in a string of data."""
+
+ # ampersand must be replaced first
+ from_symbols = '&><"\''
+ to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
+ for from_,to_ in zip(from_symbols, to_symbols):
+ data = data.replace(from_, to_)
+ return data
+
+alphas = string.ascii_uppercase + string.ascii_lowercase
+nums = "0123456789"
+hexnums = nums + "ABCDEFabcdef"
+alphanums = alphas + nums
+_bslash = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+class ParseBaseException(Exception):
+ """base exception class for all parsing runtime exceptions"""
+ # Performance tuning: we construct a *lot* of these, so keep this
+ # constructor as small and fast as possible
+ def __init__( self, pstr, loc=0, msg=None, elem=None ):
+ self.loc = loc
+ if msg is None:
+ self.msg = pstr
+ self.pstr = ""
+ else:
+ self.msg = msg
+ self.pstr = pstr
+ self.parserElement = elem
+ self.args = (pstr, loc, msg)
+
+ @classmethod
+ def _from_exception(cls, pe):
+ """
+ internal factory method to simplify creating one type of ParseException
+ from another - avoids having __init__ signature conflicts among subclasses
+ """
+ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+ def __getattr__( self, aname ):
+ """supported attributes by name are:
+ - lineno - returns the line number of the exception text
+ - col - returns the column number of the exception text
+ - line - returns the line containing the exception text
+ """
+ if( aname == "lineno" ):
+ return lineno( self.loc, self.pstr )
+ elif( aname in ("col", "column") ):
+ return col( self.loc, self.pstr )
+ elif( aname == "line" ):
+ return line( self.loc, self.pstr )
+ else:
+ raise AttributeError(aname)
+
+ def __str__( self ):
+ return "%s (at char %d), (line:%d, col:%d)" % \
+ ( self.msg, self.loc, self.lineno, self.column )
+ def __repr__( self ):
+ return _ustr(self)
+ def markInputline( self, markerString = ">!<" ):
+ """Extracts the exception line from the input string, and marks
+ the location of the exception with a special symbol.
+ """
+ line_str = self.line
+ line_column = self.column - 1
+ if markerString:
+ line_str = "".join((line_str[:line_column],
+ markerString, line_str[line_column:]))
+ return line_str.strip()
+ def __dir__(self):
+ return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+ """
+ Exception thrown when parse expressions don't match class;
+ supported attributes by name are:
+ - lineno - returns the line number of the exception text
+ - col - returns the column number of the exception text
+ - line - returns the line containing the exception text
+
+ Example::
+
+ try:
+ Word(nums).setName("integer").parseString("ABC")
+ except ParseException as pe:
+ print(pe)
+ print("column: {}".format(pe.col))
+
+ prints::
+
+ Expected integer (at char 0), (line:1, col:1)
+ column: 1
+
+ """
+
+ @staticmethod
+ def explain(exc, depth=16):
+ """
+ Method to take an exception and translate the Python internal traceback into a list
+ of the pyparsing expressions that caused the exception to be raised.
+
+ Parameters:
+
+ - exc - exception raised during parsing (need not be a ParseException, in support
+ of Python exceptions that might be raised in a parse action)
+ - depth (default=16) - number of levels back in the stack trace to list expression
+ and function names; if None, the full stack trace names will be listed; if 0, only
+ the failing input line, marker, and exception string will be shown
+
+ Returns a multi-line string listing the ParserElements and/or function names in the
+ exception's stack trace.
+
+ Note: the diagnostic output will include string representations of the expressions
+ that failed to parse. These representations will be more helpful if you use `setName` to
+ give identifiable names to your expressions. Otherwise they will use the default string
+ forms, which may be cryptic to read.
+
+ explain() is only supported under Python 3.
+ """
+ import inspect
+
+ if depth is None:
+ depth = sys.getrecursionlimit()
+ ret = []
+ if isinstance(exc, ParseBaseException):
+ ret.append(exc.line)
+ ret.append(' ' * (exc.col - 1) + '^')
+ ret.append("{0}: {1}".format(type(exc).__name__, exc))
+
+ if depth > 0:
+ callers = inspect.getinnerframes(exc.__traceback__, context=depth)
+ seen = set()
+ for i, ff in enumerate(callers[-depth:]):
+ frm = ff[0]
+
+ f_self = frm.f_locals.get('self', None)
+ if isinstance(f_self, ParserElement):
+ if frm.f_code.co_name not in ('parseImpl', '_parseNoCache'):
+ continue
+ if f_self in seen:
+ continue
+ seen.add(f_self)
+
+ self_type = type(f_self)
+ ret.append("{0}.{1} - {2}".format(self_type.__module__,
+ self_type.__name__,
+ f_self))
+ elif f_self is not None:
+ self_type = type(f_self)
+ ret.append("{0}.{1}".format(self_type.__module__,
+ self_type.__name__))
+ else:
+ code = frm.f_code
+ if code.co_name in ('wrapper', '<module>'):
+ continue
+
+ ret.append("{0}".format(code.co_name))
+
+ depth -= 1
+ if not depth:
+ break
+
+ return '\n'.join(ret)
+
+
+class ParseFatalException(ParseBaseException):
+ """user-throwable exception thrown when inconsistent parse content
+ is found; stops all parsing immediately"""
+ pass
+
+class ParseSyntaxException(ParseFatalException):
+ """just like :class:`ParseFatalException`, but thrown internally
+ when an :class:`ErrorStop<And._ErrorStop>` ('-' operator) indicates
+ that parsing is to stop immediately because an unbacktrackable
+ syntax error has been found.
+ """
+ pass
+
+#~ class ReparseException(ParseBaseException):
+ #~ """Experimental class - parse actions can raise this exception to cause
+ #~ pyparsing to reparse the input string:
+ #~ - with a modified input string, and/or
+ #~ - with a modified start location
+ #~ Set the values of the ReparseException in the constructor, and raise the
+ #~ exception in a parse action to cause pyparsing to use the new string/location.
+ #~ Setting the values as None causes no change to be made.
+ #~ """
+ #~ def __init_( self, newstring, restartLoc ):
+ #~ self.newParseText = newstring
+ #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+ """exception thrown by :class:`ParserElement.validate` if the
+ grammar could be improperly recursive
+ """
+ def __init__( self, parseElementList ):
+ self.parseElementTrace = parseElementList
+
+ def __str__( self ):
+ return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+ def __init__(self,p1,p2):
+ self.tup = (p1,p2)
+ def __getitem__(self,i):
+ return self.tup[i]
+ def __repr__(self):
+ return repr(self.tup[0])
+ def setOffset(self,i):
+ self.tup = (self.tup[0],i)
+
+class ParseResults(object):
+ """Structured parse results, to provide multiple means of access to
+ the parsed data:
+
+ - as a list (``len(results)``)
+ - by list index (``results[0], results[1]``, etc.)
+ - by attribute (``results.<resultsName>`` - see :class:`ParserElement.setResultsName`)
+
+ Example::
+
+ integer = Word(nums)
+ date_str = (integer.setResultsName("year") + '/'
+ + integer.setResultsName("month") + '/'
+ + integer.setResultsName("day"))
+ # equivalent form:
+ # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ # parseString returns a ParseResults object
+ result = date_str.parseString("1999/12/31")
+
+ def test(s, fn=repr):
+ print("%s -> %s" % (s, fn(eval(s))))
+ test("list(result)")
+ test("result[0]")
+ test("result['month']")
+ test("result.day")
+ test("'month' in result")
+ test("'minutes' in result")
+ test("result.dump()", str)
+
+ prints::
+
+ list(result) -> ['1999', '/', '12', '/', '31']
+ result[0] -> '1999'
+ result['month'] -> '12'
+ result.day -> '31'
+ 'month' in result -> True
+ 'minutes' in result -> False
+ result.dump() -> ['1999', '/', '12', '/', '31']
+ - day: 31
+ - month: 12
+ - year: 1999
+ """
+ def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
+ if isinstance(toklist, cls):
+ return toklist
+ retobj = object.__new__(cls)
+ retobj.__doinit = True
+ return retobj
+
+ # Performance tuning: we construct a *lot* of these, so keep this
+ # constructor as small and fast as possible
+ def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
+ if self.__doinit:
+ self.__doinit = False
+ self.__name = None
+ self.__parent = None
+ self.__accumNames = {}
+ self.__asList = asList
+ self.__modal = modal
+ if toklist is None:
+ toklist = []
+ if isinstance(toklist, list):
+ self.__toklist = toklist[:]
+ elif isinstance(toklist, _generatorType):
+ self.__toklist = list(toklist)
+ else:
+ self.__toklist = [toklist]
+ self.__tokdict = dict()
+
+ if name is not None and name:
+ if not modal:
+ self.__accumNames[name] = 0
+ if isinstance(name,int):
+ name = _ustr(name) # will always return a str, but use _ustr for consistency
+ self.__name = name
+ if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
+ if isinstance(toklist,basestring):
+ toklist = [ toklist ]
+ if asList:
+ if isinstance(toklist,ParseResults):
+ self[name] = _ParseResultsWithOffset(ParseResults(toklist.__toklist), 0)
+ else:
+ self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
+ self[name].__name = name
+ else:
+ try:
+ self[name] = toklist[0]
+ except (KeyError,TypeError,IndexError):
+ self[name] = toklist
+
+ def __getitem__( self, i ):
+ if isinstance( i, (int,slice) ):
+ return self.__toklist[i]
+ else:
+ if i not in self.__accumNames:
+ return self.__tokdict[i][-1][0]
+ else:
+ return ParseResults([ v[0] for v in self.__tokdict[i] ])
+
+ def __setitem__( self, k, v, isinstance=isinstance ):
+ if isinstance(v,_ParseResultsWithOffset):
+ self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
+ sub = v[0]
+ elif isinstance(k,(int,slice)):
+ self.__toklist[k] = v
+ sub = v
+ else:
+ self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
+ sub = v
+ if isinstance(sub,ParseResults):
+ sub.__parent = wkref(self)
+
+ def __delitem__( self, i ):
+ if isinstance(i,(int,slice)):
+ mylen = len( self.__toklist )
+ del self.__toklist[i]
+
+ # convert int to slice
+ if isinstance(i, int):
+ if i < 0:
+ i += mylen
+ i = slice(i, i+1)
+ # get removed indices
+ removed = list(range(*i.indices(mylen)))
+ removed.reverse()
+ # fixup indices in token dictionary
+ for name,occurrences in self.__tokdict.items():
+ for j in removed:
+ for k, (value, position) in enumerate(occurrences):
+ occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+ else:
+ del self.__tokdict[i]
+
+ def __contains__( self, k ):
+ return k in self.__tokdict
+
+ def __len__( self ): return len( self.__toklist )
+ def __bool__(self): return ( not not self.__toklist )
+ __nonzero__ = __bool__
+ def __iter__( self ): return iter( self.__toklist )
+ def __reversed__( self ): return iter( self.__toklist[::-1] )
+ def _iterkeys( self ):
+ if hasattr(self.__tokdict, "iterkeys"):
+ return self.__tokdict.iterkeys()
+ else:
+ return iter(self.__tokdict)
+
+ def _itervalues( self ):
+ return (self[k] for k in self._iterkeys())
+
+ def _iteritems( self ):
+ return ((k, self[k]) for k in self._iterkeys())
+
+ if PY_3:
+ keys = _iterkeys
+ """Returns an iterator of all named result keys."""
+
+ values = _itervalues
+ """Returns an iterator of all named result values."""
+
+ items = _iteritems
+ """Returns an iterator of all named result key-value tuples."""
+
+ else:
+ iterkeys = _iterkeys
+ """Returns an iterator of all named result keys (Python 2.x only)."""
+
+ itervalues = _itervalues
+ """Returns an iterator of all named result values (Python 2.x only)."""
+
+ iteritems = _iteritems
+ """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+ def keys( self ):
+ """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.iterkeys())
+
+ def values( self ):
+ """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.itervalues())
+
+ def items( self ):
+ """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.iteritems())
+
+ def haskeys( self ):
+ """Since keys() returns an iterator, this method is helpful in bypassing
+ code that looks for the existence of any defined results names."""
+ return bool(self.__tokdict)
+
+ def pop( self, *args, **kwargs):
+ """
+ Removes and returns item at specified index (default= ``last``).
+ Supports both ``list`` and ``dict`` semantics for ``pop()``. If
+ passed no argument or an integer argument, it will use ``list``
+ semantics and pop tokens from the list of parsed tokens. If passed
+ a non-integer argument (most likely a string), it will use ``dict``
+ semantics and pop the corresponding value from any defined results
+ names. A second default return value argument is supported, just as in
+ ``dict.pop()``.
+
+ Example::
+
+ def remove_first(tokens):
+ tokens.pop(0)
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+ print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+ label = Word(alphas)
+ patt = label("LABEL") + OneOrMore(Word(nums))
+ print(patt.parseString("AAB 123 321").dump())
+
+ # Use pop() in a parse action to remove named result (note that corresponding value is not
+ # removed from list form of results)
+ def remove_LABEL(tokens):
+ tokens.pop("LABEL")
+ return tokens
+ patt.addParseAction(remove_LABEL)
+ print(patt.parseString("AAB 123 321").dump())
+
+ prints::
+
+ ['AAB', '123', '321']
+ - LABEL: AAB
+
+ ['AAB', '123', '321']
+ """
+ if not args:
+ args = [-1]
+ for k,v in kwargs.items():
+ if k == 'default':
+ args = (args[0], v)
+ else:
+ raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+ if (isinstance(args[0], int) or
+ len(args) == 1 or
+ args[0] in self):
+ index = args[0]
+ ret = self[index]
+ del self[index]
+ return ret
+ else:
+ defaultvalue = args[1]
+ return defaultvalue
+
+ def get(self, key, defaultValue=None):
+ """
+ Returns named result matching the given key, or if there is no
+ such name, then returns the given ``defaultValue`` or ``None`` if no
+ ``defaultValue`` is specified.
+
+ Similar to ``dict.get()``.
+
+ Example::
+
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString("1999/12/31")
+ print(result.get("year")) # -> '1999'
+ print(result.get("hour", "not specified")) # -> 'not specified'
+ print(result.get("hour")) # -> None
+ """
+ if key in self:
+ return self[key]
+ else:
+ return defaultValue
+
+ def insert( self, index, insStr ):
+ """
+ Inserts new element at location index in the list of parsed tokens.
+
+ Similar to ``list.insert()``.
+
+ Example::
+
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+ # use a parse action to insert the parse location in the front of the parsed results
+ def insert_locn(locn, tokens):
+ tokens.insert(0, locn)
+ print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+ """
+ self.__toklist.insert(index, insStr)
+ # fixup indices in token dictionary
+ for name,occurrences in self.__tokdict.items():
+ for k, (value, position) in enumerate(occurrences):
+ occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+ def append( self, item ):
+ """
+ Add single element to end of ParseResults list of elements.
+
+ Example::
+
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+ # use a parse action to compute the sum of the parsed integers, and add it to the end
+ def append_sum(tokens):
+ tokens.append(sum(map(int, tokens)))
+ print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+ """
+ self.__toklist.append(item)
+
+ def extend( self, itemseq ):
+ """
+ Add sequence of elements to end of ParseResults list of elements.
+
+ Example::
+
+ patt = OneOrMore(Word(alphas))
+
+ # use a parse action to append the reverse of the matched strings, to make a palindrome
+ def make_palindrome(tokens):
+ tokens.extend(reversed([t[::-1] for t in tokens]))
+ return ''.join(tokens)
+ print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+ """
+ if isinstance(itemseq, ParseResults):
+ self.__iadd__(itemseq)
+ else:
+ self.__toklist.extend(itemseq)
+
+ def clear( self ):
+ """
+ Clear all elements and results names.
+ """
+ del self.__toklist[:]
+ self.__tokdict.clear()
+
+ def __getattr__( self, name ):
+ try:
+ return self[name]
+ except KeyError:
+ return ""
+
+ if name in self.__tokdict:
+ if name not in self.__accumNames:
+ return self.__tokdict[name][-1][0]
+ else:
+ return ParseResults([ v[0] for v in self.__tokdict[name] ])
+ else:
+ return ""
+
+ def __add__( self, other ):
+ ret = self.copy()
+ ret += other
+ return ret
+
+ def __iadd__( self, other ):
+ if other.__tokdict:
+ offset = len(self.__toklist)
+ addoffset = lambda a: offset if a<0 else a+offset
+ otheritems = other.__tokdict.items()
+ otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
+ for (k,vlist) in otheritems for v in vlist]
+ for k,v in otherdictitems:
+ self[k] = v
+ if isinstance(v[0],ParseResults):
+ v[0].__parent = wkref(self)
+
+ self.__toklist += other.__toklist
+ self.__accumNames.update( other.__accumNames )
+ return self
+
+ def __radd__(self, other):
+ if isinstance(other,int) and other == 0:
+ # useful for merging many ParseResults using sum() builtin
+ return self.copy()
+ else:
+ # this may raise a TypeError - so be it
+ return other + self
+
+ def __repr__( self ):
+ return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
+
+ def __str__( self ):
+ return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+ def _asStringList( self, sep='' ):
+ out = []
+ for item in self.__toklist:
+ if out and sep:
+ out.append(sep)
+ if isinstance( item, ParseResults ):
+ out += item._asStringList()
+ else:
+ out.append( _ustr(item) )
+ return out
+
+ def asList( self ):
+ """
+ Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+ Example::
+
+ patt = OneOrMore(Word(alphas))
+ result = patt.parseString("sldkj lsdkj sldkj")
+ # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+ print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
+
+ # Use asList() to create an actual list
+ result_list = result.asList()
+ print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
+ """
+ return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
+
+ def asDict( self ):
+ """
+ Returns the named parse results as a nested dictionary.
+
+ Example::
+
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString('12/31/1999')
+ print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+
+ result_dict = result.asDict()
+ print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}
+
+ # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+ import json
+ print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+ print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+ """
+ if PY_3:
+ item_fn = self.items
+ else:
+ item_fn = self.iteritems
+
+ def toItem(obj):
+ if isinstance(obj, ParseResults):
+ if obj.haskeys():
+ return obj.asDict()
+ else:
+ return [toItem(v) for v in obj]
+ else:
+ return obj
+
+ return dict((k,toItem(v)) for k,v in item_fn())
+
+ def copy( self ):
+ """
+ Returns a new copy of a :class:`ParseResults` object.
+ """
+ ret = ParseResults( self.__toklist )
+ ret.__tokdict = dict(self.__tokdict.items())
+ ret.__parent = self.__parent
+ ret.__accumNames.update( self.__accumNames )
+ ret.__name = self.__name
+ return ret
+
+ def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
+ """
+ (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+ """
+ nl = "\n"
+ out = []
+ namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
+ for v in vlist)
+ nextLevelIndent = indent + " "
+
+ # collapse out indents if formatting is not desired
+ if not formatted:
+ indent = ""
+ nextLevelIndent = ""
+ nl = ""
+
+ selfTag = None
+ if doctag is not None:
+ selfTag = doctag
+ else:
+ if self.__name:
+ selfTag = self.__name
+
+ if not selfTag:
+ if namedItemsOnly:
+ return ""
+ else:
+ selfTag = "ITEM"
+
+ out += [ nl, indent, "<", selfTag, ">" ]
+
+ for i,res in enumerate(self.__toklist):
+ if isinstance(res,ParseResults):
+ if i in namedItems:
+ out += [ res.asXML(namedItems[i],
+ namedItemsOnly and doctag is None,
+ nextLevelIndent,
+ formatted)]
+ else:
+ out += [ res.asXML(None,
+ namedItemsOnly and doctag is None,
+ nextLevelIndent,
+ formatted)]
+ else:
+ # individual token, see if there is a name for it
+ resTag = None
+ if i in namedItems:
+ resTag = namedItems[i]
+ if not resTag:
+ if namedItemsOnly:
+ continue
+ else:
+ resTag = "ITEM"
+ xmlBodyText = _xml_escape(_ustr(res))
+ out += [ nl, nextLevelIndent, "<", resTag, ">",
+ xmlBodyText,
+ "</", resTag, ">" ]
+
+ out += [ nl, indent, "</", selfTag, ">" ]
+ return "".join(out)
+
+ def __lookup(self,sub):
+ for k,vlist in self.__tokdict.items():
+ for v,loc in vlist:
+ if sub is v:
+ return k
+ return None
+
+ def getName(self):
+ r"""
+ Returns the results name for this token expression. Useful when several
+ different expressions might match at a particular location.
+
+ Example::
+
+ integer = Word(nums)
+ ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+ house_number_expr = Suppress('#') + Word(nums, alphanums)
+ user_data = (Group(house_number_expr)("house_number")
+ | Group(ssn_expr)("ssn")
+ | Group(integer)("age"))
+ user_info = OneOrMore(user_data)
+
+ result = user_info.parseString("22 111-22-3333 #221B")
+ for item in result:
+ print(item.getName(), ':', item[0])
+
+ prints::
+
+ age : 22
+ ssn : 111-22-3333
+ house_number : 221B
+ """
+ if self.__name:
+ return self.__name
+ elif self.__parent:
+ par = self.__parent()
+ if par:
+ return par.__lookup(self)
+ else:
+ return None
+ elif (len(self) == 1 and
+ len(self.__tokdict) == 1 and
+ next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
+ return next(iter(self.__tokdict.keys()))
+ else:
+ return None
+
+ def dump(self, indent='', depth=0, full=True):
+ """
+ Diagnostic method for listing out the contents of
+ a :class:`ParseResults`. Accepts an optional ``indent`` argument so
+ that this string can be embedded in a nested display of other data.
+
+ Example::
+
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString('12/31/1999')
+ print(result.dump())
+
+ prints::
+
+ ['12', '/', '31', '/', '1999']
+ - day: 1999
+ - month: 31
+ - year: 12
+ """
+ out = []
+ NL = '\n'
+ out.append( indent+_ustr(self.asList()) )
+ if full:
+ if self.haskeys():
+ items = sorted((str(k), v) for k,v in self.items())
+ for k,v in items:
+ if out:
+ out.append(NL)
+ out.append( "%s%s- %s: " % (indent,(' '*depth), k) )
+ if isinstance(v,ParseResults):
+ if v:
+ out.append( v.dump(indent,depth+1) )
+ else:
+ out.append(_ustr(v))
+ else:
+ out.append(repr(v))
+ elif any(isinstance(vv,ParseResults) for vv in self):
+ v = self
+ for i,vv in enumerate(v):
+ if isinstance(vv,ParseResults):
+ out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) ))
+ else:
+ out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv)))
+
+ return "".join(out)
+
+ def pprint(self, *args, **kwargs):
+ """
+ Pretty-printer for parsed results as a list, using the
+ `pprint <https://docs.python.org/3/library/pprint.html>`_ module.
+ Accepts additional positional or keyword args as defined for
+ `pprint.pprint <https://docs.python.org/3/library/pprint.html#pprint.pprint>`_ .
+
+ Example::
+
+ ident = Word(alphas, alphanums)
+ num = Word(nums)
+ func = Forward()
+ term = ident | num | Group('(' + func + ')')
+ func <<= ident + Group(Optional(delimitedList(term)))
+ result = func.parseString("fna a,b,(fnb c,d,200),100")
+ result.pprint(width=40)
+
+ prints::
+
+ ['fna',
+ ['a',
+ 'b',
+ ['(', 'fnb', ['c', 'd', '200'], ')'],
+ '100']]
+ """
+ pprint.pprint(self.asList(), *args, **kwargs)
+
+ # add support for pickle protocol
+ def __getstate__(self):
+ return ( self.__toklist,
+ ( self.__tokdict.copy(),
+ self.__parent is not None and self.__parent() or None,
+ self.__accumNames,
+ self.__name ) )
+
+ def __setstate__(self,state):
+ self.__toklist = state[0]
+ (self.__tokdict,
+ par,
+ inAccumNames,
+ self.__name) = state[1]
+ self.__accumNames = {}
+ self.__accumNames.update(inAccumNames)
+ if par is not None:
+ self.__parent = wkref(par)
+ else:
+ self.__parent = None
+
+ def __getnewargs__(self):
+ return self.__toklist, self.__name, self.__asList, self.__modal
+
+ def __dir__(self):
+ return (dir(type(self)) + list(self.keys()))
+
+MutableMapping.register(ParseResults)
+
+def col (loc,strg):
+ """Returns current column within a string, counting newlines as line separators.
+ The first column is number 1.
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See
+ :class:`ParserElement.parseString` for more
+ information on parsing strings containing ``<TAB>`` s, and suggested
+ methods to maintain a consistent view of the parsed string, the parse
+ location, and line and column positions within the parsed string.
+ """
+ s = strg
+ return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)
+
+def lineno(loc,strg):
+ """Returns current line number within a string, counting newlines as line separators.
+ The first line is number 1.
+
+ Note - the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See :class:`ParserElement.parseString`
+ for more information on parsing strings containing ``<TAB>`` s, and
+ suggested methods to maintain a consistent view of the parsed string, the
+ parse location, and line and column positions within the parsed string.
+ """
+ return strg.count("\n",0,loc) + 1
+
+def line( loc, strg ):
+ """Returns the line of text containing loc within a string, counting newlines as line separators.
+ """
+ lastCR = strg.rfind("\n", 0, loc)
+ nextCR = strg.find("\n", loc)
+ if nextCR >= 0:
+ return strg[lastCR+1:nextCR]
+ else:
+ return strg[lastCR+1:]
+
+def _defaultStartDebugAction( instring, loc, expr ):
+ print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
+
+def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
+ print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction( instring, loc, expr, exc ):
+ print ("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+ """'Do-nothing' debug action, to suppress debugging output during parsing."""
+ pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+ #~ if func in singleArgBuiltins:
+ #~ return lambda s,l,t: func(t)
+ #~ limit = 0
+ #~ foundArity = False
+ #~ def wrapper(*args):
+ #~ nonlocal limit,foundArity
+ #~ while 1:
+ #~ try:
+ #~ ret = func(*args[limit:])
+ #~ foundArity = True
+ #~ return ret
+ #~ except TypeError:
+ #~ if limit == maxargs or foundArity:
+ #~ raise
+ #~ limit += 1
+ #~ continue
+ #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+ if func in singleArgBuiltins:
+ return lambda s,l,t: func(t)
+ limit = [0]
+ foundArity = [False]
+
+ # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+ if system_version[:2] >= (3,5):
+ def extract_stack(limit=0):
+ # special handling for Python 3.5.0 - extra deep call stack by 1
+ offset = -3 if system_version == (3,5,0) else -2
+ frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
+ return [frame_summary[:2]]
+ def extract_tb(tb, limit=0):
+ frames = traceback.extract_tb(tb, limit=limit)
+ frame_summary = frames[-1]
+ return [frame_summary[:2]]
+ else:
+ extract_stack = traceback.extract_stack
+ extract_tb = traceback.extract_tb
+
+ # synthesize what would be returned by traceback.extract_stack at the call to
+ # user's parse action 'func', so that we don't incur call penalty at parse time
+
+ LINE_DIFF = 6
+ # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND
+ # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+ this_line = extract_stack(limit=2)[-1]
+ pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)
+
+ def wrapper(*args):
+ while 1:
+ try:
+ ret = func(*args[limit[0]:])
+ foundArity[0] = True
+ return ret
+ except TypeError:
+ # re-raise TypeErrors if they did not come from our arity testing
+ if foundArity[0]:
+ raise
+ else:
+ try:
+ tb = sys.exc_info()[-1]
+ if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+ raise
+ finally:
+ del tb
+
+ if limit[0] <= maxargs:
+ limit[0] += 1
+ continue
+ raise
+
+ # copy func name to wrapper for sensible debug output
+ func_name = "<parse action>"
+ try:
+ func_name = getattr(func, '__name__',
+ getattr(func, '__class__').__name__)
+ except Exception:
+ func_name = str(func)
+ wrapper.__name__ = func_name
+
+ return wrapper
+
+class ParserElement(object):
+ """Abstract base level parser element class."""
+ DEFAULT_WHITE_CHARS = " \n\t\r"
+ verbose_stacktrace = False
+
+ @staticmethod
+ def setDefaultWhitespaceChars( chars ):
+ r"""
+ Overrides the default whitespace chars
+
+ Example::
+
+ # default whitespace chars are space, <TAB> and newline
+ OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl']
+
+ # change to just treat newline as significant
+ ParserElement.setDefaultWhitespaceChars(" \t")
+ OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def']
+ """
+ ParserElement.DEFAULT_WHITE_CHARS = chars
+
+ @staticmethod
+ def inlineLiteralsUsing(cls):
+ """
+ Set class to be used for inclusion of string literals into a parser.
+
+ Example::
+
+ # default literal class used is Literal
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']
+
+
+ # change to Suppress
+ ParserElement.inlineLiteralsUsing(Suppress)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ date_str.parseString("1999/12/31") # -> ['1999', '12', '31']
+ """
+ ParserElement._literalStringClass = cls
+
+ def __init__( self, savelist=False ):
+ self.parseAction = list()
+ self.failAction = None
+ #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall
+ self.strRepr = None
+ self.resultsName = None
+ self.saveAsList = savelist
+ self.skipWhitespace = True
+ self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS)
+ self.copyDefaultWhiteChars = True
+ self.mayReturnEmpty = False # used when checking for left-recursion
+ self.keepTabs = False
+ self.ignoreExprs = list()
+ self.debug = False
+ self.streamlined = False
+ self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+ self.errmsg = ""
+ self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+ self.debugActions = ( None, None, None ) #custom debug actions
+ self.re = None
+ self.callPreparse = True # used to avoid redundant calls to preParse
+ self.callDuringTry = False
+
+ def copy( self ):
+ """
+ Make a copy of this :class:`ParserElement`. Useful for defining
+ different parse actions for the same parsing pattern, using copies of
+ the original parse element.
+
+ Example::
+
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
+ integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+
+ print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+
+ prints::
+
+ [5120, 100, 655360, 268435456]
+
+ Equivalent form of ``expr.copy()`` is just ``expr()``::
+
+ integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+ """
+ cpy = copy.copy( self )
+ cpy.parseAction = self.parseAction[:]
+ cpy.ignoreExprs = self.ignoreExprs[:]
+ if self.copyDefaultWhiteChars:
+ cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+ return cpy
+
+ def setName( self, name ):
+ """
+ Define name for this expression, makes debugging and exception messages clearer.
+
+ Example::
+
+ Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+ Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1)
+ """
+ self.name = name
+ self.errmsg = "Expected " + self.name
+ if hasattr(self,"exception"):
+ self.exception.msg = self.errmsg
+ return self
+
+ def setResultsName( self, name, listAllMatches=False ):
+ """
+ Define name for referencing matching tokens as a nested attribute
+ of the returned parse results.
+ NOTE: this returns a *copy* of the original :class:`ParserElement` object;
+ this is so that the client can define a basic element, such as an
+ integer, and reference it in multiple places with different names.
+
+ You can also set results names using the abbreviated syntax,
+ ``expr("name")`` in place of ``expr.setResultsName("name")``
+ - see :class:`__call__`.
+
+ Example::
+
+ date_str = (integer.setResultsName("year") + '/'
+ + integer.setResultsName("month") + '/'
+ + integer.setResultsName("day"))
+
+ # equivalent form:
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+ """
+ newself = self.copy()
+ if name.endswith("*"):
+ name = name[:-1]
+ listAllMatches=True
+ newself.resultsName = name
+ newself.modalResults = not listAllMatches
+ return newself
+
+ def setBreak(self,breakFlag = True):
+ """Method to invoke the Python pdb debugger when this element is
+ about to be parsed. Set ``breakFlag`` to True to enable, False to
+ disable.
+ """
+ if breakFlag:
+ _parseMethod = self._parse
+ def breaker(instring, loc, doActions=True, callPreParse=True):
+ import pdb
+ pdb.set_trace()
+ return _parseMethod( instring, loc, doActions, callPreParse )
+ breaker._originalParseMethod = _parseMethod
+ self._parse = breaker
+ else:
+ if hasattr(self._parse,"_originalParseMethod"):
+ self._parse = self._parse._originalParseMethod
+ return self
+
+ def setParseAction( self, *fns, **kwargs ):
+ """
+ Define one or more actions to perform when successfully matching parse element definition.
+ Parse action fn is a callable method with 0-3 arguments, called as ``fn(s,loc,toks)`` ,
+ ``fn(loc,toks)`` , ``fn(toks)`` , or just ``fn()`` , where:
+
+ - s = the original string being parsed (see note below)
+ - loc = the location of the matching substring
+ - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object
+
+ If the functions in fns modify the tokens, they can return them as the return
+ value from fn, and the modified list of tokens will replace the original.
+ Otherwise, fn does not need to return any value.
+
+ Optional keyword arguments:
+ - callDuringTry = (default= ``False`` ) indicate if parse action should be run during lookaheads and alternate testing
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See :class:`parseString for more
+ information on parsing strings containing ``<TAB>`` s, and suggested
+ methods to maintain a consistent view of the parsed string, the parse
+ location, and line and column positions within the parsed string.
+
+ Example::
+
+ integer = Word(nums)
+ date_str = integer + '/' + integer + '/' + integer
+
+ date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']
+
+ # use parse action to convert to ints at parse time
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ date_str = integer + '/' + integer + '/' + integer
+
+ # note that integer fields are now ints, not strings
+ date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31]
+ """
+ self.parseAction = list(map(_trim_arity, list(fns)))
+ self.callDuringTry = kwargs.get("callDuringTry", False)
+ return self
+
+ def addParseAction( self, *fns, **kwargs ):
+ """
+ Add one or more parse actions to expression's list of parse actions. See :class:`setParseAction`.
+
+ See examples in :class:`copy`.
+ """
+ self.parseAction += list(map(_trim_arity, list(fns)))
+ self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+ return self
+
+ def addCondition(self, *fns, **kwargs):
+ """Add a boolean predicate function to expression's list of parse actions. See
+ :class:`setParseAction` for function call signatures. Unlike ``setParseAction``,
+ functions passed to ``addCondition`` need to return boolean success/fail of the condition.
+
+ Optional keyword arguments:
+ - message = define a custom message to be used in the raised exception
+ - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+
+ Example::
+
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ year_int = integer.copy()
+ year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+ date_str = year_int + '/' + integer + '/' + integer
+
+ result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+ """
+ msg = kwargs.get("message", "failed user-defined condition")
+ exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
+ for fn in fns:
+ fn = _trim_arity(fn)
+ def pa(s,l,t):
+ if not bool(fn(s,l,t)):
+ raise exc_type(s,l,msg)
+ self.parseAction.append(pa)
+ self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+ return self
+
+ def setFailAction( self, fn ):
+ """Define action to perform if parsing fails at this expression.
+ Fail acton fn is a callable function that takes the arguments
+ ``fn(s,loc,expr,err)`` where:
+ - s = string being parsed
+ - loc = location where expression match was attempted and failed
+ - expr = the parse expression that failed
+ - err = the exception thrown
+ The function returns no value. It may throw :class:`ParseFatalException`
+ if it is desired to stop parsing immediately."""
+ self.failAction = fn
+ return self
+
+ def _skipIgnorables( self, instring, loc ):
+ exprsFound = True
+ while exprsFound:
+ exprsFound = False
+ for e in self.ignoreExprs:
+ try:
+ while 1:
+ loc,dummy = e._parse( instring, loc )
+ exprsFound = True
+ except ParseException:
+ pass
+ return loc
+
+ def preParse( self, instring, loc ):
+ if self.ignoreExprs:
+ loc = self._skipIgnorables( instring, loc )
+
+ if self.skipWhitespace:
+ wt = self.whiteChars
+ instrlen = len(instring)
+ while loc < instrlen and instring[loc] in wt:
+ loc += 1
+
+ return loc
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ return loc, []
+
+ def postParse( self, instring, loc, tokenlist ):
+ return tokenlist
+
+ #~ @profile
+ def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
+ debugging = ( self.debug ) #and doActions )
+
+ if debugging or self.failAction:
+ #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
+ if (self.debugActions[0] ):
+ self.debugActions[0]( instring, loc, self )
+ if callPreParse and self.callPreparse:
+ preloc = self.preParse( instring, loc )
+ else:
+ preloc = loc
+ tokensStart = preloc
+ try:
+ try:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+ except IndexError:
+ raise ParseException( instring, len(instring), self.errmsg, self )
+ except ParseBaseException as err:
+ #~ print ("Exception raised:", err)
+ if self.debugActions[2]:
+ self.debugActions[2]( instring, tokensStart, self, err )
+ if self.failAction:
+ self.failAction( instring, tokensStart, self, err )
+ raise
+ else:
+ if callPreParse and self.callPreparse:
+ preloc = self.preParse( instring, loc )
+ else:
+ preloc = loc
+ tokensStart = preloc
+ if self.mayIndexError or preloc >= len(instring):
+ try:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+ except IndexError:
+ raise ParseException( instring, len(instring), self.errmsg, self )
+ else:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+
+ tokens = self.postParse( instring, loc, tokens )
+
+ retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
+ if self.parseAction and (doActions or self.callDuringTry):
+ if debugging:
+ try:
+ for fn in self.parseAction:
+ try:
+ tokens = fn( instring, tokensStart, retTokens )
+ except IndexError as parse_action_exc:
+ exc = ParseException("exception raised in parse action")
+ exc.__cause__ = parse_action_exc
+ raise exc
+
+ if tokens is not None and tokens is not retTokens:
+ retTokens = ParseResults( tokens,
+ self.resultsName,
+ asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+ modal=self.modalResults )
+ except ParseBaseException as err:
+ #~ print "Exception raised in user parse action:", err
+ if (self.debugActions[2] ):
+ self.debugActions[2]( instring, tokensStart, self, err )
+ raise
+ else:
+ for fn in self.parseAction:
+ try:
+ tokens = fn( instring, tokensStart, retTokens )
+ except IndexError as parse_action_exc:
+ exc = ParseException("exception raised in parse action")
+ exc.__cause__ = parse_action_exc
+ raise exc
+
+ if tokens is not None and tokens is not retTokens:
+ retTokens = ParseResults( tokens,
+ self.resultsName,
+ asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+ modal=self.modalResults )
+ if debugging:
+ #~ print ("Matched",self,"->",retTokens.asList())
+ if (self.debugActions[1] ):
+ self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
+
+ return loc, retTokens
+
+ def tryParse( self, instring, loc ):
+ try:
+ return self._parse( instring, loc, doActions=False )[0]
+ except ParseFatalException:
+ raise ParseException( instring, loc, self.errmsg, self)
+
+ def canParseNext(self, instring, loc):
+ try:
+ self.tryParse(instring, loc)
+ except (ParseException, IndexError):
+ return False
+ else:
+ return True
+
+ class _UnboundedCache(object):
+ def __init__(self):
+ cache = {}
+ self.not_in_cache = not_in_cache = object()
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+
+ def clear(self):
+ cache.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ if _OrderedDict is not None:
+ class _FifoCache(object):
+ def __init__(self, size):
+ self.not_in_cache = not_in_cache = object()
+
+ cache = _OrderedDict()
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+ while len(cache) > size:
+ try:
+ cache.popitem(False)
+ except KeyError:
+ pass
+
+ def clear(self):
+ cache.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ else:
+ class _FifoCache(object):
+ def __init__(self, size):
+ self.not_in_cache = not_in_cache = object()
+
+ cache = {}
+ key_fifo = collections.deque([], size)
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+ while len(key_fifo) > size:
+ cache.pop(key_fifo.popleft(), None)
+ key_fifo.append(key)
+
+ def clear(self):
+ cache.clear()
+ key_fifo.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ # argument cache for optimizing repeated calls when backtracking through recursive expressions
+ packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+ packrat_cache_lock = RLock()
+ packrat_cache_stats = [0, 0]
+
+ # this method gets repeatedly called during backtracking with the same arguments -
+ # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+ def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
+ HIT, MISS = 0, 1
+ lookup = (self, instring, loc, callPreParse, doActions)
+ with ParserElement.packrat_cache_lock:
+ cache = ParserElement.packrat_cache
+ value = cache.get(lookup)
+ if value is cache.not_in_cache:
+ ParserElement.packrat_cache_stats[MISS] += 1
+ try:
+ value = self._parseNoCache(instring, loc, doActions, callPreParse)
+ except ParseBaseException as pe:
+ # cache a copy of the exception, without the traceback
+ cache.set(lookup, pe.__class__(*pe.args))
+ raise
+ else:
+ cache.set(lookup, (value[0], value[1].copy()))
+ return value
+ else:
+ ParserElement.packrat_cache_stats[HIT] += 1
+ if isinstance(value, Exception):
+ raise value
+ return (value[0], value[1].copy())
+
+ _parse = _parseNoCache
+
+ @staticmethod
+ def resetCache():
+ ParserElement.packrat_cache.clear()
+ ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+ _packratEnabled = False
+ @staticmethod
+ def enablePackrat(cache_size_limit=128):
+ """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+ Repeated parse attempts at the same string location (which happens
+ often in many complex grammars) can immediately return a cached value,
+ instead of re-executing parsing/validating code. Memoizing is done of
+ both valid results and parsing exceptions.
+
+ Parameters:
+
+ - cache_size_limit - (default= ``128``) - if an integer value is provided
+ will limit the size of the packrat cache; if None is passed, then
+ the cache size will be unbounded; if 0 is passed, the cache will
+ be effectively disabled.
+
+ This speedup may break existing programs that use parse actions that
+ have side-effects. For this reason, packrat parsing is disabled when
+ you first import pyparsing. To activate the packrat feature, your
+ program must call the class method :class:`ParserElement.enablePackrat`.
+ For best results, call ``enablePackrat()`` immediately after
+ importing pyparsing.
+
+ Example::
+
+ from pip._vendor import pyparsing
+ pyparsing.ParserElement.enablePackrat()
+ """
+ if not ParserElement._packratEnabled:
+ ParserElement._packratEnabled = True
+ if cache_size_limit is None:
+ ParserElement.packrat_cache = ParserElement._UnboundedCache()
+ else:
+ ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+ ParserElement._parse = ParserElement._parseCache
+
+ def parseString( self, instring, parseAll=False ):
+ """
+ Execute the parse expression with the given string.
+ This is the main interface to the client code, once the complete
+ expression has been built.
+
+ If you want the grammar to require that the entire input string be
+ successfully parsed, then set ``parseAll`` to True (equivalent to ending
+ the grammar with ``StringEnd()``).
+
+ Note: ``parseString`` implicitly calls ``expandtabs()`` on the input string,
+ in order to report proper column numbers in parse actions.
+ If the input string contains tabs and
+ the grammar uses parse actions that use the ``loc`` argument to index into the
+ string being parsed, you can ensure you have a consistent view of the input
+ string by:
+
+ - calling ``parseWithTabs`` on your grammar before calling ``parseString``
+ (see :class:`parseWithTabs`)
+ - define your parse action using the full ``(s,loc,toks)`` signature, and
+ reference the input string using the parse action's ``s`` argument
+ - explictly expand the tabs in your input string before calling
+ ``parseString``
+
+ Example::
+
+ Word('a').parseString('aaaaabaaa') # -> ['aaaaa']
+ Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text
+ """
+ ParserElement.resetCache()
+ if not self.streamlined:
+ self.streamline()
+ #~ self.saveAsList = True
+ for e in self.ignoreExprs:
+ e.streamline()
+ if not self.keepTabs:
+ instring = instring.expandtabs()
+ try:
+ loc, tokens = self._parse( instring, 0 )
+ if parseAll:
+ loc = self.preParse( instring, loc )
+ se = Empty() + StringEnd()
+ se._parse( instring, loc )
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+ else:
+ return tokens
+
+ def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
+ """
+ Scan the input string for expression matches. Each match will return the
+ matching tokens, start location, and end location. May be called with optional
+ ``maxMatches`` argument, to clip scanning after 'n' matches are found. If
+ ``overlap`` is specified, then overlapping matches will be reported.
+
+ Note that the start and end locations are reported relative to the string
+ being parsed. See :class:`parseString` for more information on parsing
+ strings with embedded tabs.
+
+ Example::
+
+ source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+ print(source)
+ for tokens,start,end in Word(alphas).scanString(source):
+ print(' '*start + '^'*(end-start))
+ print(' '*start + tokens[0])
+
+ prints::
+
+ sldjf123lsdjjkf345sldkjf879lkjsfd987
+ ^^^^^
+ sldjf
+ ^^^^^^^
+ lsdjjkf
+ ^^^^^^
+ sldkjf
+ ^^^^^^
+ lkjsfd
+ """
+ if not self.streamlined:
+ self.streamline()
+ for e in self.ignoreExprs:
+ e.streamline()
+
+ if not self.keepTabs:
+ instring = _ustr(instring).expandtabs()
+ instrlen = len(instring)
+ loc = 0
+ preparseFn = self.preParse
+ parseFn = self._parse
+ ParserElement.resetCache()
+ matches = 0
+ try:
+ while loc <= instrlen and matches < maxMatches:
+ try:
+ preloc = preparseFn( instring, loc )
+ nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
+ except ParseException:
+ loc = preloc+1
+ else:
+ if nextLoc > loc:
+ matches += 1
+ yield tokens, preloc, nextLoc
+ if overlap:
+ nextloc = preparseFn( instring, loc )
+ if nextloc > loc:
+ loc = nextLoc
+ else:
+ loc += 1
+ else:
+ loc = nextLoc
+ else:
+ loc = preloc+1
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def transformString( self, instring ):
+ """
+ Extension to :class:`scanString`, to modify matching text with modified tokens that may
+ be returned from a parse action. To use ``transformString``, define a grammar and
+ attach a parse action to it that modifies the returned token list.
+ Invoking ``transformString()`` on a target string will then scan for matches,
+ and replace the matched text patterns according to the logic in the parse
+ action. ``transformString()`` returns the resulting transformed string.
+
+ Example::
+
+ wd = Word(alphas)
+ wd.setParseAction(lambda toks: toks[0].title())
+
+ print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+
+ prints::
+
+ Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+ """
+ out = []
+ lastE = 0
+ # force preservation of <TAB>s, to minimize unwanted transformation of string, and to
+ # keep string locs straight between transformString and scanString
+ self.keepTabs = True
+ try:
+ for t,s,e in self.scanString( instring ):
+ out.append( instring[lastE:s] )
+ if t:
+ if isinstance(t,ParseResults):
+ out += t.asList()
+ elif isinstance(t,list):
+ out += t
+ else:
+ out.append(t)
+ lastE = e
+ out.append(instring[lastE:])
+ out = [o for o in out if o]
+ return "".join(map(_ustr,_flatten(out)))
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def searchString( self, instring, maxMatches=_MAX_INT ):
+ """
+ Another extension to :class:`scanString`, simplifying the access to the tokens found
+ to match the given parse expression. May be called with optional
+ ``maxMatches`` argument, to clip searching after 'n' matches are found.
+
+ Example::
+
+ # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+ cap_word = Word(alphas.upper(), alphas.lower())
+
+ print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+ # the sum() builtin can be used to merge results into a single ParseResults object
+ print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
+
+ prints::
+
+ [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+ ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
+ """
+ try:
+ return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+ """
+ Generator method to split a string using the given expression as a separator.
+ May be called with optional ``maxsplit`` argument, to limit the number of splits;
+ and the optional ``includeSeparators`` argument (default= ``False``), if the separating
+ matching text should be included in the split results.
+
+ Example::
+
+ punc = oneOf(list(".,;:/-!?"))
+ print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+
+ prints::
+
+ ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+ """
+ splits = 0
+ last = 0
+ for t,s,e in self.scanString(instring, maxMatches=maxsplit):
+ yield instring[last:s]
+ if includeSeparators:
+ yield t[0]
+ last = e
+ yield instring[last:]
+
+ def __add__(self, other ):
+ """
+ Implementation of + operator - returns :class:`And`. Adding strings to a ParserElement
+ converts them to :class:`Literal`s by default.
+
+ Example::
+
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+
+ prints::
+
+ Hello, World! -> ['Hello', ',', 'World', '!']
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return And( [ self, other ] )
+
+ def __radd__(self, other ):
+ """
+ Implementation of + operator when left operand is not a :class:`ParserElement`
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other + self
+
+ def __sub__(self, other):
+ """
+ Implementation of - operator, returns :class:`And` with error stop
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return self + And._ErrorStop() + other
+
+ def __rsub__(self, other ):
+ """
+ Implementation of - operator when left operand is not a :class:`ParserElement`
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other - self
+
+ def __mul__(self,other):
+ """
+ Implementation of * operator, allows use of ``expr * 3`` in place of
+ ``expr + expr + expr``. Expressions may also me multiplied by a 2-integer
+ tuple, similar to ``{min,max}`` multipliers in regular expressions. Tuples
+ may also include ``None`` as in:
+ - ``expr*(n,None)`` or ``expr*(n,)`` is equivalent
+ to ``expr*n + ZeroOrMore(expr)``
+ (read as "at least n instances of ``expr``")
+ - ``expr*(None,n)`` is equivalent to ``expr*(0,n)``
+ (read as "0 to n instances of ``expr``")
+ - ``expr*(None,None)`` is equivalent to ``ZeroOrMore(expr)``
+ - ``expr*(1,None)`` is equivalent to ``OneOrMore(expr)``
+
+ Note that ``expr*(None,n)`` does not raise an exception if
+ more than n exprs exist in the input stream; that is,
+ ``expr*(None,n)`` does not enforce a maximum number of expr
+ occurrences. If this behavior is desired, then write
+ ``expr*(None,n) + ~expr``
+ """
+ if isinstance(other,int):
+ minElements, optElements = other,0
+ elif isinstance(other,tuple):
+ other = (other + (None, None))[:2]
+ if other[0] is None:
+ other = (0, other[1])
+ if isinstance(other[0],int) and other[1] is None:
+ if other[0] == 0:
+ return ZeroOrMore(self)
+ if other[0] == 1:
+ return OneOrMore(self)
+ else:
+ return self*other[0] + ZeroOrMore(self)
+ elif isinstance(other[0],int) and isinstance(other[1],int):
+ minElements, optElements = other
+ optElements -= minElements
+ else:
+ raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
+ else:
+ raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+ if minElements < 0:
+ raise ValueError("cannot multiply ParserElement by negative value")
+ if optElements < 0:
+ raise ValueError("second tuple value must be greater or equal to first tuple value")
+ if minElements == optElements == 0:
+ raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
+
+ if (optElements):
+ def makeOptionalList(n):
+ if n>1:
+ return Optional(self + makeOptionalList(n-1))
+ else:
+ return Optional(self)
+ if minElements:
+ if minElements == 1:
+ ret = self + makeOptionalList(optElements)
+ else:
+ ret = And([self]*minElements) + makeOptionalList(optElements)
+ else:
+ ret = makeOptionalList(optElements)
+ else:
+ if minElements == 1:
+ ret = self
+ else:
+ ret = And([self]*minElements)
+ return ret
+
+ def __rmul__(self, other):
+ return self.__mul__(other)
+
+ def __or__(self, other ):
+ """
+ Implementation of | operator - returns :class:`MatchFirst`
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return MatchFirst( [ self, other ] )
+
+ def __ror__(self, other ):
+ """
+ Implementation of | operator when left operand is not a :class:`ParserElement`
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other | self
+
+ def __xor__(self, other ):
+ """
+ Implementation of ^ operator - returns :class:`Or`
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return Or( [ self, other ] )
+
+ def __rxor__(self, other ):
+ """
+ Implementation of ^ operator when left operand is not a :class:`ParserElement`
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other ^ self
+
+ def __and__(self, other ):
+ """
+ Implementation of & operator - returns :class:`Each`
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return Each( [ self, other ] )
+
+ def __rand__(self, other ):
+ """
+ Implementation of & operator when left operand is not a :class:`ParserElement`
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other & self
+
+ def __invert__( self ):
+ """
+ Implementation of ~ operator - returns :class:`NotAny`
+ """
+ return NotAny( self )
+
+ def __call__(self, name=None):
+ """
+ Shortcut for :class:`setResultsName`, with ``listAllMatches=False``.
+
+ If ``name`` is given with a trailing ``'*'`` character, then ``listAllMatches`` will be
+ passed as ``True``.
+
+ If ``name` is omitted, same as calling :class:`copy`.
+
+ Example::
+
+ # these are equivalent
+ userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
+ userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")
+ """
+ if name is not None:
+ return self.setResultsName(name)
+ else:
+ return self.copy()
+
+ def suppress( self ):
+ """
+ Suppresses the output of this :class:`ParserElement`; useful to keep punctuation from
+ cluttering up returned output.
+ """
+ return Suppress( self )
+
+ def leaveWhitespace( self ):
+ """
+ Disables the skipping of whitespace before matching the characters in the
+ :class:`ParserElement`'s defined pattern. This is normally only used internally by
+ the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+ """
+ self.skipWhitespace = False
+ return self
+
+ def setWhitespaceChars( self, chars ):
+ """
+ Overrides the default whitespace chars
+ """
+ self.skipWhitespace = True
+ self.whiteChars = chars
+ self.copyDefaultWhiteChars = False
+ return self
+
+ def parseWithTabs( self ):
+ """
+ Overrides default behavior to expand ``<TAB>``s to spaces before parsing the input string.
+ Must be called before ``parseString`` when the input grammar contains elements that
+ match ``<TAB>`` characters.
+ """
+ self.keepTabs = True
+ return self
+
+ def ignore( self, other ):
+ """
+ Define expression to be ignored (e.g., comments) while doing pattern
+ matching; may be called repeatedly, to define multiple comment or other
+ ignorable patterns.
+
+ Example::
+
+ patt = OneOrMore(Word(alphas))
+ patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+
+ patt.ignore(cStyleComment)
+ patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+ """
+ if isinstance(other, basestring):
+ other = Suppress(other)
+
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ self.ignoreExprs.append(other)
+ else:
+ self.ignoreExprs.append( Suppress( other.copy() ) )
+ return self
+
+ def setDebugActions( self, startAction, successAction, exceptionAction ):
+ """
+ Enable display of debugging messages while doing pattern matching.
+ """
+ self.debugActions = (startAction or _defaultStartDebugAction,
+ successAction or _defaultSuccessDebugAction,
+ exceptionAction or _defaultExceptionDebugAction)
+ self.debug = True
+ return self
+
+ def setDebug( self, flag=True ):
+ """
+ Enable display of debugging messages while doing pattern matching.
+ Set ``flag`` to True to enable, False to disable.
+
+ Example::
+
+ wd = Word(alphas).setName("alphaword")
+ integer = Word(nums).setName("numword")
+ term = wd | integer
+
+ # turn on debugging for wd
+ wd.setDebug()
+
+ OneOrMore(term).parseString("abc 123 xyz 890")
+
+ prints::
+
+ Match alphaword at loc 0(1,1)
+ Matched alphaword -> ['abc']
+ Match alphaword at loc 3(1,4)
+ Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+ Match alphaword at loc 7(1,8)
+ Matched alphaword -> ['xyz']
+ Match alphaword at loc 11(1,12)
+ Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+ Match alphaword at loc 15(1,16)
+ Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+ The output shown is that produced by the default debug actions - custom debug actions can be
+ specified using :class:`setDebugActions`. Prior to attempting
+ to match the ``wd`` expression, the debugging message ``"Match <exprname> at loc <n>(<line>,<col>)"``
+ is shown. Then if the parse succeeds, a ``"Matched"`` message is shown, or an ``"Exception raised"``
+ message is shown. Also note the use of :class:`setName` to assign a human-readable name to the expression,
+ which makes debugging and exception messages easier to understand - for instance, the default
+ name created for the :class:`Word` expression without calling ``setName`` is ``"W:(ABCD...)"``.
+ """
+ if flag:
+ self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
+ else:
+ self.debug = False
+ return self
+
+ def __str__( self ):
+ return self.name
+
+ def __repr__( self ):
+ return _ustr(self)
+
+ def streamline( self ):
+ self.streamlined = True
+ self.strRepr = None
+ return self
+
+ def checkRecursion( self, parseElementList ):
+ pass
+
+ def validate( self, validateTrace=[] ):
+ """
+ Check defined expressions for valid structure, check for infinite recursive definitions.
+ """
+ self.checkRecursion( [] )
+
+ def parseFile( self, file_or_filename, parseAll=False ):
+ """
+ Execute the parse expression on the given file or filename.
+ If a filename is specified (instead of a file object),
+ the entire file is opened, read, and closed before parsing.
+ """
+ try:
+ file_contents = file_or_filename.read()
+ except AttributeError:
+ with open(file_or_filename, "r") as f:
+ file_contents = f.read()
+ try:
+ return self.parseString(file_contents, parseAll)
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def __eq__(self,other):
+ if isinstance(other, ParserElement):
+ return self is other or vars(self) == vars(other)
+ elif isinstance(other, basestring):
+ return self.matches(other)
+ else:
+ return super(ParserElement,self)==other
+
+ def __ne__(self,other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash(id(self))
+
+ def __req__(self,other):
+ return self == other
+
+ def __rne__(self,other):
+ return not (self == other)
+
+ def matches(self, testString, parseAll=True):
+ """
+ Method for quick testing of a parser against a test string. Good for simple
+ inline microtests of sub expressions while building up larger parser.
+
+ Parameters:
+ - testString - to test against this expression for a match
+ - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
+
+ Example::
+
+ expr = Word(nums)
+ assert expr.matches("100")
+ """
+ try:
+ self.parseString(_ustr(testString), parseAll=parseAll)
+ return True
+ except ParseBaseException:
+ return False
+
+ def runTests(self, tests, parseAll=True, comment='#',
+ fullDump=True, printResults=True, failureTests=False, postParse=None):
+ """
+ Execute the parse expression on a series of test strings, showing each
+ test, the parsed results or where the parse failed. Quick and easy way to
+ run a parse expression against a list of sample strings.
+
+ Parameters:
+ - tests - a list of separate test strings, or a multiline string of test strings
+ - parseAll - (default= ``True``) - flag to pass to :class:`parseString` when running tests
+ - comment - (default= ``'#'``) - expression for indicating embedded comments in the test
+ string; pass None to disable comment filtering
+ - fullDump - (default= ``True``) - dump results as list followed by results names in nested outline;
+ if False, only dump nested list
+ - printResults - (default= ``True``) prints test output to stdout
+ - failureTests - (default= ``False``) indicates if these tests are expected to fail parsing
+ - postParse - (default= ``None``) optional callback for successful parse results; called as
+ `fn(test_string, parse_results)` and returns a string to be added to the test output
+
+ Returns: a (success, results) tuple, where success indicates that all tests succeeded
+ (or failed if ``failureTests`` is True), and the results contain a list of lines of each
+ test's output
+
+ Example::
+
+ number_expr = pyparsing_common.number.copy()
+
+ result = number_expr.runTests('''
+ # unsigned integer
+ 100
+ # negative integer
+ -100
+ # float with scientific notation
+ 6.02e23
+ # integer with scientific notation
+ 1e-12
+ ''')
+ print("Success" if result[0] else "Failed!")
+
+ result = number_expr.runTests('''
+ # stray character
+ 100Z
+ # missing leading digit before '.'
+ -.100
+ # too many '.'
+ 3.14.159
+ ''', failureTests=True)
+ print("Success" if result[0] else "Failed!")
+
+ prints::
+
+ # unsigned integer
+ 100
+ [100]
+
+ # negative integer
+ -100
+ [-100]
+
+ # float with scientific notation
+ 6.02e23
+ [6.02e+23]
+
+ # integer with scientific notation
+ 1e-12
+ [1e-12]
+
+ Success
+
+ # stray character
+ 100Z
+ ^
+ FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+ # missing leading digit before '.'
+ -.100
+ ^
+ FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+ # too many '.'
+ 3.14.159
+ ^
+ FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+ Success
+
+ Each test string must be on a single line. If you want to test a string that spans multiple
+ lines, create a test like this::
+
+ expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+
+ (Note that this is a raw string literal, you must include the leading 'r'.)
+ """
+ if isinstance(tests, basestring):
+ tests = list(map(str.strip, tests.rstrip().splitlines()))
+ if isinstance(comment, basestring):
+ comment = Literal(comment)
+ allResults = []
+ comments = []
+ success = True
+ for t in tests:
+ if comment is not None and comment.matches(t, False) or comments and not t:
+ comments.append(t)
+ continue
+ if not t:
+ continue
+ out = ['\n'.join(comments), t]
+ comments = []
+ try:
+ # convert newline marks to actual newlines, and strip leading BOM if present
+ NL = Literal(r'\n').addParseAction(replaceWith('\n')).ignore(quotedString)
+ BOM = '\ufeff'
+ t = NL.transformString(t.lstrip(BOM))
+ result = self.parseString(t, parseAll=parseAll)
+ out.append(result.dump(full=fullDump))
+ success = success and not failureTests
+ if postParse is not None:
+ try:
+ pp_value = postParse(t, result)
+ if pp_value is not None:
+ out.append(str(pp_value))
+ except Exception as e:
+ out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e))
+ except ParseBaseException as pe:
+ fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+ if '\n' in t:
+ out.append(line(pe.loc, t))
+ out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
+ else:
+ out.append(' '*pe.loc + '^' + fatal)
+ out.append("FAIL: " + str(pe))
+ success = success and failureTests
+ result = pe
+ except Exception as exc:
+ out.append("FAIL-EXCEPTION: " + str(exc))
+ success = success and failureTests
+ result = exc
+
+ if printResults:
+ if fullDump:
+ out.append('')
+ print('\n'.join(out))
+
+ allResults.append((t, result))
+
+ return success, allResults
+
+
+class Token(ParserElement):
+ """Abstract :class:`ParserElement` subclass, for defining atomic
+ matching patterns.
+ """
+ def __init__( self ):
+ super(Token,self).__init__( savelist=False )
+
+
+class Empty(Token):
+ """An empty token, will always match.
+ """
+ def __init__( self ):
+ super(Empty,self).__init__()
+ self.name = "Empty"
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+
+
+class NoMatch(Token):
+ """A token that will never match.
+ """
+ def __init__( self ):
+ super(NoMatch,self).__init__()
+ self.name = "NoMatch"
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.errmsg = "Unmatchable token"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+ """Token to exactly match a specified string.
+
+ Example::
+
+ Literal('blah').parseString('blah') # -> ['blah']
+ Literal('blah').parseString('blahfooblah') # -> ['blah']
+ Literal('blah').parseString('bla') # -> Exception: Expected "blah"
+
+ For case-insensitive matching, use :class:`CaselessLiteral`.
+
+ For keyword matching (force word break before and after the matched string),
+ use :class:`Keyword` or :class:`CaselessKeyword`.
+ """
+ def __init__( self, matchString ):
+ super(Literal,self).__init__()
+ self.match = matchString
+ self.matchLen = len(matchString)
+ try:
+ self.firstMatchChar = matchString[0]
+ except IndexError:
+ warnings.warn("null string passed to Literal; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+ self.__class__ = Empty
+ self.name = '"%s"' % _ustr(self.match)
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = False
+ self.mayIndexError = False
+
+ # Performance tuning: this routine gets called a *lot*
+ # if this is a single character match string and the first character matches,
+ # short-circuit as quickly as possible, and avoid calling startswith
+ #~ @profile
+ def parseImpl( self, instring, loc, doActions=True ):
+ if (instring[loc] == self.firstMatchChar and
+ (self.matchLen==1 or instring.startswith(self.match,loc)) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+ """Token to exactly match a specified string as a keyword, that is,
+ it must be immediately followed by a non-keyword character. Compare
+ with :class:`Literal`:
+
+ - ``Literal("if")`` will match the leading ``'if'`` in
+ ``'ifAndOnlyIf'``.
+ - ``Keyword("if")`` will not; it will only match the leading
+ ``'if'`` in ``'if x=1'``, or ``'if(y==2)'``
+
+ Accepts two optional constructor arguments in addition to the
+ keyword string:
+
+ - ``identChars`` is a string of characters that would be valid
+ identifier characters, defaulting to all alphanumerics + "_" and
+ "$"
+ - ``caseless`` allows case-insensitive matching, default is ``False``.
+
+ Example::
+
+ Keyword("start").parseString("start") # -> ['start']
+ Keyword("start").parseString("starting") # -> Exception
+
+ For case-insensitive matching, use :class:`CaselessKeyword`.
+ """
+ DEFAULT_KEYWORD_CHARS = alphanums+"_$"
+
+ def __init__( self, matchString, identChars=None, caseless=False ):
+ super(Keyword,self).__init__()
+ if identChars is None:
+ identChars = Keyword.DEFAULT_KEYWORD_CHARS
+ self.match = matchString
+ self.matchLen = len(matchString)
+ try:
+ self.firstMatchChar = matchString[0]
+ except IndexError:
+ warnings.warn("null string passed to Keyword; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+ self.name = '"%s"' % self.match
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = False
+ self.mayIndexError = False
+ self.caseless = caseless
+ if caseless:
+ self.caselessmatch = matchString.upper()
+ identChars = identChars.upper()
+ self.identChars = set(identChars)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.caseless:
+ if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
+ (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ else:
+ if (instring[loc] == self.firstMatchChar and
+ (self.matchLen==1 or instring.startswith(self.match,loc)) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
+ (loc == 0 or instring[loc-1] not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ def copy(self):
+ c = super(Keyword,self).copy()
+ c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+ return c
+
+ @staticmethod
+ def setDefaultKeywordChars( chars ):
+ """Overrides the default Keyword chars
+ """
+ Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+ """Token to match a specified string, ignoring case of letters.
+ Note: the matched results will always be in the case of the given
+ match string, NOT the case of the input text.
+
+ Example::
+
+ OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+
+ (Contrast with example for :class:`CaselessKeyword`.)
+ """
+ def __init__( self, matchString ):
+ super(CaselessLiteral,self).__init__( matchString.upper() )
+ # Preserve the defining literal.
+ self.returnString = matchString
+ self.name = "'%s'" % self.returnString
+ self.errmsg = "Expected " + self.name
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[ loc:loc+self.matchLen ].upper() == self.match:
+ return loc+self.matchLen, self.returnString
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+ """
+ Caseless version of :class:`Keyword`.
+
+ Example::
+
+ OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+
+ (Contrast with example for :class:`CaselessLiteral`.)
+ """
+ def __init__( self, matchString, identChars=None ):
+ super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
+
+class CloseMatch(Token):
+ """A variation on :class:`Literal` which matches "close" matches,
+ that is, strings with at most 'n' mismatching characters.
+ :class:`CloseMatch` takes parameters:
+
+ - ``match_string`` - string to be matched
+ - ``maxMismatches`` - (``default=1``) maximum number of
+ mismatches allowed to count as a match
+
+ The results from a successful parse will contain the matched text
+ from the input string and the following named results:
+
+ - ``mismatches`` - a list of the positions within the
+ match_string where mismatches were found
+ - ``original`` - the original match_string used to compare
+ against the input string
+
+ If ``mismatches`` is an empty list, then the match was an exact
+ match.
+
+ Example::
+
+ patt = CloseMatch("ATCATCGAATGGA")
+ patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+ patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+ # exact match
+ patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+ # close match allowing up to 2 mismatches
+ patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+ patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+ """
+ def __init__(self, match_string, maxMismatches=1):
+ super(CloseMatch,self).__init__()
+ self.name = match_string
+ self.match_string = match_string
+ self.maxMismatches = maxMismatches
+ self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+ self.mayIndexError = False
+ self.mayReturnEmpty = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ start = loc
+ instrlen = len(instring)
+ maxloc = start + len(self.match_string)
+
+ if maxloc <= instrlen:
+ match_string = self.match_string
+ match_stringloc = 0
+ mismatches = []
+ maxMismatches = self.maxMismatches
+
+ for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
+ src,mat = s_m
+ if src != mat:
+ mismatches.append(match_stringloc)
+ if len(mismatches) > maxMismatches:
+ break
+ else:
+ loc = match_stringloc + 1
+ results = ParseResults([instring[start:loc]])
+ results['original'] = self.match_string
+ results['mismatches'] = mismatches
+ return loc, results
+
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+ """Token for matching words composed of allowed character sets.
+ Defined with string containing all allowed initial characters, an
+ optional string containing allowed body characters (if omitted,
+ defaults to the initial character set), and an optional minimum,
+ maximum, and/or exact length. The default value for ``min`` is
+ 1 (a minimum value < 1 is not valid); the default values for
+ ``max`` and ``exact`` are 0, meaning no maximum or exact
+ length restriction. An optional ``excludeChars`` parameter can
+ list characters that might be found in the input ``bodyChars``
+ string; useful to define a word of all printables except for one or
+ two characters, for instance.
+
+ :class:`srange` is useful for defining custom character set strings
+ for defining ``Word`` expressions, using range notation from
+ regular expression character sets.
+
+ A common mistake is to use :class:`Word` to match a specific literal
+ string, as in ``Word("Address")``. Remember that :class:`Word`
+ uses the string argument to define *sets* of matchable characters.
+ This expression would match "Add", "AAA", "dAred", or any other word
+ made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an
+ exact literal string, use :class:`Literal` or :class:`Keyword`.
+
+ pyparsing includes helper strings for building Words:
+
+ - :class:`alphas`
+ - :class:`nums`
+ - :class:`alphanums`
+ - :class:`hexnums`
+ - :class:`alphas8bit` (alphabetic characters in ASCII range 128-255
+ - accented, tilded, umlauted, etc.)
+ - :class:`punc8bit` (non-alphabetic characters in ASCII range
+ 128-255 - currency, symbols, superscripts, diacriticals, etc.)
+ - :class:`printables` (any non-whitespace character)
+
+ Example::
+
+ # a word composed of digits
+ integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+
+ # a word with a leading capital, and zero or more lowercase
+ capital_word = Word(alphas.upper(), alphas.lower())
+
+ # hostnames are alphanumeric, with leading alpha, and '-'
+ hostname = Word(alphas, alphanums+'-')
+
+ # roman numeral (not a strict parser, accepts invalid mix of characters)
+ roman = Word("IVXLCDM")
+
+ # any string of non-whitespace characters, except for ','
+ csv_value = Word(printables, excludeChars=",")
+ """
+ def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
+ super(Word,self).__init__()
+ if excludeChars:
+ excludeChars = set(excludeChars)
+ initChars = ''.join(c for c in initChars if c not in excludeChars)
+ if bodyChars:
+ bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+ self.initCharsOrig = initChars
+ self.initChars = set(initChars)
+ if bodyChars :
+ self.bodyCharsOrig = bodyChars
+ self.bodyChars = set(bodyChars)
+ else:
+ self.bodyCharsOrig = initChars
+ self.bodyChars = set(initChars)
+
+ self.maxSpecified = max > 0
+
+ if min < 1:
+ raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.asKeyword = asKeyword
+
+ if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
+ if self.bodyCharsOrig == self.initCharsOrig:
+ self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+ elif len(self.initCharsOrig) == 1:
+ self.reString = "%s[%s]*" % \
+ (re.escape(self.initCharsOrig),
+ _escapeRegexRangeChars(self.bodyCharsOrig),)
+ else:
+ self.reString = "[%s][%s]*" % \
+ (_escapeRegexRangeChars(self.initCharsOrig),
+ _escapeRegexRangeChars(self.bodyCharsOrig),)
+ if self.asKeyword:
+ self.reString = r"\b"+self.reString+r"\b"
+ try:
+ self.re = re.compile( self.reString )
+ except Exception:
+ self.re = None
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.re:
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ return loc, result.group()
+
+ if instring[loc] not in self.initChars:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ start = loc
+ loc += 1
+ instrlen = len(instring)
+ bodychars = self.bodyChars
+ maxloc = start + self.maxLen
+ maxloc = min( maxloc, instrlen )
+ while loc < maxloc and instring[loc] in bodychars:
+ loc += 1
+
+ throwException = False
+ if loc - start < self.minLen:
+ throwException = True
+ elif self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+ throwException = True
+ elif self.asKeyword:
+ if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):
+ throwException = True
+
+ if throwException:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+ def __str__( self ):
+ try:
+ return super(Word,self).__str__()
+ except Exception:
+ pass
+
+
+ if self.strRepr is None:
+
+ def charsAsStr(s):
+ if len(s)>4:
+ return s[:4]+"..."
+ else:
+ return s
+
+ if ( self.initCharsOrig != self.bodyCharsOrig ):
+ self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
+ else:
+ self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+ return self.strRepr
+
+
+class Char(Word):
+ """A short-cut class for defining ``Word(characters, exact=1)``,
+ when defining a match of any single character in a string of
+ characters.
+ """
+ def __init__(self, charset, asKeyword=False, excludeChars=None):
+ super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars)
+ self.reString = "[%s]" % _escapeRegexRangeChars(self.initCharsOrig)
+ self.re = re.compile( self.reString )
+
+
+class Regex(Token):
+ r"""Token for matching strings that match a given regular
+ expression. Defined with string specifying the regular expression in
+ a form recognized by the stdlib Python `re module <https://docs.python.org/3/library/re.html>`_.
+ If the given regex contains named groups (defined using ``(?P<name>...)``),
+ these will be preserved as named parse results.
+
+ Example::
+
+ realnum = Regex(r"[+-]?\d+\.\d*")
+ date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
+ # ref: https://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+ roman = Regex(r"M{0,4}(CM|CD|D?{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+ """
+ compiledREtype = type(re.compile("[A-Z]"))
+ def __init__( self, pattern, flags=0, asGroupList=False, asMatch=False):
+ """The parameters ``pattern`` and ``flags`` are passed
+ to the ``re.compile()`` function as-is. See the Python
+ `re module <https://docs.python.org/3/library/re.html>`_ module for an
+ explanation of the acceptable patterns and flags.
+ """
+ super(Regex,self).__init__()
+
+ if isinstance(pattern, basestring):
+ if not pattern:
+ warnings.warn("null string passed to Regex; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+
+ self.pattern = pattern
+ self.flags = flags
+
+ try:
+ self.re = re.compile(self.pattern, self.flags)
+ self.reString = self.pattern
+ except sre_constants.error:
+ warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+ SyntaxWarning, stacklevel=2)
+ raise
+
+ elif isinstance(pattern, Regex.compiledREtype):
+ self.re = pattern
+ self.pattern = \
+ self.reString = str(pattern)
+ self.flags = flags
+
+ else:
+ raise ValueError("Regex may only be constructed with a string or a compiled RE object")
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.mayReturnEmpty = True
+ self.asGroupList = asGroupList
+ self.asMatch = asMatch
+ if self.asGroupList:
+ self.parseImpl = self.parseImplAsGroupList
+ if self.asMatch:
+ self.parseImpl = self.parseImplAsMatch
+
+ def parseImpl(self, instring, loc, doActions=True):
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ ret = ParseResults(result.group())
+ d = result.groupdict()
+ if d:
+ for k, v in d.items():
+ ret[k] = v
+ return loc, ret
+
+ def parseImplAsGroupList(self, instring, loc, doActions=True):
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ ret = result.groups()
+ return loc, ret
+
+ def parseImplAsMatch(self, instring, loc, doActions=True):
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ ret = result
+ return loc, ret
+
+ def __str__( self ):
+ try:
+ return super(Regex,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+ return self.strRepr
+
+ def sub(self, repl):
+ r"""
+ Return Regex with an attached parse action to transform the parsed
+ result as if called using `re.sub(expr, repl, string) <https://docs.python.org/3/library/re.html#re.sub>`_.
+
+ Example::
+
+ make_html = Regex(r"(\w+):(.*?):").sub(r"<\1>\2</\1>")
+ print(make_html.transformString("h1:main title:"))
+ # prints "<h1>main title</h1>"
+ """
+ if self.asGroupList:
+ warnings.warn("cannot use sub() with Regex(asGroupList=True)",
+ SyntaxWarning, stacklevel=2)
+ raise SyntaxError()
+
+ if self.asMatch and callable(repl):
+ warnings.warn("cannot use sub() with a callable with Regex(asMatch=True)",
+ SyntaxWarning, stacklevel=2)
+ raise SyntaxError()
+
+ if self.asMatch:
+ def pa(tokens):
+ return tokens[0].expand(repl)
+ else:
+ def pa(tokens):
+ return self.re.sub(repl, tokens[0])
+ return self.addParseAction(pa)
+
+class QuotedString(Token):
+ r"""
+ Token for matching strings that are delimited by quoting characters.
+
+ Defined with the following parameters:
+
+ - quoteChar - string of one or more characters defining the
+ quote delimiting string
+ - escChar - character to escape quotes, typically backslash
+ (default= ``None`` )
+ - escQuote - special quote sequence to escape an embedded quote
+ string (such as SQL's ``""`` to escape an embedded ``"``)
+ (default= ``None`` )
+ - multiline - boolean indicating whether quotes can span
+ multiple lines (default= ``False`` )
+ - unquoteResults - boolean indicating whether the matched text
+ should be unquoted (default= ``True`` )
+ - endQuoteChar - string of one or more characters defining the
+ end of the quote delimited string (default= ``None`` => same as
+ quoteChar)
+ - convertWhitespaceEscapes - convert escaped whitespace
+ (``'\t'``, ``'\n'``, etc.) to actual whitespace
+ (default= ``True`` )
+
+ Example::
+
+ qs = QuotedString('"')
+ print(qs.searchString('lsjdf "This is the quote" sldjf'))
+ complex_qs = QuotedString('{{', endQuoteChar='}}')
+ print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
+ sql_qs = QuotedString('"', escQuote='""')
+ print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
+
+ prints::
+
+ [['This is the quote']]
+ [['This is the "quote"']]
+ [['This is the quote with "embedded" quotes']]
+ """
+ def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
+ super(QuotedString,self).__init__()
+
+ # remove white space from quote chars - wont work anyway
+ quoteChar = quoteChar.strip()
+ if not quoteChar:
+ warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+ raise SyntaxError()
+
+ if endQuoteChar is None:
+ endQuoteChar = quoteChar
+ else:
+ endQuoteChar = endQuoteChar.strip()
+ if not endQuoteChar:
+ warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+ raise SyntaxError()
+
+ self.quoteChar = quoteChar
+ self.quoteCharLen = len(quoteChar)
+ self.firstQuoteChar = quoteChar[0]
+ self.endQuoteChar = endQuoteChar
+ self.endQuoteCharLen = len(endQuoteChar)
+ self.escChar = escChar
+ self.escQuote = escQuote
+ self.unquoteResults = unquoteResults
+ self.convertWhitespaceEscapes = convertWhitespaceEscapes
+
+ if multiline:
+ self.flags = re.MULTILINE | re.DOTALL
+ self.pattern = r'%s(?:[^%s%s]' % \
+ ( re.escape(self.quoteChar),
+ _escapeRegexRangeChars(self.endQuoteChar[0]),
+ (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+ else:
+ self.flags = 0
+ self.pattern = r'%s(?:[^%s\n\r%s]' % \
+ ( re.escape(self.quoteChar),
+ _escapeRegexRangeChars(self.endQuoteChar[0]),
+ (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+ if len(self.endQuoteChar) > 1:
+ self.pattern += (
+ '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
+ _escapeRegexRangeChars(self.endQuoteChar[i]))
+ for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
+ )
+ if escQuote:
+ self.pattern += (r'|(?:%s)' % re.escape(escQuote))
+ if escChar:
+ self.pattern += (r'|(?:%s.)' % re.escape(escChar))
+ self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
+ self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
+
+ try:
+ self.re = re.compile(self.pattern, self.flags)
+ self.reString = self.pattern
+ except sre_constants.error:
+ warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
+ SyntaxWarning, stacklevel=2)
+ raise
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ ret = result.group()
+
+ if self.unquoteResults:
+
+ # strip off quotes
+ ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
+
+ if isinstance(ret,basestring):
+ # replace escaped whitespace
+ if '\\' in ret and self.convertWhitespaceEscapes:
+ ws_map = {
+ r'\t' : '\t',
+ r'\n' : '\n',
+ r'\f' : '\f',
+ r'\r' : '\r',
+ }
+ for wslit,wschar in ws_map.items():
+ ret = ret.replace(wslit, wschar)
+
+ # replace escaped characters
+ if self.escChar:
+ ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
+
+ # replace escaped quotes
+ if self.escQuote:
+ ret = ret.replace(self.escQuote, self.endQuoteChar)
+
+ return loc, ret
+
+ def __str__( self ):
+ try:
+ return super(QuotedString,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
+
+ return self.strRepr
+
+
+class CharsNotIn(Token):
+ """Token for matching words composed of characters *not* in a given
+ set (will include whitespace in matched characters if not listed in
+ the provided exclusion set - see example). Defined with string
+ containing all disallowed characters, and an optional minimum,
+ maximum, and/or exact length. The default value for ``min`` is
+ 1 (a minimum value < 1 is not valid); the default values for
+ ``max`` and ``exact`` are 0, meaning no maximum or exact
+ length restriction.
+
+ Example::
+
+ # define a comma-separated-value as anything that is not a ','
+ csv_value = CharsNotIn(',')
+ print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
+
+ prints::
+
+ ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
+ """
+ def __init__( self, notChars, min=1, max=0, exact=0 ):
+ super(CharsNotIn,self).__init__()
+ self.skipWhitespace = False
+ self.notChars = notChars
+
+ if min < 1:
+ raise ValueError(
+ "cannot specify a minimum length < 1; use " +
+ "Optional(CharsNotIn()) if zero-length char group is permitted")
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = ( self.minLen == 0 )
+ self.mayIndexError = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[loc] in self.notChars:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ start = loc
+ loc += 1
+ notchars = self.notChars
+ maxlen = min( start+self.maxLen, len(instring) )
+ while loc < maxlen and \
+ (instring[loc] not in notchars):
+ loc += 1
+
+ if loc - start < self.minLen:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+ def __str__( self ):
+ try:
+ return super(CharsNotIn, self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ if len(self.notChars) > 4:
+ self.strRepr = "!W:(%s...)" % self.notChars[:4]
+ else:
+ self.strRepr = "!W:(%s)" % self.notChars
+
+ return self.strRepr
+
+class White(Token):
+ """Special matching class for matching whitespace. Normally,
+ whitespace is ignored by pyparsing grammars. This class is included
+ when some whitespace structures are significant. Define with
+ a string containing the whitespace characters to be matched; default
+ is ``" \\t\\r\\n"``. Also takes optional ``min``,
+ ``max``, and ``exact`` arguments, as defined for the
+ :class:`Word` class.
+ """
+ whiteStrs = {
+ ' ' : '<SP>',
+ '\t': '<TAB>',
+ '\n': '<LF>',
+ '\r': '<CR>',
+ '\f': '<FF>',
+ 'u\00A0': '<NBSP>',
+ 'u\1680': '<OGHAM_SPACE_MARK>',
+ 'u\180E': '<MONGOLIAN_VOWEL_SEPARATOR>',
+ 'u\2000': '<EN_QUAD>',
+ 'u\2001': '<EM_QUAD>',
+ 'u\2002': '<EN_SPACE>',
+ 'u\2003': '<EM_SPACE>',
+ 'u\2004': '<THREE-PER-EM_SPACE>',
+ 'u\2005': '<FOUR-PER-EM_SPACE>',
+ 'u\2006': '<SIX-PER-EM_SPACE>',
+ 'u\2007': '<FIGURE_SPACE>',
+ 'u\2008': '<PUNCTUATION_SPACE>',
+ 'u\2009': '<THIN_SPACE>',
+ 'u\200A': '<HAIR_SPACE>',
+ 'u\200B': '<ZERO_WIDTH_SPACE>',
+ 'u\202F': '<NNBSP>',
+ 'u\205F': '<MMSP>',
+ 'u\3000': '<IDEOGRAPHIC_SPACE>',
+ }
+ def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
+ super(White,self).__init__()
+ self.matchWhite = ws
+ self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
+ #~ self.leaveWhitespace()
+ self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
+ self.mayReturnEmpty = True
+ self.errmsg = "Expected " + self.name
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[loc] not in self.matchWhite:
+ raise ParseException(instring, loc, self.errmsg, self)
+ start = loc
+ loc += 1
+ maxloc = start + self.maxLen
+ maxloc = min( maxloc, len(instring) )
+ while loc < maxloc and instring[loc] in self.matchWhite:
+ loc += 1
+
+ if loc - start < self.minLen:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+
+class _PositionToken(Token):
+ def __init__( self ):
+ super(_PositionToken,self).__init__()
+ self.name=self.__class__.__name__
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+
+class GoToColumn(_PositionToken):
+ """Token to advance to a specific column of input text; useful for
+ tabular report scraping.
+ """
+ def __init__( self, colno ):
+ super(GoToColumn,self).__init__()
+ self.col = colno
+
+ def preParse( self, instring, loc ):
+ if col(loc,instring) != self.col:
+ instrlen = len(instring)
+ if self.ignoreExprs:
+ loc = self._skipIgnorables( instring, loc )
+ while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
+ loc += 1
+ return loc
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ thiscol = col( loc, instring )
+ if thiscol > self.col:
+ raise ParseException( instring, loc, "Text not in expected column", self )
+ newloc = loc + self.col - thiscol
+ ret = instring[ loc: newloc ]
+ return newloc, ret
+
+
+class LineStart(_PositionToken):
+ r"""Matches if current position is at the beginning of a line within
+ the parse string
+
+ Example::
+
+ test = '''\
+ AAA this line
+ AAA and this line
+ AAA but not this one
+ B AAA and definitely not this one
+ '''
+
+ for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+ print(t)
+
+ prints::
+
+ ['AAA', ' this line']
+ ['AAA', ' and this line']
+
+ """
+ def __init__( self ):
+ super(LineStart,self).__init__()
+ self.errmsg = "Expected start of line"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if col(loc, instring) == 1:
+ return loc, []
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class LineEnd(_PositionToken):
+ """Matches if current position is at the end of a line within the
+ parse string
+ """
+ def __init__( self ):
+ super(LineEnd,self).__init__()
+ self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
+ self.errmsg = "Expected end of line"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc<len(instring):
+ if instring[loc] == "\n":
+ return loc+1, "\n"
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+ elif loc == len(instring):
+ return loc+1, []
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class StringStart(_PositionToken):
+ """Matches if current position is at the beginning of the parse
+ string
+ """
+ def __init__( self ):
+ super(StringStart,self).__init__()
+ self.errmsg = "Expected start of text"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc != 0:
+ # see if entire string up to here is just whitespace and ignoreables
+ if loc != self.preParse( instring, 0 ):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+class StringEnd(_PositionToken):
+ """Matches if current position is at the end of the parse string
+ """
+ def __init__( self ):
+ super(StringEnd,self).__init__()
+ self.errmsg = "Expected end of text"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc < len(instring):
+ raise ParseException(instring, loc, self.errmsg, self)
+ elif loc == len(instring):
+ return loc+1, []
+ elif loc > len(instring):
+ return loc, []
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class WordStart(_PositionToken):
+ """Matches if the current position is at the beginning of a Word,
+ and is not preceded by any character in a given set of
+ ``wordChars`` (default= ``printables``). To emulate the
+ ``\b`` behavior of regular expressions, use
+ ``WordStart(alphanums)``. ``WordStart`` will also match at
+ the beginning of the string being parsed, or at the beginning of
+ a line.
+ """
+ def __init__(self, wordChars = printables):
+ super(WordStart,self).__init__()
+ self.wordChars = set(wordChars)
+ self.errmsg = "Not at the start of a word"
+
+ def parseImpl(self, instring, loc, doActions=True ):
+ if loc != 0:
+ if (instring[loc-1] in self.wordChars or
+ instring[loc] not in self.wordChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+class WordEnd(_PositionToken):
+ """Matches if the current position is at the end of a Word, and is
+ not followed by any character in a given set of ``wordChars``
+ (default= ``printables``). To emulate the ``\b`` behavior of
+ regular expressions, use ``WordEnd(alphanums)``. ``WordEnd``
+ will also match at the end of the string being parsed, or at the end
+ of a line.
+ """
+ def __init__(self, wordChars = printables):
+ super(WordEnd,self).__init__()
+ self.wordChars = set(wordChars)
+ self.skipWhitespace = False
+ self.errmsg = "Not at the end of a word"
+
+ def parseImpl(self, instring, loc, doActions=True ):
+ instrlen = len(instring)
+ if instrlen>0 and loc<instrlen:
+ if (instring[loc] in self.wordChars or
+ instring[loc-1] not in self.wordChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+
+class ParseExpression(ParserElement):
+ """Abstract subclass of ParserElement, for combining and
+ post-processing parsed tokens.
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(ParseExpression,self).__init__(savelist)
+ if isinstance( exprs, _generatorType ):
+ exprs = list(exprs)
+
+ if isinstance( exprs, basestring ):
+ self.exprs = [ ParserElement._literalStringClass( exprs ) ]
+ elif isinstance( exprs, Iterable ):
+ exprs = list(exprs)
+ # if sequence of strings provided, wrap with Literal
+ if all(isinstance(expr, basestring) for expr in exprs):
+ exprs = map(ParserElement._literalStringClass, exprs)
+ self.exprs = list(exprs)
+ else:
+ try:
+ self.exprs = list( exprs )
+ except TypeError:
+ self.exprs = [ exprs ]
+ self.callPreparse = False
+
+ def __getitem__( self, i ):
+ return self.exprs[i]
+
+ def append( self, other ):
+ self.exprs.append( other )
+ self.strRepr = None
+ return self
+
+ def leaveWhitespace( self ):
+ """Extends ``leaveWhitespace`` defined in base class, and also invokes ``leaveWhitespace`` on
+ all contained expressions."""
+ self.skipWhitespace = False
+ self.exprs = [ e.copy() for e in self.exprs ]
+ for e in self.exprs:
+ e.leaveWhitespace()
+ return self
+
+ def ignore( self, other ):
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ super( ParseExpression, self).ignore( other )
+ for e in self.exprs:
+ e.ignore( self.ignoreExprs[-1] )
+ else:
+ super( ParseExpression, self).ignore( other )
+ for e in self.exprs:
+ e.ignore( self.ignoreExprs[-1] )
+ return self
+
+ def __str__( self ):
+ try:
+ return super(ParseExpression,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )
+ return self.strRepr
+
+ def streamline( self ):
+ super(ParseExpression,self).streamline()
+
+ for e in self.exprs:
+ e.streamline()
+
+ # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )
+ # but only if there are no parse actions or resultsNames on the nested And's
+ # (likewise for Or's and MatchFirst's)
+ if ( len(self.exprs) == 2 ):
+ other = self.exprs[0]
+ if ( isinstance( other, self.__class__ ) and
+ not(other.parseAction) and
+ other.resultsName is None and
+ not other.debug ):
+ self.exprs = other.exprs[:] + [ self.exprs[1] ]
+ self.strRepr = None
+ self.mayReturnEmpty |= other.mayReturnEmpty
+ self.mayIndexError |= other.mayIndexError
+
+ other = self.exprs[-1]
+ if ( isinstance( other, self.__class__ ) and
+ not(other.parseAction) and
+ other.resultsName is None and
+ not other.debug ):
+ self.exprs = self.exprs[:-1] + other.exprs[:]
+ self.strRepr = None
+ self.mayReturnEmpty |= other.mayReturnEmpty
+ self.mayIndexError |= other.mayIndexError
+
+ self.errmsg = "Expected " + _ustr(self)
+
+ return self
+
+ def validate( self, validateTrace=[] ):
+ tmp = validateTrace[:]+[self]
+ for e in self.exprs:
+ e.validate(tmp)
+ self.checkRecursion( [] )
+
+ def copy(self):
+ ret = super(ParseExpression,self).copy()
+ ret.exprs = [e.copy() for e in self.exprs]
+ return ret
+
+class And(ParseExpression):
+ """
+ Requires all given :class:`ParseExpression` s to be found in the given order.
+ Expressions may be separated by whitespace.
+ May be constructed using the ``'+'`` operator.
+ May also be constructed using the ``'-'`` operator, which will
+ suppress backtracking.
+
+ Example::
+
+ integer = Word(nums)
+ name_expr = OneOrMore(Word(alphas))
+
+ expr = And([integer("id"),name_expr("name"),integer("age")])
+ # more easily written as:
+ expr = integer("id") + name_expr("name") + integer("age")
+ """
+
+ class _ErrorStop(Empty):
+ def __init__(self, *args, **kwargs):
+ super(And._ErrorStop,self).__init__(*args, **kwargs)
+ self.name = '-'
+ self.leaveWhitespace()
+
+ def __init__( self, exprs, savelist = True ):
+ super(And,self).__init__(exprs, savelist)
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ self.setWhitespaceChars( self.exprs[0].whiteChars )
+ self.skipWhitespace = self.exprs[0].skipWhitespace
+ self.callPreparse = True
+
+ def streamline(self):
+ super(And, self).streamline()
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ return self
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ # pass False as last arg to _parse for first element, since we already
+ # pre-parsed the string as part of our And pre-parsing
+ loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
+ errorStop = False
+ for e in self.exprs[1:]:
+ if isinstance(e, And._ErrorStop):
+ errorStop = True
+ continue
+ if errorStop:
+ try:
+ loc, exprtokens = e._parse( instring, loc, doActions )
+ except ParseSyntaxException:
+ raise
+ except ParseBaseException as pe:
+ pe.__traceback__ = None
+ raise ParseSyntaxException._from_exception(pe)
+ except IndexError:
+ raise ParseSyntaxException(instring, len(instring), self.errmsg, self)
+ else:
+ loc, exprtokens = e._parse( instring, loc, doActions )
+ if exprtokens or exprtokens.haskeys():
+ resultlist += exprtokens
+ return loc, resultlist
+
+ def __iadd__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #And( [ self, other ] )
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+ if not e.mayReturnEmpty:
+ break
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+
+class Or(ParseExpression):
+ """Requires that at least one :class:`ParseExpression` is found. If
+ two expressions match, the expression that matches the longest
+ string will be used. May be constructed using the ``'^'``
+ operator.
+
+ Example::
+
+ # construct Or using '^' operator
+
+ number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
+ print(number.searchString("123 3.1416 789"))
+
+ prints::
+
+ [['123'], ['3.1416'], ['789']]
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(Or,self).__init__(exprs, savelist)
+ if self.exprs:
+ self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+ else:
+ self.mayReturnEmpty = True
+
+ def streamline(self):
+ super(Or, self).streamline()
+ if __compat__.collect_all_And_tokens:
+ self.saveAsList = any(e.saveAsList for e in self.exprs)
+ return self
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ maxExcLoc = -1
+ maxException = None
+ matches = []
+ for e in self.exprs:
+ try:
+ loc2 = e.tryParse( instring, loc )
+ except ParseException as err:
+ err.__traceback__ = None
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+ except IndexError:
+ if len(instring) > maxExcLoc:
+ maxException = ParseException(instring,len(instring),e.errmsg,self)
+ maxExcLoc = len(instring)
+ else:
+ # save match among all matches, to retry longest to shortest
+ matches.append((loc2, e))
+
+ if matches:
+ matches.sort(key=lambda x: -x[0])
+ for _,e in matches:
+ try:
+ return e._parse( instring, loc, doActions )
+ except ParseException as err:
+ err.__traceback__ = None
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+
+ if maxException is not None:
+ maxException.msg = self.errmsg
+ raise maxException
+ else:
+ raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+
+ def __ixor__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #Or( [ self, other ] )
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class MatchFirst(ParseExpression):
+ """Requires that at least one :class:`ParseExpression` is found. If
+ two expressions match, the first one listed is the one that will
+ match. May be constructed using the ``'|'`` operator.
+
+ Example::
+
+ # construct MatchFirst using '|' operator
+
+ # watch the order of expressions to match
+ number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
+ print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']]
+
+ # put more selective expression first
+ number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
+ print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']]
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(MatchFirst,self).__init__(exprs, savelist)
+ if self.exprs:
+ self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+ else:
+ self.mayReturnEmpty = True
+
+ def streamline(self):
+ super(MatchFirst, self).streamline()
+ if __compat__.collect_all_And_tokens:
+ self.saveAsList = any(e.saveAsList for e in self.exprs)
+ return self
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ maxExcLoc = -1
+ maxException = None
+ for e in self.exprs:
+ try:
+ ret = e._parse( instring, loc, doActions )
+ return ret
+ except ParseException as err:
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+ except IndexError:
+ if len(instring) > maxExcLoc:
+ maxException = ParseException(instring,len(instring),e.errmsg,self)
+ maxExcLoc = len(instring)
+
+ # only got here if no expression matched, raise exception for match that made it the furthest
+ else:
+ if maxException is not None:
+ maxException.msg = self.errmsg
+ raise maxException
+ else:
+ raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+ def __ior__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #MatchFirst( [ self, other ] )
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class Each(ParseExpression):
+ """Requires all given :class:`ParseExpression` s to be found, but in
+ any order. Expressions may be separated by whitespace.
+
+ May be constructed using the ``'&'`` operator.
+
+ Example::
+
+ color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
+ shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
+ integer = Word(nums)
+ shape_attr = "shape:" + shape_type("shape")
+ posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
+ color_attr = "color:" + color("color")
+ size_attr = "size:" + integer("size")
+
+ # use Each (using operator '&') to accept attributes in any order
+ # (shape and posn are required, color and size are optional)
+ shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
+
+ shape_spec.runTests('''
+ shape: SQUARE color: BLACK posn: 100, 120
+ shape: CIRCLE size: 50 color: BLUE posn: 50,80
+ color:GREEN size:20 shape:TRIANGLE posn:20,40
+ '''
+ )
+
+ prints::
+
+ shape: SQUARE color: BLACK posn: 100, 120
+ ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
+ - color: BLACK
+ - posn: ['100', ',', '120']
+ - x: 100
+ - y: 120
+ - shape: SQUARE
+
+
+ shape: CIRCLE size: 50 color: BLUE posn: 50,80
+ ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
+ - color: BLUE
+ - posn: ['50', ',', '80']
+ - x: 50
+ - y: 80
+ - shape: CIRCLE
+ - size: 50
+
+
+ color: GREEN size: 20 shape: TRIANGLE posn: 20,40
+ ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
+ - color: GREEN
+ - posn: ['20', ',', '40']
+ - x: 20
+ - y: 40
+ - shape: TRIANGLE
+ - size: 20
+ """
+ def __init__( self, exprs, savelist = True ):
+ super(Each,self).__init__(exprs, savelist)
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ self.skipWhitespace = True
+ self.initExprGroups = True
+ self.saveAsList = True
+
+ def streamline(self):
+ super(Each, self).streamline()
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ return self
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.initExprGroups:
+ self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
+ opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
+ opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
+ self.optionals = opt1 + opt2
+ self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
+ self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
+ self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
+ self.required += self.multirequired
+ self.initExprGroups = False
+ tmpLoc = loc
+ tmpReqd = self.required[:]
+ tmpOpt = self.optionals[:]
+ matchOrder = []
+
+ keepMatching = True
+ while keepMatching:
+ tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
+ failed = []
+ for e in tmpExprs:
+ try:
+ tmpLoc = e.tryParse( instring, tmpLoc )
+ except ParseException:
+ failed.append(e)
+ else:
+ matchOrder.append(self.opt1map.get(id(e),e))
+ if e in tmpReqd:
+ tmpReqd.remove(e)
+ elif e in tmpOpt:
+ tmpOpt.remove(e)
+ if len(failed) == len(tmpExprs):
+ keepMatching = False
+
+ if tmpReqd:
+ missing = ", ".join(_ustr(e) for e in tmpReqd)
+ raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
+
+ # add any unmatched Optionals, in case they have default values defined
+ matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
+
+ resultlist = []
+ for e in matchOrder:
+ loc,results = e._parse(instring,loc,doActions)
+ resultlist.append(results)
+
+ finalResults = sum(resultlist, ParseResults([]))
+ return loc, finalResults
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class ParseElementEnhance(ParserElement):
+ """Abstract subclass of :class:`ParserElement`, for combining and
+ post-processing parsed tokens.
+ """
+ def __init__( self, expr, savelist=False ):
+ super(ParseElementEnhance,self).__init__(savelist)
+ if isinstance( expr, basestring ):
+ if issubclass(ParserElement._literalStringClass, Token):
+ expr = ParserElement._literalStringClass(expr)
+ else:
+ expr = ParserElement._literalStringClass(Literal(expr))
+ self.expr = expr
+ self.strRepr = None
+ if expr is not None:
+ self.mayIndexError = expr.mayIndexError
+ self.mayReturnEmpty = expr.mayReturnEmpty
+ self.setWhitespaceChars( expr.whiteChars )
+ self.skipWhitespace = expr.skipWhitespace
+ self.saveAsList = expr.saveAsList
+ self.callPreparse = expr.callPreparse
+ self.ignoreExprs.extend(expr.ignoreExprs)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.expr is not None:
+ return self.expr._parse( instring, loc, doActions, callPreParse=False )
+ else:
+ raise ParseException("",loc,self.errmsg,self)
+
+ def leaveWhitespace( self ):
+ self.skipWhitespace = False
+ self.expr = self.expr.copy()
+ if self.expr is not None:
+ self.expr.leaveWhitespace()
+ return self
+
+ def ignore( self, other ):
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ super( ParseElementEnhance, self).ignore( other )
+ if self.expr is not None:
+ self.expr.ignore( self.ignoreExprs[-1] )
+ else:
+ super( ParseElementEnhance, self).ignore( other )
+ if self.expr is not None:
+ self.expr.ignore( self.ignoreExprs[-1] )
+ return self
+
+ def streamline( self ):
+ super(ParseElementEnhance,self).streamline()
+ if self.expr is not None:
+ self.expr.streamline()
+ return self
+
+ def checkRecursion( self, parseElementList ):
+ if self in parseElementList:
+ raise RecursiveGrammarException( parseElementList+[self] )
+ subRecCheckList = parseElementList[:] + [ self ]
+ if self.expr is not None:
+ self.expr.checkRecursion( subRecCheckList )
+
+ def validate( self, validateTrace=[] ):
+ tmp = validateTrace[:]+[self]
+ if self.expr is not None:
+ self.expr.validate(tmp)
+ self.checkRecursion( [] )
+
+ def __str__( self ):
+ try:
+ return super(ParseElementEnhance,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None and self.expr is not None:
+ self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
+ return self.strRepr
+
+
+class FollowedBy(ParseElementEnhance):
+ """Lookahead matching of the given parse expression.
+ ``FollowedBy`` does *not* advance the parsing position within
+ the input string, it only verifies that the specified parse
+ expression matches at the current position. ``FollowedBy``
+ always returns a null token list. If any results names are defined
+ in the lookahead expression, those *will* be returned for access by
+ name.
+
+ Example::
+
+ # use FollowedBy to match a label only if it is followed by a ':'
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+ OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
+
+ prints::
+
+ [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
+ """
+ def __init__( self, expr ):
+ super(FollowedBy,self).__init__(expr)
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ _, ret = self.expr._parse(instring, loc, doActions=doActions)
+ del ret[:]
+ return loc, ret
+
+
+class PrecededBy(ParseElementEnhance):
+ """Lookbehind matching of the given parse expression.
+ ``PrecededBy`` does not advance the parsing position within the
+ input string, it only verifies that the specified parse expression
+ matches prior to the current position. ``PrecededBy`` always
+ returns a null token list, but if a results name is defined on the
+ given expression, it is returned.
+
+ Parameters:
+
+ - expr - expression that must match prior to the current parse
+ location
+ - retreat - (default= ``None``) - (int) maximum number of characters
+ to lookbehind prior to the current parse location
+
+ If the lookbehind expression is a string, Literal, Keyword, or
+ a Word or CharsNotIn with a specified exact or maximum length, then
+ the retreat parameter is not required. Otherwise, retreat must be
+ specified to give a maximum number of characters to look back from
+ the current parse position for a lookbehind match.
+
+ Example::
+
+ # VB-style variable names with type prefixes
+ int_var = PrecededBy("#") + pyparsing_common.identifier
+ str_var = PrecededBy("$") + pyparsing_common.identifier
+
+ """
+ def __init__(self, expr, retreat=None):
+ super(PrecededBy, self).__init__(expr)
+ self.expr = self.expr().leaveWhitespace()
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.exact = False
+ if isinstance(expr, str):
+ retreat = len(expr)
+ self.exact = True
+ elif isinstance(expr, (Literal, Keyword)):
+ retreat = expr.matchLen
+ self.exact = True
+ elif isinstance(expr, (Word, CharsNotIn)) and expr.maxLen != _MAX_INT:
+ retreat = expr.maxLen
+ self.exact = True
+ elif isinstance(expr, _PositionToken):
+ retreat = 0
+ self.exact = True
+ self.retreat = retreat
+ self.errmsg = "not preceded by " + str(expr)
+ self.skipWhitespace = False
+
+ def parseImpl(self, instring, loc=0, doActions=True):
+ if self.exact:
+ if loc < self.retreat:
+ raise ParseException(instring, loc, self.errmsg)
+ start = loc - self.retreat
+ _, ret = self.expr._parse(instring, start)
+ else:
+ # retreat specified a maximum lookbehind window, iterate
+ test_expr = self.expr + StringEnd()
+ instring_slice = instring[:loc]
+ last_expr = ParseException(instring, loc, self.errmsg)
+ for offset in range(1, min(loc, self.retreat+1)):
+ try:
+ _, ret = test_expr._parse(instring_slice, loc-offset)
+ except ParseBaseException as pbe:
+ last_expr = pbe
+ else:
+ break
+ else:
+ raise last_expr
+ # return empty list of tokens, but preserve any defined results names
+ del ret[:]
+ return loc, ret
+
+
+class NotAny(ParseElementEnhance):
+ """Lookahead to disallow matching with the given parse expression.
+ ``NotAny`` does *not* advance the parsing position within the
+ input string, it only verifies that the specified parse expression
+ does *not* match at the current position. Also, ``NotAny`` does
+ *not* skip over leading whitespace. ``NotAny`` always returns
+ a null token list. May be constructed using the '~' operator.
+
+ Example::
+
+ AND, OR, NOT = map(CaselessKeyword, "AND OR NOT".split())
+
+ # take care not to mistake keywords for identifiers
+ ident = ~(AND | OR | NOT) + Word(alphas)
+ boolean_term = Optional(NOT) + ident
+
+ # very crude boolean expression - to support parenthesis groups and
+ # operation hierarchy, use infixNotation
+ boolean_expr = boolean_term + ZeroOrMore((AND | OR) + boolean_term)
+
+ # integers that are followed by "." are actually floats
+ integer = Word(nums) + ~Char(".")
+ """
+ def __init__( self, expr ):
+ super(NotAny,self).__init__(expr)
+ #~ self.leaveWhitespace()
+ self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
+ self.mayReturnEmpty = True
+ self.errmsg = "Found unwanted token, "+_ustr(self.expr)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.expr.canParseNext(instring, loc):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "~{" + _ustr(self.expr) + "}"
+
+ return self.strRepr
+
+class _MultipleMatch(ParseElementEnhance):
+ def __init__( self, expr, stopOn=None):
+ super(_MultipleMatch, self).__init__(expr)
+ self.saveAsList = True
+ ender = stopOn
+ if isinstance(ender, basestring):
+ ender = ParserElement._literalStringClass(ender)
+ self.not_ender = ~ender if ender is not None else None
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ self_expr_parse = self.expr._parse
+ self_skip_ignorables = self._skipIgnorables
+ check_ender = self.not_ender is not None
+ if check_ender:
+ try_not_ender = self.not_ender.tryParse
+
+ # must be at least one (but first see if we are the stopOn sentinel;
+ # if so, fail)
+ if check_ender:
+ try_not_ender(instring, loc)
+ loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
+ try:
+ hasIgnoreExprs = (not not self.ignoreExprs)
+ while 1:
+ if check_ender:
+ try_not_ender(instring, loc)
+ if hasIgnoreExprs:
+ preloc = self_skip_ignorables( instring, loc )
+ else:
+ preloc = loc
+ loc, tmptokens = self_expr_parse( instring, preloc, doActions )
+ if tmptokens or tmptokens.haskeys():
+ tokens += tmptokens
+ except (ParseException,IndexError):
+ pass
+
+ return loc, tokens
+
+class OneOrMore(_MultipleMatch):
+ """Repetition of one or more of the given expression.
+
+ Parameters:
+ - expr - expression that must match one or more times
+ - stopOn - (default= ``None``) - expression for a terminating sentinel
+ (only required if the sentinel would ordinarily match the repetition
+ expression)
+
+ Example::
+
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+ text = "shape: SQUARE posn: upper left color: BLACK"
+ OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
+
+ # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+ OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
+
+ # could also be written as
+ (attr_expr * (1,)).parseString(text).pprint()
+ """
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + _ustr(self.expr) + "}..."
+
+ return self.strRepr
+
+class ZeroOrMore(_MultipleMatch):
+ """Optional repetition of zero or more of the given expression.
+
+ Parameters:
+ - expr - expression that must match zero or more times
+ - stopOn - (default= ``None``) - expression for a terminating sentinel
+ (only required if the sentinel would ordinarily match the repetition
+ expression)
+
+ Example: similar to :class:`OneOrMore`
+ """
+ def __init__( self, expr, stopOn=None):
+ super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ try:
+ return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
+ except (ParseException,IndexError):
+ return loc, []
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "[" + _ustr(self.expr) + "]..."
+
+ return self.strRepr
+
+class _NullToken(object):
+ def __bool__(self):
+ return False
+ __nonzero__ = __bool__
+ def __str__(self):
+ return ""
+
+_optionalNotMatched = _NullToken()
+class Optional(ParseElementEnhance):
+ """Optional matching of the given expression.
+
+ Parameters:
+ - expr - expression that must match zero or more times
+ - default (optional) - value to be returned if the optional expression is not found.
+
+ Example::
+
+ # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
+ zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
+ zip.runTests('''
+ # traditional ZIP code
+ 12345
+
+ # ZIP+4 form
+ 12101-0001
+
+ # invalid ZIP
+ 98765-
+ ''')
+
+ prints::
+
+ # traditional ZIP code
+ 12345
+ ['12345']
+
+ # ZIP+4 form
+ 12101-0001
+ ['12101-0001']
+
+ # invalid ZIP
+ 98765-
+ ^
+ FAIL: Expected end of text (at char 5), (line:1, col:6)
+ """
+ def __init__( self, expr, default=_optionalNotMatched ):
+ super(Optional,self).__init__( expr, savelist=False )
+ self.saveAsList = self.expr.saveAsList
+ self.defaultValue = default
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ try:
+ loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
+ except (ParseException,IndexError):
+ if self.defaultValue is not _optionalNotMatched:
+ if self.expr.resultsName:
+ tokens = ParseResults([ self.defaultValue ])
+ tokens[self.expr.resultsName] = self.defaultValue
+ else:
+ tokens = [ self.defaultValue ]
+ else:
+ tokens = []
+ return loc, tokens
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "[" + _ustr(self.expr) + "]"
+
+ return self.strRepr
+
+class SkipTo(ParseElementEnhance):
+ """Token for skipping over all undefined text until the matched
+ expression is found.
+
+ Parameters:
+ - expr - target expression marking the end of the data to be skipped
+ - include - (default= ``False``) if True, the target expression is also parsed
+ (the skipped text and target expression are returned as a 2-element list).
+ - ignore - (default= ``None``) used to define grammars (typically quoted strings and
+ comments) that might contain false matches to the target expression
+ - failOn - (default= ``None``) define expressions that are not allowed to be
+ included in the skipped test; if found before the target expression is found,
+ the SkipTo is not a match
+
+ Example::
+
+ report = '''
+ Outstanding Issues Report - 1 Jan 2000
+
+ # | Severity | Description | Days Open
+ -----+----------+-------------------------------------------+-----------
+ 101 | Critical | Intermittent system crash | 6
+ 94 | Cosmetic | Spelling error on Login ('log|n') | 14
+ 79 | Minor | System slow when running too many reports | 47
+ '''
+ integer = Word(nums)
+ SEP = Suppress('|')
+ # use SkipTo to simply match everything up until the next SEP
+ # - ignore quoted strings, so that a '|' character inside a quoted string does not match
+ # - parse action will call token.strip() for each matched token, i.e., the description body
+ string_data = SkipTo(SEP, ignore=quotedString)
+ string_data.setParseAction(tokenMap(str.strip))
+ ticket_expr = (integer("issue_num") + SEP
+ + string_data("sev") + SEP
+ + string_data("desc") + SEP
+ + integer("days_open"))
+
+ for tkt in ticket_expr.searchString(report):
+ print tkt.dump()
+
+ prints::
+
+ ['101', 'Critical', 'Intermittent system crash', '6']
+ - days_open: 6
+ - desc: Intermittent system crash
+ - issue_num: 101
+ - sev: Critical
+ ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
+ - days_open: 14
+ - desc: Spelling error on Login ('log|n')
+ - issue_num: 94
+ - sev: Cosmetic
+ ['79', 'Minor', 'System slow when running too many reports', '47']
+ - days_open: 47
+ - desc: System slow when running too many reports
+ - issue_num: 79
+ - sev: Minor
+ """
+ def __init__( self, other, include=False, ignore=None, failOn=None ):
+ super( SkipTo, self ).__init__( other )
+ self.ignoreExpr = ignore
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.includeMatch = include
+ self.saveAsList = False
+ if isinstance(failOn, basestring):
+ self.failOn = ParserElement._literalStringClass(failOn)
+ else:
+ self.failOn = failOn
+ self.errmsg = "No match found for "+_ustr(self.expr)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ startloc = loc
+ instrlen = len(instring)
+ expr = self.expr
+ expr_parse = self.expr._parse
+ self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
+ self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
+
+ tmploc = loc
+ while tmploc <= instrlen:
+ if self_failOn_canParseNext is not None:
+ # break if failOn expression matches
+ if self_failOn_canParseNext(instring, tmploc):
+ break
+
+ if self_ignoreExpr_tryParse is not None:
+ # advance past ignore expressions
+ while 1:
+ try:
+ tmploc = self_ignoreExpr_tryParse(instring, tmploc)
+ except ParseBaseException:
+ break
+
+ try:
+ expr_parse(instring, tmploc, doActions=False, callPreParse=False)
+ except (ParseException, IndexError):
+ # no match, advance loc in string
+ tmploc += 1
+ else:
+ # matched skipto expr, done
+ break
+
+ else:
+ # ran off the end of the input string without matching skipto expr, fail
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ # build up return values
+ loc = tmploc
+ skiptext = instring[startloc:loc]
+ skipresult = ParseResults(skiptext)
+
+ if self.includeMatch:
+ loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
+ skipresult += mat
+
+ return loc, skipresult
+
+class Forward(ParseElementEnhance):
+ """Forward declaration of an expression to be defined later -
+ used for recursive grammars, such as algebraic infix notation.
+ When the expression is known, it is assigned to the ``Forward``
+ variable using the '<<' operator.
+
+ Note: take care when assigning to ``Forward`` not to overlook
+ precedence of operators.
+
+ Specifically, '|' has a lower precedence than '<<', so that::
+
+ fwdExpr << a | b | c
+
+ will actually be evaluated as::
+
+ (fwdExpr << a) | b | c
+
+ thereby leaving b and c out as parseable alternatives. It is recommended that you
+ explicitly group the values inserted into the ``Forward``::
+
+ fwdExpr << (a | b | c)
+
+ Converting to use the '<<=' operator instead will avoid this problem.
+
+ See :class:`ParseResults.pprint` for an example of a recursive
+ parser created using ``Forward``.
+ """
+ def __init__( self, other=None ):
+ super(Forward,self).__init__( other, savelist=False )
+
+ def __lshift__( self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass(other)
+ self.expr = other
+ self.strRepr = None
+ self.mayIndexError = self.expr.mayIndexError
+ self.mayReturnEmpty = self.expr.mayReturnEmpty
+ self.setWhitespaceChars( self.expr.whiteChars )
+ self.skipWhitespace = self.expr.skipWhitespace
+ self.saveAsList = self.expr.saveAsList
+ self.ignoreExprs.extend(self.expr.ignoreExprs)
+ return self
+
+ def __ilshift__(self, other):
+ return self << other
+
+ def leaveWhitespace( self ):
+ self.skipWhitespace = False
+ return self
+
+ def streamline( self ):
+ if not self.streamlined:
+ self.streamlined = True
+ if self.expr is not None:
+ self.expr.streamline()
+ return self
+
+ def validate( self, validateTrace=[] ):
+ if self not in validateTrace:
+ tmp = validateTrace[:]+[self]
+ if self.expr is not None:
+ self.expr.validate(tmp)
+ self.checkRecursion([])
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ # Avoid infinite recursion by setting a temporary name
+ self.name = self.__class__.__name__ + ": ..."
+
+ # Use the string representation of main expression.
+ try:
+ if self.expr is not None:
+ retString = _ustr(self.expr)
+ else:
+ retString = "None"
+ finally:
+ del self.name
+ return self.__class__.__name__ + ": " + retString
+
+ def copy(self):
+ if self.expr is not None:
+ return super(Forward,self).copy()
+ else:
+ ret = Forward()
+ ret <<= self
+ return ret
+
+class TokenConverter(ParseElementEnhance):
+ """
+ Abstract subclass of :class:`ParseExpression`, for converting parsed results.
+ """
+ def __init__( self, expr, savelist=False ):
+ super(TokenConverter,self).__init__( expr )#, savelist )
+ self.saveAsList = False
+
+class Combine(TokenConverter):
+ """Converter to concatenate all matching tokens to a single string.
+ By default, the matching patterns must also be contiguous in the
+ input string; this can be disabled by specifying
+ ``'adjacent=False'`` in the constructor.
+
+ Example::
+
+ real = Word(nums) + '.' + Word(nums)
+ print(real.parseString('3.1416')) # -> ['3', '.', '1416']
+ # will also erroneously match the following
+ print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
+
+ real = Combine(Word(nums) + '.' + Word(nums))
+ print(real.parseString('3.1416')) # -> ['3.1416']
+ # no match when there are internal spaces
+ print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
+ """
+ def __init__( self, expr, joinString="", adjacent=True ):
+ super(Combine,self).__init__( expr )
+ # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
+ if adjacent:
+ self.leaveWhitespace()
+ self.adjacent = adjacent
+ self.skipWhitespace = True
+ self.joinString = joinString
+ self.callPreparse = True
+
+ def ignore( self, other ):
+ if self.adjacent:
+ ParserElement.ignore(self, other)
+ else:
+ super( Combine, self).ignore( other )
+ return self
+
+ def postParse( self, instring, loc, tokenlist ):
+ retToks = tokenlist.copy()
+ del retToks[:]
+ retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
+
+ if self.resultsName and retToks.haskeys():
+ return [ retToks ]
+ else:
+ return retToks
+
+class Group(TokenConverter):
+ """Converter to return the matched tokens as a list - useful for
+ returning tokens of :class:`ZeroOrMore` and :class:`OneOrMore` expressions.
+
+ Example::
+
+ ident = Word(alphas)
+ num = Word(nums)
+ term = ident | num
+ func = ident + Optional(delimitedList(term))
+ print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100']
+
+ func = ident + Group(Optional(delimitedList(term)))
+ print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']]
+ """
+ def __init__( self, expr ):
+ super(Group,self).__init__( expr )
+ self.saveAsList = True
+
+ def postParse( self, instring, loc, tokenlist ):
+ return [ tokenlist ]
+
+class Dict(TokenConverter):
+ """Converter to return a repetitive expression as a list, but also
+ as a dictionary. Each element can also be referenced using the first
+ token in the expression as its key. Useful for tabular report
+ scraping when the first column can be used as a item key.
+
+ Example::
+
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+ text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+ attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+ # print attributes as plain groups
+ print(OneOrMore(attr_expr).parseString(text).dump())
+
+ # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
+ result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
+ print(result.dump())
+
+ # access named fields as dict entries, or output as dict
+ print(result['shape'])
+ print(result.asDict())
+
+ prints::
+
+ ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
+ [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+ - color: light blue
+ - posn: upper left
+ - shape: SQUARE
+ - texture: burlap
+ SQUARE
+ {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
+
+ See more examples at :class:`ParseResults` of accessing fields by results name.
+ """
+ def __init__( self, expr ):
+ super(Dict,self).__init__( expr )
+ self.saveAsList = True
+
+ def postParse( self, instring, loc, tokenlist ):
+ for i,tok in enumerate(tokenlist):
+ if len(tok) == 0:
+ continue
+ ikey = tok[0]
+ if isinstance(ikey,int):
+ ikey = _ustr(tok[0]).strip()
+ if len(tok)==1:
+ tokenlist[ikey] = _ParseResultsWithOffset("",i)
+ elif len(tok)==2 and not isinstance(tok[1],ParseResults):
+ tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
+ else:
+ dictvalue = tok.copy() #ParseResults(i)
+ del dictvalue[0]
+ if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
+ tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
+ else:
+ tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
+
+ if self.resultsName:
+ return [ tokenlist ]
+ else:
+ return tokenlist
+
+
+class Suppress(TokenConverter):
+ """Converter for ignoring the results of a parsed expression.
+
+ Example::
+
+ source = "a, b, c,d"
+ wd = Word(alphas)
+ wd_list1 = wd + ZeroOrMore(',' + wd)
+ print(wd_list1.parseString(source))
+
+ # often, delimiters that are useful during parsing are just in the
+ # way afterward - use Suppress to keep them out of the parsed output
+ wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
+ print(wd_list2.parseString(source))
+
+ prints::
+
+ ['a', ',', 'b', ',', 'c', ',', 'd']
+ ['a', 'b', 'c', 'd']
+
+ (See also :class:`delimitedList`.)
+ """
+ def postParse( self, instring, loc, tokenlist ):
+ return []
+
+ def suppress( self ):
+ return self
+
+
+class OnlyOnce(object):
+ """Wrapper for parse actions, to ensure they are only called once.
+ """
+ def __init__(self, methodCall):
+ self.callable = _trim_arity(methodCall)
+ self.called = False
+ def __call__(self,s,l,t):
+ if not self.called:
+ results = self.callable(s,l,t)
+ self.called = True
+ return results
+ raise ParseException(s,l,"")
+ def reset(self):
+ self.called = False
+
+def traceParseAction(f):
+ """Decorator for debugging parse actions.
+
+ When the parse action is called, this decorator will print
+ ``">> entering method-name(line:<current_source_line>, <parse_location>, <matched_tokens>)"``.
+ When the parse action completes, the decorator will print
+ ``"<<"`` followed by the returned value, or any exception that the parse action raised.
+
+ Example::
+
+ wd = Word(alphas)
+
+ @traceParseAction
+ def remove_duplicate_chars(tokens):
+ return ''.join(sorted(set(''.join(tokens))))
+
+ wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
+ print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
+
+ prints::
+
+ >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
+ <<leaving remove_duplicate_chars (ret: 'dfjkls')
+ ['dfjkls']
+ """
+ f = _trim_arity(f)
+ def z(*paArgs):
+ thisFunc = f.__name__
+ s,l,t = paArgs[-3:]
+ if len(paArgs)>3:
+ thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
+ sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
+ try:
+ ret = f(*paArgs)
+ except Exception as exc:
+ sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )
+ raise
+ sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) )
+ return ret
+ try:
+ z.__name__ = f.__name__
+ except AttributeError:
+ pass
+ return z
+
+#
+# global helpers
+#
+def delimitedList( expr, delim=",", combine=False ):
+ """Helper to define a delimited list of expressions - the delimiter
+ defaults to ','. By default, the list elements and delimiters can
+ have intervening whitespace, and comments, but this can be
+ overridden by passing ``combine=True`` in the constructor. If
+ ``combine`` is set to ``True``, the matching tokens are
+ returned as a single token string, with the delimiters included;
+ otherwise, the matching tokens are returned as a list of tokens,
+ with the delimiters suppressed.
+
+ Example::
+
+ delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
+ delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
+ """
+ dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
+ if combine:
+ return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
+ else:
+ return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
+
+def countedArray( expr, intExpr=None ):
+ """Helper to define a counted list of expressions.
+
+ This helper defines a pattern of the form::
+
+ integer expr expr expr...
+
+ where the leading integer tells how many expr expressions follow.
+ The matched tokens returns the array of expr tokens as a list - the
+ leading count token is suppressed.
+
+ If ``intExpr`` is specified, it should be a pyparsing expression
+ that produces an integer value.
+
+ Example::
+
+ countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd']
+
+ # in this parser, the leading integer value is given in binary,
+ # '10' indicating that 2 values are in the array
+ binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+ countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd']
+ """
+ arrayExpr = Forward()
+ def countFieldParseAction(s,l,t):
+ n = t[0]
+ arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
+ return []
+ if intExpr is None:
+ intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
+ else:
+ intExpr = intExpr.copy()
+ intExpr.setName("arrayLen")
+ intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
+ return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
+
+def _flatten(L):
+ ret = []
+ for i in L:
+ if isinstance(i,list):
+ ret.extend(_flatten(i))
+ else:
+ ret.append(i)
+ return ret
+
+def matchPreviousLiteral(expr):
+ """Helper to define an expression that is indirectly defined from
+ the tokens matched in a previous expression, that is, it looks for
+ a 'repeat' of a previous expression. For example::
+
+ first = Word(nums)
+ second = matchPreviousLiteral(first)
+ matchExpr = first + ":" + second
+
+ will match ``"1:1"``, but not ``"1:2"``. Because this
+ matches a previous literal, will also match the leading
+ ``"1:1"`` in ``"1:10"``. If this is not desired, use
+ :class:`matchPreviousExpr`. Do *not* use with packrat parsing
+ enabled.
+ """
+ rep = Forward()
+ def copyTokenToRepeater(s,l,t):
+ if t:
+ if len(t) == 1:
+ rep << t[0]
+ else:
+ # flatten t tokens
+ tflat = _flatten(t.asList())
+ rep << And(Literal(tt) for tt in tflat)
+ else:
+ rep << Empty()
+ expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+ rep.setName('(prev) ' + _ustr(expr))
+ return rep
+
+def matchPreviousExpr(expr):
+ """Helper to define an expression that is indirectly defined from
+ the tokens matched in a previous expression, that is, it looks for
+ a 'repeat' of a previous expression. For example::
+
+ first = Word(nums)
+ second = matchPreviousExpr(first)
+ matchExpr = first + ":" + second
+
+ will match ``"1:1"``, but not ``"1:2"``. Because this
+ matches by expressions, will *not* match the leading ``"1:1"``
+ in ``"1:10"``; the expressions are evaluated first, and then
+ compared, so ``"1"`` is compared with ``"10"``. Do *not* use
+ with packrat parsing enabled.
+ """
+ rep = Forward()
+ e2 = expr.copy()
+ rep <<= e2
+ def copyTokenToRepeater(s,l,t):
+ matchTokens = _flatten(t.asList())
+ def mustMatchTheseTokens(s,l,t):
+ theseTokens = _flatten(t.asList())
+ if theseTokens != matchTokens:
+ raise ParseException("",0,"")
+ rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
+ expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+ rep.setName('(prev) ' + _ustr(expr))
+ return rep
+
+def _escapeRegexRangeChars(s):
+ #~ escape these chars: ^-]
+ for c in r"\^-]":
+ s = s.replace(c,_bslash+c)
+ s = s.replace("\n",r"\n")
+ s = s.replace("\t",r"\t")
+ return _ustr(s)
+
+def oneOf( strs, caseless=False, useRegex=True ):
+ """Helper to quickly define a set of alternative Literals, and makes
+ sure to do longest-first testing when there is a conflict,
+ regardless of the input order, but returns
+ a :class:`MatchFirst` for best performance.
+
+ Parameters:
+
+ - strs - a string of space-delimited literals, or a collection of
+ string literals
+ - caseless - (default= ``False``) - treat all literals as
+ caseless
+ - useRegex - (default= ``True``) - as an optimization, will
+ generate a Regex object; otherwise, will generate
+ a :class:`MatchFirst` object (if ``caseless=True``, or if
+ creating a :class:`Regex` raises an exception)
+
+ Example::
+
+ comp_oper = oneOf("< = > <= >= !=")
+ var = Word(alphas)
+ number = Word(nums)
+ term = var | number
+ comparison_expr = term + comp_oper + term
+ print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12"))
+
+ prints::
+
+ [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
+ """
+ if caseless:
+ isequal = ( lambda a,b: a.upper() == b.upper() )
+ masks = ( lambda a,b: b.upper().startswith(a.upper()) )
+ parseElementClass = CaselessLiteral
+ else:
+ isequal = ( lambda a,b: a == b )
+ masks = ( lambda a,b: b.startswith(a) )
+ parseElementClass = Literal
+
+ symbols = []
+ if isinstance(strs,basestring):
+ symbols = strs.split()
+ elif isinstance(strs, Iterable):
+ symbols = list(strs)
+ else:
+ warnings.warn("Invalid argument to oneOf, expected string or iterable",
+ SyntaxWarning, stacklevel=2)
+ if not symbols:
+ return NoMatch()
+
+ i = 0
+ while i < len(symbols)-1:
+ cur = symbols[i]
+ for j,other in enumerate(symbols[i+1:]):
+ if ( isequal(other, cur) ):
+ del symbols[i+j+1]
+ break
+ elif ( masks(cur, other) ):
+ del symbols[i+j+1]
+ symbols.insert(i,other)
+ cur = other
+ break
+ else:
+ i += 1
+
+ if not caseless and useRegex:
+ #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
+ try:
+ if len(symbols)==len("".join(symbols)):
+ return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
+ else:
+ return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
+ except Exception:
+ warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
+ SyntaxWarning, stacklevel=2)
+
+
+ # last resort, just use MatchFirst
+ return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
+
+def dictOf( key, value ):
+ """Helper to easily and clearly define a dictionary by specifying
+ the respective patterns for the key and value. Takes care of
+ defining the :class:`Dict`, :class:`ZeroOrMore`, and
+ :class:`Group` tokens in the proper order. The key pattern
+ can include delimiting markers or punctuation, as long as they are
+ suppressed, thereby leaving the significant key text. The value
+ pattern can include named results, so that the :class:`Dict` results
+ can include named token fields.
+
+ Example::
+
+ text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+ attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+ print(OneOrMore(attr_expr).parseString(text).dump())
+
+ attr_label = label
+ attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
+
+ # similar to Dict, but simpler call format
+ result = dictOf(attr_label, attr_value).parseString(text)
+ print(result.dump())
+ print(result['shape'])
+ print(result.shape) # object attribute access works too
+ print(result.asDict())
+
+ prints::
+
+ [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+ - color: light blue
+ - posn: upper left
+ - shape: SQUARE
+ - texture: burlap
+ SQUARE
+ SQUARE
+ {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
+ """
+ return Dict(OneOrMore(Group(key + value)))
+
+def originalTextFor(expr, asString=True):
+ """Helper to return the original, untokenized text for a given
+ expression. Useful to restore the parsed fields of an HTML start
+ tag into the raw tag text itself, or to revert separate tokens with
+ intervening whitespace back to the original matching input text. By
+ default, returns astring containing the original parsed text.
+
+ If the optional ``asString`` argument is passed as
+ ``False``, then the return value is
+ a :class:`ParseResults` containing any results names that
+ were originally matched, and a single token containing the original
+ matched text from the input string. So if the expression passed to
+ :class:`originalTextFor` contains expressions with defined
+ results names, you must set ``asString`` to ``False`` if you
+ want to preserve those results name values.
+
+ Example::
+
+ src = "this is test <b> bold <i>text</i> </b> normal text "
+ for tag in ("b","i"):
+ opener,closer = makeHTMLTags(tag)
+ patt = originalTextFor(opener + SkipTo(closer) + closer)
+ print(patt.searchString(src)[0])
+
+ prints::
+
+ ['<b> bold <i>text</i> </b>']
+ ['<i>text</i>']
+ """
+ locMarker = Empty().setParseAction(lambda s,loc,t: loc)
+ endlocMarker = locMarker.copy()
+ endlocMarker.callPreparse = False
+ matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
+ if asString:
+ extractText = lambda s,l,t: s[t._original_start:t._original_end]
+ else:
+ def extractText(s,l,t):
+ t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
+ matchExpr.setParseAction(extractText)
+ matchExpr.ignoreExprs = expr.ignoreExprs
+ return matchExpr
+
+def ungroup(expr):
+ """Helper to undo pyparsing's default grouping of And expressions,
+ even if all but one are non-empty.
+ """
+ return TokenConverter(expr).addParseAction(lambda t:t[0])
+
+def locatedExpr(expr):
+ """Helper to decorate a returned token with its starting and ending
+ locations in the input string.
+
+ This helper adds the following results names:
+
+ - locn_start = location where matched expression begins
+ - locn_end = location where matched expression ends
+ - value = the actual parsed results
+
+ Be careful if the input text contains ``<TAB>`` characters, you
+ may want to call :class:`ParserElement.parseWithTabs`
+
+ Example::
+
+ wd = Word(alphas)
+ for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
+ print(match)
+
+ prints::
+
+ [[0, 'ljsdf', 5]]
+ [[8, 'lksdjjf', 15]]
+ [[18, 'lkkjj', 23]]
+ """
+ locator = Empty().setParseAction(lambda s,l,t: l)
+ return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
+
+
+# convenience constants for positional expressions
+empty = Empty().setName("empty")
+lineStart = LineStart().setName("lineStart")
+lineEnd = LineEnd().setName("lineEnd")
+stringStart = StringStart().setName("stringStart")
+stringEnd = StringEnd().setName("stringEnd")
+
+_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
+_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
+_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
+_charRange = Group(_singleChar + Suppress("-") + _singleChar)
+_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
+
+def srange(s):
+ r"""Helper to easily define string ranges for use in Word
+ construction. Borrows syntax from regexp '[]' string range
+ definitions::
+
+ srange("[0-9]") -> "0123456789"
+ srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
+ srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
+
+ The input string must be enclosed in []'s, and the returned string
+ is the expanded character set joined into a single string. The
+ values enclosed in the []'s may be:
+
+ - a single character
+ - an escaped character with a leading backslash (such as ``\-``
+ or ``\]``)
+ - an escaped hex character with a leading ``'\x'``
+ (``\x21``, which is a ``'!'`` character) (``\0x##``
+ is also supported for backwards compatibility)
+ - an escaped octal character with a leading ``'\0'``
+ (``\041``, which is a ``'!'`` character)
+ - a range of any of the above, separated by a dash (``'a-z'``,
+ etc.)
+ - any combination of the above (``'aeiouy'``,
+ ``'a-zA-Z0-9_$'``, etc.)
+ """
+ _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
+ try:
+ return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
+ except Exception:
+ return ""
+
+def matchOnlyAtCol(n):
+ """Helper method for defining parse actions that require matching at
+ a specific column in the input text.
+ """
+ def verifyCol(strg,locn,toks):
+ if col(locn,strg) != n:
+ raise ParseException(strg,locn,"matched token not at column %d" % n)
+ return verifyCol
+
+def replaceWith(replStr):
+ """Helper method for common parse actions that simply return
+ a literal value. Especially useful when used with
+ :class:`transformString<ParserElement.transformString>` ().
+
+ Example::
+
+ num = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
+ term = na | num
+
+ OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
+ """
+ return lambda s,l,t: [replStr]
+
+def removeQuotes(s,l,t):
+ """Helper parse action for removing quotation marks from parsed
+ quoted strings.
+
+ Example::
+
+ # by default, quotation marks are included in parsed results
+ quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+
+ # use removeQuotes to strip quotation marks from parsed results
+ quotedString.setParseAction(removeQuotes)
+ quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+ """
+ return t[0][1:-1]
+
+def tokenMap(func, *args):
+ """Helper to define a parse action by mapping a function to all
+ elements of a ParseResults list. If any additional args are passed,
+ they are forwarded to the given function as additional arguments
+ after the token, as in
+ ``hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))``,
+ which will convert the parsed data to an integer using base 16.
+
+ Example (compare the last to example in :class:`ParserElement.transformString`::
+
+ hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
+ hex_ints.runTests('''
+ 00 11 22 aa FF 0a 0d 1a
+ ''')
+
+ upperword = Word(alphas).setParseAction(tokenMap(str.upper))
+ OneOrMore(upperword).runTests('''
+ my kingdom for a horse
+ ''')
+
+ wd = Word(alphas).setParseAction(tokenMap(str.title))
+ OneOrMore(wd).setParseAction(' '.join).runTests('''
+ now is the winter of our discontent made glorious summer by this sun of york
+ ''')
+
+ prints::
+
+ 00 11 22 aa FF 0a 0d 1a
+ [0, 17, 34, 170, 255, 10, 13, 26]
+
+ my kingdom for a horse
+ ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
+
+ now is the winter of our discontent made glorious summer by this sun of york
+ ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
+ """
+ def pa(s,l,t):
+ return [func(tokn, *args) for tokn in t]
+
+ try:
+ func_name = getattr(func, '__name__',
+ getattr(func, '__class__').__name__)
+ except Exception:
+ func_name = str(func)
+ pa.__name__ = func_name
+
+ return pa
+
+upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
+"""(Deprecated) Helper parse action to convert tokens to upper case.
+Deprecated in favor of :class:`pyparsing_common.upcaseTokens`"""
+
+downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
+"""(Deprecated) Helper parse action to convert tokens to lower case.
+Deprecated in favor of :class:`pyparsing_common.downcaseTokens`"""
+
+def _makeTags(tagStr, xml,
+ suppress_LT=Suppress("<"),
+ suppress_GT=Suppress(">")):
+ """Internal helper to construct opening and closing tag expressions, given a tag name"""
+ if isinstance(tagStr,basestring):
+ resname = tagStr
+ tagStr = Keyword(tagStr, caseless=not xml)
+ else:
+ resname = tagStr.name
+
+ tagAttrName = Word(alphas,alphanums+"_-:")
+ if (xml):
+ tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
+ openTag = (suppress_LT
+ + tagStr("tag")
+ + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue )))
+ + Optional("/", default=[False])("empty").setParseAction(lambda s,l,t:t[0]=='/')
+ + suppress_GT)
+ else:
+ tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printables, excludeChars=">")
+ openTag = (suppress_LT
+ + tagStr("tag")
+ + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens)
+ + Optional(Suppress("=") + tagAttrValue))))
+ + Optional("/",default=[False])("empty").setParseAction(lambda s,l,t:t[0]=='/')
+ + suppress_GT)
+ closeTag = Combine(_L("</") + tagStr + ">", adjacent=False)
+
+ openTag.setName("<%s>" % resname)
+ # add start<tagname> results name in parse action now that ungrouped names are not reported at two levels
+ openTag.addParseAction(lambda t: t.__setitem__("start"+"".join(resname.replace(":"," ").title().split()), t.copy()))
+ closeTag = closeTag("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname)
+ openTag.tag = resname
+ closeTag.tag = resname
+ openTag.tag_body = SkipTo(closeTag())
+ return openTag, closeTag
+
+def makeHTMLTags(tagStr):
+ """Helper to construct opening and closing tag expressions for HTML,
+ given a tag name. Matches tags in either upper or lower case,
+ attributes with namespaces and with quoted or unquoted values.
+
+ Example::
+
+ text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>'
+ # makeHTMLTags returns pyparsing expressions for the opening and
+ # closing tags as a 2-tuple
+ a,a_end = makeHTMLTags("A")
+ link_expr = a + SkipTo(a_end)("link_text") + a_end
+
+ for link in link_expr.searchString(text):
+ # attributes in the <A> tag (like "href" shown here) are
+ # also accessible as named results
+ print(link.link_text, '->', link.href)
+
+ prints::
+
+ pyparsing -> https://github.com/pyparsing/pyparsing/wiki
+ """
+ return _makeTags( tagStr, False )
+
+def makeXMLTags(tagStr):
+ """Helper to construct opening and closing tag expressions for XML,
+ given a tag name. Matches tags only in the given upper/lower case.
+
+ Example: similar to :class:`makeHTMLTags`
+ """
+ return _makeTags( tagStr, True )
+
+def withAttribute(*args,**attrDict):
+ """Helper to create a validating parse action to be used with start
+ tags created with :class:`makeXMLTags` or
+ :class:`makeHTMLTags`. Use ``withAttribute`` to qualify
+ a starting tag with a required attribute value, to avoid false
+ matches on common tags such as ``<TD>`` or ``<DIV>``.
+
+ Call ``withAttribute`` with a series of attribute names and
+ values. Specify the list of filter attributes names and values as:
+
+ - keyword arguments, as in ``(align="right")``, or
+ - as an explicit dict with ``**`` operator, when an attribute
+ name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}``
+ - a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align","right"))``
+
+ For attribute names with a namespace prefix, you must use the second
+ form. Attribute names are matched insensitive to upper/lower case.
+
+ If just testing for ``class`` (with or without a namespace), use
+ :class:`withClass`.
+
+ To verify that the attribute exists, but without specifying a value,
+ pass ``withAttribute.ANY_VALUE`` as the value.
+
+ Example::
+
+ html = '''
+ <div>
+ Some text
+ <div type="grid">1 4 0 1 0</div>
+ <div type="graph">1,3 2,3 1,1</div>
+ <div>this has no type</div>
+ </div>
+
+ '''
+ div,div_end = makeHTMLTags("div")
+
+ # only match div tag having a type attribute with value "grid"
+ div_grid = div().setParseAction(withAttribute(type="grid"))
+ grid_expr = div_grid + SkipTo(div | div_end)("body")
+ for grid_header in grid_expr.searchString(html):
+ print(grid_header.body)
+
+ # construct a match with any div tag having a type attribute, regardless of the value
+ div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
+ div_expr = div_any_type + SkipTo(div | div_end)("body")
+ for div_header in div_expr.searchString(html):
+ print(div_header.body)
+
+ prints::
+
+ 1 4 0 1 0
+
+ 1 4 0 1 0
+ 1,3 2,3 1,1
+ """
+ if args:
+ attrs = args[:]
+ else:
+ attrs = attrDict.items()
+ attrs = [(k,v) for k,v in attrs]
+ def pa(s,l,tokens):
+ for attrName,attrValue in attrs:
+ if attrName not in tokens:
+ raise ParseException(s,l,"no matching attribute " + attrName)
+ if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
+ raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
+ (attrName, tokens[attrName], attrValue))
+ return pa
+withAttribute.ANY_VALUE = object()
+
+def withClass(classname, namespace=''):
+ """Simplified version of :class:`withAttribute` when
+ matching on a div class - made difficult because ``class`` is
+ a reserved word in Python.
+
+ Example::
+
+ html = '''
+ <div>
+ Some text
+ <div class="grid">1 4 0 1 0</div>
+ <div class="graph">1,3 2,3 1,1</div>
+ <div>this &lt;div&gt; has no class</div>
+ </div>
+
+ '''
+ div,div_end = makeHTMLTags("div")
+ div_grid = div().setParseAction(withClass("grid"))
+
+ grid_expr = div_grid + SkipTo(div | div_end)("body")
+ for grid_header in grid_expr.searchString(html):
+ print(grid_header.body)
+
+ div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))
+ div_expr = div_any_type + SkipTo(div | div_end)("body")
+ for div_header in div_expr.searchString(html):
+ print(div_header.body)
+
+ prints::
+
+ 1 4 0 1 0
+
+ 1 4 0 1 0
+ 1,3 2,3 1,1
+ """
+ classattr = "%s:class" % namespace if namespace else "class"
+ return withAttribute(**{classattr : classname})
+
+opAssoc = SimpleNamespace()
+opAssoc.LEFT = object()
+opAssoc.RIGHT = object()
+
+def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
+ """Helper method for constructing grammars of expressions made up of
+ operators working in a precedence hierarchy. Operators may be unary
+ or binary, left- or right-associative. Parse actions can also be
+ attached to operator expressions. The generated parser will also
+ recognize the use of parentheses to override operator precedences
+ (see example below).
+
+ Note: if you define a deep operator list, you may see performance
+ issues when using infixNotation. See
+ :class:`ParserElement.enablePackrat` for a mechanism to potentially
+ improve your parser performance.
+
+ Parameters:
+ - baseExpr - expression representing the most basic element for the
+ nested
+ - opList - list of tuples, one for each operator precedence level
+ in the expression grammar; each tuple is of the form ``(opExpr,
+ numTerms, rightLeftAssoc, parseAction)``, where:
+
+ - opExpr is the pyparsing expression for the operator; may also
+ be a string, which will be converted to a Literal; if numTerms
+ is 3, opExpr is a tuple of two expressions, for the two
+ operators separating the 3 terms
+ - numTerms is the number of terms for this operator (must be 1,
+ 2, or 3)
+ - rightLeftAssoc is the indicator whether the operator is right
+ or left associative, using the pyparsing-defined constants
+ ``opAssoc.RIGHT`` and ``opAssoc.LEFT``.
+ - parseAction is the parse action to be associated with
+ expressions matching this operator expression (the parse action
+ tuple member may be omitted); if the parse action is passed
+ a tuple or list of functions, this is equivalent to calling
+ ``setParseAction(*fn)``
+ (:class:`ParserElement.setParseAction`)
+ - lpar - expression for matching left-parentheses
+ (default= ``Suppress('(')``)
+ - rpar - expression for matching right-parentheses
+ (default= ``Suppress(')')``)
+
+ Example::
+
+ # simple example of four-function arithmetic with ints and
+ # variable names
+ integer = pyparsing_common.signed_integer
+ varname = pyparsing_common.identifier
+
+ arith_expr = infixNotation(integer | varname,
+ [
+ ('-', 1, opAssoc.RIGHT),
+ (oneOf('* /'), 2, opAssoc.LEFT),
+ (oneOf('+ -'), 2, opAssoc.LEFT),
+ ])
+
+ arith_expr.runTests('''
+ 5+3*6
+ (5+3)*6
+ -2--11
+ ''', fullDump=False)
+
+ prints::
+
+ 5+3*6
+ [[5, '+', [3, '*', 6]]]
+
+ (5+3)*6
+ [[[5, '+', 3], '*', 6]]
+
+ -2--11
+ [[['-', 2], '-', ['-', 11]]]
+ """
+ # captive version of FollowedBy that does not do parse actions or capture results names
+ class _FB(FollowedBy):
+ def parseImpl(self, instring, loc, doActions=True):
+ self.expr.tryParse(instring, loc)
+ return loc, []
+
+ ret = Forward()
+ lastExpr = baseExpr | ( lpar + ret + rpar )
+ for i,operDef in enumerate(opList):
+ opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
+ termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr
+ if arity == 3:
+ if opExpr is None or len(opExpr) != 2:
+ raise ValueError(
+ "if numterms=3, opExpr must be a tuple or list of two expressions")
+ opExpr1, opExpr2 = opExpr
+ thisExpr = Forward().setName(termName)
+ if rightLeftAssoc == opAssoc.LEFT:
+ if arity == 1:
+ matchExpr = _FB(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )
+ elif arity == 2:
+ if opExpr is not None:
+ matchExpr = _FB(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )
+ else:
+ matchExpr = _FB(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )
+ elif arity == 3:
+ matchExpr = _FB(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \
+ Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )
+ else:
+ raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+ elif rightLeftAssoc == opAssoc.RIGHT:
+ if arity == 1:
+ # try to avoid LR with this extra test
+ if not isinstance(opExpr, Optional):
+ opExpr = Optional(opExpr)
+ matchExpr = _FB(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )
+ elif arity == 2:
+ if opExpr is not None:
+ matchExpr = _FB(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )
+ else:
+ matchExpr = _FB(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )
+ elif arity == 3:
+ matchExpr = _FB(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \
+ Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )
+ else:
+ raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+ else:
+ raise ValueError("operator must indicate right or left associativity")
+ if pa:
+ if isinstance(pa, (tuple, list)):
+ matchExpr.setParseAction(*pa)
+ else:
+ matchExpr.setParseAction(pa)
+ thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
+ lastExpr = thisExpr
+ ret <<= lastExpr
+ return ret
+
+operatorPrecedence = infixNotation
+"""(Deprecated) Former name of :class:`infixNotation`, will be
+dropped in a future release."""
+
+dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes")
+sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes")
+quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'|
+ Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes")
+unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")
+
+def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):
+ """Helper method for defining nested lists enclosed in opening and
+ closing delimiters ("(" and ")" are the default).
+
+ Parameters:
+ - opener - opening character for a nested list
+ (default= ``"("``); can also be a pyparsing expression
+ - closer - closing character for a nested list
+ (default= ``")"``); can also be a pyparsing expression
+ - content - expression for items within the nested lists
+ (default= ``None``)
+ - ignoreExpr - expression for ignoring opening and closing
+ delimiters (default= :class:`quotedString`)
+
+ If an expression is not provided for the content argument, the
+ nested expression will capture all whitespace-delimited content
+ between delimiters as a list of separate values.
+
+ Use the ``ignoreExpr`` argument to define expressions that may
+ contain opening or closing characters that should not be treated as
+ opening or closing characters for nesting, such as quotedString or
+ a comment expression. Specify multiple expressions using an
+ :class:`Or` or :class:`MatchFirst`. The default is
+ :class:`quotedString`, but if no expressions are to be ignored, then
+ pass ``None`` for this argument.
+
+ Example::
+
+ data_type = oneOf("void int short long char float double")
+ decl_data_type = Combine(data_type + Optional(Word('*')))
+ ident = Word(alphas+'_', alphanums+'_')
+ number = pyparsing_common.number
+ arg = Group(decl_data_type + ident)
+ LPAR,RPAR = map(Suppress, "()")
+
+ code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))
+
+ c_function = (decl_data_type("type")
+ + ident("name")
+ + LPAR + Optional(delimitedList(arg), [])("args") + RPAR
+ + code_body("body"))
+ c_function.ignore(cStyleComment)
+
+ source_code = '''
+ int is_odd(int x) {
+ return (x%2);
+ }
+
+ int dec_to_hex(char hchar) {
+ if (hchar >= '0' && hchar <= '9') {
+ return (ord(hchar)-ord('0'));
+ } else {
+ return (10+ord(hchar)-ord('A'));
+ }
+ }
+ '''
+ for func in c_function.searchString(source_code):
+ print("%(name)s (%(type)s) args: %(args)s" % func)
+
+
+ prints::
+
+ is_odd (int) args: [['int', 'x']]
+ dec_to_hex (int) args: [['char', 'hchar']]
+ """
+ if opener == closer:
+ raise ValueError("opening and closing strings cannot be the same")
+ if content is None:
+ if isinstance(opener,basestring) and isinstance(closer,basestring):
+ if len(opener) == 1 and len(closer)==1:
+ if ignoreExpr is not None:
+ content = (Combine(OneOrMore(~ignoreExpr +
+ CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ if ignoreExpr is not None:
+ content = (Combine(OneOrMore(~ignoreExpr +
+ ~Literal(opener) + ~Literal(closer) +
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ raise ValueError("opening and closing arguments must be strings if no content expression is given")
+ ret = Forward()
+ if ignoreExpr is not None:
+ ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )
+ else:
+ ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )
+ ret.setName('nested %s%s expression' % (opener,closer))
+ return ret
+
+def indentedBlock(blockStatementExpr, indentStack, indent=True):
+ """Helper method for defining space-delimited indentation blocks,
+ such as those used to define block statements in Python source code.
+
+ Parameters:
+
+ - blockStatementExpr - expression defining syntax of statement that
+ is repeated within the indented block
+ - indentStack - list created by caller to manage indentation stack
+ (multiple statementWithIndentedBlock expressions within a single
+ grammar should share a common indentStack)
+ - indent - boolean indicating whether block must be indented beyond
+ the the current level; set to False for block of left-most
+ statements (default= ``True``)
+
+ A valid block must contain at least one ``blockStatement``.
+
+ Example::
+
+ data = '''
+ def A(z):
+ A1
+ B = 100
+ G = A2
+ A2
+ A3
+ B
+ def BB(a,b,c):
+ BB1
+ def BBA():
+ bba1
+ bba2
+ bba3
+ C
+ D
+ def spam(x,y):
+ def eggs(z):
+ pass
+ '''
+
+
+ indentStack = [1]
+ stmt = Forward()
+
+ identifier = Word(alphas, alphanums)
+ funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")
+ func_body = indentedBlock(stmt, indentStack)
+ funcDef = Group( funcDecl + func_body )
+
+ rvalue = Forward()
+ funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")
+ rvalue << (funcCall | identifier | Word(nums))
+ assignment = Group(identifier + "=" + rvalue)
+ stmt << ( funcDef | assignment | identifier )
+
+ module_body = OneOrMore(stmt)
+
+ parseTree = module_body.parseString(data)
+ parseTree.pprint()
+
+ prints::
+
+ [['def',
+ 'A',
+ ['(', 'z', ')'],
+ ':',
+ [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
+ 'B',
+ ['def',
+ 'BB',
+ ['(', 'a', 'b', 'c', ')'],
+ ':',
+ [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
+ 'C',
+ 'D',
+ ['def',
+ 'spam',
+ ['(', 'x', 'y', ')'],
+ ':',
+ [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]
+ """
+ backup_stack = indentStack[:]
+
+ def reset_stack():
+ indentStack[:] = backup_stack
+
+ def checkPeerIndent(s,l,t):
+ if l >= len(s): return
+ curCol = col(l,s)
+ if curCol != indentStack[-1]:
+ if curCol > indentStack[-1]:
+ raise ParseException(s,l,"illegal nesting")
+ raise ParseException(s,l,"not a peer entry")
+
+ def checkSubIndent(s,l,t):
+ curCol = col(l,s)
+ if curCol > indentStack[-1]:
+ indentStack.append( curCol )
+ else:
+ raise ParseException(s,l,"not a subentry")
+
+ def checkUnindent(s,l,t):
+ if l >= len(s): return
+ curCol = col(l,s)
+ if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):
+ raise ParseException(s,l,"not an unindent")
+ indentStack.pop()
+
+ NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())
+ INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')
+ PEER = Empty().setParseAction(checkPeerIndent).setName('')
+ UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')
+ if indent:
+ smExpr = Group( Optional(NL) +
+ #~ FollowedBy(blockStatementExpr) +
+ INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)
+ else:
+ smExpr = Group( Optional(NL) +
+ (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )
+ smExpr.setFailAction(lambda a, b, c, d: reset_stack())
+ blockStatementExpr.ignore(_bslash + LineEnd())
+ return smExpr.setName('indented block')
+
+alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
+punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")
+
+anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag'))
+_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\''))
+commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")
+def replaceHTMLEntity(t):
+ """Helper parser action to replace common HTML entities with their special characters"""
+ return _htmlEntityMap.get(t.entity)
+
+# it's easy to get these comment structures wrong - they're very common, so may as well make them available
+cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment")
+"Comment of the form ``/* ... */``"
+
+htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")
+"Comment of the form ``<!-- ... -->``"
+
+restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")
+dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment")
+"Comment of the form ``// ... (to end of line)``"
+
+cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment")
+"Comment of either form :class:`cStyleComment` or :class:`dblSlashComment`"
+
+javaStyleComment = cppStyleComment
+"Same as :class:`cppStyleComment`"
+
+pythonStyleComment = Regex(r"#.*").setName("Python style comment")
+"Comment of the form ``# ... (to end of line)``"
+
+_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +
+ Optional( Word(" \t") +
+ ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
+commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")
+"""(Deprecated) Predefined expression of 1 or more printable words or
+quoted strings, separated by commas.
+
+This expression is deprecated in favor of :class:`pyparsing_common.comma_separated_list`.
+"""
+
+# some other useful expressions - using lower-case class name since we are really using this as a namespace
+class pyparsing_common:
+ """Here are some common low-level expressions that may be useful in
+ jump-starting parser development:
+
+ - numeric forms (:class:`integers<integer>`, :class:`reals<real>`,
+ :class:`scientific notation<sci_real>`)
+ - common :class:`programming identifiers<identifier>`
+ - network addresses (:class:`MAC<mac_address>`,
+ :class:`IPv4<ipv4_address>`, :class:`IPv6<ipv6_address>`)
+ - ISO8601 :class:`dates<iso8601_date>` and
+ :class:`datetime<iso8601_datetime>`
+ - :class:`UUID<uuid>`
+ - :class:`comma-separated list<comma_separated_list>`
+
+ Parse actions:
+
+ - :class:`convertToInteger`
+ - :class:`convertToFloat`
+ - :class:`convertToDate`
+ - :class:`convertToDatetime`
+ - :class:`stripHTMLTags`
+ - :class:`upcaseTokens`
+ - :class:`downcaseTokens`
+
+ Example::
+
+ pyparsing_common.number.runTests('''
+ # any int or real number, returned as the appropriate type
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ ''')
+
+ pyparsing_common.fnumber.runTests('''
+ # any int or real number, returned as float
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ ''')
+
+ pyparsing_common.hex_integer.runTests('''
+ # hex numbers
+ 100
+ FF
+ ''')
+
+ pyparsing_common.fraction.runTests('''
+ # fractions
+ 1/2
+ -3/4
+ ''')
+
+ pyparsing_common.mixed_integer.runTests('''
+ # mixed fractions
+ 1
+ 1/2
+ -3/4
+ 1-3/4
+ ''')
+
+ import uuid
+ pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+ pyparsing_common.uuid.runTests('''
+ # uuid
+ 12345678-1234-5678-1234-567812345678
+ ''')
+
+ prints::
+
+ # any int or real number, returned as the appropriate type
+ 100
+ [100]
+
+ -100
+ [-100]
+
+ +100
+ [100]
+
+ 3.14159
+ [3.14159]
+
+ 6.02e23
+ [6.02e+23]
+
+ 1e-12
+ [1e-12]
+
+ # any int or real number, returned as float
+ 100
+ [100.0]
+
+ -100
+ [-100.0]
+
+ +100
+ [100.0]
+
+ 3.14159
+ [3.14159]
+
+ 6.02e23
+ [6.02e+23]
+
+ 1e-12
+ [1e-12]
+
+ # hex numbers
+ 100
+ [256]
+
+ FF
+ [255]
+
+ # fractions
+ 1/2
+ [0.5]
+
+ -3/4
+ [-0.75]
+
+ # mixed fractions
+ 1
+ [1]
+
+ 1/2
+ [0.5]
+
+ -3/4
+ [-0.75]
+
+ 1-3/4
+ [1.75]
+
+ # uuid
+ 12345678-1234-5678-1234-567812345678
+ [UUID('12345678-1234-5678-1234-567812345678')]
+ """
+
+ convertToInteger = tokenMap(int)
+ """
+ Parse action for converting parsed integers to Python int
+ """
+
+ convertToFloat = tokenMap(float)
+ """
+ Parse action for converting parsed numbers to Python float
+ """
+
+ integer = Word(nums).setName("integer").setParseAction(convertToInteger)
+ """expression that parses an unsigned integer, returns an int"""
+
+ hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16))
+ """expression that parses a hexadecimal integer, returns an int"""
+
+ signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)
+ """expression that parses an integer with optional leading sign, returns an int"""
+
+ fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")
+ """fractional expression of an integer divided by an integer, returns a float"""
+ fraction.addParseAction(lambda t: t[0]/t[-1])
+
+ mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")
+ """mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
+ mixed_integer.addParseAction(sum)
+
+ real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat)
+ """expression that parses a floating point number and returns a float"""
+
+ sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)
+ """expression that parses a floating point number with optional
+ scientific notation and returns a float"""
+
+ # streamlining this expression makes the docs nicer-looking
+ number = (sci_real | real | signed_integer).streamline()
+ """any numeric expression, returns the corresponding Python type"""
+
+ fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)
+ """any int or real number, returned as float"""
+
+ identifier = Word(alphas+'_', alphanums+'_').setName("identifier")
+ """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""
+
+ ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address")
+ "IPv4 address (``0.0.0.0 - 255.255.255.255``)"
+
+ _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer")
+ _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address")
+ _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address")
+ _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8)
+ _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address")
+ ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address")
+ "IPv6 address (long, short, or mixed form)"
+
+ mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address")
+ "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"
+
+ @staticmethod
+ def convertToDate(fmt="%Y-%m-%d"):
+ """
+ Helper to create a parse action for converting parsed date string to Python datetime.date
+
+ Params -
+ - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%d"``)
+
+ Example::
+
+ date_expr = pyparsing_common.iso8601_date.copy()
+ date_expr.setParseAction(pyparsing_common.convertToDate())
+ print(date_expr.parseString("1999-12-31"))
+
+ prints::
+
+ [datetime.date(1999, 12, 31)]
+ """
+ def cvt_fn(s,l,t):
+ try:
+ return datetime.strptime(t[0], fmt).date()
+ except ValueError as ve:
+ raise ParseException(s, l, str(ve))
+ return cvt_fn
+
+ @staticmethod
+ def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"):
+ """Helper to create a parse action for converting parsed
+ datetime string to Python datetime.datetime
+
+ Params -
+ - fmt - format to be passed to datetime.strptime (default= ``"%Y-%m-%dT%H:%M:%S.%f"``)
+
+ Example::
+
+ dt_expr = pyparsing_common.iso8601_datetime.copy()
+ dt_expr.setParseAction(pyparsing_common.convertToDatetime())
+ print(dt_expr.parseString("1999-12-31T23:59:59.999"))
+
+ prints::
+
+ [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
+ """
+ def cvt_fn(s,l,t):
+ try:
+ return datetime.strptime(t[0], fmt)
+ except ValueError as ve:
+ raise ParseException(s, l, str(ve))
+ return cvt_fn
+
+ iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date")
+ "ISO8601 date (``yyyy-mm-dd``)"
+
+ iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime")
+ "ISO8601 datetime (``yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)``) - trailing seconds, milliseconds, and timezone optional; accepts separating ``'T'`` or ``' '``"
+
+ uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID")
+ "UUID (``xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx``)"
+
+ _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress()
+ @staticmethod
+ def stripHTMLTags(s, l, tokens):
+ """Parse action to remove HTML tags from web page HTML source
+
+ Example::
+
+ # strip HTML links from normal text
+ text = '<td>More info at the <a href="https://github.com/pyparsing/pyparsing/wiki">pyparsing</a> wiki page</td>'
+ td,td_end = makeHTMLTags("TD")
+ table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
+ print(table_text.parseString(text).body)
+
+ Prints::
+
+ More info at the pyparsing wiki page
+ """
+ return pyparsing_common._html_stripper.transformString(tokens[0])
+
+ _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',')
+ + Optional( White(" \t") ) ) ).streamline().setName("commaItem")
+ comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list")
+ """Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
+
+ upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))
+ """Parse action to convert tokens to upper case."""
+
+ downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))
+ """Parse action to convert tokens to lower case."""
+
+
+class _lazyclassproperty(object):
+ def __init__(self, fn):
+ self.fn = fn
+ self.__doc__ = fn.__doc__
+ self.__name__ = fn.__name__
+
+ def __get__(self, obj, cls):
+ if cls is None:
+ cls = type(obj)
+ if not hasattr(cls, '_intern') or any(cls._intern is getattr(superclass, '_intern', []) for superclass in cls.__mro__[1:]):
+ cls._intern = {}
+ attrname = self.fn.__name__
+ if attrname not in cls._intern:
+ cls._intern[attrname] = self.fn(cls)
+ return cls._intern[attrname]
+
+
+class unicode_set(object):
+ """
+ A set of Unicode characters, for language-specific strings for
+ ``alphas``, ``nums``, ``alphanums``, and ``printables``.
+ A unicode_set is defined by a list of ranges in the Unicode character
+ set, in a class attribute ``_ranges``, such as::
+
+ _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),]
+
+ A unicode set can also be defined using multiple inheritance of other unicode sets::
+
+ class CJK(Chinese, Japanese, Korean):
+ pass
+ """
+ _ranges = []
+
+ @classmethod
+ def _get_chars_for_ranges(cls):
+ ret = []
+ for cc in cls.__mro__:
+ if cc is unicode_set:
+ break
+ for rr in cc._ranges:
+ ret.extend(range(rr[0], rr[-1]+1))
+ return [unichr(c) for c in sorted(set(ret))]
+
+ @_lazyclassproperty
+ def printables(cls):
+ "all non-whitespace characters in this range"
+ return u''.join(filterfalse(unicode.isspace, cls._get_chars_for_ranges()))
+
+ @_lazyclassproperty
+ def alphas(cls):
+ "all alphabetic characters in this range"
+ return u''.join(filter(unicode.isalpha, cls._get_chars_for_ranges()))
+
+ @_lazyclassproperty
+ def nums(cls):
+ "all numeric digit characters in this range"
+ return u''.join(filter(unicode.isdigit, cls._get_chars_for_ranges()))
+
+ @_lazyclassproperty
+ def alphanums(cls):
+ "all alphanumeric characters in this range"
+ return cls.alphas + cls.nums
+
+
+class pyparsing_unicode(unicode_set):
+ """
+ A namespace class for defining common language unicode_sets.
+ """
+ _ranges = [(32, sys.maxunicode)]
+
+ class Latin1(unicode_set):
+ "Unicode set for Latin-1 Unicode Character Range"
+ _ranges = [(0x0020, 0x007e), (0x00a0, 0x00ff),]
+
+ class LatinA(unicode_set):
+ "Unicode set for Latin-A Unicode Character Range"
+ _ranges = [(0x0100, 0x017f),]
+
+ class LatinB(unicode_set):
+ "Unicode set for Latin-B Unicode Character Range"
+ _ranges = [(0x0180, 0x024f),]
+
+ class Greek(unicode_set):
+ "Unicode set for Greek Unicode Character Ranges"
+ _ranges = [
+ (0x0370, 0x03ff), (0x1f00, 0x1f15), (0x1f18, 0x1f1d), (0x1f20, 0x1f45), (0x1f48, 0x1f4d),
+ (0x1f50, 0x1f57), (0x1f59,), (0x1f5b,), (0x1f5d,), (0x1f5f, 0x1f7d), (0x1f80, 0x1fb4), (0x1fb6, 0x1fc4),
+ (0x1fc6, 0x1fd3), (0x1fd6, 0x1fdb), (0x1fdd, 0x1fef), (0x1ff2, 0x1ff4), (0x1ff6, 0x1ffe),
+ ]
+
+ class Cyrillic(unicode_set):
+ "Unicode set for Cyrillic Unicode Character Range"
+ _ranges = [(0x0400, 0x04ff)]
+
+ class Chinese(unicode_set):
+ "Unicode set for Chinese Unicode Character Range"
+ _ranges = [(0x4e00, 0x9fff), (0x3000, 0x303f), ]
+
+ class Japanese(unicode_set):
+ "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges"
+ _ranges = [ ]
+
+ class Kanji(unicode_set):
+ "Unicode set for Kanji Unicode Character Range"
+ _ranges = [(0x4E00, 0x9Fbf), (0x3000, 0x303f), ]
+
+ class Hiragana(unicode_set):
+ "Unicode set for Hiragana Unicode Character Range"
+ _ranges = [(0x3040, 0x309f), ]
+
+ class Katakana(unicode_set):
+ "Unicode set for Katakana Unicode Character Range"
+ _ranges = [(0x30a0, 0x30ff), ]
+
+ class Korean(unicode_set):
+ "Unicode set for Korean Unicode Character Range"
+ _ranges = [(0xac00, 0xd7af), (0x1100, 0x11ff), (0x3130, 0x318f), (0xa960, 0xa97f), (0xd7b0, 0xd7ff), (0x3000, 0x303f), ]
+
+ class CJK(Chinese, Japanese, Korean):
+ "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range"
+ pass
+
+ class Thai(unicode_set):
+ "Unicode set for Thai Unicode Character Range"
+ _ranges = [(0x0e01, 0x0e3a), (0x0e3f, 0x0e5b), ]
+
+ class Arabic(unicode_set):
+ "Unicode set for Arabic Unicode Character Range"
+ _ranges = [(0x0600, 0x061b), (0x061e, 0x06ff), (0x0700, 0x077f), ]
+
+ class Hebrew(unicode_set):
+ "Unicode set for Hebrew Unicode Character Range"
+ _ranges = [(0x0590, 0x05ff), ]
+
+ class Devanagari(unicode_set):
+ "Unicode set for Devanagari Unicode Character Range"
+ _ranges = [(0x0900, 0x097f), (0xa8e0, 0xa8ff)]
+
+pyparsing_unicode.Japanese._ranges = (pyparsing_unicode.Japanese.Kanji._ranges
+ + pyparsing_unicode.Japanese.Hiragana._ranges
+ + pyparsing_unicode.Japanese.Katakana._ranges)
+
+# define ranges in language character sets
+if PY_3:
+ setattr(pyparsing_unicode, "العربية", pyparsing_unicode.Arabic)
+ setattr(pyparsing_unicode, "中文", pyparsing_unicode.Chinese)
+ setattr(pyparsing_unicode, "кириллица", pyparsing_unicode.Cyrillic)
+ setattr(pyparsing_unicode, "Ελληνικά", pyparsing_unicode.Greek)
+ setattr(pyparsing_unicode, "עִברִית", pyparsing_unicode.Hebrew)
+ setattr(pyparsing_unicode, "日本語", pyparsing_unicode.Japanese)
+ setattr(pyparsing_unicode.Japanese, "漢字", pyparsing_unicode.Japanese.Kanji)
+ setattr(pyparsing_unicode.Japanese, "カタカナ", pyparsing_unicode.Japanese.Katakana)
+ setattr(pyparsing_unicode.Japanese, "ひらがな", pyparsing_unicode.Japanese.Hiragana)
+ setattr(pyparsing_unicode, "한국어", pyparsing_unicode.Korean)
+ setattr(pyparsing_unicode, "ไทย", pyparsing_unicode.Thai)
+ setattr(pyparsing_unicode, "देवनागरी", pyparsing_unicode.Devanagari)
+
+
+if __name__ == "__main__":
+
+ selectToken = CaselessLiteral("select")
+ fromToken = CaselessLiteral("from")
+
+ ident = Word(alphas, alphanums + "_$")
+
+ columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+ columnNameList = Group(delimitedList(columnName)).setName("columns")
+ columnSpec = ('*' | columnNameList)
+
+ tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+ tableNameList = Group(delimitedList(tableName)).setName("tables")
+
+ simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables")
+
+ # demo runTests method, including embedded comments in test string
+ simpleSQL.runTests("""
+ # '*' as column list and dotted table name
+ select * from SYS.XYZZY
+
+ # caseless match on "SELECT", and casts back to "select"
+ SELECT * from XYZZY, ABC
+
+ # list of column names, and mixed case SELECT keyword
+ Select AA,BB,CC from Sys.dual
+
+ # multiple tables
+ Select A, B, C from Sys.dual, Table2
+
+ # invalid SELECT keyword - should fail
+ Xelect A, B, C from Sys.dual
+
+ # incomplete command - should fail
+ Select
+
+ # invalid column name - should fail
+ Select ^^^ frox Sys.dual
+
+ """)
+
+ pyparsing_common.number.runTests("""
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ """)
+
+ # any int or real number, returned as float
+ pyparsing_common.fnumber.runTests("""
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ """)
+
+ pyparsing_common.hex_integer.runTests("""
+ 100
+ FF
+ """)
+
+ import uuid
+ pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+ pyparsing_common.uuid.runTests("""
+ 12345678-1234-5678-1234-567812345678
+ """)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__init__.py
new file mode 100644
index 00000000..8ed060ff
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__init__.py
@@ -0,0 +1,4 @@
+from .core import TomlError
+from .parser import load, loads
+from .test import translate_to_test
+from .writer import dump, dumps \ No newline at end of file
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..c77bf0ce
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/core.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/core.cpython-37.pyc
new file mode 100644
index 00000000..eafd47e0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/core.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/parser.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/parser.cpython-37.pyc
new file mode 100644
index 00000000..1b92f436
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/parser.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/test.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/test.cpython-37.pyc
new file mode 100644
index 00000000..385d62e3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/test.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/utils.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 00000000..9c9019d7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/utils.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/writer.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/writer.cpython-37.pyc
new file mode 100644
index 00000000..59a5706c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/__pycache__/writer.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/core.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/core.py
new file mode 100644
index 00000000..c182734e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/core.py
@@ -0,0 +1,13 @@
+class TomlError(RuntimeError):
+ def __init__(self, message, line, col, filename):
+ RuntimeError.__init__(self, message, line, col, filename)
+ self.message = message
+ self.line = line
+ self.col = col
+ self.filename = filename
+
+ def __str__(self):
+ return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message)
+
+ def __repr__(self):
+ return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/parser.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/parser.py
new file mode 100644
index 00000000..3493aa64
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/parser.py
@@ -0,0 +1,341 @@
+import string, re, sys, datetime
+from .core import TomlError
+from .utils import rfc3339_re, parse_rfc3339_re
+
+if sys.version_info[0] == 2:
+ _chr = unichr
+else:
+ _chr = chr
+
+def load(fin, translate=lambda t, x, v: v, object_pairs_hook=dict):
+ return loads(fin.read(), translate=translate, object_pairs_hook=object_pairs_hook, filename=getattr(fin, 'name', repr(fin)))
+
+def loads(s, filename='<string>', translate=lambda t, x, v: v, object_pairs_hook=dict):
+ if isinstance(s, bytes):
+ s = s.decode('utf-8')
+
+ s = s.replace('\r\n', '\n')
+
+ root = object_pairs_hook()
+ tables = object_pairs_hook()
+ scope = root
+
+ src = _Source(s, filename=filename)
+ ast = _p_toml(src, object_pairs_hook=object_pairs_hook)
+
+ def error(msg):
+ raise TomlError(msg, pos[0], pos[1], filename)
+
+ def process_value(v, object_pairs_hook):
+ kind, text, value, pos = v
+ if kind == 'str' and value.startswith('\n'):
+ value = value[1:]
+ if kind == 'array':
+ if value and any(k != value[0][0] for k, t, v, p in value[1:]):
+ error('array-type-mismatch')
+ value = [process_value(item, object_pairs_hook=object_pairs_hook) for item in value]
+ elif kind == 'table':
+ value = object_pairs_hook([(k, process_value(value[k], object_pairs_hook=object_pairs_hook)) for k in value])
+ return translate(kind, text, value)
+
+ for kind, value, pos in ast:
+ if kind == 'kv':
+ k, v = value
+ if k in scope:
+ error('duplicate_keys. Key "{0}" was used more than once.'.format(k))
+ scope[k] = process_value(v, object_pairs_hook=object_pairs_hook)
+ else:
+ is_table_array = (kind == 'table_array')
+ cur = tables
+ for name in value[:-1]:
+ if isinstance(cur.get(name), list):
+ d, cur = cur[name][-1]
+ else:
+ d, cur = cur.setdefault(name, (None, object_pairs_hook()))
+
+ scope = object_pairs_hook()
+ name = value[-1]
+ if name not in cur:
+ if is_table_array:
+ cur[name] = [(scope, object_pairs_hook())]
+ else:
+ cur[name] = (scope, object_pairs_hook())
+ elif isinstance(cur[name], list):
+ if not is_table_array:
+ error('table_type_mismatch')
+ cur[name].append((scope, object_pairs_hook()))
+ else:
+ if is_table_array:
+ error('table_type_mismatch')
+ old_scope, next_table = cur[name]
+ if old_scope is not None:
+ error('duplicate_tables')
+ cur[name] = (scope, next_table)
+
+ def merge_tables(scope, tables):
+ if scope is None:
+ scope = object_pairs_hook()
+ for k in tables:
+ if k in scope:
+ error('key_table_conflict')
+ v = tables[k]
+ if isinstance(v, list):
+ scope[k] = [merge_tables(sc, tbl) for sc, tbl in v]
+ else:
+ scope[k] = merge_tables(v[0], v[1])
+ return scope
+
+ return merge_tables(root, tables)
+
+class _Source:
+ def __init__(self, s, filename=None):
+ self.s = s
+ self._pos = (1, 1)
+ self._last = None
+ self._filename = filename
+ self.backtrack_stack = []
+
+ def last(self):
+ return self._last
+
+ def pos(self):
+ return self._pos
+
+ def fail(self):
+ return self._expect(None)
+
+ def consume_dot(self):
+ if self.s:
+ self._last = self.s[0]
+ self.s = self[1:]
+ self._advance(self._last)
+ return self._last
+ return None
+
+ def expect_dot(self):
+ return self._expect(self.consume_dot())
+
+ def consume_eof(self):
+ if not self.s:
+ self._last = ''
+ return True
+ return False
+
+ def expect_eof(self):
+ return self._expect(self.consume_eof())
+
+ def consume(self, s):
+ if self.s.startswith(s):
+ self.s = self.s[len(s):]
+ self._last = s
+ self._advance(s)
+ return True
+ return False
+
+ def expect(self, s):
+ return self._expect(self.consume(s))
+
+ def consume_re(self, re):
+ m = re.match(self.s)
+ if m:
+ self.s = self.s[len(m.group(0)):]
+ self._last = m
+ self._advance(m.group(0))
+ return m
+ return None
+
+ def expect_re(self, re):
+ return self._expect(self.consume_re(re))
+
+ def __enter__(self):
+ self.backtrack_stack.append((self.s, self._pos))
+
+ def __exit__(self, type, value, traceback):
+ if type is None:
+ self.backtrack_stack.pop()
+ else:
+ self.s, self._pos = self.backtrack_stack.pop()
+ return type == TomlError
+
+ def commit(self):
+ self.backtrack_stack[-1] = (self.s, self._pos)
+
+ def _expect(self, r):
+ if not r:
+ raise TomlError('msg', self._pos[0], self._pos[1], self._filename)
+ return r
+
+ def _advance(self, s):
+ suffix_pos = s.rfind('\n')
+ if suffix_pos == -1:
+ self._pos = (self._pos[0], self._pos[1] + len(s))
+ else:
+ self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos)
+
+_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*')
+def _p_ews(s):
+ s.expect_re(_ews_re)
+
+_ws_re = re.compile(r'[ \t]*')
+def _p_ws(s):
+ s.expect_re(_ws_re)
+
+_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"',
+ '\\': '\\', 'f': '\f' }
+
+_basicstr_re = re.compile(r'[^"\\\000-\037]*')
+_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})')
+_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})')
+_escapes_re = re.compile(r'[btnfr\"\\]')
+_newline_esc_re = re.compile('\n[ \t\n]*')
+def _p_basicstr_content(s, content=_basicstr_re):
+ res = []
+ while True:
+ res.append(s.expect_re(content).group(0))
+ if not s.consume('\\'):
+ break
+ if s.consume_re(_newline_esc_re):
+ pass
+ elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re):
+ v = int(s.last().group(1), 16)
+ if 0xd800 <= v < 0xe000:
+ s.fail()
+ res.append(_chr(v))
+ else:
+ s.expect_re(_escapes_re)
+ res.append(_escapes[s.last().group(0)])
+ return ''.join(res)
+
+_key_re = re.compile(r'[0-9a-zA-Z-_]+')
+def _p_key(s):
+ with s:
+ s.expect('"')
+ r = _p_basicstr_content(s, _basicstr_re)
+ s.expect('"')
+ return r
+ if s.consume('\''):
+ if s.consume('\'\''):
+ r = s.expect_re(_litstr_ml_re).group(0)
+ s.expect('\'\'\'')
+ else:
+ r = s.expect_re(_litstr_re).group(0)
+ s.expect('\'')
+ return r
+ return s.expect_re(_key_re).group(0)
+
+_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?')
+
+_basicstr_ml_re = re.compile(r'(?:""?(?!")|[^"\\\000-\011\013-\037])*')
+_litstr_re = re.compile(r"[^'\000\010\012-\037]*")
+_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\010\013-\037]))*")
+def _p_value(s, object_pairs_hook):
+ pos = s.pos()
+
+ if s.consume('true'):
+ return 'bool', s.last(), True, pos
+ if s.consume('false'):
+ return 'bool', s.last(), False, pos
+
+ if s.consume('"'):
+ if s.consume('""'):
+ r = _p_basicstr_content(s, _basicstr_ml_re)
+ s.expect('"""')
+ else:
+ r = _p_basicstr_content(s, _basicstr_re)
+ s.expect('"')
+ return 'str', r, r, pos
+
+ if s.consume('\''):
+ if s.consume('\'\''):
+ r = s.expect_re(_litstr_ml_re).group(0)
+ s.expect('\'\'\'')
+ else:
+ r = s.expect_re(_litstr_re).group(0)
+ s.expect('\'')
+ return 'str', r, r, pos
+
+ if s.consume_re(rfc3339_re):
+ m = s.last()
+ return 'datetime', m.group(0), parse_rfc3339_re(m), pos
+
+ if s.consume_re(_float_re):
+ m = s.last().group(0)
+ r = m.replace('_','')
+ if '.' in m or 'e' in m or 'E' in m:
+ return 'float', m, float(r), pos
+ else:
+ return 'int', m, int(r, 10), pos
+
+ if s.consume('['):
+ items = []
+ with s:
+ while True:
+ _p_ews(s)
+ items.append(_p_value(s, object_pairs_hook=object_pairs_hook))
+ s.commit()
+ _p_ews(s)
+ s.expect(',')
+ s.commit()
+ _p_ews(s)
+ s.expect(']')
+ return 'array', None, items, pos
+
+ if s.consume('{'):
+ _p_ws(s)
+ items = object_pairs_hook()
+ if not s.consume('}'):
+ k = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ items[k] = _p_value(s, object_pairs_hook=object_pairs_hook)
+ _p_ws(s)
+ while s.consume(','):
+ _p_ws(s)
+ k = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ items[k] = _p_value(s, object_pairs_hook=object_pairs_hook)
+ _p_ws(s)
+ s.expect('}')
+ return 'table', None, items, pos
+
+ s.fail()
+
+def _p_stmt(s, object_pairs_hook):
+ pos = s.pos()
+ if s.consume( '['):
+ is_array = s.consume('[')
+ _p_ws(s)
+ keys = [_p_key(s)]
+ _p_ws(s)
+ while s.consume('.'):
+ _p_ws(s)
+ keys.append(_p_key(s))
+ _p_ws(s)
+ s.expect(']')
+ if is_array:
+ s.expect(']')
+ return 'table_array' if is_array else 'table', keys, pos
+
+ key = _p_key(s)
+ _p_ws(s)
+ s.expect('=')
+ _p_ws(s)
+ value = _p_value(s, object_pairs_hook=object_pairs_hook)
+ return 'kv', (key, value), pos
+
+_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*')
+def _p_toml(s, object_pairs_hook):
+ stmts = []
+ _p_ews(s)
+ with s:
+ stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook))
+ while True:
+ s.commit()
+ s.expect_re(_stmtsep_re)
+ stmts.append(_p_stmt(s, object_pairs_hook=object_pairs_hook))
+ _p_ews(s)
+ s.expect_eof()
+ return stmts
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/test.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/test.py
new file mode 100644
index 00000000..ec8abfc6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/test.py
@@ -0,0 +1,30 @@
+import datetime
+from .utils import format_rfc3339
+
+try:
+ _string_types = (str, unicode)
+ _int_types = (int, long)
+except NameError:
+ _string_types = str
+ _int_types = int
+
+def translate_to_test(v):
+ if isinstance(v, dict):
+ return { k: translate_to_test(v) for k, v in v.items() }
+ if isinstance(v, list):
+ a = [translate_to_test(x) for x in v]
+ if v and isinstance(v[0], dict):
+ return a
+ else:
+ return {'type': 'array', 'value': a}
+ if isinstance(v, datetime.datetime):
+ return {'type': 'datetime', 'value': format_rfc3339(v)}
+ if isinstance(v, bool):
+ return {'type': 'bool', 'value': 'true' if v else 'false'}
+ if isinstance(v, _int_types):
+ return {'type': 'integer', 'value': str(v)}
+ if isinstance(v, float):
+ return {'type': 'float', 'value': '{:.17}'.format(v)}
+ if isinstance(v, _string_types):
+ return {'type': 'string', 'value': v}
+ raise RuntimeError('unexpected value: {!r}'.format(v))
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/utils.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/utils.py
new file mode 100644
index 00000000..636a680b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/utils.py
@@ -0,0 +1,67 @@
+import datetime
+import re
+
+rfc3339_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))')
+
+def parse_rfc3339(v):
+ m = rfc3339_re.match(v)
+ if not m or m.group(0) != v:
+ return None
+ return parse_rfc3339_re(m)
+
+def parse_rfc3339_re(m):
+ r = map(int, m.groups()[:6])
+ if m.group(7):
+ micro = float(m.group(7))
+ else:
+ micro = 0
+
+ if m.group(8):
+ g = int(m.group(8), 10) * 60 + int(m.group(9), 10)
+ tz = _TimeZone(datetime.timedelta(0, g * 60))
+ else:
+ tz = _TimeZone(datetime.timedelta(0, 0))
+
+ y, m, d, H, M, S = r
+ return datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz)
+
+
+def format_rfc3339(v):
+ offs = v.utcoffset()
+ offs = int(offs.total_seconds()) // 60 if offs is not None else 0
+
+ if offs == 0:
+ suffix = 'Z'
+ else:
+ if offs > 0:
+ suffix = '+'
+ else:
+ suffix = '-'
+ offs = -offs
+ suffix = '{0}{1:02}:{2:02}'.format(suffix, offs // 60, offs % 60)
+
+ if v.microsecond:
+ return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix
+ else:
+ return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix
+
+class _TimeZone(datetime.tzinfo):
+ def __init__(self, offset):
+ self._offset = offset
+
+ def utcoffset(self, dt):
+ return self._offset
+
+ def dst(self, dt):
+ return None
+
+ def tzname(self, dt):
+ m = self._offset.total_seconds() // 60
+ if m < 0:
+ res = '-'
+ m = -m
+ else:
+ res = '+'
+ h = m // 60
+ m = m - h * 60
+ return '{}{:.02}{:.02}'.format(res, h, m)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/writer.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/writer.py
new file mode 100644
index 00000000..73b5089c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/pytoml/writer.py
@@ -0,0 +1,106 @@
+from __future__ import unicode_literals
+import io, datetime, math, string, sys
+
+from .utils import format_rfc3339
+
+if sys.version_info[0] == 3:
+ long = int
+ unicode = str
+
+
+def dumps(obj, sort_keys=False):
+ fout = io.StringIO()
+ dump(obj, fout, sort_keys=sort_keys)
+ return fout.getvalue()
+
+
+_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'}
+
+
+def _escape_string(s):
+ res = []
+ start = 0
+
+ def flush():
+ if start != i:
+ res.append(s[start:i])
+ return i + 1
+
+ i = 0
+ while i < len(s):
+ c = s[i]
+ if c in '"\\\n\r\t\b\f':
+ start = flush()
+ res.append('\\' + _escapes[c])
+ elif ord(c) < 0x20:
+ start = flush()
+ res.append('\\u%04x' % ord(c))
+ i += 1
+
+ flush()
+ return '"' + ''.join(res) + '"'
+
+
+_key_chars = string.digits + string.ascii_letters + '-_'
+def _escape_id(s):
+ if any(c not in _key_chars for c in s):
+ return _escape_string(s)
+ return s
+
+
+def _format_value(v):
+ if isinstance(v, bool):
+ return 'true' if v else 'false'
+ if isinstance(v, int) or isinstance(v, long):
+ return unicode(v)
+ if isinstance(v, float):
+ if math.isnan(v) or math.isinf(v):
+ raise ValueError("{0} is not a valid TOML value".format(v))
+ else:
+ return repr(v)
+ elif isinstance(v, unicode) or isinstance(v, bytes):
+ return _escape_string(v)
+ elif isinstance(v, datetime.datetime):
+ return format_rfc3339(v)
+ elif isinstance(v, list):
+ return '[{0}]'.format(', '.join(_format_value(obj) for obj in v))
+ elif isinstance(v, dict):
+ return '{{{0}}}'.format(', '.join('{} = {}'.format(_escape_id(k), _format_value(obj)) for k, obj in v.items()))
+ else:
+ raise RuntimeError(v)
+
+
+def dump(obj, fout, sort_keys=False):
+ tables = [((), obj, False)]
+
+ while tables:
+ name, table, is_array = tables.pop()
+ if name:
+ section_name = '.'.join(_escape_id(c) for c in name)
+ if is_array:
+ fout.write('[[{0}]]\n'.format(section_name))
+ else:
+ fout.write('[{0}]\n'.format(section_name))
+
+ table_keys = sorted(table.keys()) if sort_keys else table.keys()
+ new_tables = []
+ has_kv = False
+ for k in table_keys:
+ v = table[k]
+ if isinstance(v, dict):
+ new_tables.append((name + (k,), v, False))
+ elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v):
+ new_tables.extend((name + (k,), d, True) for d in v)
+ elif v is None:
+ # based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344
+ fout.write(
+ '#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k)))
+ has_kv = True
+ else:
+ fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v)))
+ has_kv = True
+
+ tables.extend(reversed(new_tables))
+
+ if (name or has_kv) and tables:
+ fout.write('\n')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__init__.py
new file mode 100644
index 00000000..1d30e3e0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__init__.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+
+# __
+# /__) _ _ _ _ _/ _
+# / ( (- (/ (/ (- _) / _)
+# /
+
+"""
+Requests HTTP Library
+~~~~~~~~~~~~~~~~~~~~~
+
+Requests is an HTTP library, written in Python, for human beings. Basic GET
+usage:
+
+ >>> import requests
+ >>> r = requests.get('https://www.python.org')
+ >>> r.status_code
+ 200
+ >>> 'Python is a programming language' in r.content
+ True
+
+... or POST:
+
+ >>> payload = dict(key1='value1', key2='value2')
+ >>> r = requests.post('https://httpbin.org/post', data=payload)
+ >>> print(r.text)
+ {
+ ...
+ "form": {
+ "key2": "value2",
+ "key1": "value1"
+ },
+ ...
+ }
+
+The other HTTP methods are supported - see `requests.api`. Full documentation
+is at <http://python-requests.org>.
+
+:copyright: (c) 2017 by Kenneth Reitz.
+:license: Apache 2.0, see LICENSE for more details.
+"""
+
+from pip._vendor import urllib3
+from pip._vendor import chardet
+import warnings
+from .exceptions import RequestsDependencyWarning
+
+
+def check_compatibility(urllib3_version, chardet_version):
+ urllib3_version = urllib3_version.split('.')
+ assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
+
+ # Sometimes, urllib3 only reports its version as 16.1.
+ if len(urllib3_version) == 2:
+ urllib3_version.append('0')
+
+ # Check urllib3 for compatibility.
+ major, minor, patch = urllib3_version # noqa: F811
+ major, minor, patch = int(major), int(minor), int(patch)
+ # urllib3 >= 1.21.1, <= 1.25
+ assert major == 1
+ assert minor >= 21
+ assert minor <= 25
+
+ # Check chardet for compatibility.
+ major, minor, patch = chardet_version.split('.')[:3]
+ major, minor, patch = int(major), int(minor), int(patch)
+ # chardet >= 3.0.2, < 3.1.0
+ assert major == 3
+ assert minor < 1
+ assert patch >= 2
+
+
+def _check_cryptography(cryptography_version):
+ # cryptography < 1.3.4
+ try:
+ cryptography_version = list(map(int, cryptography_version.split('.')))
+ except ValueError:
+ return
+
+ if cryptography_version < [1, 3, 4]:
+ warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
+ warnings.warn(warning, RequestsDependencyWarning)
+
+# Check imported dependencies for compatibility.
+try:
+ check_compatibility(urllib3.__version__, chardet.__version__)
+except (AssertionError, ValueError):
+ warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
+ "version!".format(urllib3.__version__, chardet.__version__),
+ RequestsDependencyWarning)
+
+# Attempt to enable urllib3's SNI support, if possible
+from pip._internal.utils.compat import WINDOWS
+if not WINDOWS:
+ try:
+ from pip._vendor.urllib3.contrib import pyopenssl
+ pyopenssl.inject_into_urllib3()
+
+ # Check cryptography version
+ from cryptography import __version__ as cryptography_version
+ _check_cryptography(cryptography_version)
+ except ImportError:
+ pass
+
+# urllib3's DependencyWarnings should be silenced.
+from pip._vendor.urllib3.exceptions import DependencyWarning
+warnings.simplefilter('ignore', DependencyWarning)
+
+from .__version__ import __title__, __description__, __url__, __version__
+from .__version__ import __build__, __author__, __author_email__, __license__
+from .__version__ import __copyright__, __cake__
+
+from . import utils
+from . import packages
+from .models import Request, Response, PreparedRequest
+from .api import request, get, head, post, patch, put, delete, options
+from .sessions import session, Session
+from .status_codes import codes
+from .exceptions import (
+ RequestException, Timeout, URLRequired,
+ TooManyRedirects, HTTPError, ConnectionError,
+ FileModeWarning, ConnectTimeout, ReadTimeout
+)
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+from logging import NullHandler
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+# FileModeWarnings go off per the default.
+warnings.simplefilter('default', FileModeWarning, append=True)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..7f3c9fc8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc
new file mode 100644
index 00000000..fb18c78b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc
new file mode 100644
index 00000000..f5ec913c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc
new file mode 100644
index 00000000..c2745459
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/api.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/api.cpython-37.pyc
new file mode 100644
index 00000000..a73135de
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/api.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-37.pyc
new file mode 100644
index 00000000..b9f45ad0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-37.pyc
new file mode 100644
index 00000000..fefda569
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..ae2595b8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc
new file mode 100644
index 00000000..8a7a2c99
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 00000000..cc3915fc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/help.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/help.cpython-37.pyc
new file mode 100644
index 00000000..c61fecb4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/help.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc
new file mode 100644
index 00000000..e5b598be
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/models.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/models.cpython-37.pyc
new file mode 100644
index 00000000..8c77df8d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/models.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-37.pyc
new file mode 100644
index 00000000..ef0d3914
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc
new file mode 100644
index 00000000..4627c7a0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc
new file mode 100644
index 00000000..f4ee09bd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-37.pyc
new file mode 100644
index 00000000..93c5d2e5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 00000000..9a00b700
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__version__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__version__.py
new file mode 100644
index 00000000..9844f740
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/__version__.py
@@ -0,0 +1,14 @@
+# .-. .-. .-. . . .-. .-. .-. .-.
+# |( |- |.| | | |- `-. | `-.
+# ' ' `-' `-`.`-' `-' `-' ' `-'
+
+__title__ = 'requests'
+__description__ = 'Python HTTP for Humans.'
+__url__ = 'http://python-requests.org'
+__version__ = '2.22.0'
+__build__ = 0x022200
+__author__ = 'Kenneth Reitz'
+__author_email__ = 'me@kennethreitz.org'
+__license__ = 'Apache 2.0'
+__copyright__ = 'Copyright 2019 Kenneth Reitz'
+__cake__ = u'\u2728 \U0001f370 \u2728'
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/_internal_utils.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/_internal_utils.py
new file mode 100644
index 00000000..759d9a56
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/_internal_utils.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests._internal_utils
+~~~~~~~~~~~~~~
+
+Provides utility functions that are consumed internally by Requests
+which depend on extremely few external helpers (such as compat)
+"""
+
+from .compat import is_py2, builtin_str, str
+
+
+def to_native_string(string, encoding='ascii'):
+ """Given a string object, regardless of type, returns a representation of
+ that string in the native string type, encoding and decoding where
+ necessary. This assumes ASCII unless told otherwise.
+ """
+ if isinstance(string, builtin_str):
+ out = string
+ else:
+ if is_py2:
+ out = string.encode(encoding)
+ else:
+ out = string.decode(encoding)
+
+ return out
+
+
+def unicode_is_ascii(u_string):
+ """Determine if unicode string only contains ASCII characters.
+
+ :param str u_string: unicode string to check. Must be unicode
+ and not Python 2 `str`.
+ :rtype: bool
+ """
+ assert isinstance(u_string, str)
+ try:
+ u_string.encode('ascii')
+ return True
+ except UnicodeEncodeError:
+ return False
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/adapters.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/adapters.py
new file mode 100644
index 00000000..c30e7c92
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/adapters.py
@@ -0,0 +1,533 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.adapters
+~~~~~~~~~~~~~~~~~
+
+This module contains the transport adapters that Requests uses to define
+and maintain connections.
+"""
+
+import os.path
+import socket
+
+from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
+from pip._vendor.urllib3.response import HTTPResponse
+from pip._vendor.urllib3.util import parse_url
+from pip._vendor.urllib3.util import Timeout as TimeoutSauce
+from pip._vendor.urllib3.util.retry import Retry
+from pip._vendor.urllib3.exceptions import ClosedPoolError
+from pip._vendor.urllib3.exceptions import ConnectTimeoutError
+from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
+from pip._vendor.urllib3.exceptions import MaxRetryError
+from pip._vendor.urllib3.exceptions import NewConnectionError
+from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError
+from pip._vendor.urllib3.exceptions import ProtocolError
+from pip._vendor.urllib3.exceptions import ReadTimeoutError
+from pip._vendor.urllib3.exceptions import SSLError as _SSLError
+from pip._vendor.urllib3.exceptions import ResponseError
+from pip._vendor.urllib3.exceptions import LocationValueError
+
+from .models import Response
+from .compat import urlparse, basestring
+from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
+ get_encoding_from_headers, prepend_scheme_if_needed,
+ get_auth_from_url, urldefragauth, select_proxy)
+from .structures import CaseInsensitiveDict
+from .cookies import extract_cookies_to_jar
+from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
+ ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
+ InvalidURL)
+from .auth import _basic_auth_str
+
+try:
+ from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager
+except ImportError:
+ def SOCKSProxyManager(*args, **kwargs):
+ raise InvalidSchema("Missing dependencies for SOCKS support.")
+
+DEFAULT_POOLBLOCK = False
+DEFAULT_POOLSIZE = 10
+DEFAULT_RETRIES = 0
+DEFAULT_POOL_TIMEOUT = None
+
+
+class BaseAdapter(object):
+ """The Base Transport Adapter"""
+
+ def __init__(self):
+ super(BaseAdapter, self).__init__()
+
+ def send(self, request, stream=False, timeout=None, verify=True,
+ cert=None, proxies=None):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
+ """
+ raise NotImplementedError
+
+ def close(self):
+ """Cleans up adapter specific items."""
+ raise NotImplementedError
+
+
+class HTTPAdapter(BaseAdapter):
+ """The built-in HTTP Adapter for urllib3.
+
+ Provides a general-case interface for Requests sessions to contact HTTP and
+ HTTPS urls by implementing the Transport Adapter interface. This class will
+ usually be created by the :class:`Session <Session>` class under the
+ covers.
+
+ :param pool_connections: The number of urllib3 connection pools to cache.
+ :param pool_maxsize: The maximum number of connections to save in the pool.
+ :param max_retries: The maximum number of retries each connection
+ should attempt. Note, this applies only to failed DNS lookups, socket
+ connections and connection timeouts, never to requests where data has
+ made it to the server. By default, Requests does not retry failed
+ connections. If you need granular control over the conditions under
+ which we retry a request, import urllib3's ``Retry`` class and pass
+ that instead.
+ :param pool_block: Whether the connection pool should block for connections.
+
+ Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> a = requests.adapters.HTTPAdapter(max_retries=3)
+ >>> s.mount('http://', a)
+ """
+ __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
+ '_pool_block']
+
+ def __init__(self, pool_connections=DEFAULT_POOLSIZE,
+ pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
+ pool_block=DEFAULT_POOLBLOCK):
+ if max_retries == DEFAULT_RETRIES:
+ self.max_retries = Retry(0, read=False)
+ else:
+ self.max_retries = Retry.from_int(max_retries)
+ self.config = {}
+ self.proxy_manager = {}
+
+ super(HTTPAdapter, self).__init__()
+
+ self._pool_connections = pool_connections
+ self._pool_maxsize = pool_maxsize
+ self._pool_block = pool_block
+
+ self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
+
+ def __getstate__(self):
+ return {attr: getattr(self, attr, None) for attr in self.__attrs__}
+
+ def __setstate__(self, state):
+ # Can't handle by adding 'proxy_manager' to self.__attrs__ because
+ # self.poolmanager uses a lambda function, which isn't pickleable.
+ self.proxy_manager = {}
+ self.config = {}
+
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.init_poolmanager(self._pool_connections, self._pool_maxsize,
+ block=self._pool_block)
+
+ def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
+ """Initializes a urllib3 PoolManager.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param connections: The number of urllib3 connection pools to cache.
+ :param maxsize: The maximum number of connections to save in the pool.
+ :param block: Block when no free connections are available.
+ :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
+ """
+ # save these values for pickling
+ self._pool_connections = connections
+ self._pool_maxsize = maxsize
+ self._pool_block = block
+
+ self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
+ block=block, strict=True, **pool_kwargs)
+
+ def proxy_manager_for(self, proxy, **proxy_kwargs):
+ """Return urllib3 ProxyManager for the given proxy.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxy: The proxy to return a urllib3 ProxyManager for.
+ :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
+ :returns: ProxyManager
+ :rtype: urllib3.ProxyManager
+ """
+ if proxy in self.proxy_manager:
+ manager = self.proxy_manager[proxy]
+ elif proxy.lower().startswith('socks'):
+ username, password = get_auth_from_url(proxy)
+ manager = self.proxy_manager[proxy] = SOCKSProxyManager(
+ proxy,
+ username=username,
+ password=password,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs
+ )
+ else:
+ proxy_headers = self.proxy_headers(proxy)
+ manager = self.proxy_manager[proxy] = proxy_from_url(
+ proxy,
+ proxy_headers=proxy_headers,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs)
+
+ return manager
+
+ def cert_verify(self, conn, url, verify, cert):
+ """Verify a SSL certificate. This method should not be called from user
+ code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param conn: The urllib3 connection object associated with the cert.
+ :param url: The requested URL.
+ :param verify: Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use
+ :param cert: The SSL certificate to verify.
+ """
+ if url.lower().startswith('https') and verify:
+
+ cert_loc = None
+
+ # Allow self-specified cert location.
+ if verify is not True:
+ cert_loc = verify
+
+ if not cert_loc:
+ cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
+
+ if not cert_loc or not os.path.exists(cert_loc):
+ raise IOError("Could not find a suitable TLS CA certificate bundle, "
+ "invalid path: {}".format(cert_loc))
+
+ conn.cert_reqs = 'CERT_REQUIRED'
+
+ if not os.path.isdir(cert_loc):
+ conn.ca_certs = cert_loc
+ else:
+ conn.ca_cert_dir = cert_loc
+ else:
+ conn.cert_reqs = 'CERT_NONE'
+ conn.ca_certs = None
+ conn.ca_cert_dir = None
+
+ if cert:
+ if not isinstance(cert, basestring):
+ conn.cert_file = cert[0]
+ conn.key_file = cert[1]
+ else:
+ conn.cert_file = cert
+ conn.key_file = None
+ if conn.cert_file and not os.path.exists(conn.cert_file):
+ raise IOError("Could not find the TLS certificate file, "
+ "invalid path: {}".format(conn.cert_file))
+ if conn.key_file and not os.path.exists(conn.key_file):
+ raise IOError("Could not find the TLS key file, "
+ "invalid path: {}".format(conn.key_file))
+
+ def build_response(self, req, resp):
+ """Builds a :class:`Response <requests.Response>` object from a urllib3
+ response. This should not be called from user code, and is only exposed
+ for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
+
+ :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
+ :param resp: The urllib3 response object.
+ :rtype: requests.Response
+ """
+ response = Response()
+
+ # Fallback to None if there's no status_code, for whatever reason.
+ response.status_code = getattr(resp, 'status', None)
+
+ # Make headers case-insensitive.
+ response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
+
+ # Set encoding.
+ response.encoding = get_encoding_from_headers(response.headers)
+ response.raw = resp
+ response.reason = response.raw.reason
+
+ if isinstance(req.url, bytes):
+ response.url = req.url.decode('utf-8')
+ else:
+ response.url = req.url
+
+ # Add new cookies from the server.
+ extract_cookies_to_jar(response.cookies, req, resp)
+
+ # Give the Response some context.
+ response.request = req
+ response.connection = self
+
+ return response
+
+ def get_connection(self, url, proxies=None):
+ """Returns a urllib3 connection for the given URL. This should not be
+ called from user code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param url: The URL to connect to.
+ :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
+ :rtype: urllib3.ConnectionPool
+ """
+ proxy = select_proxy(url, proxies)
+
+ if proxy:
+ proxy = prepend_scheme_if_needed(proxy, 'http')
+ proxy_url = parse_url(proxy)
+ if not proxy_url.host:
+ raise InvalidProxyURL("Please check proxy URL. It is malformed"
+ " and could be missing the host.")
+ proxy_manager = self.proxy_manager_for(proxy)
+ conn = proxy_manager.connection_from_url(url)
+ else:
+ # Only scheme should be lower case
+ parsed = urlparse(url)
+ url = parsed.geturl()
+ conn = self.poolmanager.connection_from_url(url)
+
+ return conn
+
+ def close(self):
+ """Disposes of any internal state.
+
+ Currently, this closes the PoolManager and any active ProxyManager,
+ which closes any pooled connections.
+ """
+ self.poolmanager.clear()
+ for proxy in self.proxy_manager.values():
+ proxy.clear()
+
+ def request_url(self, request, proxies):
+ """Obtain the url to use when making the final request.
+
+ If the message is being sent through a HTTP proxy, the full URL has to
+ be used. Otherwise, we should only use the path portion of the URL.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
+ :rtype: str
+ """
+ proxy = select_proxy(request.url, proxies)
+ scheme = urlparse(request.url).scheme
+
+ is_proxied_http_request = (proxy and scheme != 'https')
+ using_socks_proxy = False
+ if proxy:
+ proxy_scheme = urlparse(proxy).scheme.lower()
+ using_socks_proxy = proxy_scheme.startswith('socks')
+
+ url = request.path_url
+ if is_proxied_http_request and not using_socks_proxy:
+ url = urldefragauth(request.url)
+
+ return url
+
+ def add_headers(self, request, **kwargs):
+ """Add any headers needed by the connection. As of v2.0 this does
+ nothing by default, but is left for overriding by users that subclass
+ the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
+ :param kwargs: The keyword arguments from the call to send().
+ """
+ pass
+
+ def proxy_headers(self, proxy):
+ """Returns a dictionary of the headers to add to any request sent
+ through a proxy. This works with urllib3 magic to ensure that they are
+ correctly sent to the proxy, rather than in a tunnelled request if
+ CONNECT is being used.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxy: The url of the proxy being used for this request.
+ :rtype: dict
+ """
+ headers = {}
+ username, password = get_auth_from_url(proxy)
+
+ if username:
+ headers['Proxy-Authorization'] = _basic_auth_str(username,
+ password)
+
+ return headers
+
+ def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple or urllib3 Timeout object
+ :param verify: (optional) Either a boolean, in which case it controls whether
+ we verify the server's TLS certificate, or a string, in which case it
+ must be a path to a CA bundle to use
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
+ :rtype: requests.Response
+ """
+
+ try:
+ conn = self.get_connection(request.url, proxies)
+ except LocationValueError as e:
+ raise InvalidURL(e, request=request)
+
+ self.cert_verify(conn, request.url, verify, cert)
+ url = self.request_url(request, proxies)
+ self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
+
+ chunked = not (request.body is None or 'Content-Length' in request.headers)
+
+ if isinstance(timeout, tuple):
+ try:
+ connect, read = timeout
+ timeout = TimeoutSauce(connect=connect, read=read)
+ except ValueError as e:
+ # this may raise a string formatting error.
+ err = ("Invalid timeout {}. Pass a (connect, read) "
+ "timeout tuple, or a single float to set "
+ "both timeouts to the same value".format(timeout))
+ raise ValueError(err)
+ elif isinstance(timeout, TimeoutSauce):
+ pass
+ else:
+ timeout = TimeoutSauce(connect=timeout, read=timeout)
+
+ try:
+ if not chunked:
+ resp = conn.urlopen(
+ method=request.method,
+ url=url,
+ body=request.body,
+ headers=request.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=False,
+ decode_content=False,
+ retries=self.max_retries,
+ timeout=timeout
+ )
+
+ # Send the request.
+ else:
+ if hasattr(conn, 'proxy_pool'):
+ conn = conn.proxy_pool
+
+ low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
+
+ try:
+ low_conn.putrequest(request.method,
+ url,
+ skip_accept_encoding=True)
+
+ for header, value in request.headers.items():
+ low_conn.putheader(header, value)
+
+ low_conn.endheaders()
+
+ for i in request.body:
+ low_conn.send(hex(len(i))[2:].encode('utf-8'))
+ low_conn.send(b'\r\n')
+ low_conn.send(i)
+ low_conn.send(b'\r\n')
+ low_conn.send(b'0\r\n\r\n')
+
+ # Receive the response from the server
+ try:
+ # For Python 2.7, use buffering of HTTP responses
+ r = low_conn.getresponse(buffering=True)
+ except TypeError:
+ # For compatibility with Python 3.3+
+ r = low_conn.getresponse()
+
+ resp = HTTPResponse.from_httplib(
+ r,
+ pool=conn,
+ connection=low_conn,
+ preload_content=False,
+ decode_content=False
+ )
+ except:
+ # If we hit any problems here, clean up the connection.
+ # Then, reraise so that we can handle the actual exception.
+ low_conn.close()
+ raise
+
+ except (ProtocolError, socket.error) as err:
+ raise ConnectionError(err, request=request)
+
+ except MaxRetryError as e:
+ if isinstance(e.reason, ConnectTimeoutError):
+ # TODO: Remove this in 3.0.0: see #2811
+ if not isinstance(e.reason, NewConnectionError):
+ raise ConnectTimeout(e, request=request)
+
+ if isinstance(e.reason, ResponseError):
+ raise RetryError(e, request=request)
+
+ if isinstance(e.reason, _ProxyError):
+ raise ProxyError(e, request=request)
+
+ if isinstance(e.reason, _SSLError):
+ # This branch is for urllib3 v1.22 and later.
+ raise SSLError(e, request=request)
+
+ raise ConnectionError(e, request=request)
+
+ except ClosedPoolError as e:
+ raise ConnectionError(e, request=request)
+
+ except _ProxyError as e:
+ raise ProxyError(e)
+
+ except (_SSLError, _HTTPError) as e:
+ if isinstance(e, _SSLError):
+ # This branch is for urllib3 versions earlier than v1.22
+ raise SSLError(e, request=request)
+ elif isinstance(e, ReadTimeoutError):
+ raise ReadTimeout(e, request=request)
+ else:
+ raise
+
+ return self.build_response(request, resp)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/api.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/api.py
new file mode 100644
index 00000000..ef71d075
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/api.py
@@ -0,0 +1,158 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.api
+~~~~~~~~~~~~
+
+This module implements the Requests API.
+
+:copyright: (c) 2012 by Kenneth Reitz.
+:license: Apache2, see LICENSE for more details.
+"""
+
+from . import sessions
+
+
+def request(method, url, **kwargs):
+ """Constructs and sends a :class:`Request <Request>`.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary, list of tuples or bytes to send
+ in the query string for the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
+ :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
+ ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
+ or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
+ defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
+ to add for the file.
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How many seconds to wait for the server to send data
+ before giving up, as a float, or a :ref:`(connect timeout, read
+ timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use. Defaults to ``True``.
+ :param stream: (optional) if ``False``, the response content will be immediately downloaded.
+ :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.request('GET', 'https://httpbin.org/get')
+ <Response [200]>
+ """
+
+ # By using the 'with' statement we are sure the session is closed, thus we
+ # avoid leaving sockets open which can trigger a ResourceWarning in some
+ # cases, and look like a memory leak in others.
+ with sessions.Session() as session:
+ return session.request(method=method, url=url, **kwargs)
+
+
+def get(url, params=None, **kwargs):
+ r"""Sends a GET request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary, list of tuples or bytes to send
+ in the query string for the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return request('get', url, params=params, **kwargs)
+
+
+def options(url, **kwargs):
+ r"""Sends an OPTIONS request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return request('options', url, **kwargs)
+
+
+def head(url, **kwargs):
+ r"""Sends a HEAD request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', False)
+ return request('head', url, **kwargs)
+
+
+def post(url, data=None, json=None, **kwargs):
+ r"""Sends a POST request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('post', url, data=data, json=json, **kwargs)
+
+
+def put(url, data=None, **kwargs):
+ r"""Sends a PUT request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('put', url, data=data, **kwargs)
+
+
+def patch(url, data=None, **kwargs):
+ r"""Sends a PATCH request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('patch', url, data=data, **kwargs)
+
+
+def delete(url, **kwargs):
+ r"""Sends a DELETE request.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response <Response>` object
+ :rtype: requests.Response
+ """
+
+ return request('delete', url, **kwargs)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/auth.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/auth.py
new file mode 100644
index 00000000..bdde51c7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/auth.py
@@ -0,0 +1,305 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.auth
+~~~~~~~~~~~~~
+
+This module contains the authentication handlers for Requests.
+"""
+
+import os
+import re
+import time
+import hashlib
+import threading
+import warnings
+
+from base64 import b64encode
+
+from .compat import urlparse, str, basestring
+from .cookies import extract_cookies_to_jar
+from ._internal_utils import to_native_string
+from .utils import parse_dict_header
+
+CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
+CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
+
+
+def _basic_auth_str(username, password):
+ """Returns a Basic Auth string."""
+
+ # "I want us to put a big-ol' comment on top of it that
+ # says that this behaviour is dumb but we need to preserve
+ # it because people are relying on it."
+ # - Lukasa
+ #
+ # These are here solely to maintain backwards compatibility
+ # for things like ints. This will be removed in 3.0.0.
+ if not isinstance(username, basestring):
+ warnings.warn(
+ "Non-string usernames will no longer be supported in Requests "
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
+ "a string or bytes object in the near future to avoid "
+ "problems.".format(username),
+ category=DeprecationWarning,
+ )
+ username = str(username)
+
+ if not isinstance(password, basestring):
+ warnings.warn(
+ "Non-string passwords will no longer be supported in Requests "
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
+ "a string or bytes object in the near future to avoid "
+ "problems.".format(password),
+ category=DeprecationWarning,
+ )
+ password = str(password)
+ # -- End Removal --
+
+ if isinstance(username, str):
+ username = username.encode('latin1')
+
+ if isinstance(password, str):
+ password = password.encode('latin1')
+
+ authstr = 'Basic ' + to_native_string(
+ b64encode(b':'.join((username, password))).strip()
+ )
+
+ return authstr
+
+
+class AuthBase(object):
+ """Base class that all auth implementations derive from"""
+
+ def __call__(self, r):
+ raise NotImplementedError('Auth hooks must be callable.')
+
+
+class HTTPBasicAuth(AuthBase):
+ """Attaches HTTP Basic Authentication to the given Request object."""
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
+ def __eq__(self, other):
+ return all([
+ self.username == getattr(other, 'username', None),
+ self.password == getattr(other, 'password', None)
+ ])
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __call__(self, r):
+ r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPProxyAuth(HTTPBasicAuth):
+ """Attaches HTTP Proxy Authentication to a given Request object."""
+
+ def __call__(self, r):
+ r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPDigestAuth(AuthBase):
+ """Attaches HTTP Digest Authentication to the given Request object."""
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+ # Keep state in per-thread local storage
+ self._thread_local = threading.local()
+
+ def init_per_thread_state(self):
+ # Ensure state is initialized just once per-thread
+ if not hasattr(self._thread_local, 'init'):
+ self._thread_local.init = True
+ self._thread_local.last_nonce = ''
+ self._thread_local.nonce_count = 0
+ self._thread_local.chal = {}
+ self._thread_local.pos = None
+ self._thread_local.num_401_calls = None
+
+ def build_digest_header(self, method, url):
+ """
+ :rtype: str
+ """
+
+ realm = self._thread_local.chal['realm']
+ nonce = self._thread_local.chal['nonce']
+ qop = self._thread_local.chal.get('qop')
+ algorithm = self._thread_local.chal.get('algorithm')
+ opaque = self._thread_local.chal.get('opaque')
+ hash_utf8 = None
+
+ if algorithm is None:
+ _algorithm = 'MD5'
+ else:
+ _algorithm = algorithm.upper()
+ # lambdas assume digest modules are imported at the top level
+ if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
+ def md5_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.md5(x).hexdigest()
+ hash_utf8 = md5_utf8
+ elif _algorithm == 'SHA':
+ def sha_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha1(x).hexdigest()
+ hash_utf8 = sha_utf8
+ elif _algorithm == 'SHA-256':
+ def sha256_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha256(x).hexdigest()
+ hash_utf8 = sha256_utf8
+ elif _algorithm == 'SHA-512':
+ def sha512_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha512(x).hexdigest()
+ hash_utf8 = sha512_utf8
+
+ KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
+
+ if hash_utf8 is None:
+ return None
+
+ # XXX not implemented yet
+ entdig = None
+ p_parsed = urlparse(url)
+ #: path is request-uri defined in RFC 2616 which should not be empty
+ path = p_parsed.path or "/"
+ if p_parsed.query:
+ path += '?' + p_parsed.query
+
+ A1 = '%s:%s:%s' % (self.username, realm, self.password)
+ A2 = '%s:%s' % (method, path)
+
+ HA1 = hash_utf8(A1)
+ HA2 = hash_utf8(A2)
+
+ if nonce == self._thread_local.last_nonce:
+ self._thread_local.nonce_count += 1
+ else:
+ self._thread_local.nonce_count = 1
+ ncvalue = '%08x' % self._thread_local.nonce_count
+ s = str(self._thread_local.nonce_count).encode('utf-8')
+ s += nonce.encode('utf-8')
+ s += time.ctime().encode('utf-8')
+ s += os.urandom(8)
+
+ cnonce = (hashlib.sha1(s).hexdigest()[:16])
+ if _algorithm == 'MD5-SESS':
+ HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
+
+ if not qop:
+ respdig = KD(HA1, "%s:%s" % (nonce, HA2))
+ elif qop == 'auth' or 'auth' in qop.split(','):
+ noncebit = "%s:%s:%s:%s:%s" % (
+ nonce, ncvalue, cnonce, 'auth', HA2
+ )
+ respdig = KD(HA1, noncebit)
+ else:
+ # XXX handle auth-int.
+ return None
+
+ self._thread_local.last_nonce = nonce
+
+ # XXX should the partial digests be encoded too?
+ base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
+ 'response="%s"' % (self.username, realm, nonce, path, respdig)
+ if opaque:
+ base += ', opaque="%s"' % opaque
+ if algorithm:
+ base += ', algorithm="%s"' % algorithm
+ if entdig:
+ base += ', digest="%s"' % entdig
+ if qop:
+ base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
+
+ return 'Digest %s' % (base)
+
+ def handle_redirect(self, r, **kwargs):
+ """Reset num_401_calls counter on redirects."""
+ if r.is_redirect:
+ self._thread_local.num_401_calls = 1
+
+ def handle_401(self, r, **kwargs):
+ """
+ Takes the given response and tries digest-auth, if needed.
+
+ :rtype: requests.Response
+ """
+
+ # If response is not 4xx, do not auth
+ # See https://github.com/requests/requests/issues/3772
+ if not 400 <= r.status_code < 500:
+ self._thread_local.num_401_calls = 1
+ return r
+
+ if self._thread_local.pos is not None:
+ # Rewind the file position indicator of the body to where
+ # it was to resend the request.
+ r.request.body.seek(self._thread_local.pos)
+ s_auth = r.headers.get('www-authenticate', '')
+
+ if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
+
+ self._thread_local.num_401_calls += 1
+ pat = re.compile(r'digest ', flags=re.IGNORECASE)
+ self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
+
+ # Consume content and release the original connection
+ # to allow our new request to reuse the same one.
+ r.content
+ r.close()
+ prep = r.request.copy()
+ extract_cookies_to_jar(prep._cookies, r.request, r.raw)
+ prep.prepare_cookies(prep._cookies)
+
+ prep.headers['Authorization'] = self.build_digest_header(
+ prep.method, prep.url)
+ _r = r.connection.send(prep, **kwargs)
+ _r.history.append(r)
+ _r.request = prep
+
+ return _r
+
+ self._thread_local.num_401_calls = 1
+ return r
+
+ def __call__(self, r):
+ # Initialize per-thread state, if needed
+ self.init_per_thread_state()
+ # If we have a saved nonce, skip the 401
+ if self._thread_local.last_nonce:
+ r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
+ try:
+ self._thread_local.pos = r.body.tell()
+ except AttributeError:
+ # In the case of HTTPDigestAuth being reused and the body of
+ # the previous request was a file-like object, pos has the
+ # file position of the previous body. Ensure it's set to
+ # None.
+ self._thread_local.pos = None
+ r.register_hook('response', self.handle_401)
+ r.register_hook('response', self.handle_redirect)
+ self._thread_local.num_401_calls = 1
+
+ return r
+
+ def __eq__(self, other):
+ return all([
+ self.username == getattr(other, 'username', None),
+ self.password == getattr(other, 'password', None)
+ ])
+
+ def __ne__(self, other):
+ return not self == other
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/certs.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/certs.py
new file mode 100644
index 00000000..06a594e5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/certs.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+requests.certs
+~~~~~~~~~~~~~~
+
+This module returns the preferred default CA certificate bundle. There is
+only one — the one from the certifi package.
+
+If you are packaging Requests, e.g., for a Linux distribution or a managed
+environment, you can change the definition of where() to return a separately
+packaged CA bundle.
+"""
+from pip._vendor.certifi import where
+
+if __name__ == '__main__':
+ print(where())
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/compat.py
new file mode 100644
index 00000000..6a86893d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/compat.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.compat
+~~~~~~~~~~~~~~~
+
+This module handles import compatibility issues between Python 2 and
+Python 3.
+"""
+
+from pip._vendor import chardet
+
+import sys
+
+# -------
+# Pythons
+# -------
+
+# Syntax sugar.
+_ver = sys.version_info
+
+#: Python 2.x?
+is_py2 = (_ver[0] == 2)
+
+#: Python 3.x?
+is_py3 = (_ver[0] == 3)
+
+# Note: We've patched out simplejson support in pip because it prevents
+# upgrading simplejson on Windows.
+# try:
+# import simplejson as json
+# except (ImportError, SyntaxError):
+# # simplejson does not support Python 3.2, it throws a SyntaxError
+# # because of u'...' Unicode literals.
+import json
+
+# ---------
+# Specifics
+# ---------
+
+if is_py2:
+ from urllib import (
+ quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
+ proxy_bypass, proxy_bypass_environment, getproxies_environment)
+ from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
+ from urllib2 import parse_http_list
+ import cookielib
+ from Cookie import Morsel
+ from StringIO import StringIO
+ from collections import Callable, Mapping, MutableMapping, OrderedDict
+
+
+ builtin_str = str
+ bytes = str
+ str = unicode
+ basestring = basestring
+ numeric_types = (int, long, float)
+ integer_types = (int, long)
+
+elif is_py3:
+ from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
+ from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
+ from http import cookiejar as cookielib
+ from http.cookies import Morsel
+ from io import StringIO
+ from collections import OrderedDict
+ from collections.abc import Callable, Mapping, MutableMapping
+
+ builtin_str = str
+ str = str
+ bytes = bytes
+ basestring = (str, bytes)
+ numeric_types = (int, float)
+ integer_types = (int,)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py
new file mode 100644
index 00000000..56fccd9c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py
@@ -0,0 +1,549 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.cookies
+~~~~~~~~~~~~~~~~
+
+Compatibility code to be able to use `cookielib.CookieJar` with requests.
+
+requests.utils imports from here, so be careful with imports.
+"""
+
+import copy
+import time
+import calendar
+
+from ._internal_utils import to_native_string
+from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading
+
+
+class MockRequest(object):
+ """Wraps a `requests.Request` to mimic a `urllib2.Request`.
+
+ The code in `cookielib.CookieJar` expects this interface in order to correctly
+ manage cookie policies, i.e., determine whether a cookie can be set, given the
+ domains of the request and the cookie.
+
+ The original request object is read-only. The client is responsible for collecting
+ the new headers via `get_new_headers()` and interpreting them appropriately. You
+ probably want `get_cookie_header`, defined below.
+ """
+
+ def __init__(self, request):
+ self._r = request
+ self._new_headers = {}
+ self.type = urlparse(self._r.url).scheme
+
+ def get_type(self):
+ return self.type
+
+ def get_host(self):
+ return urlparse(self._r.url).netloc
+
+ def get_origin_req_host(self):
+ return self.get_host()
+
+ def get_full_url(self):
+ # Only return the response's URL if the user hadn't set the Host
+ # header
+ if not self._r.headers.get('Host'):
+ return self._r.url
+ # If they did set it, retrieve it and reconstruct the expected domain
+ host = to_native_string(self._r.headers['Host'], encoding='utf-8')
+ parsed = urlparse(self._r.url)
+ # Reconstruct the URL as we expect it
+ return urlunparse([
+ parsed.scheme, host, parsed.path, parsed.params, parsed.query,
+ parsed.fragment
+ ])
+
+ def is_unverifiable(self):
+ return True
+
+ def has_header(self, name):
+ return name in self._r.headers or name in self._new_headers
+
+ def get_header(self, name, default=None):
+ return self._r.headers.get(name, self._new_headers.get(name, default))
+
+ def add_header(self, key, val):
+ """cookielib has no legitimate use for this method; add it back if you find one."""
+ raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
+
+ def add_unredirected_header(self, name, value):
+ self._new_headers[name] = value
+
+ def get_new_headers(self):
+ return self._new_headers
+
+ @property
+ def unverifiable(self):
+ return self.is_unverifiable()
+
+ @property
+ def origin_req_host(self):
+ return self.get_origin_req_host()
+
+ @property
+ def host(self):
+ return self.get_host()
+
+
+class MockResponse(object):
+ """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
+
+ ...what? Basically, expose the parsed HTTP headers from the server response
+ the way `cookielib` expects to see them.
+ """
+
+ def __init__(self, headers):
+ """Make a MockResponse for `cookielib` to read.
+
+ :param headers: a httplib.HTTPMessage or analogous carrying the headers
+ """
+ self._headers = headers
+
+ def info(self):
+ return self._headers
+
+ def getheaders(self, name):
+ self._headers.getheaders(name)
+
+
+def extract_cookies_to_jar(jar, request, response):
+ """Extract the cookies from the response into a CookieJar.
+
+ :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
+ :param request: our own requests.Request object
+ :param response: urllib3.HTTPResponse object
+ """
+ if not (hasattr(response, '_original_response') and
+ response._original_response):
+ return
+ # the _original_response field is the wrapped httplib.HTTPResponse object,
+ req = MockRequest(request)
+ # pull out the HTTPMessage with the headers and put it in the mock:
+ res = MockResponse(response._original_response.msg)
+ jar.extract_cookies(res, req)
+
+
+def get_cookie_header(jar, request):
+ """
+ Produce an appropriate Cookie header string to be sent with `request`, or None.
+
+ :rtype: str
+ """
+ r = MockRequest(request)
+ jar.add_cookie_header(r)
+ return r.get_new_headers().get('Cookie')
+
+
+def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
+ """Unsets a cookie by name, by default over all domains and paths.
+
+ Wraps CookieJar.clear(), is O(n).
+ """
+ clearables = []
+ for cookie in cookiejar:
+ if cookie.name != name:
+ continue
+ if domain is not None and domain != cookie.domain:
+ continue
+ if path is not None and path != cookie.path:
+ continue
+ clearables.append((cookie.domain, cookie.path, cookie.name))
+
+ for domain, path, name in clearables:
+ cookiejar.clear(domain, path, name)
+
+
+class CookieConflictError(RuntimeError):
+ """There are two cookies that meet the criteria specified in the cookie jar.
+ Use .get and .set and include domain and path args in order to be more specific.
+ """
+
+
+class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
+ """Compatibility class; is a cookielib.CookieJar, but exposes a dict
+ interface.
+
+ This is the CookieJar we create by default for requests and sessions that
+ don't specify one, since some clients may expect response.cookies and
+ session.cookies to support dict operations.
+
+ Requests does not use the dict interface internally; it's just for
+ compatibility with external client code. All requests code should work
+ out of the box with externally provided instances of ``CookieJar``, e.g.
+ ``LWPCookieJar`` and ``FileCookieJar``.
+
+ Unlike a regular CookieJar, this class is pickleable.
+
+ .. warning:: dictionary operations that are normally O(1) may be O(n).
+ """
+
+ def get(self, name, default=None, domain=None, path=None):
+ """Dict-like get() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains.
+
+ .. warning:: operation is O(n), not O(1).
+ """
+ try:
+ return self._find_no_duplicates(name, domain, path)
+ except KeyError:
+ return default
+
+ def set(self, name, value, **kwargs):
+ """Dict-like set() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
+ multiple domains.
+ """
+ # support client code that unsets cookies by assignment of a None value:
+ if value is None:
+ remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
+ return
+
+ if isinstance(value, Morsel):
+ c = morsel_to_cookie(value)
+ else:
+ c = create_cookie(name, value, **kwargs)
+ self.set_cookie(c)
+ return c
+
+ def iterkeys(self):
+ """Dict-like iterkeys() that returns an iterator of names of cookies
+ from the jar.
+
+ .. seealso:: itervalues() and iteritems().
+ """
+ for cookie in iter(self):
+ yield cookie.name
+
+ def keys(self):
+ """Dict-like keys() that returns a list of names of cookies from the
+ jar.
+
+ .. seealso:: values() and items().
+ """
+ return list(self.iterkeys())
+
+ def itervalues(self):
+ """Dict-like itervalues() that returns an iterator of values of cookies
+ from the jar.
+
+ .. seealso:: iterkeys() and iteritems().
+ """
+ for cookie in iter(self):
+ yield cookie.value
+
+ def values(self):
+ """Dict-like values() that returns a list of values of cookies from the
+ jar.
+
+ .. seealso:: keys() and items().
+ """
+ return list(self.itervalues())
+
+ def iteritems(self):
+ """Dict-like iteritems() that returns an iterator of name-value tuples
+ from the jar.
+
+ .. seealso:: iterkeys() and itervalues().
+ """
+ for cookie in iter(self):
+ yield cookie.name, cookie.value
+
+ def items(self):
+ """Dict-like items() that returns a list of name-value tuples from the
+ jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
+ vanilla python dict of key value pairs.
+
+ .. seealso:: keys() and values().
+ """
+ return list(self.iteritems())
+
+ def list_domains(self):
+ """Utility method to list all the domains in the jar."""
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain not in domains:
+ domains.append(cookie.domain)
+ return domains
+
+ def list_paths(self):
+ """Utility method to list all the paths in the jar."""
+ paths = []
+ for cookie in iter(self):
+ if cookie.path not in paths:
+ paths.append(cookie.path)
+ return paths
+
+ def multiple_domains(self):
+ """Returns True if there are multiple domains in the jar.
+ Returns False otherwise.
+
+ :rtype: bool
+ """
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain is not None and cookie.domain in domains:
+ return True
+ domains.append(cookie.domain)
+ return False # there is only one domain in jar
+
+ def get_dict(self, domain=None, path=None):
+ """Takes as an argument an optional domain and path and returns a plain
+ old Python dict of name-value pairs of cookies that meet the
+ requirements.
+
+ :rtype: dict
+ """
+ dictionary = {}
+ for cookie in iter(self):
+ if (
+ (domain is None or cookie.domain == domain) and
+ (path is None or cookie.path == path)
+ ):
+ dictionary[cookie.name] = cookie.value
+ return dictionary
+
+ def __contains__(self, name):
+ try:
+ return super(RequestsCookieJar, self).__contains__(name)
+ except CookieConflictError:
+ return True
+
+ def __getitem__(self, name):
+ """Dict-like __getitem__() for compatibility with client code. Throws
+ exception if there are more than one cookie with name. In that case,
+ use the more explicit get() method instead.
+
+ .. warning:: operation is O(n), not O(1).
+ """
+ return self._find_no_duplicates(name)
+
+ def __setitem__(self, name, value):
+ """Dict-like __setitem__ for compatibility with client code. Throws
+ exception if there is already a cookie of that name in the jar. In that
+ case, use the more explicit set() method instead.
+ """
+ self.set(name, value)
+
+ def __delitem__(self, name):
+ """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
+ ``remove_cookie_by_name()``.
+ """
+ remove_cookie_by_name(self, name)
+
+ def set_cookie(self, cookie, *args, **kwargs):
+ if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
+ cookie.value = cookie.value.replace('\\"', '')
+ return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
+
+ def update(self, other):
+ """Updates this jar with cookies from another CookieJar or dict-like"""
+ if isinstance(other, cookielib.CookieJar):
+ for cookie in other:
+ self.set_cookie(copy.copy(cookie))
+ else:
+ super(RequestsCookieJar, self).update(other)
+
+ def _find(self, name, domain=None, path=None):
+ """Requests uses this method internally to get cookie values.
+
+ If there are conflicting cookies, _find arbitrarily chooses one.
+ See _find_no_duplicates if you want an exception thrown if there are
+ conflicting cookies.
+
+ :param name: a string containing name of cookie
+ :param domain: (optional) string containing domain of cookie
+ :param path: (optional) string containing path of cookie
+ :return: cookie.value
+ """
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ return cookie.value
+
+ raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
+
+ def _find_no_duplicates(self, name, domain=None, path=None):
+ """Both ``__get_item__`` and ``get`` call this function: it's never
+ used elsewhere in Requests.
+
+ :param name: a string containing name of cookie
+ :param domain: (optional) string containing domain of cookie
+ :param path: (optional) string containing path of cookie
+ :raises KeyError: if cookie is not found
+ :raises CookieConflictError: if there are multiple cookies
+ that match name and optionally domain and path
+ :return: cookie.value
+ """
+ toReturn = None
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ if toReturn is not None: # if there are multiple cookies that meet passed in criteria
+ raise CookieConflictError('There are multiple cookies with name, %r' % (name))
+ toReturn = cookie.value # we will eventually return this as long as no cookie conflict
+
+ if toReturn:
+ return toReturn
+ raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
+
+ def __getstate__(self):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ state = self.__dict__.copy()
+ # remove the unpickleable RLock object
+ state.pop('_cookies_lock')
+ return state
+
+ def __setstate__(self, state):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ self.__dict__.update(state)
+ if '_cookies_lock' not in self.__dict__:
+ self._cookies_lock = threading.RLock()
+
+ def copy(self):
+ """Return a copy of this RequestsCookieJar."""
+ new_cj = RequestsCookieJar()
+ new_cj.set_policy(self.get_policy())
+ new_cj.update(self)
+ return new_cj
+
+ def get_policy(self):
+ """Return the CookiePolicy instance used."""
+ return self._policy
+
+
+def _copy_cookie_jar(jar):
+ if jar is None:
+ return None
+
+ if hasattr(jar, 'copy'):
+ # We're dealing with an instance of RequestsCookieJar
+ return jar.copy()
+ # We're dealing with a generic CookieJar instance
+ new_jar = copy.copy(jar)
+ new_jar.clear()
+ for cookie in jar:
+ new_jar.set_cookie(copy.copy(cookie))
+ return new_jar
+
+
+def create_cookie(name, value, **kwargs):
+ """Make a cookie from underspecified parameters.
+
+ By default, the pair of `name` and `value` will be set for the domain ''
+ and sent on every request (this is sometimes called a "supercookie").
+ """
+ result = {
+ 'version': 0,
+ 'name': name,
+ 'value': value,
+ 'port': None,
+ 'domain': '',
+ 'path': '/',
+ 'secure': False,
+ 'expires': None,
+ 'discard': True,
+ 'comment': None,
+ 'comment_url': None,
+ 'rest': {'HttpOnly': None},
+ 'rfc2109': False,
+ }
+
+ badargs = set(kwargs) - set(result)
+ if badargs:
+ err = 'create_cookie() got unexpected keyword arguments: %s'
+ raise TypeError(err % list(badargs))
+
+ result.update(kwargs)
+ result['port_specified'] = bool(result['port'])
+ result['domain_specified'] = bool(result['domain'])
+ result['domain_initial_dot'] = result['domain'].startswith('.')
+ result['path_specified'] = bool(result['path'])
+
+ return cookielib.Cookie(**result)
+
+
+def morsel_to_cookie(morsel):
+ """Convert a Morsel object into a Cookie containing the one k/v pair."""
+
+ expires = None
+ if morsel['max-age']:
+ try:
+ expires = int(time.time() + int(morsel['max-age']))
+ except ValueError:
+ raise TypeError('max-age: %s must be integer' % morsel['max-age'])
+ elif morsel['expires']:
+ time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
+ expires = calendar.timegm(
+ time.strptime(morsel['expires'], time_template)
+ )
+ return create_cookie(
+ comment=morsel['comment'],
+ comment_url=bool(morsel['comment']),
+ discard=False,
+ domain=morsel['domain'],
+ expires=expires,
+ name=morsel.key,
+ path=morsel['path'],
+ port=None,
+ rest={'HttpOnly': morsel['httponly']},
+ rfc2109=False,
+ secure=bool(morsel['secure']),
+ value=morsel.value,
+ version=morsel['version'] or 0,
+ )
+
+
+def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ :param cookiejar: (optional) A cookiejar to add the cookies to.
+ :param overwrite: (optional) If False, will not replace cookies
+ already in the jar with new ones.
+ :rtype: CookieJar
+ """
+ if cookiejar is None:
+ cookiejar = RequestsCookieJar()
+
+ if cookie_dict is not None:
+ names_from_jar = [cookie.name for cookie in cookiejar]
+ for name in cookie_dict:
+ if overwrite or (name not in names_from_jar):
+ cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
+
+ return cookiejar
+
+
+def merge_cookies(cookiejar, cookies):
+ """Add cookies to cookiejar and returns a merged CookieJar.
+
+ :param cookiejar: CookieJar object to add the cookies to.
+ :param cookies: Dictionary or CookieJar object to be added.
+ :rtype: CookieJar
+ """
+ if not isinstance(cookiejar, cookielib.CookieJar):
+ raise ValueError('You can only merge into CookieJar')
+
+ if isinstance(cookies, dict):
+ cookiejar = cookiejar_from_dict(
+ cookies, cookiejar=cookiejar, overwrite=False)
+ elif isinstance(cookies, cookielib.CookieJar):
+ try:
+ cookiejar.update(cookies)
+ except AttributeError:
+ for cookie_in_jar in cookies:
+ cookiejar.set_cookie(cookie_in_jar)
+
+ return cookiejar
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/exceptions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/exceptions.py
new file mode 100644
index 00000000..a91e1fd1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/exceptions.py
@@ -0,0 +1,126 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.exceptions
+~~~~~~~~~~~~~~~~~~~
+
+This module contains the set of Requests' exceptions.
+"""
+from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError
+
+
+class RequestException(IOError):
+ """There was an ambiguous exception that occurred while handling your
+ request.
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Initialize RequestException with `request` and `response` objects."""
+ response = kwargs.pop('response', None)
+ self.response = response
+ self.request = kwargs.pop('request', None)
+ if (response is not None and not self.request and
+ hasattr(response, 'request')):
+ self.request = self.response.request
+ super(RequestException, self).__init__(*args, **kwargs)
+
+
+class HTTPError(RequestException):
+ """An HTTP error occurred."""
+
+
+class ConnectionError(RequestException):
+ """A Connection error occurred."""
+
+
+class ProxyError(ConnectionError):
+ """A proxy error occurred."""
+
+
+class SSLError(ConnectionError):
+ """An SSL error occurred."""
+
+
+class Timeout(RequestException):
+ """The request timed out.
+
+ Catching this error will catch both
+ :exc:`~requests.exceptions.ConnectTimeout` and
+ :exc:`~requests.exceptions.ReadTimeout` errors.
+ """
+
+
+class ConnectTimeout(ConnectionError, Timeout):
+ """The request timed out while trying to connect to the remote server.
+
+ Requests that produced this error are safe to retry.
+ """
+
+
+class ReadTimeout(Timeout):
+ """The server did not send any data in the allotted amount of time."""
+
+
+class URLRequired(RequestException):
+ """A valid URL is required to make a request."""
+
+
+class TooManyRedirects(RequestException):
+ """Too many redirects."""
+
+
+class MissingSchema(RequestException, ValueError):
+ """The URL schema (e.g. http or https) is missing."""
+
+
+class InvalidSchema(RequestException, ValueError):
+ """See defaults.py for valid schemas."""
+
+
+class InvalidURL(RequestException, ValueError):
+ """The URL provided was somehow invalid."""
+
+
+class InvalidHeader(RequestException, ValueError):
+ """The header value provided was somehow invalid."""
+
+
+class InvalidProxyURL(InvalidURL):
+ """The proxy URL provided is invalid."""
+
+
+class ChunkedEncodingError(RequestException):
+ """The server declared chunked encoding but sent an invalid chunk."""
+
+
+class ContentDecodingError(RequestException, BaseHTTPError):
+ """Failed to decode response content"""
+
+
+class StreamConsumedError(RequestException, TypeError):
+ """The content for this response was already consumed"""
+
+
+class RetryError(RequestException):
+ """Custom retries logic failed"""
+
+
+class UnrewindableBodyError(RequestException):
+ """Requests encountered an error when trying to rewind a body"""
+
+# Warnings
+
+
+class RequestsWarning(Warning):
+ """Base warning for Requests."""
+ pass
+
+
+class FileModeWarning(RequestsWarning, DeprecationWarning):
+ """A file was opened in text mode, but Requests determined its binary length."""
+ pass
+
+
+class RequestsDependencyWarning(RequestsWarning):
+ """An imported dependency doesn't match the expected version range."""
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/help.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/help.py
new file mode 100644
index 00000000..3c3072ba
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/help.py
@@ -0,0 +1,119 @@
+"""Module containing bug report helper(s)."""
+from __future__ import print_function
+
+import json
+import platform
+import sys
+import ssl
+
+from pip._vendor import idna
+from pip._vendor import urllib3
+from pip._vendor import chardet
+
+from . import __version__ as requests_version
+
+try:
+ from pip._vendor.urllib3.contrib import pyopenssl
+except ImportError:
+ pyopenssl = None
+ OpenSSL = None
+ cryptography = None
+else:
+ import OpenSSL
+ import cryptography
+
+
+def _implementation():
+ """Return a dict with the Python implementation and version.
+
+ Provide both the name and the version of the Python implementation
+ currently running. For example, on CPython 2.7.5 it will return
+ {'name': 'CPython', 'version': '2.7.5'}.
+
+ This function works best on CPython and PyPy: in particular, it probably
+ doesn't work for Jython or IronPython. Future investigation should be done
+ to work out the correct shape of the code for those platforms.
+ """
+ implementation = platform.python_implementation()
+
+ if implementation == 'CPython':
+ implementation_version = platform.python_version()
+ elif implementation == 'PyPy':
+ implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
+ sys.pypy_version_info.minor,
+ sys.pypy_version_info.micro)
+ if sys.pypy_version_info.releaselevel != 'final':
+ implementation_version = ''.join([
+ implementation_version, sys.pypy_version_info.releaselevel
+ ])
+ elif implementation == 'Jython':
+ implementation_version = platform.python_version() # Complete Guess
+ elif implementation == 'IronPython':
+ implementation_version = platform.python_version() # Complete Guess
+ else:
+ implementation_version = 'Unknown'
+
+ return {'name': implementation, 'version': implementation_version}
+
+
+def info():
+ """Generate information for a bug report."""
+ try:
+ platform_info = {
+ 'system': platform.system(),
+ 'release': platform.release(),
+ }
+ except IOError:
+ platform_info = {
+ 'system': 'Unknown',
+ 'release': 'Unknown',
+ }
+
+ implementation_info = _implementation()
+ urllib3_info = {'version': urllib3.__version__}
+ chardet_info = {'version': chardet.__version__}
+
+ pyopenssl_info = {
+ 'version': None,
+ 'openssl_version': '',
+ }
+ if OpenSSL:
+ pyopenssl_info = {
+ 'version': OpenSSL.__version__,
+ 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER,
+ }
+ cryptography_info = {
+ 'version': getattr(cryptography, '__version__', ''),
+ }
+ idna_info = {
+ 'version': getattr(idna, '__version__', ''),
+ }
+
+ system_ssl = ssl.OPENSSL_VERSION_NUMBER
+ system_ssl_info = {
+ 'version': '%x' % system_ssl if system_ssl is not None else ''
+ }
+
+ return {
+ 'platform': platform_info,
+ 'implementation': implementation_info,
+ 'system_ssl': system_ssl_info,
+ 'using_pyopenssl': pyopenssl is not None,
+ 'pyOpenSSL': pyopenssl_info,
+ 'urllib3': urllib3_info,
+ 'chardet': chardet_info,
+ 'cryptography': cryptography_info,
+ 'idna': idna_info,
+ 'requests': {
+ 'version': requests_version,
+ },
+ }
+
+
+def main():
+ """Pretty-print the bug information as JSON."""
+ print(json.dumps(info(), sort_keys=True, indent=2))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/hooks.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/hooks.py
new file mode 100644
index 00000000..7a51f212
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/hooks.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.hooks
+~~~~~~~~~~~~~~
+
+This module provides the capabilities for the Requests hooks system.
+
+Available hooks:
+
+``response``:
+ The response generated from a Request.
+"""
+HOOKS = ['response']
+
+
+def default_hooks():
+ return {event: [] for event in HOOKS}
+
+# TODO: response is the only one
+
+
+def dispatch_hook(key, hooks, hook_data, **kwargs):
+ """Dispatches a hook dictionary on a given piece of data."""
+ hooks = hooks or {}
+ hooks = hooks.get(key)
+ if hooks:
+ if hasattr(hooks, '__call__'):
+ hooks = [hooks]
+ for hook in hooks:
+ _hook_data = hook(hook_data, **kwargs)
+ if _hook_data is not None:
+ hook_data = _hook_data
+ return hook_data
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/models.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/models.py
new file mode 100644
index 00000000..08399574
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/models.py
@@ -0,0 +1,953 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.models
+~~~~~~~~~~~~~~~
+
+This module contains the primary objects that power Requests.
+"""
+
+import datetime
+import sys
+
+# Import encoding now, to avoid implicit import later.
+# Implicit import within threads may cause LookupError when standard library is in a ZIP,
+# such as in Embedded Python. See https://github.com/requests/requests/issues/3578.
+import encodings.idna
+
+from pip._vendor.urllib3.fields import RequestField
+from pip._vendor.urllib3.filepost import encode_multipart_formdata
+from pip._vendor.urllib3.util import parse_url
+from pip._vendor.urllib3.exceptions import (
+ DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
+
+from io import UnsupportedOperation
+from .hooks import default_hooks
+from .structures import CaseInsensitiveDict
+
+from .auth import HTTPBasicAuth
+from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
+from .exceptions import (
+ HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
+ ContentDecodingError, ConnectionError, StreamConsumedError)
+from ._internal_utils import to_native_string, unicode_is_ascii
+from .utils import (
+ guess_filename, get_auth_from_url, requote_uri,
+ stream_decode_response_unicode, to_key_val_list, parse_header_links,
+ iter_slices, guess_json_utf, super_len, check_header_validity)
+from .compat import (
+ Callable, Mapping,
+ cookielib, urlunparse, urlsplit, urlencode, str, bytes,
+ is_py2, chardet, builtin_str, basestring)
+from .compat import json as complexjson
+from .status_codes import codes
+
+#: The set of HTTP status codes that indicate an automatically
+#: processable redirect.
+REDIRECT_STATI = (
+ codes.moved, # 301
+ codes.found, # 302
+ codes.other, # 303
+ codes.temporary_redirect, # 307
+ codes.permanent_redirect, # 308
+)
+
+DEFAULT_REDIRECT_LIMIT = 30
+CONTENT_CHUNK_SIZE = 10 * 1024
+ITER_CHUNK_SIZE = 512
+
+
+class RequestEncodingMixin(object):
+ @property
+ def path_url(self):
+ """Build the path URL to use."""
+
+ url = []
+
+ p = urlsplit(self.url)
+
+ path = p.path
+ if not path:
+ path = '/'
+
+ url.append(path)
+
+ query = p.query
+ if query:
+ url.append('?')
+ url.append(query)
+
+ return ''.join(url)
+
+ @staticmethod
+ def _encode_params(data):
+ """Encode parameters in a piece of data.
+
+ Will successfully encode parameters when passed as a dict or a list of
+ 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
+ if parameters are supplied as a dict.
+ """
+
+ if isinstance(data, (str, bytes)):
+ return data
+ elif hasattr(data, 'read'):
+ return data
+ elif hasattr(data, '__iter__'):
+ result = []
+ for k, vs in to_key_val_list(data):
+ if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
+ vs = [vs]
+ for v in vs:
+ if v is not None:
+ result.append(
+ (k.encode('utf-8') if isinstance(k, str) else k,
+ v.encode('utf-8') if isinstance(v, str) else v))
+ return urlencode(result, doseq=True)
+ else:
+ return data
+
+ @staticmethod
+ def _encode_files(files, data):
+ """Build the body for a multipart/form-data request.
+
+ Will successfully encode files when passed as a dict or a list of
+ tuples. Order is retained if data is a list of tuples but arbitrary
+ if parameters are supplied as a dict.
+ The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
+ or 4-tuples (filename, fileobj, contentype, custom_headers).
+ """
+ if (not files):
+ raise ValueError("Files must be provided.")
+ elif isinstance(data, basestring):
+ raise ValueError("Data must not be a string.")
+
+ new_fields = []
+ fields = to_key_val_list(data or {})
+ files = to_key_val_list(files or {})
+
+ for field, val in fields:
+ if isinstance(val, basestring) or not hasattr(val, '__iter__'):
+ val = [val]
+ for v in val:
+ if v is not None:
+ # Don't call str() on bytestrings: in Py3 it all goes wrong.
+ if not isinstance(v, bytes):
+ v = str(v)
+
+ new_fields.append(
+ (field.decode('utf-8') if isinstance(field, bytes) else field,
+ v.encode('utf-8') if isinstance(v, str) else v))
+
+ for (k, v) in files:
+ # support for explicit filename
+ ft = None
+ fh = None
+ if isinstance(v, (tuple, list)):
+ if len(v) == 2:
+ fn, fp = v
+ elif len(v) == 3:
+ fn, fp, ft = v
+ else:
+ fn, fp, ft, fh = v
+ else:
+ fn = guess_filename(v) or k
+ fp = v
+
+ if isinstance(fp, (str, bytes, bytearray)):
+ fdata = fp
+ elif hasattr(fp, 'read'):
+ fdata = fp.read()
+ elif fp is None:
+ continue
+ else:
+ fdata = fp
+
+ rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
+ rf.make_multipart(content_type=ft)
+ new_fields.append(rf)
+
+ body, content_type = encode_multipart_formdata(new_fields)
+
+ return body, content_type
+
+
+class RequestHooksMixin(object):
+ def register_hook(self, event, hook):
+ """Properly register a hook."""
+
+ if event not in self.hooks:
+ raise ValueError('Unsupported event specified, with event name "%s"' % (event))
+
+ if isinstance(hook, Callable):
+ self.hooks[event].append(hook)
+ elif hasattr(hook, '__iter__'):
+ self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
+
+ def deregister_hook(self, event, hook):
+ """Deregister a previously registered hook.
+ Returns True if the hook existed, False if not.
+ """
+
+ try:
+ self.hooks[event].remove(hook)
+ return True
+ except ValueError:
+ return False
+
+
+class Request(RequestHooksMixin):
+ """A user-created :class:`Request <Request>` object.
+
+ Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
+
+ :param method: HTTP method to use.
+ :param url: URL to send.
+ :param headers: dictionary of headers to send.
+ :param files: dictionary of {filename: fileobject} files to multipart upload.
+ :param data: the body to attach to the request. If a dictionary or
+ list of tuples ``[(key, value)]`` is provided, form-encoding will
+ take place.
+ :param json: json for the body to attach to the request (if files or data is not specified).
+ :param params: URL parameters to append to the URL. If a dictionary or
+ list of tuples ``[(key, value)]`` is provided, form-encoding will
+ take place.
+ :param auth: Auth handler or (user, pass) tuple.
+ :param cookies: dictionary or CookieJar of cookies to attach to this request.
+ :param hooks: dictionary of callback hooks, for internal usage.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'https://httpbin.org/get')
+ >>> req.prepare()
+ <PreparedRequest [GET]>
+ """
+
+ def __init__(self,
+ method=None, url=None, headers=None, files=None, data=None,
+ params=None, auth=None, cookies=None, hooks=None, json=None):
+
+ # Default empty dicts for dict params.
+ data = [] if data is None else data
+ files = [] if files is None else files
+ headers = {} if headers is None else headers
+ params = {} if params is None else params
+ hooks = {} if hooks is None else hooks
+
+ self.hooks = default_hooks()
+ for (k, v) in list(hooks.items()):
+ self.register_hook(event=k, hook=v)
+
+ self.method = method
+ self.url = url
+ self.headers = headers
+ self.files = files
+ self.data = data
+ self.json = json
+ self.params = params
+ self.auth = auth
+ self.cookies = cookies
+
+ def __repr__(self):
+ return '<Request [%s]>' % (self.method)
+
+ def prepare(self):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
+ p = PreparedRequest()
+ p.prepare(
+ method=self.method,
+ url=self.url,
+ headers=self.headers,
+ files=self.files,
+ data=self.data,
+ json=self.json,
+ params=self.params,
+ auth=self.auth,
+ cookies=self.cookies,
+ hooks=self.hooks,
+ )
+ return p
+
+
+class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
+ """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
+ containing the exact bytes that will be sent to the server.
+
+ Generated from either a :class:`Request <Request>` object or manually.
+
+ Usage::
+
+ >>> import requests
+ >>> req = requests.Request('GET', 'https://httpbin.org/get')
+ >>> r = req.prepare()
+ <PreparedRequest [GET]>
+
+ >>> s = requests.Session()
+ >>> s.send(r)
+ <Response [200]>
+ """
+
+ def __init__(self):
+ #: HTTP verb to send to the server.
+ self.method = None
+ #: HTTP URL to send the request to.
+ self.url = None
+ #: dictionary of HTTP headers.
+ self.headers = None
+ # The `CookieJar` used to create the Cookie header will be stored here
+ # after prepare_cookies is called
+ self._cookies = None
+ #: request body to send to the server.
+ self.body = None
+ #: dictionary of callback hooks, for internal usage.
+ self.hooks = default_hooks()
+ #: integer denoting starting position of a readable file-like body.
+ self._body_position = None
+
+ def prepare(self,
+ method=None, url=None, headers=None, files=None, data=None,
+ params=None, auth=None, cookies=None, hooks=None, json=None):
+ """Prepares the entire request with the given parameters."""
+
+ self.prepare_method(method)
+ self.prepare_url(url, params)
+ self.prepare_headers(headers)
+ self.prepare_cookies(cookies)
+ self.prepare_body(data, files, json)
+ self.prepare_auth(auth, url)
+
+ # Note that prepare_auth must be last to enable authentication schemes
+ # such as OAuth to work on a fully prepared request.
+
+ # This MUST go after prepare_auth. Authenticators could add a hook
+ self.prepare_hooks(hooks)
+
+ def __repr__(self):
+ return '<PreparedRequest [%s]>' % (self.method)
+
+ def copy(self):
+ p = PreparedRequest()
+ p.method = self.method
+ p.url = self.url
+ p.headers = self.headers.copy() if self.headers is not None else None
+ p._cookies = _copy_cookie_jar(self._cookies)
+ p.body = self.body
+ p.hooks = self.hooks
+ p._body_position = self._body_position
+ return p
+
+ def prepare_method(self, method):
+ """Prepares the given HTTP method."""
+ self.method = method
+ if self.method is not None:
+ self.method = to_native_string(self.method.upper())
+
+ @staticmethod
+ def _get_idna_encoded_host(host):
+ from pip._vendor import idna
+
+ try:
+ host = idna.encode(host, uts46=True).decode('utf-8')
+ except idna.IDNAError:
+ raise UnicodeError
+ return host
+
+ def prepare_url(self, url, params):
+ """Prepares the given HTTP URL."""
+ #: Accept objects that have string representations.
+ #: We're unable to blindly call unicode/str functions
+ #: as this will include the bytestring indicator (b'')
+ #: on python 3.x.
+ #: https://github.com/requests/requests/pull/2238
+ if isinstance(url, bytes):
+ url = url.decode('utf8')
+ else:
+ url = unicode(url) if is_py2 else str(url)
+
+ # Remove leading whitespaces from url
+ url = url.lstrip()
+
+ # Don't do any URL preparation for non-HTTP schemes like `mailto`,
+ # `data` etc to work around exceptions from `url_parse`, which
+ # handles RFC 3986 only.
+ if ':' in url and not url.lower().startswith('http'):
+ self.url = url
+ return
+
+ # Support for unicode domain names and paths.
+ try:
+ scheme, auth, host, port, path, query, fragment = parse_url(url)
+ except LocationParseError as e:
+ raise InvalidURL(*e.args)
+
+ if not scheme:
+ error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
+ error = error.format(to_native_string(url, 'utf8'))
+
+ raise MissingSchema(error)
+
+ if not host:
+ raise InvalidURL("Invalid URL %r: No host supplied" % url)
+
+ # In general, we want to try IDNA encoding the hostname if the string contains
+ # non-ASCII characters. This allows users to automatically get the correct IDNA
+ # behaviour. For strings containing only ASCII characters, we need to also verify
+ # it doesn't start with a wildcard (*), before allowing the unencoded hostname.
+ if not unicode_is_ascii(host):
+ try:
+ host = self._get_idna_encoded_host(host)
+ except UnicodeError:
+ raise InvalidURL('URL has an invalid label.')
+ elif host.startswith(u'*'):
+ raise InvalidURL('URL has an invalid label.')
+
+ # Carefully reconstruct the network location
+ netloc = auth or ''
+ if netloc:
+ netloc += '@'
+ netloc += host
+ if port:
+ netloc += ':' + str(port)
+
+ # Bare domains aren't valid URLs.
+ if not path:
+ path = '/'
+
+ if is_py2:
+ if isinstance(scheme, str):
+ scheme = scheme.encode('utf-8')
+ if isinstance(netloc, str):
+ netloc = netloc.encode('utf-8')
+ if isinstance(path, str):
+ path = path.encode('utf-8')
+ if isinstance(query, str):
+ query = query.encode('utf-8')
+ if isinstance(fragment, str):
+ fragment = fragment.encode('utf-8')
+
+ if isinstance(params, (str, bytes)):
+ params = to_native_string(params)
+
+ enc_params = self._encode_params(params)
+ if enc_params:
+ if query:
+ query = '%s&%s' % (query, enc_params)
+ else:
+ query = enc_params
+
+ url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
+ self.url = url
+
+ def prepare_headers(self, headers):
+ """Prepares the given HTTP headers."""
+
+ self.headers = CaseInsensitiveDict()
+ if headers:
+ for header in headers.items():
+ # Raise exception on invalid header value.
+ check_header_validity(header)
+ name, value = header
+ self.headers[to_native_string(name)] = value
+
+ def prepare_body(self, data, files, json=None):
+ """Prepares the given HTTP body data."""
+
+ # Check if file, fo, generator, iterator.
+ # If not, run through normal process.
+
+ # Nottin' on you.
+ body = None
+ content_type = None
+
+ if not data and json is not None:
+ # urllib3 requires a bytes-like body. Python 2's json.dumps
+ # provides this natively, but Python 3 gives a Unicode string.
+ content_type = 'application/json'
+ body = complexjson.dumps(json)
+ if not isinstance(body, bytes):
+ body = body.encode('utf-8')
+
+ is_stream = all([
+ hasattr(data, '__iter__'),
+ not isinstance(data, (basestring, list, tuple, Mapping))
+ ])
+
+ try:
+ length = super_len(data)
+ except (TypeError, AttributeError, UnsupportedOperation):
+ length = None
+
+ if is_stream:
+ body = data
+
+ if getattr(body, 'tell', None) is not None:
+ # Record the current file position before reading.
+ # This will allow us to rewind a file in the event
+ # of a redirect.
+ try:
+ self._body_position = body.tell()
+ except (IOError, OSError):
+ # This differentiates from None, allowing us to catch
+ # a failed `tell()` later when trying to rewind the body
+ self._body_position = object()
+
+ if files:
+ raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
+
+ if length:
+ self.headers['Content-Length'] = builtin_str(length)
+ else:
+ self.headers['Transfer-Encoding'] = 'chunked'
+ else:
+ # Multi-part file uploads.
+ if files:
+ (body, content_type) = self._encode_files(files, data)
+ else:
+ if data:
+ body = self._encode_params(data)
+ if isinstance(data, basestring) or hasattr(data, 'read'):
+ content_type = None
+ else:
+ content_type = 'application/x-www-form-urlencoded'
+
+ self.prepare_content_length(body)
+
+ # Add content-type if it wasn't explicitly provided.
+ if content_type and ('content-type' not in self.headers):
+ self.headers['Content-Type'] = content_type
+
+ self.body = body
+
+ def prepare_content_length(self, body):
+ """Prepare Content-Length header based on request method and body"""
+ if body is not None:
+ length = super_len(body)
+ if length:
+ # If length exists, set it. Otherwise, we fallback
+ # to Transfer-Encoding: chunked.
+ self.headers['Content-Length'] = builtin_str(length)
+ elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:
+ # Set Content-Length to 0 for methods that can have a body
+ # but don't provide one. (i.e. not GET or HEAD)
+ self.headers['Content-Length'] = '0'
+
+ def prepare_auth(self, auth, url=''):
+ """Prepares the given HTTP auth data."""
+
+ # If no Auth is explicitly provided, extract it from the URL first.
+ if auth is None:
+ url_auth = get_auth_from_url(self.url)
+ auth = url_auth if any(url_auth) else None
+
+ if auth:
+ if isinstance(auth, tuple) and len(auth) == 2:
+ # special-case basic HTTP auth
+ auth = HTTPBasicAuth(*auth)
+
+ # Allow auth to make its changes.
+ r = auth(self)
+
+ # Update self to reflect the auth changes.
+ self.__dict__.update(r.__dict__)
+
+ # Recompute Content-Length
+ self.prepare_content_length(self.body)
+
+ def prepare_cookies(self, cookies):
+ """Prepares the given HTTP cookie data.
+
+ This function eventually generates a ``Cookie`` header from the
+ given cookies using cookielib. Due to cookielib's design, the header
+ will not be regenerated if it already exists, meaning this function
+ can only be called once for the life of the
+ :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
+ to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
+ header is removed beforehand.
+ """
+ if isinstance(cookies, cookielib.CookieJar):
+ self._cookies = cookies
+ else:
+ self._cookies = cookiejar_from_dict(cookies)
+
+ cookie_header = get_cookie_header(self._cookies, self)
+ if cookie_header is not None:
+ self.headers['Cookie'] = cookie_header
+
+ def prepare_hooks(self, hooks):
+ """Prepares the given hooks."""
+ # hooks can be passed as None to the prepare method and to this
+ # method. To prevent iterating over None, simply use an empty list
+ # if hooks is False-y
+ hooks = hooks or []
+ for event in hooks:
+ self.register_hook(event, hooks[event])
+
+
+class Response(object):
+ """The :class:`Response <Response>` object, which contains a
+ server's response to an HTTP request.
+ """
+
+ __attrs__ = [
+ '_content', 'status_code', 'headers', 'url', 'history',
+ 'encoding', 'reason', 'cookies', 'elapsed', 'request'
+ ]
+
+ def __init__(self):
+ self._content = False
+ self._content_consumed = False
+ self._next = None
+
+ #: Integer Code of responded HTTP Status, e.g. 404 or 200.
+ self.status_code = None
+
+ #: Case-insensitive Dictionary of Response Headers.
+ #: For example, ``headers['content-encoding']`` will return the
+ #: value of a ``'Content-Encoding'`` response header.
+ self.headers = CaseInsensitiveDict()
+
+ #: File-like object representation of response (for advanced usage).
+ #: Use of ``raw`` requires that ``stream=True`` be set on the request.
+ # This requirement does not apply for use internally to Requests.
+ self.raw = None
+
+ #: Final URL location of Response.
+ self.url = None
+
+ #: Encoding to decode with when accessing r.text.
+ self.encoding = None
+
+ #: A list of :class:`Response <Response>` objects from
+ #: the history of the Request. Any redirect responses will end
+ #: up here. The list is sorted from the oldest to the most recent request.
+ self.history = []
+
+ #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
+ self.reason = None
+
+ #: A CookieJar of Cookies the server sent back.
+ self.cookies = cookiejar_from_dict({})
+
+ #: The amount of time elapsed between sending the request
+ #: and the arrival of the response (as a timedelta).
+ #: This property specifically measures the time taken between sending
+ #: the first byte of the request and finishing parsing the headers. It
+ #: is therefore unaffected by consuming the response content or the
+ #: value of the ``stream`` keyword argument.
+ self.elapsed = datetime.timedelta(0)
+
+ #: The :class:`PreparedRequest <PreparedRequest>` object to which this
+ #: is a response.
+ self.request = None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def __getstate__(self):
+ # Consume everything; accessing the content attribute makes
+ # sure the content has been fully read.
+ if not self._content_consumed:
+ self.content
+
+ return {attr: getattr(self, attr, None) for attr in self.__attrs__}
+
+ def __setstate__(self, state):
+ for name, value in state.items():
+ setattr(self, name, value)
+
+ # pickled objects do not have .raw
+ setattr(self, '_content_consumed', True)
+ setattr(self, 'raw', None)
+
+ def __repr__(self):
+ return '<Response [%s]>' % (self.status_code)
+
+ def __bool__(self):
+ """Returns True if :attr:`status_code` is less than 400.
+
+ This attribute checks if the status code of the response is between
+ 400 and 600 to see if there was a client error or a server error. If
+ the status code, is between 200 and 400, this will return True. This
+ is **not** a check to see if the response code is ``200 OK``.
+ """
+ return self.ok
+
+ def __nonzero__(self):
+ """Returns True if :attr:`status_code` is less than 400.
+
+ This attribute checks if the status code of the response is between
+ 400 and 600 to see if there was a client error or a server error. If
+ the status code, is between 200 and 400, this will return True. This
+ is **not** a check to see if the response code is ``200 OK``.
+ """
+ return self.ok
+
+ def __iter__(self):
+ """Allows you to use a response as an iterator."""
+ return self.iter_content(128)
+
+ @property
+ def ok(self):
+ """Returns True if :attr:`status_code` is less than 400, False if not.
+
+ This attribute checks if the status code of the response is between
+ 400 and 600 to see if there was a client error or a server error. If
+ the status code is between 200 and 400, this will return True. This
+ is **not** a check to see if the response code is ``200 OK``.
+ """
+ try:
+ self.raise_for_status()
+ except HTTPError:
+ return False
+ return True
+
+ @property
+ def is_redirect(self):
+ """True if this Response is a well-formed HTTP redirect that could have
+ been processed automatically (by :meth:`Session.resolve_redirects`).
+ """
+ return ('location' in self.headers and self.status_code in REDIRECT_STATI)
+
+ @property
+ def is_permanent_redirect(self):
+ """True if this Response one of the permanent versions of redirect."""
+ return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
+
+ @property
+ def next(self):
+ """Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
+ return self._next
+
+ @property
+ def apparent_encoding(self):
+ """The apparent encoding, provided by the chardet library."""
+ return chardet.detect(self.content)['encoding']
+
+ def iter_content(self, chunk_size=1, decode_unicode=False):
+ """Iterates over the response data. When stream=True is set on the
+ request, this avoids reading the content at once into memory for
+ large responses. The chunk size is the number of bytes it should
+ read into memory. This is not necessarily the length of each item
+ returned as decoding can take place.
+
+ chunk_size must be of type int or None. A value of None will
+ function differently depending on the value of `stream`.
+ stream=True will read data as it arrives in whatever size the
+ chunks are received. If stream=False, data is returned as
+ a single chunk.
+
+ If decode_unicode is True, content will be decoded using the best
+ available encoding based on the response.
+ """
+
+ def generate():
+ # Special case for urllib3.
+ if hasattr(self.raw, 'stream'):
+ try:
+ for chunk in self.raw.stream(chunk_size, decode_content=True):
+ yield chunk
+ except ProtocolError as e:
+ raise ChunkedEncodingError(e)
+ except DecodeError as e:
+ raise ContentDecodingError(e)
+ except ReadTimeoutError as e:
+ raise ConnectionError(e)
+ else:
+ # Standard file-like object.
+ while True:
+ chunk = self.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ self._content_consumed = True
+
+ if self._content_consumed and isinstance(self._content, bool):
+ raise StreamConsumedError()
+ elif chunk_size is not None and not isinstance(chunk_size, int):
+ raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))
+ # simulate reading small chunks of the content
+ reused_chunks = iter_slices(self._content, chunk_size)
+
+ stream_chunks = generate()
+
+ chunks = reused_chunks if self._content_consumed else stream_chunks
+
+ if decode_unicode:
+ chunks = stream_decode_response_unicode(chunks, self)
+
+ return chunks
+
+ def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):
+ """Iterates over the response data, one line at a time. When
+ stream=True is set on the request, this avoids reading the
+ content at once into memory for large responses.
+
+ .. note:: This method is not reentrant safe.
+ """
+
+ pending = None
+
+ for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
+
+ if pending is not None:
+ chunk = pending + chunk
+
+ if delimiter:
+ lines = chunk.split(delimiter)
+ else:
+ lines = chunk.splitlines()
+
+ if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
+ pending = lines.pop()
+ else:
+ pending = None
+
+ for line in lines:
+ yield line
+
+ if pending is not None:
+ yield pending
+
+ @property
+ def content(self):
+ """Content of the response, in bytes."""
+
+ if self._content is False:
+ # Read the contents.
+ if self._content_consumed:
+ raise RuntimeError(
+ 'The content for this response was already consumed')
+
+ if self.status_code == 0 or self.raw is None:
+ self._content = None
+ else:
+ self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
+
+ self._content_consumed = True
+ # don't need to release the connection; that's been handled by urllib3
+ # since we exhausted the data.
+ return self._content
+
+ @property
+ def text(self):
+ """Content of the response, in unicode.
+
+ If Response.encoding is None, encoding will be guessed using
+ ``chardet``.
+
+ The encoding of the response content is determined based solely on HTTP
+ headers, following RFC 2616 to the letter. If you can take advantage of
+ non-HTTP knowledge to make a better guess at the encoding, you should
+ set ``r.encoding`` appropriately before accessing this property.
+ """
+
+ # Try charset from content-type
+ content = None
+ encoding = self.encoding
+
+ if not self.content:
+ return str('')
+
+ # Fallback to auto-detected encoding.
+ if self.encoding is None:
+ encoding = self.apparent_encoding
+
+ # Decode unicode from given encoding.
+ try:
+ content = str(self.content, encoding, errors='replace')
+ except (LookupError, TypeError):
+ # A LookupError is raised if the encoding was not found which could
+ # indicate a misspelling or similar mistake.
+ #
+ # A TypeError can be raised if encoding is None
+ #
+ # So we try blindly encoding.
+ content = str(self.content, errors='replace')
+
+ return content
+
+ def json(self, **kwargs):
+ r"""Returns the json-encoded content of a response, if any.
+
+ :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
+ :raises ValueError: If the response body does not contain valid json.
+ """
+
+ if not self.encoding and self.content and len(self.content) > 3:
+ # No encoding set. JSON RFC 4627 section 3 states we should expect
+ # UTF-8, -16 or -32. Detect which one to use; If the detection or
+ # decoding fails, fall back to `self.text` (using chardet to make
+ # a best guess).
+ encoding = guess_json_utf(self.content)
+ if encoding is not None:
+ try:
+ return complexjson.loads(
+ self.content.decode(encoding), **kwargs
+ )
+ except UnicodeDecodeError:
+ # Wrong UTF codec detected; usually because it's not UTF-8
+ # but some other 8-bit codec. This is an RFC violation,
+ # and the server didn't bother to tell us what codec *was*
+ # used.
+ pass
+ return complexjson.loads(self.text, **kwargs)
+
+ @property
+ def links(self):
+ """Returns the parsed header links of the response, if any."""
+
+ header = self.headers.get('link')
+
+ # l = MultiDict()
+ l = {}
+
+ if header:
+ links = parse_header_links(header)
+
+ for link in links:
+ key = link.get('rel') or link.get('url')
+ l[key] = link
+
+ return l
+
+ def raise_for_status(self):
+ """Raises stored :class:`HTTPError`, if one occurred."""
+
+ http_error_msg = ''
+ if isinstance(self.reason, bytes):
+ # We attempt to decode utf-8 first because some servers
+ # choose to localize their reason strings. If the string
+ # isn't utf-8, we fall back to iso-8859-1 for all other
+ # encodings. (See PR #3538)
+ try:
+ reason = self.reason.decode('utf-8')
+ except UnicodeDecodeError:
+ reason = self.reason.decode('iso-8859-1')
+ else:
+ reason = self.reason
+
+ if 400 <= self.status_code < 500:
+ http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)
+
+ elif 500 <= self.status_code < 600:
+ http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)
+
+ if http_error_msg:
+ raise HTTPError(http_error_msg, response=self)
+
+ def close(self):
+ """Releases the connection back to the pool. Once this method has been
+ called the underlying ``raw`` object must not be accessed again.
+
+ *Note: Should not normally need to be called explicitly.*
+ """
+ if not self._content_consumed:
+ self.raw.close()
+
+ release_conn = getattr(self.raw, 'release_conn', None)
+ if release_conn is not None:
+ release_conn()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/packages.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/packages.py
new file mode 100644
index 00000000..9582fa73
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/packages.py
@@ -0,0 +1,16 @@
+import sys
+
+# This code exists for backwards compatibility reasons.
+# I don't like it either. Just look the other way. :)
+
+for package in ('urllib3', 'idna', 'chardet'):
+ vendored_package = "pip._vendor." + package
+ locals()[package] = __import__(vendored_package)
+ # This traversal is apparently necessary such that the identities are
+ # preserved (requests.packages.urllib3.* is urllib3.*)
+ for mod in list(sys.modules):
+ if mod == vendored_package or mod.startswith(vendored_package + '.'):
+ unprefixed_mod = mod[len("pip._vendor."):]
+ sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod]
+
+# Kinda cool, though, right?
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/sessions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/sessions.py
new file mode 100644
index 00000000..d73d700f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/sessions.py
@@ -0,0 +1,770 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.session
+~~~~~~~~~~~~~~~~
+
+This module provides a Session object to manage and persist settings across
+requests (cookies, auth, proxies).
+"""
+import os
+import sys
+import time
+from datetime import timedelta
+
+from .auth import _basic_auth_str
+from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping
+from .cookies import (
+ cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
+from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
+from .hooks import default_hooks, dispatch_hook
+from ._internal_utils import to_native_string
+from .utils import to_key_val_list, default_headers, DEFAULT_PORTS
+from .exceptions import (
+ TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
+
+from .structures import CaseInsensitiveDict
+from .adapters import HTTPAdapter
+
+from .utils import (
+ requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
+ get_auth_from_url, rewind_body
+)
+
+from .status_codes import codes
+
+# formerly defined here, reexposed here for backward compatibility
+from .models import REDIRECT_STATI
+
+# Preferred clock, based on which one is more accurate on a given system.
+if sys.platform == 'win32':
+ try: # Python 3.4+
+ preferred_clock = time.perf_counter
+ except AttributeError: # Earlier than Python 3.
+ preferred_clock = time.clock
+else:
+ preferred_clock = time.time
+
+
+def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
+ """Determines appropriate setting for a given request, taking into account
+ the explicit setting on that request, and the setting in the session. If a
+ setting is a dictionary, they will be merged together using `dict_class`
+ """
+
+ if session_setting is None:
+ return request_setting
+
+ if request_setting is None:
+ return session_setting
+
+ # Bypass if not a dictionary (e.g. verify)
+ if not (
+ isinstance(session_setting, Mapping) and
+ isinstance(request_setting, Mapping)
+ ):
+ return request_setting
+
+ merged_setting = dict_class(to_key_val_list(session_setting))
+ merged_setting.update(to_key_val_list(request_setting))
+
+ # Remove keys that are set to None. Extract keys first to avoid altering
+ # the dictionary during iteration.
+ none_keys = [k for (k, v) in merged_setting.items() if v is None]
+ for key in none_keys:
+ del merged_setting[key]
+
+ return merged_setting
+
+
+def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
+ """Properly merges both requests and session hooks.
+
+ This is necessary because when request_hooks == {'response': []}, the
+ merge breaks Session hooks entirely.
+ """
+ if session_hooks is None or session_hooks.get('response') == []:
+ return request_hooks
+
+ if request_hooks is None or request_hooks.get('response') == []:
+ return session_hooks
+
+ return merge_setting(request_hooks, session_hooks, dict_class)
+
+
+class SessionRedirectMixin(object):
+
+ def get_redirect_target(self, resp):
+ """Receives a Response. Returns a redirect URI or ``None``"""
+ # Due to the nature of how requests processes redirects this method will
+ # be called at least once upon the original response and at least twice
+ # on each subsequent redirect response (if any).
+ # If a custom mixin is used to handle this logic, it may be advantageous
+ # to cache the redirect location onto the response object as a private
+ # attribute.
+ if resp.is_redirect:
+ location = resp.headers['location']
+ # Currently the underlying http module on py3 decode headers
+ # in latin1, but empirical evidence suggests that latin1 is very
+ # rarely used with non-ASCII characters in HTTP headers.
+ # It is more likely to get UTF8 header rather than latin1.
+ # This causes incorrect handling of UTF8 encoded location headers.
+ # To solve this, we re-encode the location in latin1.
+ if is_py3:
+ location = location.encode('latin1')
+ return to_native_string(location, 'utf8')
+ return None
+
+ def should_strip_auth(self, old_url, new_url):
+ """Decide whether Authorization header should be removed when redirecting"""
+ old_parsed = urlparse(old_url)
+ new_parsed = urlparse(new_url)
+ if old_parsed.hostname != new_parsed.hostname:
+ return True
+ # Special case: allow http -> https redirect when using the standard
+ # ports. This isn't specified by RFC 7235, but is kept to avoid
+ # breaking backwards compatibility with older versions of requests
+ # that allowed any redirects on the same host.
+ if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)
+ and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):
+ return False
+
+ # Handle default port usage corresponding to scheme.
+ changed_port = old_parsed.port != new_parsed.port
+ changed_scheme = old_parsed.scheme != new_parsed.scheme
+ default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
+ if (not changed_scheme and old_parsed.port in default_port
+ and new_parsed.port in default_port):
+ return False
+
+ # Standard case: root URI must match
+ return changed_port or changed_scheme
+
+ def resolve_redirects(self, resp, req, stream=False, timeout=None,
+ verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):
+ """Receives a Response. Returns a generator of Responses or Requests."""
+
+ hist = [] # keep track of history
+
+ url = self.get_redirect_target(resp)
+ previous_fragment = urlparse(req.url).fragment
+ while url:
+ prepared_request = req.copy()
+
+ # Update history and keep track of redirects.
+ # resp.history must ignore the original request in this loop
+ hist.append(resp)
+ resp.history = hist[1:]
+
+ try:
+ resp.content # Consume socket so it can be released
+ except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
+ resp.raw.read(decode_content=False)
+
+ if len(resp.history) >= self.max_redirects:
+ raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp)
+
+ # Release the connection back into the pool.
+ resp.close()
+
+ # Handle redirection without scheme (see: RFC 1808 Section 4)
+ if url.startswith('//'):
+ parsed_rurl = urlparse(resp.url)
+ url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url)
+
+ # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
+ parsed = urlparse(url)
+ if parsed.fragment == '' and previous_fragment:
+ parsed = parsed._replace(fragment=previous_fragment)
+ elif parsed.fragment:
+ previous_fragment = parsed.fragment
+ url = parsed.geturl()
+
+ # Facilitate relative 'location' headers, as allowed by RFC 7231.
+ # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
+ # Compliant with RFC3986, we percent encode the url.
+ if not parsed.netloc:
+ url = urljoin(resp.url, requote_uri(url))
+ else:
+ url = requote_uri(url)
+
+ prepared_request.url = to_native_string(url)
+
+ self.rebuild_method(prepared_request, resp)
+
+ # https://github.com/requests/requests/issues/1084
+ if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
+ # https://github.com/requests/requests/issues/3490
+ purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')
+ for header in purged_headers:
+ prepared_request.headers.pop(header, None)
+ prepared_request.body = None
+
+ headers = prepared_request.headers
+ try:
+ del headers['Cookie']
+ except KeyError:
+ pass
+
+ # Extract any cookies sent on the response to the cookiejar
+ # in the new request. Because we've mutated our copied prepared
+ # request, use the old one that we haven't yet touched.
+ extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
+ merge_cookies(prepared_request._cookies, self.cookies)
+ prepared_request.prepare_cookies(prepared_request._cookies)
+
+ # Rebuild auth and proxy information.
+ proxies = self.rebuild_proxies(prepared_request, proxies)
+ self.rebuild_auth(prepared_request, resp)
+
+ # A failed tell() sets `_body_position` to `object()`. This non-None
+ # value ensures `rewindable` will be True, allowing us to raise an
+ # UnrewindableBodyError, instead of hanging the connection.
+ rewindable = (
+ prepared_request._body_position is not None and
+ ('Content-Length' in headers or 'Transfer-Encoding' in headers)
+ )
+
+ # Attempt to rewind consumed file-like object.
+ if rewindable:
+ rewind_body(prepared_request)
+
+ # Override the original request.
+ req = prepared_request
+
+ if yield_requests:
+ yield req
+ else:
+
+ resp = self.send(
+ req,
+ stream=stream,
+ timeout=timeout,
+ verify=verify,
+ cert=cert,
+ proxies=proxies,
+ allow_redirects=False,
+ **adapter_kwargs
+ )
+
+ extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
+
+ # extract redirect url, if any, for the next loop
+ url = self.get_redirect_target(resp)
+ yield resp
+
+ def rebuild_auth(self, prepared_request, response):
+ """When being redirected we may want to strip authentication from the
+ request to avoid leaking credentials. This method intelligently removes
+ and reapplies authentication where possible to avoid credential loss.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+
+ if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):
+ # If we get redirected to a new host, we should strip out any
+ # authentication headers.
+ del headers['Authorization']
+
+ # .netrc might have more auth for us on our new host.
+ new_auth = get_netrc_auth(url) if self.trust_env else None
+ if new_auth is not None:
+ prepared_request.prepare_auth(new_auth)
+
+ return
+
+ def rebuild_proxies(self, prepared_request, proxies):
+ """This method re-evaluates the proxy configuration by considering the
+ environment variables. If we are redirected to a URL covered by
+ NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
+ proxy keys for this URL (in case they were stripped by a previous
+ redirect).
+
+ This method also replaces the Proxy-Authorization header where
+ necessary.
+
+ :rtype: dict
+ """
+ proxies = proxies if proxies is not None else {}
+ headers = prepared_request.headers
+ url = prepared_request.url
+ scheme = urlparse(url).scheme
+ new_proxies = proxies.copy()
+ no_proxy = proxies.get('no_proxy')
+
+ bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)
+ if self.trust_env and not bypass_proxy:
+ environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
+
+ proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
+
+ if proxy:
+ new_proxies.setdefault(scheme, proxy)
+
+ if 'Proxy-Authorization' in headers:
+ del headers['Proxy-Authorization']
+
+ try:
+ username, password = get_auth_from_url(new_proxies[scheme])
+ except KeyError:
+ username, password = None, None
+
+ if username and password:
+ headers['Proxy-Authorization'] = _basic_auth_str(username, password)
+
+ return new_proxies
+
+ def rebuild_method(self, prepared_request, response):
+ """When being redirected we may want to change the method of the request
+ based on certain specs or browser behavior.
+ """
+ method = prepared_request.method
+
+ # https://tools.ietf.org/html/rfc7231#section-6.4.4
+ if response.status_code == codes.see_other and method != 'HEAD':
+ method = 'GET'
+
+ # Do what the browsers do, despite standards...
+ # First, turn 302s into GETs.
+ if response.status_code == codes.found and method != 'HEAD':
+ method = 'GET'
+
+ # Second, if a POST is responded to with a 301, turn it into a GET.
+ # This bizarre behaviour is explained in Issue 1704.
+ if response.status_code == codes.moved and method == 'POST':
+ method = 'GET'
+
+ prepared_request.method = method
+
+
+class Session(SessionRedirectMixin):
+ """A Requests session.
+
+ Provides cookie persistence, connection-pooling, and configuration.
+
+ Basic Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> s.get('https://httpbin.org/get')
+ <Response [200]>
+
+ Or as a context manager::
+
+ >>> with requests.Session() as s:
+ >>> s.get('https://httpbin.org/get')
+ <Response [200]>
+ """
+
+ __attrs__ = [
+ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
+ 'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
+ 'max_redirects',
+ ]
+
+ def __init__(self):
+
+ #: A case-insensitive dictionary of headers to be sent on each
+ #: :class:`Request <Request>` sent from this
+ #: :class:`Session <Session>`.
+ self.headers = default_headers()
+
+ #: Default Authentication tuple or object to attach to
+ #: :class:`Request <Request>`.
+ self.auth = None
+
+ #: Dictionary mapping protocol or protocol and host to the URL of the proxy
+ #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
+ #: be used on each :class:`Request <Request>`.
+ self.proxies = {}
+
+ #: Event-handling hooks.
+ self.hooks = default_hooks()
+
+ #: Dictionary of querystring data to attach to each
+ #: :class:`Request <Request>`. The dictionary values may be lists for
+ #: representing multivalued query parameters.
+ self.params = {}
+
+ #: Stream response content default.
+ self.stream = False
+
+ #: SSL Verification default.
+ self.verify = True
+
+ #: SSL client certificate default, if String, path to ssl client
+ #: cert file (.pem). If Tuple, ('cert', 'key') pair.
+ self.cert = None
+
+ #: Maximum number of redirects allowed. If the request exceeds this
+ #: limit, a :class:`TooManyRedirects` exception is raised.
+ #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is
+ #: 30.
+ self.max_redirects = DEFAULT_REDIRECT_LIMIT
+
+ #: Trust environment settings for proxy configuration, default
+ #: authentication and similar.
+ self.trust_env = True
+
+ #: A CookieJar containing all currently outstanding cookies set on this
+ #: session. By default it is a
+ #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
+ #: may be any other ``cookielib.CookieJar`` compatible object.
+ self.cookies = cookiejar_from_dict({})
+
+ # Default connection adapters.
+ self.adapters = OrderedDict()
+ self.mount('https://', HTTPAdapter())
+ self.mount('http://', HTTPAdapter())
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def prepare_request(self, request):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for
+ transmission and returns it. The :class:`PreparedRequest` has settings
+ merged from the :class:`Request <Request>` instance and those of the
+ :class:`Session`.
+
+ :param request: :class:`Request` instance to prepare with this
+ session's settings.
+ :rtype: requests.PreparedRequest
+ """
+ cookies = request.cookies or {}
+
+ # Bootstrap CookieJar.
+ if not isinstance(cookies, cookielib.CookieJar):
+ cookies = cookiejar_from_dict(cookies)
+
+ # Merge with session cookies
+ merged_cookies = merge_cookies(
+ merge_cookies(RequestsCookieJar(), self.cookies), cookies)
+
+ # Set environment's basic authentication if not explicitly set.
+ auth = request.auth
+ if self.trust_env and not auth and not self.auth:
+ auth = get_netrc_auth(request.url)
+
+ p = PreparedRequest()
+ p.prepare(
+ method=request.method.upper(),
+ url=request.url,
+ files=request.files,
+ data=request.data,
+ json=request.json,
+ headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
+ params=merge_setting(request.params, self.params),
+ auth=merge_setting(auth, self.auth),
+ cookies=merged_cookies,
+ hooks=merge_hooks(request.hooks, self.hooks),
+ )
+ return p
+
+ def request(self, method, url,
+ params=None, data=None, headers=None, cookies=None, files=None,
+ auth=None, timeout=None, allow_redirects=True, proxies=None,
+ hooks=None, stream=None, verify=None, cert=None, json=None):
+ """Constructs a :class:`Request <Request>`, prepares it and sends it.
+ Returns :class:`Response <Response>` object.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query
+ string for the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the
+ :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the
+ :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the
+ :class:`Request`.
+ :param files: (optional) Dictionary of ``'filename': file-like-objects``
+ for multipart encoding upload.
+ :param auth: (optional) Auth tuple or callable to enable
+ Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) <timeouts>` tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Set to True by default.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol or protocol and
+ hostname to the URL of the proxy.
+ :param stream: (optional) whether to immediately download the response
+ content. Defaults to ``False``.
+ :param verify: (optional) Either a boolean, in which case it controls whether we verify
+ the server's TLS certificate, or a string, in which case it must be a path
+ to a CA bundle to use. Defaults to ``True``.
+ :param cert: (optional) if String, path to ssl client cert file (.pem).
+ If Tuple, ('cert', 'key') pair.
+ :rtype: requests.Response
+ """
+ # Create the Request.
+ req = Request(
+ method=method.upper(),
+ url=url,
+ headers=headers,
+ files=files,
+ data=data or {},
+ json=json,
+ params=params or {},
+ auth=auth,
+ cookies=cookies,
+ hooks=hooks,
+ )
+ prep = self.prepare_request(req)
+
+ proxies = proxies or {}
+
+ settings = self.merge_environment_settings(
+ prep.url, proxies, stream, verify, cert
+ )
+
+ # Send the request.
+ send_kwargs = {
+ 'timeout': timeout,
+ 'allow_redirects': allow_redirects,
+ }
+ send_kwargs.update(settings)
+ resp = self.send(prep, **send_kwargs)
+
+ return resp
+
+ def get(self, url, **kwargs):
+ r"""Sends a GET request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('GET', url, **kwargs)
+
+ def options(self, url, **kwargs):
+ r"""Sends a OPTIONS request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('OPTIONS', url, **kwargs)
+
+ def head(self, url, **kwargs):
+ r"""Sends a HEAD request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ kwargs.setdefault('allow_redirects', False)
+ return self.request('HEAD', url, **kwargs)
+
+ def post(self, url, data=None, json=None, **kwargs):
+ r"""Sends a POST request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ return self.request('POST', url, data=data, json=json, **kwargs)
+
+ def put(self, url, data=None, **kwargs):
+ r"""Sends a PUT request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ return self.request('PUT', url, data=data, **kwargs)
+
+ def patch(self, url, data=None, **kwargs):
+ r"""Sends a PATCH request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ return self.request('PATCH', url, data=data, **kwargs)
+
+ def delete(self, url, **kwargs):
+ r"""Sends a DELETE request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :rtype: requests.Response
+ """
+
+ return self.request('DELETE', url, **kwargs)
+
+ def send(self, request, **kwargs):
+ """Send a given PreparedRequest.
+
+ :rtype: requests.Response
+ """
+ # Set defaults that the hooks can utilize to ensure they always have
+ # the correct parameters to reproduce the previous request.
+ kwargs.setdefault('stream', self.stream)
+ kwargs.setdefault('verify', self.verify)
+ kwargs.setdefault('cert', self.cert)
+ kwargs.setdefault('proxies', self.proxies)
+
+ # It's possible that users might accidentally send a Request object.
+ # Guard against that specific failure case.
+ if isinstance(request, Request):
+ raise ValueError('You can only send PreparedRequests.')
+
+ # Set up variables needed for resolve_redirects and dispatching of hooks
+ allow_redirects = kwargs.pop('allow_redirects', True)
+ stream = kwargs.get('stream')
+ hooks = request.hooks
+
+ # Get the appropriate adapter to use
+ adapter = self.get_adapter(url=request.url)
+
+ # Start time (approximately) of the request
+ start = preferred_clock()
+
+ # Send the request
+ r = adapter.send(request, **kwargs)
+
+ # Total elapsed time of the request (approximately)
+ elapsed = preferred_clock() - start
+ r.elapsed = timedelta(seconds=elapsed)
+
+ # Response manipulation hooks
+ r = dispatch_hook('response', hooks, r, **kwargs)
+
+ # Persist cookies
+ if r.history:
+
+ # If the hooks create history then we want those cookies too
+ for resp in r.history:
+ extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
+
+ extract_cookies_to_jar(self.cookies, request, r.raw)
+
+ # Redirect resolving generator.
+ gen = self.resolve_redirects(r, request, **kwargs)
+
+ # Resolve redirects if allowed.
+ history = [resp for resp in gen] if allow_redirects else []
+
+ # Shuffle things around if there's history.
+ if history:
+ # Insert the first (original) request at the start
+ history.insert(0, r)
+ # Get the last request made
+ r = history.pop()
+ r.history = history
+
+ # If redirects aren't being followed, store the response on the Request for Response.next().
+ if not allow_redirects:
+ try:
+ r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))
+ except StopIteration:
+ pass
+
+ if not stream:
+ r.content
+
+ return r
+
+ def merge_environment_settings(self, url, proxies, stream, verify, cert):
+ """
+ Check the environment and merge it with some settings.
+
+ :rtype: dict
+ """
+ # Gather clues from the surrounding environment.
+ if self.trust_env:
+ # Set environment's proxies.
+ no_proxy = proxies.get('no_proxy') if proxies is not None else None
+ env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
+ for (k, v) in env_proxies.items():
+ proxies.setdefault(k, v)
+
+ # Look for requests environment configuration and be compatible
+ # with cURL.
+ if verify is True or verify is None:
+ verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
+ os.environ.get('CURL_CA_BUNDLE'))
+
+ # Merge all the kwargs.
+ proxies = merge_setting(proxies, self.proxies)
+ stream = merge_setting(stream, self.stream)
+ verify = merge_setting(verify, self.verify)
+ cert = merge_setting(cert, self.cert)
+
+ return {'verify': verify, 'proxies': proxies, 'stream': stream,
+ 'cert': cert}
+
+ def get_adapter(self, url):
+ """
+ Returns the appropriate connection adapter for the given URL.
+
+ :rtype: requests.adapters.BaseAdapter
+ """
+ for (prefix, adapter) in self.adapters.items():
+
+ if url.lower().startswith(prefix.lower()):
+ return adapter
+
+ # Nothing matches :-/
+ raise InvalidSchema("No connection adapters were found for '%s'" % url)
+
+ def close(self):
+ """Closes all adapters and as such the session"""
+ for v in self.adapters.values():
+ v.close()
+
+ def mount(self, prefix, adapter):
+ """Registers a connection adapter to a prefix.
+
+ Adapters are sorted in descending order by prefix length.
+ """
+ self.adapters[prefix] = adapter
+ keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
+
+ for key in keys_to_move:
+ self.adapters[key] = self.adapters.pop(key)
+
+ def __getstate__(self):
+ state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
+ return state
+
+ def __setstate__(self, state):
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+
+def session():
+ """
+ Returns a :class:`Session` for context-management.
+
+ .. deprecated:: 1.0.0
+
+ This method has been deprecated since version 1.0.0 and is only kept for
+ backwards compatibility. New code should use :class:`~requests.sessions.Session`
+ to create a session. This may be removed at a future date.
+
+ :rtype: Session
+ """
+ return Session()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/status_codes.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/status_codes.py
new file mode 100644
index 00000000..813e8c4e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/status_codes.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+
+r"""
+The ``codes`` object defines a mapping from common names for HTTP statuses
+to their numerical codes, accessible either as attributes or as dictionary
+items.
+
+>>> requests.codes['temporary_redirect']
+307
+>>> requests.codes.teapot
+418
+>>> requests.codes['\o/']
+200
+
+Some codes have multiple names, and both upper- and lower-case versions of
+the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
+``codes.okay`` all correspond to the HTTP status code 200.
+"""
+
+from .structures import LookupDict
+
+_codes = {
+
+ # Informational.
+ 100: ('continue',),
+ 101: ('switching_protocols',),
+ 102: ('processing',),
+ 103: ('checkpoint',),
+ 122: ('uri_too_long', 'request_uri_too_long'),
+ 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
+ 201: ('created',),
+ 202: ('accepted',),
+ 203: ('non_authoritative_info', 'non_authoritative_information'),
+ 204: ('no_content',),
+ 205: ('reset_content', 'reset'),
+ 206: ('partial_content', 'partial'),
+ 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
+ 208: ('already_reported',),
+ 226: ('im_used',),
+
+ # Redirection.
+ 300: ('multiple_choices',),
+ 301: ('moved_permanently', 'moved', '\\o-'),
+ 302: ('found',),
+ 303: ('see_other', 'other'),
+ 304: ('not_modified',),
+ 305: ('use_proxy',),
+ 306: ('switch_proxy',),
+ 307: ('temporary_redirect', 'temporary_moved', 'temporary'),
+ 308: ('permanent_redirect',
+ 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
+
+ # Client Error.
+ 400: ('bad_request', 'bad'),
+ 401: ('unauthorized',),
+ 402: ('payment_required', 'payment'),
+ 403: ('forbidden',),
+ 404: ('not_found', '-o-'),
+ 405: ('method_not_allowed', 'not_allowed'),
+ 406: ('not_acceptable',),
+ 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
+ 408: ('request_timeout', 'timeout'),
+ 409: ('conflict',),
+ 410: ('gone',),
+ 411: ('length_required',),
+ 412: ('precondition_failed', 'precondition'),
+ 413: ('request_entity_too_large',),
+ 414: ('request_uri_too_large',),
+ 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
+ 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
+ 417: ('expectation_failed',),
+ 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
+ 421: ('misdirected_request',),
+ 422: ('unprocessable_entity', 'unprocessable'),
+ 423: ('locked',),
+ 424: ('failed_dependency', 'dependency'),
+ 425: ('unordered_collection', 'unordered'),
+ 426: ('upgrade_required', 'upgrade'),
+ 428: ('precondition_required', 'precondition'),
+ 429: ('too_many_requests', 'too_many'),
+ 431: ('header_fields_too_large', 'fields_too_large'),
+ 444: ('no_response', 'none'),
+ 449: ('retry_with', 'retry'),
+ 450: ('blocked_by_windows_parental_controls', 'parental_controls'),
+ 451: ('unavailable_for_legal_reasons', 'legal_reasons'),
+ 499: ('client_closed_request',),
+
+ # Server Error.
+ 500: ('internal_server_error', 'server_error', '/o\\', '✗'),
+ 501: ('not_implemented',),
+ 502: ('bad_gateway',),
+ 503: ('service_unavailable', 'unavailable'),
+ 504: ('gateway_timeout',),
+ 505: ('http_version_not_supported', 'http_version'),
+ 506: ('variant_also_negotiates',),
+ 507: ('insufficient_storage',),
+ 509: ('bandwidth_limit_exceeded', 'bandwidth'),
+ 510: ('not_extended',),
+ 511: ('network_authentication_required', 'network_auth', 'network_authentication'),
+}
+
+codes = LookupDict(name='status_codes')
+
+def _init():
+ for code, titles in _codes.items():
+ for title in titles:
+ setattr(codes, title, code)
+ if not title.startswith(('\\', '/')):
+ setattr(codes, title.upper(), code)
+
+ def doc(code):
+ names = ', '.join('``%s``' % n for n in _codes[code])
+ return '* %d: %s' % (code, names)
+
+ global __doc__
+ __doc__ = (__doc__ + '\n' +
+ '\n'.join(doc(code) for code in sorted(_codes))
+ if __doc__ is not None else None)
+
+_init()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/structures.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/structures.py
new file mode 100644
index 00000000..da930e28
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/structures.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.structures
+~~~~~~~~~~~~~~~~~~~
+
+Data structures that power Requests.
+"""
+
+from .compat import OrderedDict, Mapping, MutableMapping
+
+
+class CaseInsensitiveDict(MutableMapping):
+ """A case-insensitive ``dict``-like object.
+
+ Implements all methods and operations of
+ ``MutableMapping`` as well as dict's ``copy``. Also
+ provides ``lower_items``.
+
+ All keys are expected to be strings. The structure remembers the
+ case of the last key to be set, and ``iter(instance)``,
+ ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
+ will contain case-sensitive keys. However, querying and contains
+ testing is case insensitive::
+
+ cid = CaseInsensitiveDict()
+ cid['Accept'] = 'application/json'
+ cid['aCCEPT'] == 'application/json' # True
+ list(cid) == ['Accept'] # True
+
+ For example, ``headers['content-encoding']`` will return the
+ value of a ``'Content-Encoding'`` response header, regardless
+ of how the header name was originally stored.
+
+ If the constructor, ``.update``, or equality comparison
+ operations are given keys that have equal ``.lower()``s, the
+ behavior is undefined.
+ """
+
+ def __init__(self, data=None, **kwargs):
+ self._store = OrderedDict()
+ if data is None:
+ data = {}
+ self.update(data, **kwargs)
+
+ def __setitem__(self, key, value):
+ # Use the lowercased key for lookups, but store the actual
+ # key alongside the value.
+ self._store[key.lower()] = (key, value)
+
+ def __getitem__(self, key):
+ return self._store[key.lower()][1]
+
+ def __delitem__(self, key):
+ del self._store[key.lower()]
+
+ def __iter__(self):
+ return (casedkey for casedkey, mappedvalue in self._store.values())
+
+ def __len__(self):
+ return len(self._store)
+
+ def lower_items(self):
+ """Like iteritems(), but with all lowercase keys."""
+ return (
+ (lowerkey, keyval[1])
+ for (lowerkey, keyval)
+ in self._store.items()
+ )
+
+ def __eq__(self, other):
+ if isinstance(other, Mapping):
+ other = CaseInsensitiveDict(other)
+ else:
+ return NotImplemented
+ # Compare insensitively
+ return dict(self.lower_items()) == dict(other.lower_items())
+
+ # Copy is required
+ def copy(self):
+ return CaseInsensitiveDict(self._store.values())
+
+ def __repr__(self):
+ return str(dict(self.items()))
+
+
+class LookupDict(dict):
+ """Dictionary lookup object."""
+
+ def __init__(self, name=None):
+ self.name = name
+ super(LookupDict, self).__init__()
+
+ def __repr__(self):
+ return '<lookup \'%s\'>' % (self.name)
+
+ def __getitem__(self, key):
+ # We allow fall-through here, so values default to None
+
+ return self.__dict__.get(key, None)
+
+ def get(self, key, default=None):
+ return self.__dict__.get(key, default)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/utils.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/utils.py
new file mode 100644
index 00000000..8170a8d2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/requests/utils.py
@@ -0,0 +1,977 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.utils
+~~~~~~~~~~~~~~
+
+This module provides utility functions that are used within Requests
+that are also useful for external consumption.
+"""
+
+import codecs
+import contextlib
+import io
+import os
+import re
+import socket
+import struct
+import sys
+import tempfile
+import warnings
+import zipfile
+
+from .__version__ import __version__
+from . import certs
+# to_native_string is unused here, but imported here for backwards compatibility
+from ._internal_utils import to_native_string
+from .compat import parse_http_list as _parse_list_header
+from .compat import (
+ quote, urlparse, bytes, str, OrderedDict, unquote, getproxies,
+ proxy_bypass, urlunparse, basestring, integer_types, is_py3,
+ proxy_bypass_environment, getproxies_environment, Mapping)
+from .cookies import cookiejar_from_dict
+from .structures import CaseInsensitiveDict
+from .exceptions import (
+ InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)
+
+NETRC_FILES = ('.netrc', '_netrc')
+
+DEFAULT_CA_BUNDLE_PATH = certs.where()
+
+DEFAULT_PORTS = {'http': 80, 'https': 443}
+
+
+if sys.platform == 'win32':
+ # provide a proxy_bypass version on Windows without DNS lookups
+
+ def proxy_bypass_registry(host):
+ try:
+ if is_py3:
+ import winreg
+ else:
+ import _winreg as winreg
+ except ImportError:
+ return False
+
+ try:
+ internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
+ r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
+ # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
+ proxyEnable = int(winreg.QueryValueEx(internetSettings,
+ 'ProxyEnable')[0])
+ # ProxyOverride is almost always a string
+ proxyOverride = winreg.QueryValueEx(internetSettings,
+ 'ProxyOverride')[0]
+ except OSError:
+ return False
+ if not proxyEnable or not proxyOverride:
+ return False
+
+ # make a check value list from the registry entry: replace the
+ # '<local>' string by the localhost entry and the corresponding
+ # canonical entry.
+ proxyOverride = proxyOverride.split(';')
+ # now check if we match one of the registry values.
+ for test in proxyOverride:
+ if test == '<local>':
+ if '.' not in host:
+ return True
+ test = test.replace(".", r"\.") # mask dots
+ test = test.replace("*", r".*") # change glob sequence
+ test = test.replace("?", r".") # change glob char
+ if re.match(test, host, re.I):
+ return True
+ return False
+
+ def proxy_bypass(host): # noqa
+ """Return True, if the host should be bypassed.
+
+ Checks proxy settings gathered from the environment, if specified,
+ or the registry.
+ """
+ if getproxies_environment():
+ return proxy_bypass_environment(host)
+ else:
+ return proxy_bypass_registry(host)
+
+
+def dict_to_sequence(d):
+ """Returns an internal sequence dictionary update."""
+
+ if hasattr(d, 'items'):
+ d = d.items()
+
+ return d
+
+
+def super_len(o):
+ total_length = None
+ current_position = 0
+
+ if hasattr(o, '__len__'):
+ total_length = len(o)
+
+ elif hasattr(o, 'len'):
+ total_length = o.len
+
+ elif hasattr(o, 'fileno'):
+ try:
+ fileno = o.fileno()
+ except io.UnsupportedOperation:
+ pass
+ else:
+ total_length = os.fstat(fileno).st_size
+
+ # Having used fstat to determine the file length, we need to
+ # confirm that this file was opened up in binary mode.
+ if 'b' not in o.mode:
+ warnings.warn((
+ "Requests has determined the content-length for this "
+ "request using the binary size of the file: however, the "
+ "file has been opened in text mode (i.e. without the 'b' "
+ "flag in the mode). This may lead to an incorrect "
+ "content-length. In Requests 3.0, support will be removed "
+ "for files in text mode."),
+ FileModeWarning
+ )
+
+ if hasattr(o, 'tell'):
+ try:
+ current_position = o.tell()
+ except (OSError, IOError):
+ # This can happen in some weird situations, such as when the file
+ # is actually a special file descriptor like stdin. In this
+ # instance, we don't know what the length is, so set it to zero and
+ # let requests chunk it instead.
+ if total_length is not None:
+ current_position = total_length
+ else:
+ if hasattr(o, 'seek') and total_length is None:
+ # StringIO and BytesIO have seek but no useable fileno
+ try:
+ # seek to end of file
+ o.seek(0, 2)
+ total_length = o.tell()
+
+ # seek back to current position to support
+ # partially read file-like objects
+ o.seek(current_position or 0)
+ except (OSError, IOError):
+ total_length = 0
+
+ if total_length is None:
+ total_length = 0
+
+ return max(0, total_length - current_position)
+
+
+def get_netrc_auth(url, raise_errors=False):
+ """Returns the Requests tuple auth for a given url from netrc."""
+
+ try:
+ from netrc import netrc, NetrcParseError
+
+ netrc_path = None
+
+ for f in NETRC_FILES:
+ try:
+ loc = os.path.expanduser('~/{}'.format(f))
+ except KeyError:
+ # os.path.expanduser can fail when $HOME is undefined and
+ # getpwuid fails. See https://bugs.python.org/issue20164 &
+ # https://github.com/requests/requests/issues/1846
+ return
+
+ if os.path.exists(loc):
+ netrc_path = loc
+ break
+
+ # Abort early if there isn't one.
+ if netrc_path is None:
+ return
+
+ ri = urlparse(url)
+
+ # Strip port numbers from netloc. This weird `if...encode`` dance is
+ # used for Python 3.2, which doesn't support unicode literals.
+ splitstr = b':'
+ if isinstance(url, str):
+ splitstr = splitstr.decode('ascii')
+ host = ri.netloc.split(splitstr)[0]
+
+ try:
+ _netrc = netrc(netrc_path).authenticators(host)
+ if _netrc:
+ # Return with login / password
+ login_i = (0 if _netrc[0] else 1)
+ return (_netrc[login_i], _netrc[2])
+ except (NetrcParseError, IOError):
+ # If there was a parsing error or a permissions issue reading the file,
+ # we'll just skip netrc auth unless explicitly asked to raise errors.
+ if raise_errors:
+ raise
+
+ # AppEngine hackiness.
+ except (ImportError, AttributeError):
+ pass
+
+
+def guess_filename(obj):
+ """Tries to guess the filename of the given object."""
+ name = getattr(obj, 'name', None)
+ if (name and isinstance(name, basestring) and name[0] != '<' and
+ name[-1] != '>'):
+ return os.path.basename(name)
+
+
+def extract_zipped_paths(path):
+ """Replace nonexistent paths that look like they refer to a member of a zip
+ archive with the location of an extracted copy of the target, or else
+ just return the provided path unchanged.
+ """
+ if os.path.exists(path):
+ # this is already a valid path, no need to do anything further
+ return path
+
+ # find the first valid part of the provided path and treat that as a zip archive
+ # assume the rest of the path is the name of a member in the archive
+ archive, member = os.path.split(path)
+ while archive and not os.path.exists(archive):
+ archive, prefix = os.path.split(archive)
+ member = '/'.join([prefix, member])
+
+ if not zipfile.is_zipfile(archive):
+ return path
+
+ zip_file = zipfile.ZipFile(archive)
+ if member not in zip_file.namelist():
+ return path
+
+ # we have a valid zip archive and a valid member of that archive
+ tmp = tempfile.gettempdir()
+ extracted_path = os.path.join(tmp, *member.split('/'))
+ if not os.path.exists(extracted_path):
+ extracted_path = zip_file.extract(member, path=tmp)
+
+ return extracted_path
+
+
+def from_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. Unless it can not be represented as such, return an
+ OrderedDict, e.g.,
+
+ ::
+
+ >>> from_key_val_list([('key', 'val')])
+ OrderedDict([('key', 'val')])
+ >>> from_key_val_list('string')
+ ValueError: cannot encode objects that are not 2-tuples
+ >>> from_key_val_list({'key': 'val'})
+ OrderedDict([('key', 'val')])
+
+ :rtype: OrderedDict
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError('cannot encode objects that are not 2-tuples')
+
+ return OrderedDict(value)
+
+
+def to_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. If it can be, return a list of tuples, e.g.,
+
+ ::
+
+ >>> to_key_val_list([('key', 'val')])
+ [('key', 'val')]
+ >>> to_key_val_list({'key': 'val'})
+ [('key', 'val')]
+ >>> to_key_val_list('string')
+ ValueError: cannot encode objects that are not 2-tuples.
+
+ :rtype: list
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError('cannot encode objects that are not 2-tuples')
+
+ if isinstance(value, Mapping):
+ value = value.items()
+
+ return list(value)
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_list_header(value):
+ """Parse lists as described by RFC 2068 Section 2.
+
+ In particular, parse comma-separated lists where the elements of
+ the list may include quoted-strings. A quoted-string could
+ contain a comma. A non-quoted string could have quotes in the
+ middle. Quotes are removed automatically after parsing.
+
+ It basically works like :func:`parse_set_header` just that items
+ may appear multiple times and case sensitivity is preserved.
+
+ The return value is a standard :class:`list`:
+
+ >>> parse_list_header('token, "quoted value"')
+ ['token', 'quoted value']
+
+ To create a header from the :class:`list` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a list header.
+ :return: :class:`list`
+ :rtype: list
+ """
+ result = []
+ for item in _parse_list_header(value):
+ if item[:1] == item[-1:] == '"':
+ item = unquote_header_value(item[1:-1])
+ result.append(item)
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_dict_header(value):
+ """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
+ convert them into a python dict:
+
+ >>> d = parse_dict_header('foo="is a fish", bar="as well"')
+ >>> type(d) is dict
+ True
+ >>> sorted(d.items())
+ [('bar', 'as well'), ('foo', 'is a fish')]
+
+ If there is no value for a key it will be `None`:
+
+ >>> parse_dict_header('key_without_value')
+ {'key_without_value': None}
+
+ To create a header from the :class:`dict` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a dict header.
+ :return: :class:`dict`
+ :rtype: dict
+ """
+ result = {}
+ for item in _parse_list_header(value):
+ if '=' not in item:
+ result[item] = None
+ continue
+ name, value = item.split('=', 1)
+ if value[:1] == value[-1:] == '"':
+ value = unquote_header_value(value[1:-1])
+ result[name] = value
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def unquote_header_value(value, is_filename=False):
+ r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
+ This does not use the real unquoting but what browsers are actually
+ using for quoting.
+
+ :param value: the header value to unquote.
+ :rtype: str
+ """
+ if value and value[0] == value[-1] == '"':
+ # this is not the real unquoting, but fixing this so that the
+ # RFC is met will result in bugs with internet explorer and
+ # probably some other browsers as well. IE for example is
+ # uploading files with "C:\foo\bar.txt" as filename
+ value = value[1:-1]
+
+ # if this is a filename and the starting characters look like
+ # a UNC path, then just return the value without quotes. Using the
+ # replace sequence below on a UNC path has the effect of turning
+ # the leading double slash into a single slash and then
+ # _fix_ie_filename() doesn't work correctly. See #458.
+ if not is_filename or value[:2] != '\\\\':
+ return value.replace('\\\\', '\\').replace('\\"', '"')
+ return value
+
+
+def dict_from_cookiejar(cj):
+ """Returns a key/value dictionary from a CookieJar.
+
+ :param cj: CookieJar object to extract cookies from.
+ :rtype: dict
+ """
+
+ cookie_dict = {}
+
+ for cookie in cj:
+ cookie_dict[cookie.name] = cookie.value
+
+ return cookie_dict
+
+
+def add_dict_to_cookiejar(cj, cookie_dict):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cj: CookieJar to insert cookies into.
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ :rtype: CookieJar
+ """
+
+ return cookiejar_from_dict(cookie_dict, cj)
+
+
+def get_encodings_from_content(content):
+ """Returns encodings from given content string.
+
+ :param content: bytestring to extract encodings from.
+ """
+ warnings.warn((
+ 'In requests 3.0, get_encodings_from_content will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
+
+ charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
+ pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
+ xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
+
+ return (charset_re.findall(content) +
+ pragma_re.findall(content) +
+ xml_re.findall(content))
+
+
+def _parse_content_type_header(header):
+ """Returns content type and parameters from given header
+
+ :param header: string
+ :return: tuple containing content type and dictionary of
+ parameters
+ """
+
+ tokens = header.split(';')
+ content_type, params = tokens[0].strip(), tokens[1:]
+ params_dict = {}
+ items_to_strip = "\"' "
+
+ for param in params:
+ param = param.strip()
+ if param:
+ key, value = param, True
+ index_of_equals = param.find("=")
+ if index_of_equals != -1:
+ key = param[:index_of_equals].strip(items_to_strip)
+ value = param[index_of_equals + 1:].strip(items_to_strip)
+ params_dict[key.lower()] = value
+ return content_type, params_dict
+
+
+def get_encoding_from_headers(headers):
+ """Returns encodings from given HTTP Header Dict.
+
+ :param headers: dictionary to extract encoding from.
+ :rtype: str
+ """
+
+ content_type = headers.get('content-type')
+
+ if not content_type:
+ return None
+
+ content_type, params = _parse_content_type_header(content_type)
+
+ if 'charset' in params:
+ return params['charset'].strip("'\"")
+
+ if 'text' in content_type:
+ return 'ISO-8859-1'
+
+
+def stream_decode_response_unicode(iterator, r):
+ """Stream decodes a iterator."""
+
+ if r.encoding is None:
+ for item in iterator:
+ yield item
+ return
+
+ decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
+ for chunk in iterator:
+ rv = decoder.decode(chunk)
+ if rv:
+ yield rv
+ rv = decoder.decode(b'', final=True)
+ if rv:
+ yield rv
+
+
+def iter_slices(string, slice_length):
+ """Iterate over slices of a string."""
+ pos = 0
+ if slice_length is None or slice_length <= 0:
+ slice_length = len(string)
+ while pos < len(string):
+ yield string[pos:pos + slice_length]
+ pos += slice_length
+
+
+def get_unicode_from_response(r):
+ """Returns the requested content back in unicode.
+
+ :param r: Response object to get unicode content from.
+
+ Tried:
+
+ 1. charset from content-type
+ 2. fall back and replace all unicode characters
+
+ :rtype: str
+ """
+ warnings.warn((
+ 'In requests 3.0, get_unicode_from_response will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
+
+ tried_encodings = []
+
+ # Try charset from content-type
+ encoding = get_encoding_from_headers(r.headers)
+
+ if encoding:
+ try:
+ return str(r.content, encoding)
+ except UnicodeError:
+ tried_encodings.append(encoding)
+
+ # Fall back:
+ try:
+ return str(r.content, encoding, errors='replace')
+ except TypeError:
+ return r.content
+
+
+# The unreserved URI characters (RFC 3986)
+UNRESERVED_SET = frozenset(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~")
+
+
+def unquote_unreserved(uri):
+ """Un-escape any percent-escape sequences in a URI that are unreserved
+ characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
+
+ :rtype: str
+ """
+ parts = uri.split('%')
+ for i in range(1, len(parts)):
+ h = parts[i][0:2]
+ if len(h) == 2 and h.isalnum():
+ try:
+ c = chr(int(h, 16))
+ except ValueError:
+ raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
+
+ if c in UNRESERVED_SET:
+ parts[i] = c + parts[i][2:]
+ else:
+ parts[i] = '%' + parts[i]
+ else:
+ parts[i] = '%' + parts[i]
+ return ''.join(parts)
+
+
+def requote_uri(uri):
+ """Re-quote the given URI.
+
+ This function passes the given URI through an unquote/quote cycle to
+ ensure that it is fully and consistently quoted.
+
+ :rtype: str
+ """
+ safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
+ safe_without_percent = "!#$&'()*+,/:;=?@[]~"
+ try:
+ # Unquote only the unreserved characters
+ # Then quote only illegal characters (do not quote reserved,
+ # unreserved, or '%')
+ return quote(unquote_unreserved(uri), safe=safe_with_percent)
+ except InvalidURL:
+ # We couldn't unquote the given URI, so let's try quoting it, but
+ # there may be unquoted '%'s in the URI. We need to make sure they're
+ # properly quoted so they do not cause issues elsewhere.
+ return quote(uri, safe=safe_without_percent)
+
+
+def address_in_network(ip, net):
+ """This function allows you to check if an IP belongs to a network subnet
+
+ Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
+ returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
+
+ :rtype: bool
+ """
+ ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
+ netaddr, bits = net.split('/')
+ netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
+ network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
+ return (ipaddr & netmask) == (network & netmask)
+
+
+def dotted_netmask(mask):
+ """Converts mask from /xx format to xxx.xxx.xxx.xxx
+
+ Example: if mask is 24 function returns 255.255.255.0
+
+ :rtype: str
+ """
+ bits = 0xffffffff ^ (1 << 32 - mask) - 1
+ return socket.inet_ntoa(struct.pack('>I', bits))
+
+
+def is_ipv4_address(string_ip):
+ """
+ :rtype: bool
+ """
+ try:
+ socket.inet_aton(string_ip)
+ except socket.error:
+ return False
+ return True
+
+
+def is_valid_cidr(string_network):
+ """
+ Very simple check of the cidr format in no_proxy variable.
+
+ :rtype: bool
+ """
+ if string_network.count('/') == 1:
+ try:
+ mask = int(string_network.split('/')[1])
+ except ValueError:
+ return False
+
+ if mask < 1 or mask > 32:
+ return False
+
+ try:
+ socket.inet_aton(string_network.split('/')[0])
+ except socket.error:
+ return False
+ else:
+ return False
+ return True
+
+
+@contextlib.contextmanager
+def set_environ(env_name, value):
+ """Set the environment variable 'env_name' to 'value'
+
+ Save previous value, yield, and then restore the previous value stored in
+ the environment variable 'env_name'.
+
+ If 'value' is None, do nothing"""
+ value_changed = value is not None
+ if value_changed:
+ old_value = os.environ.get(env_name)
+ os.environ[env_name] = value
+ try:
+ yield
+ finally:
+ if value_changed:
+ if old_value is None:
+ del os.environ[env_name]
+ else:
+ os.environ[env_name] = old_value
+
+
+def should_bypass_proxies(url, no_proxy):
+ """
+ Returns whether we should bypass proxies or not.
+
+ :rtype: bool
+ """
+ # Prioritize lowercase environment variables over uppercase
+ # to keep a consistent behaviour with other http projects (curl, wget).
+ get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
+
+ # First check whether no_proxy is defined. If it is, check that the URL
+ # we're getting isn't in the no_proxy list.
+ no_proxy_arg = no_proxy
+ if no_proxy is None:
+ no_proxy = get_proxy('no_proxy')
+ parsed = urlparse(url)
+
+ if parsed.hostname is None:
+ # URLs don't always have hostnames, e.g. file:/// urls.
+ return True
+
+ if no_proxy:
+ # We need to check whether we match here. We need to see if we match
+ # the end of the hostname, both with and without the port.
+ no_proxy = (
+ host for host in no_proxy.replace(' ', '').split(',') if host
+ )
+
+ if is_ipv4_address(parsed.hostname):
+ for proxy_ip in no_proxy:
+ if is_valid_cidr(proxy_ip):
+ if address_in_network(parsed.hostname, proxy_ip):
+ return True
+ elif parsed.hostname == proxy_ip:
+ # If no_proxy ip was defined in plain IP notation instead of cidr notation &
+ # matches the IP of the index
+ return True
+ else:
+ host_with_port = parsed.hostname
+ if parsed.port:
+ host_with_port += ':{}'.format(parsed.port)
+
+ for host in no_proxy:
+ if parsed.hostname.endswith(host) or host_with_port.endswith(host):
+ # The URL does match something in no_proxy, so we don't want
+ # to apply the proxies on this URL.
+ return True
+
+ with set_environ('no_proxy', no_proxy_arg):
+ # parsed.hostname can be `None` in cases such as a file URI.
+ try:
+ bypass = proxy_bypass(parsed.hostname)
+ except (TypeError, socket.gaierror):
+ bypass = False
+
+ if bypass:
+ return True
+
+ return False
+
+
+def get_environ_proxies(url, no_proxy=None):
+ """
+ Return a dict of environment proxies.
+
+ :rtype: dict
+ """
+ if should_bypass_proxies(url, no_proxy=no_proxy):
+ return {}
+ else:
+ return getproxies()
+
+
+def select_proxy(url, proxies):
+ """Select a proxy for the url, if applicable.
+
+ :param url: The url being for the request
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
+ """
+ proxies = proxies or {}
+ urlparts = urlparse(url)
+ if urlparts.hostname is None:
+ return proxies.get(urlparts.scheme, proxies.get('all'))
+
+ proxy_keys = [
+ urlparts.scheme + '://' + urlparts.hostname,
+ urlparts.scheme,
+ 'all://' + urlparts.hostname,
+ 'all',
+ ]
+ proxy = None
+ for proxy_key in proxy_keys:
+ if proxy_key in proxies:
+ proxy = proxies[proxy_key]
+ break
+
+ return proxy
+
+
+def default_user_agent(name="python-requests"):
+ """
+ Return a string representing the default user agent.
+
+ :rtype: str
+ """
+ return '%s/%s' % (name, __version__)
+
+
+def default_headers():
+ """
+ :rtype: requests.structures.CaseInsensitiveDict
+ """
+ return CaseInsensitiveDict({
+ 'User-Agent': default_user_agent(),
+ 'Accept-Encoding': ', '.join(('gzip', 'deflate')),
+ 'Accept': '*/*',
+ 'Connection': 'keep-alive',
+ })
+
+
+def parse_header_links(value):
+ """Return a list of parsed link headers proxies.
+
+ i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
+
+ :rtype: list
+ """
+
+ links = []
+
+ replace_chars = ' \'"'
+
+ value = value.strip(replace_chars)
+ if not value:
+ return links
+
+ for val in re.split(', *<', value):
+ try:
+ url, params = val.split(';', 1)
+ except ValueError:
+ url, params = val, ''
+
+ link = {'url': url.strip('<> \'"')}
+
+ for param in params.split(';'):
+ try:
+ key, value = param.split('=')
+ except ValueError:
+ break
+
+ link[key.strip(replace_chars)] = value.strip(replace_chars)
+
+ links.append(link)
+
+ return links
+
+
+# Null bytes; no need to recreate these on each call to guess_json_utf
+_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
+_null2 = _null * 2
+_null3 = _null * 3
+
+
+def guess_json_utf(data):
+ """
+ :rtype: str
+ """
+ # JSON always starts with two ASCII characters, so detection is as
+ # easy as counting the nulls and from their location and count
+ # determine the encoding. Also detect a BOM, if present.
+ sample = data[:4]
+ if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
+ return 'utf-32' # BOM included
+ if sample[:3] == codecs.BOM_UTF8:
+ return 'utf-8-sig' # BOM included, MS style (discouraged)
+ if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
+ return 'utf-16' # BOM included
+ nullcount = sample.count(_null)
+ if nullcount == 0:
+ return 'utf-8'
+ if nullcount == 2:
+ if sample[::2] == _null2: # 1st and 3rd are null
+ return 'utf-16-be'
+ if sample[1::2] == _null2: # 2nd and 4th are null
+ return 'utf-16-le'
+ # Did not detect 2 valid UTF-16 ascii-range characters
+ if nullcount == 3:
+ if sample[:3] == _null3:
+ return 'utf-32-be'
+ if sample[1:] == _null3:
+ return 'utf-32-le'
+ # Did not detect a valid UTF-32 ascii-range character
+ return None
+
+
+def prepend_scheme_if_needed(url, new_scheme):
+ """Given a URL that may or may not have a scheme, prepend the given scheme.
+ Does not replace a present scheme with the one provided as an argument.
+
+ :rtype: str
+ """
+ scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
+
+ # urlparse is a finicky beast, and sometimes decides that there isn't a
+ # netloc present. Assume that it's being over-cautious, and switch netloc
+ # and path if urlparse decided there was no netloc.
+ if not netloc:
+ netloc, path = path, netloc
+
+ return urlunparse((scheme, netloc, path, params, query, fragment))
+
+
+def get_auth_from_url(url):
+ """Given a url with authentication components, extract them into a tuple of
+ username,password.
+
+ :rtype: (str,str)
+ """
+ parsed = urlparse(url)
+
+ try:
+ auth = (unquote(parsed.username), unquote(parsed.password))
+ except (AttributeError, TypeError):
+ auth = ('', '')
+
+ return auth
+
+
+# Moved outside of function to avoid recompile every call
+_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
+_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')
+
+
+def check_header_validity(header):
+ """Verifies that header value is a string which doesn't contain
+ leading whitespace or return characters. This prevents unintended
+ header injection.
+
+ :param header: tuple, in the format (name, value).
+ """
+ name, value = header
+
+ if isinstance(value, bytes):
+ pat = _CLEAN_HEADER_REGEX_BYTE
+ else:
+ pat = _CLEAN_HEADER_REGEX_STR
+ try:
+ if not pat.match(value):
+ raise InvalidHeader("Invalid return character or leading space in header: %s" % name)
+ except TypeError:
+ raise InvalidHeader("Value for header {%s: %s} must be of type str or "
+ "bytes, not %s" % (name, value, type(value)))
+
+
+def urldefragauth(url):
+ """
+ Given a url remove the fragment and the authentication part.
+
+ :rtype: str
+ """
+ scheme, netloc, path, params, query, fragment = urlparse(url)
+
+ # see func:`prepend_scheme_if_needed`
+ if not netloc:
+ netloc, path = path, netloc
+
+ netloc = netloc.rsplit('@', 1)[-1]
+
+ return urlunparse((scheme, netloc, path, params, query, ''))
+
+
+def rewind_body(prepared_request):
+ """Move file pointer back to its recorded starting position
+ so it can be read again on redirect.
+ """
+ body_seek = getattr(prepared_request.body, 'seek', None)
+ if body_seek is not None and isinstance(prepared_request._body_position, integer_types):
+ try:
+ body_seek(prepared_request._body_position)
+ except (IOError, OSError):
+ raise UnrewindableBodyError("An error occurred when rewinding request "
+ "body for redirect.")
+ else:
+ raise UnrewindableBodyError("Unable to rewind request body for redirect.")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/retrying.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/retrying.py
new file mode 100644
index 00000000..6d1e627a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/retrying.py
@@ -0,0 +1,267 @@
+## Copyright 2013-2014 Ray Holder
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+## http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+
+import random
+from pip._vendor import six
+import sys
+import time
+import traceback
+
+
+# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...
+MAX_WAIT = 1073741823
+
+
+def retry(*dargs, **dkw):
+ """
+ Decorator function that instantiates the Retrying object
+ @param *dargs: positional arguments passed to Retrying object
+ @param **dkw: keyword arguments passed to the Retrying object
+ """
+ # support both @retry and @retry() as valid syntax
+ if len(dargs) == 1 and callable(dargs[0]):
+ def wrap_simple(f):
+
+ @six.wraps(f)
+ def wrapped_f(*args, **kw):
+ return Retrying().call(f, *args, **kw)
+
+ return wrapped_f
+
+ return wrap_simple(dargs[0])
+
+ else:
+ def wrap(f):
+
+ @six.wraps(f)
+ def wrapped_f(*args, **kw):
+ return Retrying(*dargs, **dkw).call(f, *args, **kw)
+
+ return wrapped_f
+
+ return wrap
+
+
+class Retrying(object):
+
+ def __init__(self,
+ stop=None, wait=None,
+ stop_max_attempt_number=None,
+ stop_max_delay=None,
+ wait_fixed=None,
+ wait_random_min=None, wait_random_max=None,
+ wait_incrementing_start=None, wait_incrementing_increment=None,
+ wait_exponential_multiplier=None, wait_exponential_max=None,
+ retry_on_exception=None,
+ retry_on_result=None,
+ wrap_exception=False,
+ stop_func=None,
+ wait_func=None,
+ wait_jitter_max=None):
+
+ self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number
+ self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay
+ self._wait_fixed = 1000 if wait_fixed is None else wait_fixed
+ self._wait_random_min = 0 if wait_random_min is None else wait_random_min
+ self._wait_random_max = 1000 if wait_random_max is None else wait_random_max
+ self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start
+ self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment
+ self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier
+ self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max
+ self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max
+
+ # TODO add chaining of stop behaviors
+ # stop behavior
+ stop_funcs = []
+ if stop_max_attempt_number is not None:
+ stop_funcs.append(self.stop_after_attempt)
+
+ if stop_max_delay is not None:
+ stop_funcs.append(self.stop_after_delay)
+
+ if stop_func is not None:
+ self.stop = stop_func
+
+ elif stop is None:
+ self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs)
+
+ else:
+ self.stop = getattr(self, stop)
+
+ # TODO add chaining of wait behaviors
+ # wait behavior
+ wait_funcs = [lambda *args, **kwargs: 0]
+ if wait_fixed is not None:
+ wait_funcs.append(self.fixed_sleep)
+
+ if wait_random_min is not None or wait_random_max is not None:
+ wait_funcs.append(self.random_sleep)
+
+ if wait_incrementing_start is not None or wait_incrementing_increment is not None:
+ wait_funcs.append(self.incrementing_sleep)
+
+ if wait_exponential_multiplier is not None or wait_exponential_max is not None:
+ wait_funcs.append(self.exponential_sleep)
+
+ if wait_func is not None:
+ self.wait = wait_func
+
+ elif wait is None:
+ self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs)
+
+ else:
+ self.wait = getattr(self, wait)
+
+ # retry on exception filter
+ if retry_on_exception is None:
+ self._retry_on_exception = self.always_reject
+ else:
+ self._retry_on_exception = retry_on_exception
+
+ # TODO simplify retrying by Exception types
+ # retry on result filter
+ if retry_on_result is None:
+ self._retry_on_result = self.never_reject
+ else:
+ self._retry_on_result = retry_on_result
+
+ self._wrap_exception = wrap_exception
+
+ def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms):
+ """Stop after the previous attempt >= stop_max_attempt_number."""
+ return previous_attempt_number >= self._stop_max_attempt_number
+
+ def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms):
+ """Stop after the time from the first attempt >= stop_max_delay."""
+ return delay_since_first_attempt_ms >= self._stop_max_delay
+
+ def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
+ """Don't sleep at all before retrying."""
+ return 0
+
+ def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
+ """Sleep a fixed amount of time between each retry."""
+ return self._wait_fixed
+
+ def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
+ """Sleep a random amount of time between wait_random_min and wait_random_max"""
+ return random.randint(self._wait_random_min, self._wait_random_max)
+
+ def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
+ """
+ Sleep an incremental amount of time after each attempt, starting at
+ wait_incrementing_start and incrementing by wait_incrementing_increment
+ """
+ result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1))
+ if result < 0:
+ result = 0
+ return result
+
+ def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
+ exp = 2 ** previous_attempt_number
+ result = self._wait_exponential_multiplier * exp
+ if result > self._wait_exponential_max:
+ result = self._wait_exponential_max
+ if result < 0:
+ result = 0
+ return result
+
+ def never_reject(self, result):
+ return False
+
+ def always_reject(self, result):
+ return True
+
+ def should_reject(self, attempt):
+ reject = False
+ if attempt.has_exception:
+ reject |= self._retry_on_exception(attempt.value[1])
+ else:
+ reject |= self._retry_on_result(attempt.value)
+
+ return reject
+
+ def call(self, fn, *args, **kwargs):
+ start_time = int(round(time.time() * 1000))
+ attempt_number = 1
+ while True:
+ try:
+ attempt = Attempt(fn(*args, **kwargs), attempt_number, False)
+ except:
+ tb = sys.exc_info()
+ attempt = Attempt(tb, attempt_number, True)
+
+ if not self.should_reject(attempt):
+ return attempt.get(self._wrap_exception)
+
+ delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time
+ if self.stop(attempt_number, delay_since_first_attempt_ms):
+ if not self._wrap_exception and attempt.has_exception:
+ # get() on an attempt with an exception should cause it to be raised, but raise just in case
+ raise attempt.get()
+ else:
+ raise RetryError(attempt)
+ else:
+ sleep = self.wait(attempt_number, delay_since_first_attempt_ms)
+ if self._wait_jitter_max:
+ jitter = random.random() * self._wait_jitter_max
+ sleep = sleep + max(0, jitter)
+ time.sleep(sleep / 1000.0)
+
+ attempt_number += 1
+
+
+class Attempt(object):
+ """
+ An Attempt encapsulates a call to a target function that may end as a
+ normal return value from the function or an Exception depending on what
+ occurred during the execution.
+ """
+
+ def __init__(self, value, attempt_number, has_exception):
+ self.value = value
+ self.attempt_number = attempt_number
+ self.has_exception = has_exception
+
+ def get(self, wrap_exception=False):
+ """
+ Return the return value of this Attempt instance or raise an Exception.
+ If wrap_exception is true, this Attempt is wrapped inside of a
+ RetryError before being raised.
+ """
+ if self.has_exception:
+ if wrap_exception:
+ raise RetryError(self)
+ else:
+ six.reraise(self.value[0], self.value[1], self.value[2])
+ else:
+ return self.value
+
+ def __repr__(self):
+ if self.has_exception:
+ return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2])))
+ else:
+ return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value)
+
+
+class RetryError(Exception):
+ """
+ A RetryError encapsulates the last Attempt instance right before giving up.
+ """
+
+ def __init__(self, last_attempt):
+ self.last_attempt = last_attempt
+
+ def __str__(self):
+ return "RetryError[{0}]".format(self.last_attempt)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/six.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/six.py
new file mode 100644
index 00000000..89b2188f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/six.py
@@ -0,0 +1,952 @@
+# Copyright (c) 2010-2018 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.12.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ if from_value is None:
+ raise value
+ raise value from from_value
+ finally:
+ value = None
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif isinstance(s, binary_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ if PY2 and isinstance(s, text_type):
+ s = s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ s = s.decode(encoding, errors)
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__init__.py
new file mode 100644
index 00000000..c4c0dde5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__init__.py
@@ -0,0 +1,91 @@
+"""
+urllib3 - Thread-safe connection pooling and re-using.
+"""
+from __future__ import absolute_import
+import warnings
+
+from .connectionpool import (
+ HTTPConnectionPool,
+ HTTPSConnectionPool,
+ connection_from_url
+)
+
+from . import exceptions
+from .filepost import encode_multipart_formdata
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import HTTPResponse
+from .util.request import make_headers
+from .util.url import get_host
+from .util.timeout import Timeout
+from .util.retry import Retry
+
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+from logging import NullHandler
+
+__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
+__license__ = 'MIT'
+__version__ = '1.25.3'
+
+__all__ = (
+ 'HTTPConnectionPool',
+ 'HTTPSConnectionPool',
+ 'PoolManager',
+ 'ProxyManager',
+ 'HTTPResponse',
+ 'Retry',
+ 'Timeout',
+ 'add_stderr_logger',
+ 'connection_from_url',
+ 'disable_warnings',
+ 'encode_multipart_formdata',
+ 'get_host',
+ 'make_headers',
+ 'proxy_from_url',
+)
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+
+def add_stderr_logger(level=logging.DEBUG):
+ """
+ Helper for quickly adding a StreamHandler to the logger. Useful for
+ debugging.
+
+ Returns the handler after adding it.
+ """
+ # This method needs to be in this __init__.py to get the __name__ correct
+ # even if urllib3 is vendored within another package.
+ logger = logging.getLogger(__name__)
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
+ logger.addHandler(handler)
+ logger.setLevel(level)
+ logger.debug('Added a stderr logging handler to logger: %s', __name__)
+ return handler
+
+
+# ... Clean up.
+del NullHandler
+
+
+# All warning filters *must* be appended unless you're really certain that they
+# shouldn't be: otherwise, it's very hard for users to use most Python
+# mechanisms to silence them.
+# SecurityWarning's always go off by default.
+warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
+# SubjectAltNameWarning's should go off once per host
+warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True)
+# InsecurePlatformWarning's don't vary between requests, so we keep it default.
+warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
+ append=True)
+# SNIMissingWarnings should go off only once.
+warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True)
+
+
+def disable_warnings(category=exceptions.HTTPWarning):
+ """
+ Helper for quickly disabling all urllib3 warnings.
+ """
+ warnings.simplefilter('ignore', category)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..2787d349
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc
new file mode 100644
index 00000000..7073c6b7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc
new file mode 100644
index 00000000..6985c7d1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc
new file mode 100644
index 00000000..0abcdf01
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 00000000..b3c2d1d5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc
new file mode 100644
index 00000000..8c719517
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc
new file mode 100644
index 00000000..32f52df8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc
new file mode 100644
index 00000000..dcdda18f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc
new file mode 100644
index 00000000..a89b1461
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc
new file mode 100644
index 00000000..eb1cfd9d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/_collections.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/_collections.py
new file mode 100644
index 00000000..34f23811
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/_collections.py
@@ -0,0 +1,329 @@
+from __future__ import absolute_import
+try:
+ from collections.abc import Mapping, MutableMapping
+except ImportError:
+ from collections import Mapping, MutableMapping
+try:
+ from threading import RLock
+except ImportError: # Platform-specific: No threads available
+ class RLock:
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
+
+
+from collections import OrderedDict
+from .exceptions import InvalidHeader
+from .packages.six import iterkeys, itervalues, PY3
+
+
+__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
+
+
+_Null = object()
+
+
+class RecentlyUsedContainer(MutableMapping):
+ """
+ Provides a thread-safe dict-like container which maintains up to
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
+ ``maxsize``.
+
+ :param maxsize:
+ Maximum number of recent elements to retain.
+
+ :param dispose_func:
+ Every time an item is evicted from the container,
+ ``dispose_func(value)`` is called. Callback which will get called
+ """
+
+ ContainerCls = OrderedDict
+
+ def __init__(self, maxsize=10, dispose_func=None):
+ self._maxsize = maxsize
+ self.dispose_func = dispose_func
+
+ self._container = self.ContainerCls()
+ self.lock = RLock()
+
+ def __getitem__(self, key):
+ # Re-insert the item, moving it to the end of the eviction line.
+ with self.lock:
+ item = self._container.pop(key)
+ self._container[key] = item
+ return item
+
+ def __setitem__(self, key, value):
+ evicted_value = _Null
+ with self.lock:
+ # Possibly evict the existing value of 'key'
+ evicted_value = self._container.get(key, _Null)
+ self._container[key] = value
+
+ # If we didn't evict an existing value, we might have to evict the
+ # least recently used item from the beginning of the container.
+ if len(self._container) > self._maxsize:
+ _key, evicted_value = self._container.popitem(last=False)
+
+ if self.dispose_func and evicted_value is not _Null:
+ self.dispose_func(evicted_value)
+
+ def __delitem__(self, key):
+ with self.lock:
+ value = self._container.pop(key)
+
+ if self.dispose_func:
+ self.dispose_func(value)
+
+ def __len__(self):
+ with self.lock:
+ return len(self._container)
+
+ def __iter__(self):
+ raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
+
+ def clear(self):
+ with self.lock:
+ # Copy pointers to all values, then wipe the mapping
+ values = list(itervalues(self._container))
+ self._container.clear()
+
+ if self.dispose_func:
+ for value in values:
+ self.dispose_func(value)
+
+ def keys(self):
+ with self.lock:
+ return list(iterkeys(self._container))
+
+
+class HTTPHeaderDict(MutableMapping):
+ """
+ :param headers:
+ An iterable of field-value pairs. Must not contain multiple field names
+ when compared case-insensitively.
+
+ :param kwargs:
+ Additional field-value pairs to pass in to ``dict.update``.
+
+ A ``dict`` like container for storing HTTP Headers.
+
+ Field names are stored and compared case-insensitively in compliance with
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
+ case-insensitive pair.
+
+ Using ``__setitem__`` syntax overwrites fields that compare equal
+ case-insensitively in order to maintain ``dict``'s api. For fields that
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
+ in a loop.
+
+ If multiple fields that are equal case-insensitively are passed to the
+ constructor or ``.update``, the behavior is undefined and some will be
+ lost.
+
+ >>> headers = HTTPHeaderDict()
+ >>> headers.add('Set-Cookie', 'foo=bar')
+ >>> headers.add('set-cookie', 'baz=quxx')
+ >>> headers['content-length'] = '7'
+ >>> headers['SET-cookie']
+ 'foo=bar, baz=quxx'
+ >>> headers['Content-Length']
+ '7'
+ """
+
+ def __init__(self, headers=None, **kwargs):
+ super(HTTPHeaderDict, self).__init__()
+ self._container = OrderedDict()
+ if headers is not None:
+ if isinstance(headers, HTTPHeaderDict):
+ self._copy_from(headers)
+ else:
+ self.extend(headers)
+ if kwargs:
+ self.extend(kwargs)
+
+ def __setitem__(self, key, val):
+ self._container[key.lower()] = [key, val]
+ return self._container[key.lower()]
+
+ def __getitem__(self, key):
+ val = self._container[key.lower()]
+ return ', '.join(val[1:])
+
+ def __delitem__(self, key):
+ del self._container[key.lower()]
+
+ def __contains__(self, key):
+ return key.lower() in self._container
+
+ def __eq__(self, other):
+ if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
+ return False
+ if not isinstance(other, type(self)):
+ other = type(self)(other)
+ return (dict((k.lower(), v) for k, v in self.itermerged()) ==
+ dict((k.lower(), v) for k, v in other.itermerged()))
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ if not PY3: # Python 2
+ iterkeys = MutableMapping.iterkeys
+ itervalues = MutableMapping.itervalues
+
+ __marker = object()
+
+ def __len__(self):
+ return len(self._container)
+
+ def __iter__(self):
+ # Only provide the originally cased names
+ for vals in self._container.values():
+ yield vals[0]
+
+ def pop(self, key, default=__marker):
+ '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+ '''
+ # Using the MutableMapping function directly fails due to the private marker.
+ # Using ordinary dict.pop would expose the internal structures.
+ # So let's reinvent the wheel.
+ try:
+ value = self[key]
+ except KeyError:
+ if default is self.__marker:
+ raise
+ return default
+ else:
+ del self[key]
+ return value
+
+ def discard(self, key):
+ try:
+ del self[key]
+ except KeyError:
+ pass
+
+ def add(self, key, val):
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
+ exists.
+
+ >>> headers = HTTPHeaderDict(foo='bar')
+ >>> headers.add('Foo', 'baz')
+ >>> headers['foo']
+ 'bar, baz'
+ """
+ key_lower = key.lower()
+ new_vals = [key, val]
+ # Keep the common case aka no item present as fast as possible
+ vals = self._container.setdefault(key_lower, new_vals)
+ if new_vals is not vals:
+ vals.append(val)
+
+ def extend(self, *args, **kwargs):
+ """Generic import function for any type of header-like object.
+ Adapted version of MutableMapping.update in order to insert items
+ with self.add instead of self.__setitem__
+ """
+ if len(args) > 1:
+ raise TypeError("extend() takes at most 1 positional "
+ "arguments ({0} given)".format(len(args)))
+ other = args[0] if len(args) >= 1 else ()
+
+ if isinstance(other, HTTPHeaderDict):
+ for key, val in other.iteritems():
+ self.add(key, val)
+ elif isinstance(other, Mapping):
+ for key in other:
+ self.add(key, other[key])
+ elif hasattr(other, "keys"):
+ for key in other.keys():
+ self.add(key, other[key])
+ else:
+ for key, value in other:
+ self.add(key, value)
+
+ for key, value in kwargs.items():
+ self.add(key, value)
+
+ def getlist(self, key, default=__marker):
+ """Returns a list of all the values for the named field. Returns an
+ empty list if the key doesn't exist."""
+ try:
+ vals = self._container[key.lower()]
+ except KeyError:
+ if default is self.__marker:
+ return []
+ return default
+ else:
+ return vals[1:]
+
+ # Backwards compatibility for httplib
+ getheaders = getlist
+ getallmatchingheaders = getlist
+ iget = getlist
+
+ # Backwards compatibility for http.cookiejar
+ get_all = getlist
+
+ def __repr__(self):
+ return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
+
+ def _copy_from(self, other):
+ for key in other:
+ val = other.getlist(key)
+ if isinstance(val, list):
+ # Don't need to convert tuples
+ val = list(val)
+ self._container[key.lower()] = [key] + val
+
+ def copy(self):
+ clone = type(self)()
+ clone._copy_from(self)
+ return clone
+
+ def iteritems(self):
+ """Iterate over all header lines, including duplicate ones."""
+ for key in self:
+ vals = self._container[key.lower()]
+ for val in vals[1:]:
+ yield vals[0], val
+
+ def itermerged(self):
+ """Iterate over all headers, merging duplicate ones together."""
+ for key in self:
+ val = self._container[key.lower()]
+ yield val[0], ', '.join(val[1:])
+
+ def items(self):
+ return list(self.iteritems())
+
+ @classmethod
+ def from_httplib(cls, message): # Python 2
+ """Read headers from a Python 2 httplib message object."""
+ # python2.7 does not expose a proper API for exporting multiheaders
+ # efficiently. This function re-reads raw lines from the message
+ # object and extracts the multiheaders properly.
+ obs_fold_continued_leaders = (' ', '\t')
+ headers = []
+
+ for line in message.headers:
+ if line.startswith(obs_fold_continued_leaders):
+ if not headers:
+ # We received a header line that starts with OWS as described
+ # in RFC-7230 S3.2.4. This indicates a multiline header, but
+ # there exists no previous header to which we can attach it.
+ raise InvalidHeader(
+ 'Header continuation with no previous header: %s' % line
+ )
+ else:
+ key, value = headers[-1]
+ headers[-1] = (key, value + ' ' + line.strip())
+ continue
+
+ key, value = line.split(':', 1)
+ headers.append((key, value.strip()))
+
+ return cls(headers)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/connection.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/connection.py
new file mode 100644
index 00000000..57c58fed
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/connection.py
@@ -0,0 +1,417 @@
+from __future__ import absolute_import
+import datetime
+import logging
+import os
+import socket
+from socket import error as SocketError, timeout as SocketTimeout
+import warnings
+from .packages import six
+from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
+from .packages.six.moves.http_client import HTTPException # noqa: F401
+
+try: # Compiled with SSL?
+ import ssl
+ BaseSSLError = ssl.SSLError
+except (ImportError, AttributeError): # Platform-specific: No SSL.
+ ssl = None
+
+ class BaseSSLError(BaseException):
+ pass
+
+
+try:
+ # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
+ ConnectionError = ConnectionError
+except NameError:
+ # Python 2
+ class ConnectionError(Exception):
+ pass
+
+
+from .exceptions import (
+ NewConnectionError,
+ ConnectTimeoutError,
+ SubjectAltNameWarning,
+ SystemTimeWarning,
+)
+from .packages.ssl_match_hostname import match_hostname, CertificateError
+
+from .util.ssl_ import (
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ assert_fingerprint,
+ create_urllib3_context,
+ ssl_wrap_socket
+)
+
+
+from .util import connection
+
+from ._collections import HTTPHeaderDict
+
+log = logging.getLogger(__name__)
+
+port_by_scheme = {
+ 'http': 80,
+ 'https': 443,
+}
+
+# When updating RECENT_DATE, move it to within two years of the current date,
+# and not less than 6 months ago.
+# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or
+# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months)
+RECENT_DATE = datetime.date(2017, 6, 30)
+
+
+class DummyConnection(object):
+ """Used to detect a failed ConnectionCls import."""
+ pass
+
+
+class HTTPConnection(_HTTPConnection, object):
+ """
+ Based on httplib.HTTPConnection but provides an extra constructor
+ backwards-compatibility layer between older and newer Pythons.
+
+ Additional keyword parameters are used to configure attributes of the connection.
+ Accepted parameters include:
+
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+ - ``source_address``: Set the source address for the current connection.
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+ you might pass::
+
+ HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ]
+
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+ """
+
+ default_port = port_by_scheme['http']
+
+ #: Disable Nagle's algorithm by default.
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+
+ #: Whether this connection verifies the host's certificate.
+ is_verified = False
+
+ def __init__(self, *args, **kw):
+ if six.PY3:
+ kw.pop('strict', None)
+
+ # Pre-set source_address.
+ self.source_address = kw.get('source_address')
+
+ #: The socket options provided by the user. If no options are
+ #: provided, we use the default options.
+ self.socket_options = kw.pop('socket_options', self.default_socket_options)
+
+ _HTTPConnection.__init__(self, *args, **kw)
+
+ @property
+ def host(self):
+ """
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
+
+ In general, SSL certificates don't include the trailing dot indicating a
+ fully-qualified domain name, and thus, they don't validate properly when
+ checked against a domain name that includes the dot. In addition, some
+ servers may not expect to receive the trailing dot when provided.
+
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
+ whereas a lookup without a trailing dot will search the system's search domain
+ list. Thus, it's important to keep the original host around for use only in
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
+ actual TCP connection across which we're going to send HTTP requests).
+ """
+ return self._dns_host.rstrip('.')
+
+ @host.setter
+ def host(self, value):
+ """
+ Setter for the `host` property.
+
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
+ only uses `host`, and it seems reasonable that other libraries follow suit.
+ """
+ self._dns_host = value
+
+ def _new_conn(self):
+ """ Establish a socket connection and set nodelay settings on it.
+
+ :return: New socket connection.
+ """
+ extra_kw = {}
+ if self.source_address:
+ extra_kw['source_address'] = self.source_address
+
+ if self.socket_options:
+ extra_kw['socket_options'] = self.socket_options
+
+ try:
+ conn = connection.create_connection(
+ (self._dns_host, self.port), self.timeout, **extra_kw)
+
+ except SocketTimeout:
+ raise ConnectTimeoutError(
+ self, "Connection to %s timed out. (connect timeout=%s)" %
+ (self.host, self.timeout))
+
+ except SocketError as e:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e)
+
+ return conn
+
+ def _prepare_conn(self, conn):
+ self.sock = conn
+ # Google App Engine's httplib does not define _tunnel_host
+ if getattr(self, '_tunnel_host', None):
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ def connect(self):
+ conn = self._new_conn()
+ self._prepare_conn(conn)
+
+ def request_chunked(self, method, url, body=None, headers=None):
+ """
+ Alternative to the common request method, which sends the
+ body with chunked encoding and not as one block
+ """
+ headers = HTTPHeaderDict(headers if headers is not None else {})
+ skip_accept_encoding = 'accept-encoding' in headers
+ skip_host = 'host' in headers
+ self.putrequest(
+ method,
+ url,
+ skip_accept_encoding=skip_accept_encoding,
+ skip_host=skip_host
+ )
+ for header, value in headers.items():
+ self.putheader(header, value)
+ if 'transfer-encoding' not in headers:
+ self.putheader('Transfer-Encoding', 'chunked')
+ self.endheaders()
+
+ if body is not None:
+ stringish_types = six.string_types + (bytes,)
+ if isinstance(body, stringish_types):
+ body = (body,)
+ for chunk in body:
+ if not chunk:
+ continue
+ if not isinstance(chunk, bytes):
+ chunk = chunk.encode('utf8')
+ len_str = hex(len(chunk))[2:]
+ self.send(len_str.encode('utf-8'))
+ self.send(b'\r\n')
+ self.send(chunk)
+ self.send(b'\r\n')
+
+ # After the if clause, to always have a closed body
+ self.send(b'0\r\n\r\n')
+
+
+class HTTPSConnection(HTTPConnection):
+ default_port = port_by_scheme['https']
+
+ ssl_version = None
+
+ def __init__(self, host, port=None, key_file=None, cert_file=None,
+ key_password=None, strict=None,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ ssl_context=None, server_hostname=None, **kw):
+
+ HTTPConnection.__init__(self, host, port, strict=strict,
+ timeout=timeout, **kw)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.key_password = key_password
+ self.ssl_context = ssl_context
+ self.server_hostname = server_hostname
+
+ # Required property for Google AppEngine 1.9.0 which otherwise causes
+ # HTTPS requests to go out as HTTP. (See Issue #356)
+ self._protocol = 'https'
+
+ def connect(self):
+ conn = self._new_conn()
+ self._prepare_conn(conn)
+
+ # Wrap socket using verification with the root certs in
+ # trusted_root_certs
+ default_ssl_context = False
+ if self.ssl_context is None:
+ default_ssl_context = True
+ self.ssl_context = create_urllib3_context(
+ ssl_version=resolve_ssl_version(self.ssl_version),
+ cert_reqs=resolve_cert_reqs(self.cert_reqs),
+ )
+
+ # Try to load OS default certs if none are given.
+ # Works well on Windows (requires Python3.4+)
+ context = self.ssl_context
+ if (not self.ca_certs and not self.ca_cert_dir and default_ssl_context
+ and hasattr(context, 'load_default_certs')):
+ context.load_default_certs()
+
+ self.sock = ssl_wrap_socket(
+ sock=conn,
+ keyfile=self.key_file,
+ certfile=self.cert_file,
+ key_password=self.key_password,
+ ssl_context=self.ssl_context,
+ server_hostname=self.server_hostname
+ )
+
+
+class VerifiedHTTPSConnection(HTTPSConnection):
+ """
+ Based on httplib.HTTPSConnection but wraps the socket with
+ SSL certification.
+ """
+ cert_reqs = None
+ ca_certs = None
+ ca_cert_dir = None
+ ssl_version = None
+ assert_fingerprint = None
+
+ def set_cert(self, key_file=None, cert_file=None,
+ cert_reqs=None, key_password=None, ca_certs=None,
+ assert_hostname=None, assert_fingerprint=None,
+ ca_cert_dir=None):
+ """
+ This method should only be called once, before the connection is used.
+ """
+ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
+ # have an SSLContext object in which case we'll use its verify_mode.
+ if cert_reqs is None:
+ if self.ssl_context is not None:
+ cert_reqs = self.ssl_context.verify_mode
+ else:
+ cert_reqs = resolve_cert_reqs(None)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.key_password = key_password
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+
+ def connect(self):
+ # Add certificate verification
+ conn = self._new_conn()
+ hostname = self.host
+
+ # Google App Engine's httplib does not define _tunnel_host
+ if getattr(self, '_tunnel_host', None):
+ self.sock = conn
+ # Calls self._set_hostport(), so self.host is
+ # self._tunnel_host below.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ # Override the host with the one we're requesting data from.
+ hostname = self._tunnel_host
+
+ server_hostname = hostname
+ if self.server_hostname is not None:
+ server_hostname = self.server_hostname
+
+ is_time_off = datetime.date.today() < RECENT_DATE
+ if is_time_off:
+ warnings.warn((
+ 'System time is way off (before {0}). This will probably '
+ 'lead to SSL verification errors').format(RECENT_DATE),
+ SystemTimeWarning
+ )
+
+ # Wrap socket using verification with the root certs in
+ # trusted_root_certs
+ default_ssl_context = False
+ if self.ssl_context is None:
+ default_ssl_context = True
+ self.ssl_context = create_urllib3_context(
+ ssl_version=resolve_ssl_version(self.ssl_version),
+ cert_reqs=resolve_cert_reqs(self.cert_reqs),
+ )
+
+ context = self.ssl_context
+ context.verify_mode = resolve_cert_reqs(self.cert_reqs)
+
+ # Try to load OS default certs if none are given.
+ # Works well on Windows (requires Python3.4+)
+ if (not self.ca_certs and not self.ca_cert_dir and default_ssl_context
+ and hasattr(context, 'load_default_certs')):
+ context.load_default_certs()
+
+ self.sock = ssl_wrap_socket(
+ sock=conn,
+ keyfile=self.key_file,
+ certfile=self.cert_file,
+ key_password=self.key_password,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ server_hostname=server_hostname,
+ ssl_context=context)
+
+ if self.assert_fingerprint:
+ assert_fingerprint(self.sock.getpeercert(binary_form=True),
+ self.assert_fingerprint)
+ elif context.verify_mode != ssl.CERT_NONE \
+ and not getattr(context, 'check_hostname', False) \
+ and self.assert_hostname is not False:
+ # While urllib3 attempts to always turn off hostname matching from
+ # the TLS library, this cannot always be done. So we check whether
+ # the TLS Library still thinks it's matching hostnames.
+ cert = self.sock.getpeercert()
+ if not cert.get('subjectAltName', ()):
+ warnings.warn((
+ 'Certificate for {0} has no `subjectAltName`, falling back to check for a '
+ '`commonName` for now. This feature is being removed by major browsers and '
+ 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
+ 'for details.)'.format(hostname)),
+ SubjectAltNameWarning
+ )
+ _match_hostname(cert, self.assert_hostname or server_hostname)
+
+ self.is_verified = (
+ context.verify_mode == ssl.CERT_REQUIRED or
+ self.assert_fingerprint is not None
+ )
+
+
+def _match_hostname(cert, asserted_hostname):
+ try:
+ match_hostname(cert, asserted_hostname)
+ except CertificateError as e:
+ log.error(
+ 'Certificate did not match expected hostname: %s. '
+ 'Certificate: %s', asserted_hostname, cert
+ )
+ # Add cert to exception and reraise so client code can inspect
+ # the cert when catching the exception, if they want to
+ e._peer_cert = cert
+ raise
+
+
+if ssl:
+ # Make a copy for testing.
+ UnverifiedHTTPSConnection = HTTPSConnection
+ HTTPSConnection = VerifiedHTTPSConnection
+else:
+ HTTPSConnection = DummyConnection
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/connectionpool.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/connectionpool.py
new file mode 100644
index 00000000..157568a3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/connectionpool.py
@@ -0,0 +1,897 @@
+from __future__ import absolute_import
+import errno
+import logging
+import sys
+import warnings
+
+from socket import error as SocketError, timeout as SocketTimeout
+import socket
+
+
+from .exceptions import (
+ ClosedPoolError,
+ ProtocolError,
+ EmptyPoolError,
+ HeaderParsingError,
+ HostChangedError,
+ LocationValueError,
+ MaxRetryError,
+ ProxyError,
+ ReadTimeoutError,
+ SSLError,
+ TimeoutError,
+ InsecureRequestWarning,
+ NewConnectionError,
+)
+from .packages.ssl_match_hostname import CertificateError
+from .packages import six
+from .packages.six.moves import queue
+from .packages.rfc3986.normalizers import normalize_host
+from .connection import (
+ port_by_scheme,
+ DummyConnection,
+ HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
+ HTTPException, BaseSSLError,
+)
+from .request import RequestMethods
+from .response import HTTPResponse
+
+from .util.connection import is_connection_dropped
+from .util.request import set_file_position
+from .util.response import assert_header_parsing
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import get_host, Url, NORMALIZABLE_SCHEMES
+from .util.queue import LifoQueue
+
+
+xrange = six.moves.xrange
+
+log = logging.getLogger(__name__)
+
+_Default = object()
+
+
+# Pool objects
+class ConnectionPool(object):
+ """
+ Base class for all connection pools, such as
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+ """
+
+ scheme = None
+ QueueCls = LifoQueue
+
+ def __init__(self, host, port=None):
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ self.host = _normalize_host(host, scheme=self.scheme)
+ self._proxy_host = host.lower()
+ self.port = port
+
+ def __str__(self):
+ return '%s(host=%r, port=%r)' % (type(self).__name__,
+ self.host, self.port)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def close(self):
+ """
+ Close all pooled connections and disable the pool.
+ """
+ pass
+
+
+# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
+_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+ """
+ Thread-safe connection pool for one host.
+
+ :param host:
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
+ :class:`httplib.HTTPConnection`.
+
+ :param port:
+ Port used for this HTTP Connection (None is equivalent to 80), passed
+ into :class:`httplib.HTTPConnection`.
+
+ :param strict:
+ Causes BadStatusLine to be raised if the status line can't be parsed
+ as a valid HTTP/1.0 or 1.1 status line, passed into
+ :class:`httplib.HTTPConnection`.
+
+ .. note::
+ Only works in Python 2. This parameter is ignored in Python 3.
+
+ :param timeout:
+ Socket timeout in seconds for each individual connection. This can
+ be a float or integer, which sets the timeout for the HTTP request,
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
+ fine-grained control over request timeouts. After the constructor has
+ been parsed, this is always a `urllib3.util.Timeout` object.
+
+ :param maxsize:
+ Number of connections to save that can be reused. More than 1 is useful
+ in multithreaded situations. If ``block`` is set to False, more
+ connections will be created but they will not be saved once they've
+ been used.
+
+ :param block:
+ If set to True, no more than ``maxsize`` connections will be used at
+ a time. When no free connections are available, the call will block
+ until a connection has been released. This is a useful side effect for
+ particular multithreaded situations where one does not want to use more
+ than maxsize connections per host to prevent flooding.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param retries:
+ Retry configuration to use by default with requests in this pool.
+
+ :param _proxy:
+ Parsed proxy URL, should not be used directly, instead, see
+ :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param _proxy_headers:
+ A dictionary with proxy headers, should not be used directly,
+ instead, see :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param \\**conn_kw:
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+ :class:`urllib3.connection.HTTPSConnection` instances.
+ """
+
+ scheme = 'http'
+ ConnectionCls = HTTPConnection
+ ResponseCls = HTTPResponse
+
+ def __init__(self, host, port=None, strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
+ headers=None, retries=None,
+ _proxy=None, _proxy_headers=None,
+ **conn_kw):
+ ConnectionPool.__init__(self, host, port)
+ RequestMethods.__init__(self, headers)
+
+ self.strict = strict
+
+ if not isinstance(timeout, Timeout):
+ timeout = Timeout.from_float(timeout)
+
+ if retries is None:
+ retries = Retry.DEFAULT
+
+ self.timeout = timeout
+ self.retries = retries
+
+ self.pool = self.QueueCls(maxsize)
+ self.block = block
+
+ self.proxy = _proxy
+ self.proxy_headers = _proxy_headers or {}
+
+ # Fill the queue up so that doing get() on it will block properly
+ for _ in xrange(maxsize):
+ self.pool.put(None)
+
+ # These are mostly for testing and debugging purposes.
+ self.num_connections = 0
+ self.num_requests = 0
+ self.conn_kw = conn_kw
+
+ if self.proxy:
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+ # We cannot know if the user has added default socket options, so we cannot replace the
+ # list.
+ self.conn_kw.setdefault('socket_options', [])
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.debug("Starting new HTTP connection (%d): %s:%s",
+ self.num_connections, self.host, self.port or "80")
+
+ conn = self.ConnectionCls(host=self.host, port=self.port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict, **self.conn_kw)
+ return conn
+
+ def _get_conn(self, timeout=None):
+ """
+ Get a connection. Will return a pooled connection if one is available.
+
+ If no connections are available and :prop:`.block` is ``False``, then a
+ fresh connection is returned.
+
+ :param timeout:
+ Seconds to wait before giving up and raising
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+ :prop:`.block` is ``True``.
+ """
+ conn = None
+ try:
+ conn = self.pool.get(block=self.block, timeout=timeout)
+
+ except AttributeError: # self.pool is None
+ raise ClosedPoolError(self, "Pool is closed.")
+
+ except queue.Empty:
+ if self.block:
+ raise EmptyPoolError(self,
+ "Pool reached maximum size and no more "
+ "connections are allowed.")
+ pass # Oh well, we'll create a new connection then
+
+ # If this is a persistent connection, check if it got disconnected
+ if conn and is_connection_dropped(conn):
+ log.debug("Resetting dropped connection: %s", self.host)
+ conn.close()
+ if getattr(conn, 'auto_open', 1) == 0:
+ # This is a proxied connection that has been mutated by
+ # httplib._tunnel() and cannot be reused (since it would
+ # attempt to bypass the proxy)
+ conn = None
+
+ return conn or self._new_conn()
+
+ def _put_conn(self, conn):
+ """
+ Put a connection back into the pool.
+
+ :param conn:
+ Connection object for the current host and port as returned by
+ :meth:`._new_conn` or :meth:`._get_conn`.
+
+ If the pool is already full, the connection is closed and discarded
+ because we exceeded maxsize. If connections are discarded frequently,
+ then maxsize should be increased.
+
+ If the pool is closed, then the connection will be closed and discarded.
+ """
+ try:
+ self.pool.put(conn, block=False)
+ return # Everything is dandy, done.
+ except AttributeError:
+ # self.pool is None.
+ pass
+ except queue.Full:
+ # This should never happen if self.block == True
+ log.warning(
+ "Connection pool is full, discarding connection: %s",
+ self.host)
+
+ # Connection never got put back into the pool, close it.
+ if conn:
+ conn.close()
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ pass
+
+ def _prepare_proxy(self, conn):
+ # Nothing to do for HTTP connections.
+ pass
+
+ def _get_timeout(self, timeout):
+ """ Helper that always returns a :class:`urllib3.util.Timeout` """
+ if timeout is _Default:
+ return self.timeout.clone()
+
+ if isinstance(timeout, Timeout):
+ return timeout.clone()
+ else:
+ # User passed us an int/float. This is for backwards compatibility,
+ # can be removed later
+ return Timeout.from_float(timeout)
+
+ def _raise_timeout(self, err, url, timeout_value):
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+ if isinstance(err, SocketTimeout):
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # See the above comment about EAGAIN in Python 3. In Python 2 we have
+ # to specifically catch it and throw the timeout error
+ if hasattr(err, 'errno') and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # Catch possible read timeouts thrown as SSL errors. If not the
+ # case, rethrow the original. We need to do this because of:
+ # http://bugs.python.org/issue10272
+ if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python < 2.7.4
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
+ **httplib_request_kw):
+ """
+ Perform a request on a given urllib connection object taken from our
+ pool.
+
+ :param conn:
+ a connection from one of our connection pools
+
+ :param timeout:
+ Socket timeout in seconds for the request. This can be a
+ float or integer, which will set the same timeout value for
+ the socket connect and the socket read, or an instance of
+ :class:`urllib3.util.Timeout`, which gives you more fine-grained
+ control over your timeouts.
+ """
+ self.num_requests += 1
+
+ timeout_obj = self._get_timeout(timeout)
+ timeout_obj.start_connect()
+ conn.timeout = timeout_obj.connect_timeout
+
+ # Trigger any extra validation we need to do.
+ try:
+ self._validate_conn(conn)
+ except (SocketTimeout, BaseSSLError) as e:
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+ raise
+
+ # conn.request() calls httplib.*.request, not the method in
+ # urllib3.request. It also calls makefile (recv) on the socket.
+ if chunked:
+ conn.request_chunked(method, url, **httplib_request_kw)
+ else:
+ conn.request(method, url, **httplib_request_kw)
+
+ # Reset the timeout for the recv() on the socket
+ read_timeout = timeout_obj.read_timeout
+
+ # App Engine doesn't have a sock attr
+ if getattr(conn, 'sock', None):
+ # In Python 3 socket.py will catch EAGAIN and return None when you
+ # try and read into the file pointer created by http.client, which
+ # instead raises a BadStatusLine exception. Instead of catching
+ # the exception and assuming all BadStatusLine exceptions are read
+ # timeouts, check for a zero timeout before making the request.
+ if read_timeout == 0:
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % read_timeout)
+ if read_timeout is Timeout.DEFAULT_TIMEOUT:
+ conn.sock.settimeout(socket.getdefaulttimeout())
+ else: # None or a value
+ conn.sock.settimeout(read_timeout)
+
+ # Receive the response from the server
+ try:
+ try:
+ # Python 2.7, use buffering of HTTP responses
+ httplib_response = conn.getresponse(buffering=True)
+ except TypeError:
+ # Python 3
+ try:
+ httplib_response = conn.getresponse()
+ except Exception as e:
+ # Remove the TypeError from the exception chain in Python 3;
+ # otherwise it looks like a programming error was the cause.
+ six.raise_from(e, None)
+ except (SocketTimeout, BaseSSLError, SocketError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
+ raise
+
+ # AppEngine doesn't have a version attr.
+ http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
+ log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port,
+ method, url, http_version, httplib_response.status,
+ httplib_response.length)
+
+ try:
+ assert_header_parsing(httplib_response.msg)
+ except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
+ log.warning(
+ 'Failed to parse headers (url=%s): %s',
+ self._absolute_url(url), hpe, exc_info=True)
+
+ return httplib_response
+
+ def _absolute_url(self, path):
+ return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
+
+ def close(self):
+ """
+ Close all pooled connections and disable the pool.
+ """
+ if self.pool is None:
+ return
+ # Disable access to the pool
+ old_pool, self.pool = self.pool, None
+
+ try:
+ while True:
+ conn = old_pool.get(block=False)
+ if conn:
+ conn.close()
+
+ except queue.Empty:
+ pass # Done.
+
+ def is_same_host(self, url):
+ """
+ Check if the given ``url`` is a member of the same host as this
+ connection pool.
+ """
+ if url.startswith('/'):
+ return True
+
+ # TODO: Add optional support for socket.gethostbyname checking.
+ scheme, host, port = get_host(url)
+ if host is not None:
+ host = _normalize_host(host, scheme=scheme)
+
+ # Use explicit default port for comparison when none is given
+ if self.port and not port:
+ port = port_by_scheme.get(scheme)
+ elif not self.port and port == port_by_scheme.get(scheme):
+ port = None
+
+ return (scheme, host, port) == (self.scheme, self.host, self.port)
+
+ def urlopen(self, method, url, body=None, headers=None, retries=None,
+ redirect=True, assert_same_host=True, timeout=_Default,
+ pool_timeout=None, release_conn=None, chunked=False,
+ body_pos=None, **response_kw):
+ """
+ Get a connection from the pool and perform an HTTP request. This is the
+ lowest level call for making a request, so you'll need to specify all
+ the raw details.
+
+ .. note::
+
+ More commonly, it's appropriate to use a convenience method provided
+ by :class:`.RequestMethods`, such as :meth:`request`.
+
+ .. note::
+
+ `release_conn` will only behave as expected if
+ `preload_content=False` because we want to make
+ `preload_content=False` the default behaviour someday soon without
+ breaking backwards compatibility.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param body:
+ Data to send in the request body (useful for creating
+ POST requests, see HTTPConnectionPool.post_url for
+ more convenience).
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ Pass ``None`` to retry until you receive a response. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+ :param redirect:
+ If True, automatically handle redirects (status codes 301, 302,
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
+ will disable redirect, too.
+
+ :param assert_same_host:
+ If ``True``, will make sure that the host of the pool requests is
+ consistent else will raise HostChangedError. When False, you can
+ use the pool on an HTTP proxy and request foreign hosts.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param pool_timeout:
+ If set and the pool is set to block=True, then this method will
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+ connection is available within the time period.
+
+ :param release_conn:
+ If False, then the urlopen call will not release the connection
+ back into the pool once a response is received (but will release if
+ you read the entire contents of the response such as when
+ `preload_content=True`). This is useful if you're not preloading
+ the response's content immediately. You will need to call
+ ``r.release_conn()`` on the response ``r`` to return the connection
+ back into the pool. If None, it takes the value of
+ ``response_kw.get('preload_content', True)``.
+
+ :param chunked:
+ If True, urllib3 will send the body using chunked transfer
+ encoding. Otherwise, urllib3 will send the body using the standard
+ content-length form. Defaults to False.
+
+ :param int body_pos:
+ Position to seek to in file-like body in the event of a retry or
+ redirect. Typically this won't need to be set because urllib3 will
+ auto-populate the value when needed.
+
+ :param \\**response_kw:
+ Additional parameters are passed to
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
+ """
+ if headers is None:
+ headers = self.headers
+
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+ if release_conn is None:
+ release_conn = response_kw.get('preload_content', True)
+
+ # Check host
+ if assert_same_host and not self.is_same_host(url):
+ raise HostChangedError(self, url, retries)
+
+ conn = None
+
+ # Track whether `conn` needs to be released before
+ # returning/raising/recursing. Update this variable if necessary, and
+ # leave `release_conn` constant throughout the function. That way, if
+ # the function recurses, the original value of `release_conn` will be
+ # passed down into the recursive call, and its value will be respected.
+ #
+ # See issue #651 [1] for details.
+ #
+ # [1] <https://github.com/shazow/urllib3/issues/651>
+ release_this_conn = release_conn
+
+ # Merge the proxy headers. Only do this in HTTP. We have to copy the
+ # headers dict so we can safely change it without those changes being
+ # reflected in anyone else's copy.
+ if self.scheme == 'http':
+ headers = headers.copy()
+ headers.update(self.proxy_headers)
+
+ # Must keep the exception bound to a separate variable or else Python 3
+ # complains about UnboundLocalError.
+ err = None
+
+ # Keep track of whether we cleanly exited the except block. This
+ # ensures we do proper cleanup in finally.
+ clean_exit = False
+
+ # Rewind body position, if needed. Record current position
+ # for future rewinds in the event of a redirect/retry.
+ body_pos = set_file_position(body, body_pos)
+
+ try:
+ # Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
+ conn = self._get_conn(timeout=pool_timeout)
+
+ conn.timeout = timeout_obj.connect_timeout
+
+ is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
+ if is_new_proxy_conn:
+ self._prepare_proxy(conn)
+
+ # Make the request on the httplib connection object.
+ httplib_response = self._make_request(conn, method, url,
+ timeout=timeout_obj,
+ body=body, headers=headers,
+ chunked=chunked)
+
+ # If we're going to release the connection in ``finally:``, then
+ # the response doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = conn if not release_conn else None
+
+ # Pass method to Response for length checking
+ response_kw['request_method'] = method
+
+ # Import httplib's response into our own wrapper object
+ response = self.ResponseCls.from_httplib(httplib_response,
+ pool=self,
+ connection=response_conn,
+ retries=retries,
+ **response_kw)
+
+ # Everything went great!
+ clean_exit = True
+
+ except queue.Empty:
+ # Timed out by queue.
+ raise EmptyPoolError(self, "No pool connections are available.")
+
+ except (TimeoutError, HTTPException, SocketError, ProtocolError,
+ BaseSSLError, SSLError, CertificateError) as e:
+ # Discard the connection for these exceptions. It will be
+ # replaced during the next _get_conn() call.
+ clean_exit = False
+ if isinstance(e, (BaseSSLError, CertificateError)):
+ e = SSLError(e)
+ elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
+ e = ProxyError('Cannot connect to proxy.', e)
+ elif isinstance(e, (SocketError, HTTPException)):
+ e = ProtocolError('Connection aborted.', e)
+
+ retries = retries.increment(method, url, error=e, _pool=self,
+ _stacktrace=sys.exc_info()[2])
+ retries.sleep()
+
+ # Keep track of the error for the retry warning.
+ err = e
+
+ finally:
+ if not clean_exit:
+ # We hit some kind of exception, handled or otherwise. We need
+ # to throw the connection away unless explicitly told not to.
+ # Close the connection, set the variable to None, and make sure
+ # we put the None back in the pool to avoid leaking it.
+ conn = conn and conn.close()
+ release_this_conn = True
+
+ if release_this_conn:
+ # Put the connection back to be reused. If the connection is
+ # expired then it will be None, which will get replaced with a
+ # fresh connection during _get_conn.
+ self._put_conn(conn)
+
+ if not conn:
+ # Try again
+ log.warning("Retrying (%r) after connection "
+ "broken by '%r': %s", retries, err, url)
+ return self.urlopen(method, url, body, headers, retries,
+ redirect, assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, body_pos=body_pos,
+ **response_kw)
+
+ def drain_and_release_conn(response):
+ try:
+ # discard any remaining response body, the connection will be
+ # released back to the pool once the entire response is read
+ response.read()
+ except (TimeoutError, HTTPException, SocketError, ProtocolError,
+ BaseSSLError, SSLError):
+ pass
+
+ # Handle redirect?
+ redirect_location = redirect and response.get_redirect_location()
+ if redirect_location:
+ if response.status == 303:
+ method = 'GET'
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ # Drain and release the connection for this response, since
+ # we're not returning it to be released manually.
+ drain_and_release_conn(response)
+ raise
+ return response
+
+ # drain and return the connection to the pool before recursing
+ drain_and_release_conn(response)
+
+ retries.sleep_for_retry(response)
+ log.debug("Redirecting %s -> %s", url, redirect_location)
+ return self.urlopen(
+ method, redirect_location, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, body_pos=body_pos,
+ **response_kw)
+
+ # Check if we should retry the HTTP response.
+ has_retry_after = bool(response.getheader('Retry-After'))
+ if retries.is_retry(method, response.status, has_retry_after):
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_status:
+ # Drain and release the connection for this response, since
+ # we're not returning it to be released manually.
+ drain_and_release_conn(response)
+ raise
+ return response
+
+ # drain and return the connection to the pool before recursing
+ drain_and_release_conn(response)
+
+ retries.sleep(response)
+ log.debug("Retry: %s", url)
+ return self.urlopen(
+ method, url, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ body_pos=body_pos, **response_kw)
+
+ return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+ """
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+ When Python is compiled with the :mod:`ssl` module, then
+ :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
+ instead of :class:`.HTTPSConnection`.
+
+ :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
+ ``assert_hostname`` and ``host`` in this order to verify connections.
+ If ``assert_hostname`` is False, no verification is done.
+
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
+ ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
+ is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+ the connection socket into an SSL socket.
+ """
+
+ scheme = 'https'
+ ConnectionCls = HTTPSConnection
+
+ def __init__(self, host, port=None,
+ strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
+ block=False, headers=None, retries=None,
+ _proxy=None, _proxy_headers=None,
+ key_file=None, cert_file=None, cert_reqs=None,
+ key_password=None, ca_certs=None, ssl_version=None,
+ assert_hostname=None, assert_fingerprint=None,
+ ca_cert_dir=None, **conn_kw):
+
+ HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
+ block, headers, retries, _proxy, _proxy_headers,
+ **conn_kw)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.key_password = key_password
+ self.ca_certs = ca_certs
+ self.ca_cert_dir = ca_cert_dir
+ self.ssl_version = ssl_version
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+
+ def _prepare_conn(self, conn):
+ """
+ Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
+ and establish the tunnel if proxy is used.
+ """
+
+ if isinstance(conn, VerifiedHTTPSConnection):
+ conn.set_cert(key_file=self.key_file,
+ key_password=self.key_password,
+ cert_file=self.cert_file,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ assert_hostname=self.assert_hostname,
+ assert_fingerprint=self.assert_fingerprint)
+ conn.ssl_version = self.ssl_version
+ return conn
+
+ def _prepare_proxy(self, conn):
+ """
+ Establish tunnel connection early, because otherwise httplib
+ would improperly set Host: header to proxy's IP:port.
+ """
+ conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
+ conn.connect()
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`httplib.HTTPSConnection`.
+ """
+ self.num_connections += 1
+ log.debug("Starting new HTTPS connection (%d): %s:%s",
+ self.num_connections, self.host, self.port or "443")
+
+ if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
+ raise SSLError("Can't connect to HTTPS URL because the SSL "
+ "module is not available.")
+
+ actual_host = self.host
+ actual_port = self.port
+ if self.proxy is not None:
+ actual_host = self.proxy.host
+ actual_port = self.proxy.port
+
+ conn = self.ConnectionCls(host=actual_host, port=actual_port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict, cert_file=self.cert_file,
+ key_file=self.key_file, key_password=self.key_password,
+ **self.conn_kw)
+
+ return self._prepare_conn(conn)
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ super(HTTPSConnectionPool, self)._validate_conn(conn)
+
+ # Force connect early to allow us to validate the connection.
+ if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
+ conn.connect()
+
+ if not conn.is_verified:
+ warnings.warn((
+ 'Unverified HTTPS request is being made. '
+ 'Adding certificate verification is strongly advised. See: '
+ 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html'
+ '#ssl-warnings'),
+ InsecureRequestWarning)
+
+
+def connection_from_url(url, **kw):
+ """
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+ This is a shortcut for not having to parse out the scheme, host, and port
+ of the url before creating an :class:`.ConnectionPool` instance.
+
+ :param url:
+ Absolute URL string that must include the scheme. Port is optional.
+
+ :param \\**kw:
+ Passes additional parameters to the constructor of the appropriate
+ :class:`.ConnectionPool`. Useful for specifying things like
+ timeout, maxsize, headers, etc.
+
+ Example::
+
+ >>> conn = connection_from_url('http://google.com/')
+ >>> r = conn.request('GET', '/')
+ """
+ scheme, host, port = get_host(url)
+ port = port or port_by_scheme.get(scheme, 80)
+ if scheme == 'https':
+ return HTTPSConnectionPool(host, port=port, **kw)
+ else:
+ return HTTPConnectionPool(host, port=port, **kw)
+
+
+def _normalize_host(host, scheme):
+ """
+ Normalize hosts for comparisons and use with sockets.
+ """
+
+ # httplib doesn't like it when we include brackets in IPv6 addresses
+ # Specifically, if we include brackets but also pass the port then
+ # httplib crazily doubles up the square brackets on the Host header.
+ # Instead, we need to make sure we never pass ``None`` as the port.
+ # However, for backward compatibility reasons we can't actually
+ # *assert* that. See http://bugs.python.org/issue28539
+ if host.startswith('[') and host.endswith(']'):
+ host = host.strip('[]')
+ if scheme in NORMALIZABLE_SCHEMES:
+ host = normalize_host(host)
+ return host
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__init__.py
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..cc3c1476
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc
new file mode 100644
index 00000000..089018cf
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pyc
new file mode 100644
index 00000000..12138869
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc
new file mode 100644
index 00000000..69eaa274
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc
new file mode 100644
index 00000000..1ae8648f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pyc
new file mode 100644
index 00000000..65274966
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc
new file mode 100644
index 00000000..3cda2194
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py
new file mode 100644
index 00000000..f3e00942
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py
@@ -0,0 +1,30 @@
+"""
+This module provides means to detect the App Engine environment.
+"""
+
+import os
+
+
+def is_appengine():
+ return (is_local_appengine() or
+ is_prod_appengine() or
+ is_prod_appengine_mvms())
+
+
+def is_appengine_sandbox():
+ return is_appengine() and not is_prod_appengine_mvms()
+
+
+def is_local_appengine():
+ return ('APPENGINE_RUNTIME' in os.environ and
+ 'Development/' in os.environ['SERVER_SOFTWARE'])
+
+
+def is_prod_appengine():
+ return ('APPENGINE_RUNTIME' in os.environ and
+ 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
+ not is_prod_appengine_mvms())
+
+
+def is_prod_appengine_mvms():
+ return os.environ.get('GAE_VM', False) == 'true'
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..646ccac4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc
new file mode 100644
index 00000000..873fe675
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc
new file mode 100644
index 00000000..f9538aa9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py
new file mode 100644
index 00000000..be342153
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py
@@ -0,0 +1,593 @@
+"""
+This module uses ctypes to bind a whole bunch of functions and constants from
+SecureTransport. The goal here is to provide the low-level API to
+SecureTransport. These are essentially the C-level functions and constants, and
+they're pretty gross to work with.
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+ Copyright (c) 2015-2016 Will Bond <will@wbond.net>
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the "Software"),
+ to deal in the Software without restriction, including without limitation
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+"""
+from __future__ import absolute_import
+
+import platform
+from ctypes.util import find_library
+from ctypes import (
+ c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long,
+ c_bool
+)
+from ctypes import CDLL, POINTER, CFUNCTYPE
+
+
+security_path = find_library('Security')
+if not security_path:
+ raise ImportError('The library Security could not be found')
+
+
+core_foundation_path = find_library('CoreFoundation')
+if not core_foundation_path:
+ raise ImportError('The library CoreFoundation could not be found')
+
+
+version = platform.mac_ver()[0]
+version_info = tuple(map(int, version.split('.')))
+if version_info < (10, 8):
+ raise OSError(
+ 'Only OS X 10.8 and newer are supported, not %s.%s' % (
+ version_info[0], version_info[1]
+ )
+ )
+
+Security = CDLL(security_path, use_errno=True)
+CoreFoundation = CDLL(core_foundation_path, use_errno=True)
+
+Boolean = c_bool
+CFIndex = c_long
+CFStringEncoding = c_uint32
+CFData = c_void_p
+CFString = c_void_p
+CFArray = c_void_p
+CFMutableArray = c_void_p
+CFDictionary = c_void_p
+CFError = c_void_p
+CFType = c_void_p
+CFTypeID = c_ulong
+
+CFTypeRef = POINTER(CFType)
+CFAllocatorRef = c_void_p
+
+OSStatus = c_int32
+
+CFDataRef = POINTER(CFData)
+CFStringRef = POINTER(CFString)
+CFArrayRef = POINTER(CFArray)
+CFMutableArrayRef = POINTER(CFMutableArray)
+CFDictionaryRef = POINTER(CFDictionary)
+CFArrayCallBacks = c_void_p
+CFDictionaryKeyCallBacks = c_void_p
+CFDictionaryValueCallBacks = c_void_p
+
+SecCertificateRef = POINTER(c_void_p)
+SecExternalFormat = c_uint32
+SecExternalItemType = c_uint32
+SecIdentityRef = POINTER(c_void_p)
+SecItemImportExportFlags = c_uint32
+SecItemImportExportKeyParameters = c_void_p
+SecKeychainRef = POINTER(c_void_p)
+SSLProtocol = c_uint32
+SSLCipherSuite = c_uint32
+SSLContextRef = POINTER(c_void_p)
+SecTrustRef = POINTER(c_void_p)
+SSLConnectionRef = c_uint32
+SecTrustResultType = c_uint32
+SecTrustOptionFlags = c_uint32
+SSLProtocolSide = c_uint32
+SSLConnectionType = c_uint32
+SSLSessionOption = c_uint32
+
+
+try:
+ Security.SecItemImport.argtypes = [
+ CFDataRef,
+ CFStringRef,
+ POINTER(SecExternalFormat),
+ POINTER(SecExternalItemType),
+ SecItemImportExportFlags,
+ POINTER(SecItemImportExportKeyParameters),
+ SecKeychainRef,
+ POINTER(CFArrayRef),
+ ]
+ Security.SecItemImport.restype = OSStatus
+
+ Security.SecCertificateGetTypeID.argtypes = []
+ Security.SecCertificateGetTypeID.restype = CFTypeID
+
+ Security.SecIdentityGetTypeID.argtypes = []
+ Security.SecIdentityGetTypeID.restype = CFTypeID
+
+ Security.SecKeyGetTypeID.argtypes = []
+ Security.SecKeyGetTypeID.restype = CFTypeID
+
+ Security.SecCertificateCreateWithData.argtypes = [
+ CFAllocatorRef,
+ CFDataRef
+ ]
+ Security.SecCertificateCreateWithData.restype = SecCertificateRef
+
+ Security.SecCertificateCopyData.argtypes = [
+ SecCertificateRef
+ ]
+ Security.SecCertificateCopyData.restype = CFDataRef
+
+ Security.SecCopyErrorMessageString.argtypes = [
+ OSStatus,
+ c_void_p
+ ]
+ Security.SecCopyErrorMessageString.restype = CFStringRef
+
+ Security.SecIdentityCreateWithCertificate.argtypes = [
+ CFTypeRef,
+ SecCertificateRef,
+ POINTER(SecIdentityRef)
+ ]
+ Security.SecIdentityCreateWithCertificate.restype = OSStatus
+
+ Security.SecKeychainCreate.argtypes = [
+ c_char_p,
+ c_uint32,
+ c_void_p,
+ Boolean,
+ c_void_p,
+ POINTER(SecKeychainRef)
+ ]
+ Security.SecKeychainCreate.restype = OSStatus
+
+ Security.SecKeychainDelete.argtypes = [
+ SecKeychainRef
+ ]
+ Security.SecKeychainDelete.restype = OSStatus
+
+ Security.SecPKCS12Import.argtypes = [
+ CFDataRef,
+ CFDictionaryRef,
+ POINTER(CFArrayRef)
+ ]
+ Security.SecPKCS12Import.restype = OSStatus
+
+ SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
+ SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t))
+
+ Security.SSLSetIOFuncs.argtypes = [
+ SSLContextRef,
+ SSLReadFunc,
+ SSLWriteFunc
+ ]
+ Security.SSLSetIOFuncs.restype = OSStatus
+
+ Security.SSLSetPeerID.argtypes = [
+ SSLContextRef,
+ c_char_p,
+ c_size_t
+ ]
+ Security.SSLSetPeerID.restype = OSStatus
+
+ Security.SSLSetCertificate.argtypes = [
+ SSLContextRef,
+ CFArrayRef
+ ]
+ Security.SSLSetCertificate.restype = OSStatus
+
+ Security.SSLSetCertificateAuthorities.argtypes = [
+ SSLContextRef,
+ CFTypeRef,
+ Boolean
+ ]
+ Security.SSLSetCertificateAuthorities.restype = OSStatus
+
+ Security.SSLSetConnection.argtypes = [
+ SSLContextRef,
+ SSLConnectionRef
+ ]
+ Security.SSLSetConnection.restype = OSStatus
+
+ Security.SSLSetPeerDomainName.argtypes = [
+ SSLContextRef,
+ c_char_p,
+ c_size_t
+ ]
+ Security.SSLSetPeerDomainName.restype = OSStatus
+
+ Security.SSLHandshake.argtypes = [
+ SSLContextRef
+ ]
+ Security.SSLHandshake.restype = OSStatus
+
+ Security.SSLRead.argtypes = [
+ SSLContextRef,
+ c_char_p,
+ c_size_t,
+ POINTER(c_size_t)
+ ]
+ Security.SSLRead.restype = OSStatus
+
+ Security.SSLWrite.argtypes = [
+ SSLContextRef,
+ c_char_p,
+ c_size_t,
+ POINTER(c_size_t)
+ ]
+ Security.SSLWrite.restype = OSStatus
+
+ Security.SSLClose.argtypes = [
+ SSLContextRef
+ ]
+ Security.SSLClose.restype = OSStatus
+
+ Security.SSLGetNumberSupportedCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(c_size_t)
+ ]
+ Security.SSLGetNumberSupportedCiphers.restype = OSStatus
+
+ Security.SSLGetSupportedCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ POINTER(c_size_t)
+ ]
+ Security.SSLGetSupportedCiphers.restype = OSStatus
+
+ Security.SSLSetEnabledCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ c_size_t
+ ]
+ Security.SSLSetEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetNumberEnabledCiphers.argtype = [
+ SSLContextRef,
+ POINTER(c_size_t)
+ ]
+ Security.SSLGetNumberEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetEnabledCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ POINTER(c_size_t)
+ ]
+ Security.SSLGetEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetNegotiatedCipher.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite)
+ ]
+ Security.SSLGetNegotiatedCipher.restype = OSStatus
+
+ Security.SSLGetNegotiatedProtocolVersion.argtypes = [
+ SSLContextRef,
+ POINTER(SSLProtocol)
+ ]
+ Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
+
+ Security.SSLCopyPeerTrust.argtypes = [
+ SSLContextRef,
+ POINTER(SecTrustRef)
+ ]
+ Security.SSLCopyPeerTrust.restype = OSStatus
+
+ Security.SecTrustSetAnchorCertificates.argtypes = [
+ SecTrustRef,
+ CFArrayRef
+ ]
+ Security.SecTrustSetAnchorCertificates.restype = OSStatus
+
+ Security.SecTrustSetAnchorCertificatesOnly.argstypes = [
+ SecTrustRef,
+ Boolean
+ ]
+ Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
+
+ Security.SecTrustEvaluate.argtypes = [
+ SecTrustRef,
+ POINTER(SecTrustResultType)
+ ]
+ Security.SecTrustEvaluate.restype = OSStatus
+
+ Security.SecTrustGetCertificateCount.argtypes = [
+ SecTrustRef
+ ]
+ Security.SecTrustGetCertificateCount.restype = CFIndex
+
+ Security.SecTrustGetCertificateAtIndex.argtypes = [
+ SecTrustRef,
+ CFIndex
+ ]
+ Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
+
+ Security.SSLCreateContext.argtypes = [
+ CFAllocatorRef,
+ SSLProtocolSide,
+ SSLConnectionType
+ ]
+ Security.SSLCreateContext.restype = SSLContextRef
+
+ Security.SSLSetSessionOption.argtypes = [
+ SSLContextRef,
+ SSLSessionOption,
+ Boolean
+ ]
+ Security.SSLSetSessionOption.restype = OSStatus
+
+ Security.SSLSetProtocolVersionMin.argtypes = [
+ SSLContextRef,
+ SSLProtocol
+ ]
+ Security.SSLSetProtocolVersionMin.restype = OSStatus
+
+ Security.SSLSetProtocolVersionMax.argtypes = [
+ SSLContextRef,
+ SSLProtocol
+ ]
+ Security.SSLSetProtocolVersionMax.restype = OSStatus
+
+ Security.SecCopyErrorMessageString.argtypes = [
+ OSStatus,
+ c_void_p
+ ]
+ Security.SecCopyErrorMessageString.restype = CFStringRef
+
+ Security.SSLReadFunc = SSLReadFunc
+ Security.SSLWriteFunc = SSLWriteFunc
+ Security.SSLContextRef = SSLContextRef
+ Security.SSLProtocol = SSLProtocol
+ Security.SSLCipherSuite = SSLCipherSuite
+ Security.SecIdentityRef = SecIdentityRef
+ Security.SecKeychainRef = SecKeychainRef
+ Security.SecTrustRef = SecTrustRef
+ Security.SecTrustResultType = SecTrustResultType
+ Security.SecExternalFormat = SecExternalFormat
+ Security.OSStatus = OSStatus
+
+ Security.kSecImportExportPassphrase = CFStringRef.in_dll(
+ Security, 'kSecImportExportPassphrase'
+ )
+ Security.kSecImportItemIdentity = CFStringRef.in_dll(
+ Security, 'kSecImportItemIdentity'
+ )
+
+ # CoreFoundation time!
+ CoreFoundation.CFRetain.argtypes = [
+ CFTypeRef
+ ]
+ CoreFoundation.CFRetain.restype = CFTypeRef
+
+ CoreFoundation.CFRelease.argtypes = [
+ CFTypeRef
+ ]
+ CoreFoundation.CFRelease.restype = None
+
+ CoreFoundation.CFGetTypeID.argtypes = [
+ CFTypeRef
+ ]
+ CoreFoundation.CFGetTypeID.restype = CFTypeID
+
+ CoreFoundation.CFStringCreateWithCString.argtypes = [
+ CFAllocatorRef,
+ c_char_p,
+ CFStringEncoding
+ ]
+ CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
+
+ CoreFoundation.CFStringGetCStringPtr.argtypes = [
+ CFStringRef,
+ CFStringEncoding
+ ]
+ CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
+
+ CoreFoundation.CFStringGetCString.argtypes = [
+ CFStringRef,
+ c_char_p,
+ CFIndex,
+ CFStringEncoding
+ ]
+ CoreFoundation.CFStringGetCString.restype = c_bool
+
+ CoreFoundation.CFDataCreate.argtypes = [
+ CFAllocatorRef,
+ c_char_p,
+ CFIndex
+ ]
+ CoreFoundation.CFDataCreate.restype = CFDataRef
+
+ CoreFoundation.CFDataGetLength.argtypes = [
+ CFDataRef
+ ]
+ CoreFoundation.CFDataGetLength.restype = CFIndex
+
+ CoreFoundation.CFDataGetBytePtr.argtypes = [
+ CFDataRef
+ ]
+ CoreFoundation.CFDataGetBytePtr.restype = c_void_p
+
+ CoreFoundation.CFDictionaryCreate.argtypes = [
+ CFAllocatorRef,
+ POINTER(CFTypeRef),
+ POINTER(CFTypeRef),
+ CFIndex,
+ CFDictionaryKeyCallBacks,
+ CFDictionaryValueCallBacks
+ ]
+ CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
+
+ CoreFoundation.CFDictionaryGetValue.argtypes = [
+ CFDictionaryRef,
+ CFTypeRef
+ ]
+ CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
+
+ CoreFoundation.CFArrayCreate.argtypes = [
+ CFAllocatorRef,
+ POINTER(CFTypeRef),
+ CFIndex,
+ CFArrayCallBacks,
+ ]
+ CoreFoundation.CFArrayCreate.restype = CFArrayRef
+
+ CoreFoundation.CFArrayCreateMutable.argtypes = [
+ CFAllocatorRef,
+ CFIndex,
+ CFArrayCallBacks
+ ]
+ CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
+
+ CoreFoundation.CFArrayAppendValue.argtypes = [
+ CFMutableArrayRef,
+ c_void_p
+ ]
+ CoreFoundation.CFArrayAppendValue.restype = None
+
+ CoreFoundation.CFArrayGetCount.argtypes = [
+ CFArrayRef
+ ]
+ CoreFoundation.CFArrayGetCount.restype = CFIndex
+
+ CoreFoundation.CFArrayGetValueAtIndex.argtypes = [
+ CFArrayRef,
+ CFIndex
+ ]
+ CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
+
+ CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
+ CoreFoundation, 'kCFAllocatorDefault'
+ )
+ CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks')
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
+ CoreFoundation, 'kCFTypeDictionaryKeyCallBacks'
+ )
+ CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
+ CoreFoundation, 'kCFTypeDictionaryValueCallBacks'
+ )
+
+ CoreFoundation.CFTypeRef = CFTypeRef
+ CoreFoundation.CFArrayRef = CFArrayRef
+ CoreFoundation.CFStringRef = CFStringRef
+ CoreFoundation.CFDictionaryRef = CFDictionaryRef
+
+except (AttributeError):
+ raise ImportError('Error initializing ctypes')
+
+
+class CFConst(object):
+ """
+ A class object that acts as essentially a namespace for CoreFoundation
+ constants.
+ """
+ kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
+
+
+class SecurityConst(object):
+ """
+ A class object that acts as essentially a namespace for Security constants.
+ """
+ kSSLSessionOptionBreakOnServerAuth = 0
+
+ kSSLProtocol2 = 1
+ kSSLProtocol3 = 2
+ kTLSProtocol1 = 4
+ kTLSProtocol11 = 7
+ kTLSProtocol12 = 8
+ kTLSProtocol13 = 10
+ kTLSProtocolMaxSupported = 999
+
+ kSSLClientSide = 1
+ kSSLStreamType = 0
+
+ kSecFormatPEMSequence = 10
+
+ kSecTrustResultInvalid = 0
+ kSecTrustResultProceed = 1
+ # This gap is present on purpose: this was kSecTrustResultConfirm, which
+ # is deprecated.
+ kSecTrustResultDeny = 3
+ kSecTrustResultUnspecified = 4
+ kSecTrustResultRecoverableTrustFailure = 5
+ kSecTrustResultFatalTrustFailure = 6
+ kSecTrustResultOtherError = 7
+
+ errSSLProtocol = -9800
+ errSSLWouldBlock = -9803
+ errSSLClosedGraceful = -9805
+ errSSLClosedNoNotify = -9816
+ errSSLClosedAbort = -9806
+
+ errSSLXCertChainInvalid = -9807
+ errSSLCrypto = -9809
+ errSSLInternal = -9810
+ errSSLCertExpired = -9814
+ errSSLCertNotYetValid = -9815
+ errSSLUnknownRootCert = -9812
+ errSSLNoRootCert = -9813
+ errSSLHostNameMismatch = -9843
+ errSSLPeerHandshakeFail = -9824
+ errSSLPeerUserCancelled = -9839
+ errSSLWeakPeerEphemeralDHKey = -9850
+ errSSLServerAuthCompleted = -9841
+ errSSLRecordOverflow = -9847
+
+ errSecVerifyFailed = -67808
+ errSecNoTrustSettings = -25263
+ errSecItemNotFound = -25300
+ errSecInvalidTrustSettings = -25262
+
+ # Cipher suites. We only pick the ones our default cipher string allows.
+ # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
+ TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
+ TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
+ TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
+ TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
+ TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
+ TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
+ TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
+ TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
+ TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
+ TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
+ TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
+ TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
+ TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
+ TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
+ TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
+ TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
+ TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
+ TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
+ TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
+ TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
+ TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
+ TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
+ TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
+ TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
+ TLS_AES_128_GCM_SHA256 = 0x1301
+ TLS_AES_256_GCM_SHA384 = 0x1302
+ TLS_AES_128_CCM_8_SHA256 = 0x1305
+ TLS_AES_128_CCM_SHA256 = 0x1304
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
new file mode 100644
index 00000000..b13cd9e7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
@@ -0,0 +1,346 @@
+"""
+Low-level helpers for the SecureTransport bindings.
+
+These are Python functions that are not directly related to the high-level APIs
+but are necessary to get them to work. They include a whole bunch of low-level
+CoreFoundation messing about and memory management. The concerns in this module
+are almost entirely about trying to avoid memory leaks and providing
+appropriate and useful assistance to the higher-level code.
+"""
+import base64
+import ctypes
+import itertools
+import re
+import os
+import ssl
+import tempfile
+
+from .bindings import Security, CoreFoundation, CFConst
+
+
+# This regular expression is used to grab PEM data out of a PEM bundle.
+_PEM_CERTS_RE = re.compile(
+ b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
+)
+
+
+def _cf_data_from_bytes(bytestring):
+ """
+ Given a bytestring, create a CFData object from it. This CFData object must
+ be CFReleased by the caller.
+ """
+ return CoreFoundation.CFDataCreate(
+ CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
+ )
+
+
+def _cf_dictionary_from_tuples(tuples):
+ """
+ Given a list of Python tuples, create an associated CFDictionary.
+ """
+ dictionary_size = len(tuples)
+
+ # We need to get the dictionary keys and values out in the same order.
+ keys = (t[0] for t in tuples)
+ values = (t[1] for t in tuples)
+ cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
+ cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
+
+ return CoreFoundation.CFDictionaryCreate(
+ CoreFoundation.kCFAllocatorDefault,
+ cf_keys,
+ cf_values,
+ dictionary_size,
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks,
+ CoreFoundation.kCFTypeDictionaryValueCallBacks,
+ )
+
+
+def _cf_string_to_unicode(value):
+ """
+ Creates a Unicode string from a CFString object. Used entirely for error
+ reporting.
+
+ Yes, it annoys me quite a lot that this function is this complex.
+ """
+ value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
+
+ string = CoreFoundation.CFStringGetCStringPtr(
+ value_as_void_p,
+ CFConst.kCFStringEncodingUTF8
+ )
+ if string is None:
+ buffer = ctypes.create_string_buffer(1024)
+ result = CoreFoundation.CFStringGetCString(
+ value_as_void_p,
+ buffer,
+ 1024,
+ CFConst.kCFStringEncodingUTF8
+ )
+ if not result:
+ raise OSError('Error copying C string from CFStringRef')
+ string = buffer.value
+ if string is not None:
+ string = string.decode('utf-8')
+ return string
+
+
+def _assert_no_error(error, exception_class=None):
+ """
+ Checks the return code and throws an exception if there is an error to
+ report
+ """
+ if error == 0:
+ return
+
+ cf_error_string = Security.SecCopyErrorMessageString(error, None)
+ output = _cf_string_to_unicode(cf_error_string)
+ CoreFoundation.CFRelease(cf_error_string)
+
+ if output is None or output == u'':
+ output = u'OSStatus %s' % error
+
+ if exception_class is None:
+ exception_class = ssl.SSLError
+
+ raise exception_class(output)
+
+
+def _cert_array_from_pem(pem_bundle):
+ """
+ Given a bundle of certs in PEM format, turns them into a CFArray of certs
+ that can be used to validate a cert chain.
+ """
+ # Normalize the PEM bundle's line endings.
+ pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
+
+ der_certs = [
+ base64.b64decode(match.group(1))
+ for match in _PEM_CERTS_RE.finditer(pem_bundle)
+ ]
+ if not der_certs:
+ raise ssl.SSLError("No root certificates specified")
+
+ cert_array = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks)
+ )
+ if not cert_array:
+ raise ssl.SSLError("Unable to allocate memory!")
+
+ try:
+ for der_bytes in der_certs:
+ certdata = _cf_data_from_bytes(der_bytes)
+ if not certdata:
+ raise ssl.SSLError("Unable to allocate memory!")
+ cert = Security.SecCertificateCreateWithData(
+ CoreFoundation.kCFAllocatorDefault, certdata
+ )
+ CoreFoundation.CFRelease(certdata)
+ if not cert:
+ raise ssl.SSLError("Unable to build cert object!")
+
+ CoreFoundation.CFArrayAppendValue(cert_array, cert)
+ CoreFoundation.CFRelease(cert)
+ except Exception:
+ # We need to free the array before the exception bubbles further.
+ # We only want to do that if an error occurs: otherwise, the caller
+ # should free.
+ CoreFoundation.CFRelease(cert_array)
+
+ return cert_array
+
+
+def _is_cert(item):
+ """
+ Returns True if a given CFTypeRef is a certificate.
+ """
+ expected = Security.SecCertificateGetTypeID()
+ return CoreFoundation.CFGetTypeID(item) == expected
+
+
+def _is_identity(item):
+ """
+ Returns True if a given CFTypeRef is an identity.
+ """
+ expected = Security.SecIdentityGetTypeID()
+ return CoreFoundation.CFGetTypeID(item) == expected
+
+
+def _temporary_keychain():
+ """
+ This function creates a temporary Mac keychain that we can use to work with
+ credentials. This keychain uses a one-time password and a temporary file to
+ store the data. We expect to have one keychain per socket. The returned
+ SecKeychainRef must be freed by the caller, including calling
+ SecKeychainDelete.
+
+ Returns a tuple of the SecKeychainRef and the path to the temporary
+ directory that contains it.
+ """
+ # Unfortunately, SecKeychainCreate requires a path to a keychain. This
+ # means we cannot use mkstemp to use a generic temporary file. Instead,
+ # we're going to create a temporary directory and a filename to use there.
+ # This filename will be 8 random bytes expanded into base64. We also need
+ # some random bytes to password-protect the keychain we're creating, so we
+ # ask for 40 random bytes.
+ random_bytes = os.urandom(40)
+ filename = base64.b16encode(random_bytes[:8]).decode('utf-8')
+ password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
+ tempdirectory = tempfile.mkdtemp()
+
+ keychain_path = os.path.join(tempdirectory, filename).encode('utf-8')
+
+ # We now want to create the keychain itself.
+ keychain = Security.SecKeychainRef()
+ status = Security.SecKeychainCreate(
+ keychain_path,
+ len(password),
+ password,
+ False,
+ None,
+ ctypes.byref(keychain)
+ )
+ _assert_no_error(status)
+
+ # Having created the keychain, we want to pass it off to the caller.
+ return keychain, tempdirectory
+
+
+def _load_items_from_file(keychain, path):
+ """
+ Given a single file, loads all the trust objects from it into arrays and
+ the keychain.
+ Returns a tuple of lists: the first list is a list of identities, the
+ second a list of certs.
+ """
+ certificates = []
+ identities = []
+ result_array = None
+
+ with open(path, 'rb') as f:
+ raw_filedata = f.read()
+
+ try:
+ filedata = CoreFoundation.CFDataCreate(
+ CoreFoundation.kCFAllocatorDefault,
+ raw_filedata,
+ len(raw_filedata)
+ )
+ result_array = CoreFoundation.CFArrayRef()
+ result = Security.SecItemImport(
+ filedata, # cert data
+ None, # Filename, leaving it out for now
+ None, # What the type of the file is, we don't care
+ None, # what's in the file, we don't care
+ 0, # import flags
+ None, # key params, can include passphrase in the future
+ keychain, # The keychain to insert into
+ ctypes.byref(result_array) # Results
+ )
+ _assert_no_error(result)
+
+ # A CFArray is not very useful to us as an intermediary
+ # representation, so we are going to extract the objects we want
+ # and then free the array. We don't need to keep hold of keys: the
+ # keychain already has them!
+ result_count = CoreFoundation.CFArrayGetCount(result_array)
+ for index in range(result_count):
+ item = CoreFoundation.CFArrayGetValueAtIndex(
+ result_array, index
+ )
+ item = ctypes.cast(item, CoreFoundation.CFTypeRef)
+
+ if _is_cert(item):
+ CoreFoundation.CFRetain(item)
+ certificates.append(item)
+ elif _is_identity(item):
+ CoreFoundation.CFRetain(item)
+ identities.append(item)
+ finally:
+ if result_array:
+ CoreFoundation.CFRelease(result_array)
+
+ CoreFoundation.CFRelease(filedata)
+
+ return (identities, certificates)
+
+
+def _load_client_cert_chain(keychain, *paths):
+ """
+ Load certificates and maybe keys from a number of files. Has the end goal
+ of returning a CFArray containing one SecIdentityRef, and then zero or more
+ SecCertificateRef objects, suitable for use as a client certificate trust
+ chain.
+ """
+ # Ok, the strategy.
+ #
+ # This relies on knowing that macOS will not give you a SecIdentityRef
+ # unless you have imported a key into a keychain. This is a somewhat
+ # artificial limitation of macOS (for example, it doesn't necessarily
+ # affect iOS), but there is nothing inside Security.framework that lets you
+ # get a SecIdentityRef without having a key in a keychain.
+ #
+ # So the policy here is we take all the files and iterate them in order.
+ # Each one will use SecItemImport to have one or more objects loaded from
+ # it. We will also point at a keychain that macOS can use to work with the
+ # private key.
+ #
+ # Once we have all the objects, we'll check what we actually have. If we
+ # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
+ # we'll take the first certificate (which we assume to be our leaf) and
+ # ask the keychain to give us a SecIdentityRef with that cert's associated
+ # key.
+ #
+ # We'll then return a CFArray containing the trust chain: one
+ # SecIdentityRef and then zero-or-more SecCertificateRef objects. The
+ # responsibility for freeing this CFArray will be with the caller. This
+ # CFArray must remain alive for the entire connection, so in practice it
+ # will be stored with a single SSLSocket, along with the reference to the
+ # keychain.
+ certificates = []
+ identities = []
+
+ # Filter out bad paths.
+ paths = (path for path in paths if path)
+
+ try:
+ for file_path in paths:
+ new_identities, new_certs = _load_items_from_file(
+ keychain, file_path
+ )
+ identities.extend(new_identities)
+ certificates.extend(new_certs)
+
+ # Ok, we have everything. The question is: do we have an identity? If
+ # not, we want to grab one from the first cert we have.
+ if not identities:
+ new_identity = Security.SecIdentityRef()
+ status = Security.SecIdentityCreateWithCertificate(
+ keychain,
+ certificates[0],
+ ctypes.byref(new_identity)
+ )
+ _assert_no_error(status)
+ identities.append(new_identity)
+
+ # We now want to release the original certificate, as we no longer
+ # need it.
+ CoreFoundation.CFRelease(certificates.pop(0))
+
+ # We now need to build a new CFArray that holds the trust chain.
+ trust_chain = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ )
+ for item in itertools.chain(identities, certificates):
+ # ArrayAppendValue does a CFRetain on the item. That's fine,
+ # because the finally block will release our other refs to them.
+ CoreFoundation.CFArrayAppendValue(trust_chain, item)
+
+ return trust_chain
+ finally:
+ for obj in itertools.chain(identities, certificates):
+ CoreFoundation.CFRelease(obj)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py
new file mode 100644
index 00000000..9b42952d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/appengine.py
@@ -0,0 +1,289 @@
+"""
+This module provides a pool manager that uses Google App Engine's
+`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
+
+Example usage::
+
+ from pip._vendor.urllib3 import PoolManager
+ from pip._vendor.urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
+
+ if is_appengine_sandbox():
+ # AppEngineManager uses AppEngine's URLFetch API behind the scenes
+ http = AppEngineManager()
+ else:
+ # PoolManager uses a socket-level API behind the scenes
+ http = PoolManager()
+
+ r = http.request('GET', 'https://google.com/')
+
+There are `limitations <https://cloud.google.com/appengine/docs/python/\
+urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
+the best choice for your application. There are three options for using
+urllib3 on Google App Engine:
+
+1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
+ cost-effective in many circumstances as long as your usage is within the
+ limitations.
+2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
+ Sockets also have `limitations and restrictions
+ <https://cloud.google.com/appengine/docs/python/sockets/\
+ #limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
+ To use sockets, be sure to specify the following in your ``app.yaml``::
+
+ env_variables:
+ GAE_USE_SOCKETS_HTTPLIB : 'true'
+
+3. If you are using `App Engine Flexible
+<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
+:class:`PoolManager` without any configuration or special environment variables.
+"""
+
+from __future__ import absolute_import
+import io
+import logging
+import warnings
+from ..packages.six.moves.urllib.parse import urljoin
+
+from ..exceptions import (
+ HTTPError,
+ HTTPWarning,
+ MaxRetryError,
+ ProtocolError,
+ TimeoutError,
+ SSLError
+)
+
+from ..request import RequestMethods
+from ..response import HTTPResponse
+from ..util.timeout import Timeout
+from ..util.retry import Retry
+from . import _appengine_environ
+
+try:
+ from google.appengine.api import urlfetch
+except ImportError:
+ urlfetch = None
+
+
+log = logging.getLogger(__name__)
+
+
+class AppEnginePlatformWarning(HTTPWarning):
+ pass
+
+
+class AppEnginePlatformError(HTTPError):
+ pass
+
+
+class AppEngineManager(RequestMethods):
+ """
+ Connection manager for Google App Engine sandbox applications.
+
+ This manager uses the URLFetch service directly instead of using the
+ emulated httplib, and is subject to URLFetch limitations as described in
+ the App Engine documentation `here
+ <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
+
+ Notably it will raise an :class:`AppEnginePlatformError` if:
+ * URLFetch is not available.
+ * If you attempt to use this on App Engine Flexible, as full socket
+ support is available.
+ * If a request size is more than 10 megabytes.
+ * If a response size is more than 32 megabtyes.
+ * If you use an unsupported request method such as OPTIONS.
+
+ Beyond those cases, it will raise normal urllib3 errors.
+ """
+
+ def __init__(self, headers=None, retries=None, validate_certificate=True,
+ urlfetch_retries=True):
+ if not urlfetch:
+ raise AppEnginePlatformError(
+ "URLFetch is not available in this environment.")
+
+ if is_prod_appengine_mvms():
+ raise AppEnginePlatformError(
+ "Use normal urllib3.PoolManager instead of AppEngineManager"
+ "on Managed VMs, as using URLFetch is not necessary in "
+ "this environment.")
+
+ warnings.warn(
+ "urllib3 is using URLFetch on Google App Engine sandbox instead "
+ "of sockets. To use sockets directly instead of URLFetch see "
+ "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
+ AppEnginePlatformWarning)
+
+ RequestMethods.__init__(self, headers)
+ self.validate_certificate = validate_certificate
+ self.urlfetch_retries = urlfetch_retries
+
+ self.retries = retries or Retry.DEFAULT
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def urlopen(self, method, url, body=None, headers=None,
+ retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
+ **response_kw):
+
+ retries = self._get_retries(retries, redirect)
+
+ try:
+ follow_redirects = (
+ redirect and
+ retries.redirect != 0 and
+ retries.total)
+ response = urlfetch.fetch(
+ url,
+ payload=body,
+ method=method,
+ headers=headers or {},
+ allow_truncated=False,
+ follow_redirects=self.urlfetch_retries and follow_redirects,
+ deadline=self._get_absolute_timeout(timeout),
+ validate_certificate=self.validate_certificate,
+ )
+ except urlfetch.DeadlineExceededError as e:
+ raise TimeoutError(self, e)
+
+ except urlfetch.InvalidURLError as e:
+ if 'too large' in str(e):
+ raise AppEnginePlatformError(
+ "URLFetch request too large, URLFetch only "
+ "supports requests up to 10mb in size.", e)
+ raise ProtocolError(e)
+
+ except urlfetch.DownloadError as e:
+ if 'Too many redirects' in str(e):
+ raise MaxRetryError(self, url, reason=e)
+ raise ProtocolError(e)
+
+ except urlfetch.ResponseTooLargeError as e:
+ raise AppEnginePlatformError(
+ "URLFetch response too large, URLFetch only supports"
+ "responses up to 32mb in size.", e)
+
+ except urlfetch.SSLCertificateError as e:
+ raise SSLError(e)
+
+ except urlfetch.InvalidMethodError as e:
+ raise AppEnginePlatformError(
+ "URLFetch does not support method: %s" % method, e)
+
+ http_response = self._urlfetch_response_to_http_response(
+ response, retries=retries, **response_kw)
+
+ # Handle redirect?
+ redirect_location = redirect and http_response.get_redirect_location()
+ if redirect_location:
+ # Check for redirect response
+ if (self.urlfetch_retries and retries.raise_on_redirect):
+ raise MaxRetryError(self, url, "too many redirects")
+ else:
+ if http_response.status == 303:
+ method = 'GET'
+
+ try:
+ retries = retries.increment(method, url, response=http_response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ raise MaxRetryError(self, url, "too many redirects")
+ return http_response
+
+ retries.sleep_for_retry(http_response)
+ log.debug("Redirecting %s -> %s", url, redirect_location)
+ redirect_url = urljoin(url, redirect_location)
+ return self.urlopen(
+ method, redirect_url, body, headers,
+ retries=retries, redirect=redirect,
+ timeout=timeout, **response_kw)
+
+ # Check if we should retry the HTTP response.
+ has_retry_after = bool(http_response.getheader('Retry-After'))
+ if retries.is_retry(method, http_response.status, has_retry_after):
+ retries = retries.increment(
+ method, url, response=http_response, _pool=self)
+ log.debug("Retry: %s", url)
+ retries.sleep(http_response)
+ return self.urlopen(
+ method, url,
+ body=body, headers=headers,
+ retries=retries, redirect=redirect,
+ timeout=timeout, **response_kw)
+
+ return http_response
+
+ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
+
+ if is_prod_appengine():
+ # Production GAE handles deflate encoding automatically, but does
+ # not remove the encoding header.
+ content_encoding = urlfetch_resp.headers.get('content-encoding')
+
+ if content_encoding == 'deflate':
+ del urlfetch_resp.headers['content-encoding']
+
+ transfer_encoding = urlfetch_resp.headers.get('transfer-encoding')
+ # We have a full response's content,
+ # so let's make sure we don't report ourselves as chunked data.
+ if transfer_encoding == 'chunked':
+ encodings = transfer_encoding.split(",")
+ encodings.remove('chunked')
+ urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings)
+
+ original_response = HTTPResponse(
+ # In order for decoding to work, we must present the content as
+ # a file-like object.
+ body=io.BytesIO(urlfetch_resp.content),
+ msg=urlfetch_resp.header_msg,
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ **response_kw
+ )
+
+ return HTTPResponse(
+ body=io.BytesIO(urlfetch_resp.content),
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ original_response=original_response,
+ **response_kw
+ )
+
+ def _get_absolute_timeout(self, timeout):
+ if timeout is Timeout.DEFAULT_TIMEOUT:
+ return None # Defer to URLFetch's default.
+ if isinstance(timeout, Timeout):
+ if timeout._read is not None or timeout._connect is not None:
+ warnings.warn(
+ "URLFetch does not support granular timeout settings, "
+ "reverting to total or default URLFetch timeout.",
+ AppEnginePlatformWarning)
+ return timeout.total
+ return timeout
+
+ def _get_retries(self, retries, redirect):
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(
+ retries, redirect=redirect, default=self.retries)
+
+ if retries.connect or retries.read or retries.redirect:
+ warnings.warn(
+ "URLFetch only supports total retries and does not "
+ "recognize connect, read, or redirect retry parameters.",
+ AppEnginePlatformWarning)
+
+ return retries
+
+
+# Alias methods from _appengine_environ to maintain public API interface.
+
+is_appengine = _appengine_environ.is_appengine
+is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
+is_local_appengine = _appengine_environ.is_local_appengine
+is_prod_appengine = _appengine_environ.is_prod_appengine
+is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py
new file mode 100644
index 00000000..8ea127c5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py
@@ -0,0 +1,111 @@
+"""
+NTLM authenticating pool, contributed by erikcederstran
+
+Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+"""
+from __future__ import absolute_import
+
+from logging import getLogger
+from ntlm import ntlm
+
+from .. import HTTPSConnectionPool
+from ..packages.six.moves.http_client import HTTPSConnection
+
+
+log = getLogger(__name__)
+
+
+class NTLMConnectionPool(HTTPSConnectionPool):
+ """
+ Implements an NTLM authentication version of an urllib3 connection pool
+ """
+
+ scheme = 'https'
+
+ def __init__(self, user, pw, authurl, *args, **kwargs):
+ """
+ authurl is a random URL on the server that is protected by NTLM.
+ user is the Windows user, probably in the DOMAIN\\username format.
+ pw is the password for the user.
+ """
+ super(NTLMConnectionPool, self).__init__(*args, **kwargs)
+ self.authurl = authurl
+ self.rawuser = user
+ user_parts = user.split('\\', 1)
+ self.domain = user_parts[0].upper()
+ self.user = user_parts[1]
+ self.pw = pw
+
+ def _new_conn(self):
+ # Performs the NTLM handshake that secures the connection. The socket
+ # must be kept open while requests are performed.
+ self.num_connections += 1
+ log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
+ self.num_connections, self.host, self.authurl)
+
+ headers = {'Connection': 'Keep-Alive'}
+ req_header = 'Authorization'
+ resp_header = 'www-authenticate'
+
+ conn = HTTPSConnection(host=self.host, port=self.port)
+
+ # Send negotiation message
+ headers[req_header] = (
+ 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
+ log.debug('Request headers: %s', headers)
+ conn.request('GET', self.authurl, None, headers)
+ res = conn.getresponse()
+ reshdr = dict(res.getheaders())
+ log.debug('Response status: %s %s', res.status, res.reason)
+ log.debug('Response headers: %s', reshdr)
+ log.debug('Response data: %s [...]', res.read(100))
+
+ # Remove the reference to the socket, so that it can not be closed by
+ # the response object (we want to keep the socket open)
+ res.fp = None
+
+ # Server should respond with a challenge message
+ auth_header_values = reshdr[resp_header].split(', ')
+ auth_header_value = None
+ for s in auth_header_values:
+ if s[:5] == 'NTLM ':
+ auth_header_value = s[5:]
+ if auth_header_value is None:
+ raise Exception('Unexpected %s response header: %s' %
+ (resp_header, reshdr[resp_header]))
+
+ # Send authentication message
+ ServerChallenge, NegotiateFlags = \
+ ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
+ self.user,
+ self.domain,
+ self.pw,
+ NegotiateFlags)
+ headers[req_header] = 'NTLM %s' % auth_msg
+ log.debug('Request headers: %s', headers)
+ conn.request('GET', self.authurl, None, headers)
+ res = conn.getresponse()
+ log.debug('Response status: %s %s', res.status, res.reason)
+ log.debug('Response headers: %s', dict(res.getheaders()))
+ log.debug('Response data: %s [...]', res.read()[:100])
+ if res.status != 200:
+ if res.status == 401:
+ raise Exception('Server rejected request: wrong '
+ 'username or password')
+ raise Exception('Wrong server response: %s %s' %
+ (res.status, res.reason))
+
+ res.fp = None
+ log.debug('Connection established')
+ return conn
+
+ def urlopen(self, method, url, body=None, headers=None, retries=3,
+ redirect=True, assert_same_host=True):
+ if headers is None:
+ headers = {}
+ headers['Connection'] = 'Keep-Alive'
+ return super(NTLMConnectionPool, self).urlopen(method, url, body,
+ headers, retries,
+ redirect,
+ assert_same_host)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py
new file mode 100644
index 00000000..abfc3191
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py
@@ -0,0 +1,485 @@
+"""
+SSL with SNI_-support for Python 2. Follow these instructions if you would
+like to verify SSL certificates in Python 2. Note, the default libraries do
+*not* do certificate checking; you need to do additional work to validate
+certificates yourself.
+
+This needs the following packages installed:
+
+* pyOpenSSL (tested with 16.0.0)
+* cryptography (minimum 1.3.4, from pyopenssl)
+* idna (minimum 2.0, from cryptography)
+
+However, pyopenssl depends on cryptography, which depends on idna, so while we
+use all three directly here we end up having relatively few packages required.
+
+You can install them with the following command:
+
+ pip install pyopenssl cryptography idna
+
+To activate certificate checking, call
+:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+before you begin making HTTP requests. This can be done in a ``sitecustomize``
+module, or at any other time before your application begins using ``urllib3``,
+like this::
+
+ try:
+ import urllib3.contrib.pyopenssl
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+ except ImportError:
+ pass
+
+Now you can use :mod:`urllib3` as you normally would, and it will support SNI
+when the required modules are installed.
+
+Activating this module also has the positive side effect of disabling SSL/TLS
+compression in Python 2 (see `CRIME attack`_).
+
+If you want to configure the default list of supported cipher suites, you can
+set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
+
+.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
+.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
+"""
+from __future__ import absolute_import
+
+import OpenSSL.SSL
+from cryptography import x509
+from cryptography.hazmat.backends.openssl import backend as openssl_backend
+from cryptography.hazmat.backends.openssl.x509 import _Certificate
+try:
+ from cryptography.x509 import UnsupportedExtension
+except ImportError:
+ # UnsupportedExtension is gone in cryptography >= 2.1.0
+ class UnsupportedExtension(Exception):
+ pass
+
+from socket import timeout, error as SocketError
+from io import BytesIO
+
+try: # Platform-specific: Python 2
+ from socket import _fileobject
+except ImportError: # Platform-specific: Python 3
+ _fileobject = None
+ from ..packages.backports.makefile import backport_makefile
+
+import logging
+import ssl
+from ..packages import six
+import sys
+
+from .. import util
+
+
+__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
+
+# SNI always works.
+HAS_SNI = True
+
+# Map from urllib3 to PyOpenSSL compatible parameter-values.
+_openssl_versions = {
+ util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
+ ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
+}
+
+if hasattr(ssl, 'PROTOCOL_SSLv3') and hasattr(OpenSSL.SSL, 'SSLv3_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
+
+if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
+
+_stdlib_to_openssl_verify = {
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
+ ssl.CERT_REQUIRED:
+ OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+}
+_openssl_to_stdlib_verify = dict(
+ (v, k) for k, v in _stdlib_to_openssl_verify.items()
+)
+
+# OpenSSL will only write 16K at a time
+SSL_WRITE_BLOCKSIZE = 16384
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_util_SSLContext = util.ssl_.SSLContext
+
+
+log = logging.getLogger(__name__)
+
+
+def inject_into_urllib3():
+ 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
+
+ _validate_dependencies_met()
+
+ util.SSLContext = PyOpenSSLContext
+ util.ssl_.SSLContext = PyOpenSSLContext
+ util.HAS_SNI = HAS_SNI
+ util.ssl_.HAS_SNI = HAS_SNI
+ util.IS_PYOPENSSL = True
+ util.ssl_.IS_PYOPENSSL = True
+
+
+def extract_from_urllib3():
+ 'Undo monkey-patching by :func:`inject_into_urllib3`.'
+
+ util.SSLContext = orig_util_SSLContext
+ util.ssl_.SSLContext = orig_util_SSLContext
+ util.HAS_SNI = orig_util_HAS_SNI
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
+ util.IS_PYOPENSSL = False
+ util.ssl_.IS_PYOPENSSL = False
+
+
+def _validate_dependencies_met():
+ """
+ Verifies that PyOpenSSL's package-level dependencies have been met.
+ Throws `ImportError` if they are not met.
+ """
+ # Method added in `cryptography==1.1`; not available in older versions
+ from cryptography.x509.extensions import Extensions
+ if getattr(Extensions, "get_extension_for_class", None) is None:
+ raise ImportError("'cryptography' module missing required functionality. "
+ "Try upgrading to v1.3.4 or newer.")
+
+ # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
+ # attribute is only present on those versions.
+ from OpenSSL.crypto import X509
+ x509 = X509()
+ if getattr(x509, "_x509", None) is None:
+ raise ImportError("'pyOpenSSL' module missing required functionality. "
+ "Try upgrading to v0.14 or newer.")
+
+
+def _dnsname_to_stdlib(name):
+ """
+ Converts a dNSName SubjectAlternativeName field to the form used by the
+ standard library on the given Python version.
+
+ Cryptography produces a dNSName as a unicode string that was idna-decoded
+ from ASCII bytes. We need to idna-encode that string to get it back, and
+ then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
+ uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
+
+ If the name cannot be idna-encoded then we return None signalling that
+ the name given should be skipped.
+ """
+ def idna_encode(name):
+ """
+ Borrowed wholesale from the Python Cryptography Project. It turns out
+ that we can't just safely call `idna.encode`: it can explode for
+ wildcard names. This avoids that problem.
+ """
+ from pip._vendor import idna
+
+ try:
+ for prefix in [u'*.', u'.']:
+ if name.startswith(prefix):
+ name = name[len(prefix):]
+ return prefix.encode('ascii') + idna.encode(name)
+ return idna.encode(name)
+ except idna.core.IDNAError:
+ return None
+
+ # Don't send IPv6 addresses through the IDNA encoder.
+ if ':' in name:
+ return name
+
+ name = idna_encode(name)
+ if name is None:
+ return None
+ elif sys.version_info >= (3, 0):
+ name = name.decode('utf-8')
+ return name
+
+
+def get_subj_alt_name(peer_cert):
+ """
+ Given an PyOpenSSL certificate, provides all the subject alternative names.
+ """
+ # Pass the cert to cryptography, which has much better APIs for this.
+ if hasattr(peer_cert, "to_cryptography"):
+ cert = peer_cert.to_cryptography()
+ else:
+ # This is technically using private APIs, but should work across all
+ # relevant versions before PyOpenSSL got a proper API for this.
+ cert = _Certificate(openssl_backend, peer_cert._x509)
+
+ # We want to find the SAN extension. Ask Cryptography to locate it (it's
+ # faster than looping in Python)
+ try:
+ ext = cert.extensions.get_extension_for_class(
+ x509.SubjectAlternativeName
+ ).value
+ except x509.ExtensionNotFound:
+ # No such extension, return the empty list.
+ return []
+ except (x509.DuplicateExtension, UnsupportedExtension,
+ x509.UnsupportedGeneralNameType, UnicodeError) as e:
+ # A problem has been found with the quality of the certificate. Assume
+ # no SAN field is present.
+ log.warning(
+ "A problem was encountered with the certificate that prevented "
+ "urllib3 from finding the SubjectAlternativeName field. This can "
+ "affect certificate validation. The error was %s",
+ e,
+ )
+ return []
+
+ # We want to return dNSName and iPAddress fields. We need to cast the IPs
+ # back to strings because the match_hostname function wants them as
+ # strings.
+ # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
+ # decoded. This is pretty frustrating, but that's what the standard library
+ # does with certificates, and so we need to attempt to do the same.
+ # We also want to skip over names which cannot be idna encoded.
+ names = [
+ ('DNS', name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
+ if name is not None
+ ]
+ names.extend(
+ ('IP Address', str(name))
+ for name in ext.get_values_for_type(x509.IPAddress)
+ )
+
+ return names
+
+
+class WrappedSocket(object):
+ '''API-compatibility wrapper for Python OpenSSL's Connection-class.
+
+ Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
+ collector of pypy.
+ '''
+
+ def __init__(self, connection, socket, suppress_ragged_eofs=True):
+ self.connection = connection
+ self.socket = socket
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self._makefile_refs = 0
+ self._closed = False
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ # Copy-pasted from Python 3.5 source code
+ def _decref_socketios(self):
+ if self._makefile_refs > 0:
+ self._makefile_refs -= 1
+ if self._closed:
+ self.close()
+
+ def recv(self, *args, **kwargs):
+ try:
+ data = self.connection.recv(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
+ return b''
+ else:
+ raise SocketError(str(e))
+ except OpenSSL.SSL.ZeroReturnError:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b''
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout('The read operation timed out')
+ else:
+ return self.recv(*args, **kwargs)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("read error: %r" % e)
+ else:
+ return data
+
+ def recv_into(self, *args, **kwargs):
+ try:
+ return self.connection.recv_into(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
+ return 0
+ else:
+ raise SocketError(str(e))
+ except OpenSSL.SSL.ZeroReturnError:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return 0
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout('The read operation timed out')
+ else:
+ return self.recv_into(*args, **kwargs)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("read error: %r" % e)
+
+ def settimeout(self, timeout):
+ return self.socket.settimeout(timeout)
+
+ def _send_until_done(self, data):
+ while True:
+ try:
+ return self.connection.send(data)
+ except OpenSSL.SSL.WantWriteError:
+ if not util.wait_for_write(self.socket, self.socket.gettimeout()):
+ raise timeout()
+ continue
+ except OpenSSL.SSL.SysCallError as e:
+ raise SocketError(str(e))
+
+ def sendall(self, data):
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
+ total_sent += sent
+
+ def shutdown(self):
+ # FIXME rethrow compatible exceptions should we ever use this
+ self.connection.shutdown()
+
+ def close(self):
+ if self._makefile_refs < 1:
+ try:
+ self._closed = True
+ return self.connection.close()
+ except OpenSSL.SSL.Error:
+ return
+ else:
+ self._makefile_refs -= 1
+
+ def getpeercert(self, binary_form=False):
+ x509 = self.connection.get_peer_certificate()
+
+ if not x509:
+ return x509
+
+ if binary_form:
+ return OpenSSL.crypto.dump_certificate(
+ OpenSSL.crypto.FILETYPE_ASN1,
+ x509)
+
+ return {
+ 'subject': (
+ (('commonName', x509.get_subject().CN),),
+ ),
+ 'subjectAltName': get_subj_alt_name(x509)
+ }
+
+ def version(self):
+ return self.connection.get_protocol_version_name()
+
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
+
+if _fileobject: # Platform-specific: Python 2
+ def makefile(self, mode, bufsize=-1):
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+else: # Platform-specific: Python 3
+ makefile = backport_makefile
+
+WrappedSocket.makefile = makefile
+
+
+class PyOpenSSLContext(object):
+ """
+ I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
+ for translating the interface of the standard library ``SSLContext`` object
+ to calls into PyOpenSSL.
+ """
+ def __init__(self, protocol):
+ self.protocol = _openssl_versions[protocol]
+ self._ctx = OpenSSL.SSL.Context(self.protocol)
+ self._options = 0
+ self.check_hostname = False
+
+ @property
+ def options(self):
+ return self._options
+
+ @options.setter
+ def options(self, value):
+ self._options = value
+ self._ctx.set_options(value)
+
+ @property
+ def verify_mode(self):
+ return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
+
+ @verify_mode.setter
+ def verify_mode(self, value):
+ self._ctx.set_verify(
+ _stdlib_to_openssl_verify[value],
+ _verify_callback
+ )
+
+ def set_default_verify_paths(self):
+ self._ctx.set_default_verify_paths()
+
+ def set_ciphers(self, ciphers):
+ if isinstance(ciphers, six.text_type):
+ ciphers = ciphers.encode('utf-8')
+ self._ctx.set_cipher_list(ciphers)
+
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ if cafile is not None:
+ cafile = cafile.encode('utf-8')
+ if capath is not None:
+ capath = capath.encode('utf-8')
+ self._ctx.load_verify_locations(cafile, capath)
+ if cadata is not None:
+ self._ctx.load_verify_locations(BytesIO(cadata))
+
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
+ self._ctx.use_certificate_chain_file(certfile)
+ if password is not None:
+ if not isinstance(password, six.binary_type):
+ password = password.encode('utf-8')
+ self._ctx.set_passwd_cb(lambda *_: password)
+ self._ctx.use_privatekey_file(keyfile or certfile)
+
+ def wrap_socket(self, sock, server_side=False,
+ do_handshake_on_connect=True, suppress_ragged_eofs=True,
+ server_hostname=None):
+ cnx = OpenSSL.SSL.Connection(self._ctx, sock)
+
+ if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
+ server_hostname = server_hostname.encode('utf-8')
+
+ if server_hostname is not None:
+ cnx.set_tlsext_host_name(server_hostname)
+
+ cnx.set_connect_state()
+
+ while True:
+ try:
+ cnx.do_handshake()
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(sock, sock.gettimeout()):
+ raise timeout('select timed out')
+ continue
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError('bad handshake: %r' % e)
+ break
+
+ return WrappedSocket(cnx, sock)
+
+
+def _verify_callback(cnx, x509, err_no, err_depth, return_code):
+ return err_no == 0
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.py
new file mode 100644
index 00000000..4dc48484
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.py
@@ -0,0 +1,853 @@
+"""
+SecureTranport support for urllib3 via ctypes.
+
+This makes platform-native TLS available to urllib3 users on macOS without the
+use of a compiler. This is an important feature because the Python Package
+Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
+that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
+this is to give macOS users an alternative solution to the problem, and that
+solution is to use SecureTransport.
+
+We use ctypes here because this solution must not require a compiler. That's
+because pip is not allowed to require a compiler either.
+
+This is not intended to be a seriously long-term solution to this problem.
+The hope is that PEP 543 will eventually solve this issue for us, at which
+point we can retire this contrib module. But in the short term, we need to
+solve the impending tire fire that is Python on Mac without this kind of
+contrib module. So...here we are.
+
+To use this module, simply import and inject it::
+
+ import urllib3.contrib.securetransport
+ urllib3.contrib.securetransport.inject_into_urllib3()
+
+Happy TLSing!
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+ Copyright (c) 2015-2016 Will Bond <will@wbond.net>
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the "Software"),
+ to deal in the Software without restriction, including without limitation
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+"""
+from __future__ import absolute_import
+
+import contextlib
+import ctypes
+import errno
+import os.path
+import shutil
+import socket
+import ssl
+import threading
+import weakref
+
+from .. import util
+from ._securetransport.bindings import (
+ Security, SecurityConst, CoreFoundation
+)
+from ._securetransport.low_level import (
+ _assert_no_error, _cert_array_from_pem, _temporary_keychain,
+ _load_client_cert_chain
+)
+
+try: # Platform-specific: Python 2
+ from socket import _fileobject
+except ImportError: # Platform-specific: Python 3
+ _fileobject = None
+ from ..packages.backports.makefile import backport_makefile
+
+__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
+
+# SNI always works
+HAS_SNI = True
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_util_SSLContext = util.ssl_.SSLContext
+
+# This dictionary is used by the read callback to obtain a handle to the
+# calling wrapped socket. This is a pretty silly approach, but for now it'll
+# do. I feel like I should be able to smuggle a handle to the wrapped socket
+# directly in the SSLConnectionRef, but for now this approach will work I
+# guess.
+#
+# We need to lock around this structure for inserts, but we don't do it for
+# reads/writes in the callbacks. The reasoning here goes as follows:
+#
+# 1. It is not possible to call into the callbacks before the dictionary is
+# populated, so once in the callback the id must be in the dictionary.
+# 2. The callbacks don't mutate the dictionary, they only read from it, and
+# so cannot conflict with any of the insertions.
+#
+# This is good: if we had to lock in the callbacks we'd drastically slow down
+# the performance of this code.
+_connection_refs = weakref.WeakValueDictionary()
+_connection_ref_lock = threading.Lock()
+
+# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
+# for no better reason than we need *a* limit, and this one is right there.
+SSL_WRITE_BLOCKSIZE = 16384
+
+# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
+# individual cipher suites. We need to do this because this is how
+# SecureTransport wants them.
+CIPHER_SUITES = [
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_AES_256_GCM_SHA384,
+ SecurityConst.TLS_AES_128_GCM_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_AES_128_CCM_8_SHA256,
+ SecurityConst.TLS_AES_128_CCM_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
+]
+
+# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
+# TLSv1 and a high of TLSv1.3. For everything else, we pin to that version.
+# TLSv1 to 1.2 are supported on macOS 10.8+ and TLSv1.3 is macOS 10.13+
+_protocol_to_min_max = {
+ util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocolMaxSupported),
+}
+
+if hasattr(ssl, "PROTOCOL_SSLv2"):
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
+ SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2
+ )
+if hasattr(ssl, "PROTOCOL_SSLv3"):
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
+ SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
+ SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1_1"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
+ SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1_2"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
+ SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12
+ )
+
+
+def inject_into_urllib3():
+ """
+ Monkey-patch urllib3 with SecureTransport-backed SSL-support.
+ """
+ util.SSLContext = SecureTransportContext
+ util.ssl_.SSLContext = SecureTransportContext
+ util.HAS_SNI = HAS_SNI
+ util.ssl_.HAS_SNI = HAS_SNI
+ util.IS_SECURETRANSPORT = True
+ util.ssl_.IS_SECURETRANSPORT = True
+
+
+def extract_from_urllib3():
+ """
+ Undo monkey-patching by :func:`inject_into_urllib3`.
+ """
+ util.SSLContext = orig_util_SSLContext
+ util.ssl_.SSLContext = orig_util_SSLContext
+ util.HAS_SNI = orig_util_HAS_SNI
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
+ util.IS_SECURETRANSPORT = False
+ util.ssl_.IS_SECURETRANSPORT = False
+
+
+def _read_callback(connection_id, data_buffer, data_length_pointer):
+ """
+ SecureTransport read callback. This is called by ST to request that data
+ be returned from the socket.
+ """
+ wrapped_socket = None
+ try:
+ wrapped_socket = _connection_refs.get(connection_id)
+ if wrapped_socket is None:
+ return SecurityConst.errSSLInternal
+ base_socket = wrapped_socket.socket
+
+ requested_length = data_length_pointer[0]
+
+ timeout = wrapped_socket.gettimeout()
+ error = None
+ read_count = 0
+
+ try:
+ while read_count < requested_length:
+ if timeout is None or timeout >= 0:
+ if not util.wait_for_read(base_socket, timeout):
+ raise socket.error(errno.EAGAIN, 'timed out')
+
+ remaining = requested_length - read_count
+ buffer = (ctypes.c_char * remaining).from_address(
+ data_buffer + read_count
+ )
+ chunk_size = base_socket.recv_into(buffer, remaining)
+ read_count += chunk_size
+ if not chunk_size:
+ if not read_count:
+ return SecurityConst.errSSLClosedGraceful
+ break
+ except (socket.error) as e:
+ error = e.errno
+
+ if error is not None and error != errno.EAGAIN:
+ data_length_pointer[0] = read_count
+ if error == errno.ECONNRESET or error == errno.EPIPE:
+ return SecurityConst.errSSLClosedAbort
+ raise
+
+ data_length_pointer[0] = read_count
+
+ if read_count != requested_length:
+ return SecurityConst.errSSLWouldBlock
+
+ return 0
+ except Exception as e:
+ if wrapped_socket is not None:
+ wrapped_socket._exception = e
+ return SecurityConst.errSSLInternal
+
+
+def _write_callback(connection_id, data_buffer, data_length_pointer):
+ """
+ SecureTransport write callback. This is called by ST to request that data
+ actually be sent on the network.
+ """
+ wrapped_socket = None
+ try:
+ wrapped_socket = _connection_refs.get(connection_id)
+ if wrapped_socket is None:
+ return SecurityConst.errSSLInternal
+ base_socket = wrapped_socket.socket
+
+ bytes_to_write = data_length_pointer[0]
+ data = ctypes.string_at(data_buffer, bytes_to_write)
+
+ timeout = wrapped_socket.gettimeout()
+ error = None
+ sent = 0
+
+ try:
+ while sent < bytes_to_write:
+ if timeout is None or timeout >= 0:
+ if not util.wait_for_write(base_socket, timeout):
+ raise socket.error(errno.EAGAIN, 'timed out')
+ chunk_sent = base_socket.send(data)
+ sent += chunk_sent
+
+ # This has some needless copying here, but I'm not sure there's
+ # much value in optimising this data path.
+ data = data[chunk_sent:]
+ except (socket.error) as e:
+ error = e.errno
+
+ if error is not None and error != errno.EAGAIN:
+ data_length_pointer[0] = sent
+ if error == errno.ECONNRESET or error == errno.EPIPE:
+ return SecurityConst.errSSLClosedAbort
+ raise
+
+ data_length_pointer[0] = sent
+
+ if sent != bytes_to_write:
+ return SecurityConst.errSSLWouldBlock
+
+ return 0
+ except Exception as e:
+ if wrapped_socket is not None:
+ wrapped_socket._exception = e
+ return SecurityConst.errSSLInternal
+
+
+# We need to keep these two objects references alive: if they get GC'd while
+# in use then SecureTransport could attempt to call a function that is in freed
+# memory. That would be...uh...bad. Yeah, that's the word. Bad.
+_read_callback_pointer = Security.SSLReadFunc(_read_callback)
+_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
+
+
+class WrappedSocket(object):
+ """
+ API-compatibility wrapper for Python's OpenSSL wrapped socket object.
+
+ Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
+ collector of PyPy.
+ """
+ def __init__(self, socket):
+ self.socket = socket
+ self.context = None
+ self._makefile_refs = 0
+ self._closed = False
+ self._exception = None
+ self._keychain = None
+ self._keychain_dir = None
+ self._client_cert_chain = None
+
+ # We save off the previously-configured timeout and then set it to
+ # zero. This is done because we use select and friends to handle the
+ # timeouts, but if we leave the timeout set on the lower socket then
+ # Python will "kindly" call select on that socket again for us. Avoid
+ # that by forcing the timeout to zero.
+ self._timeout = self.socket.gettimeout()
+ self.socket.settimeout(0)
+
+ @contextlib.contextmanager
+ def _raise_on_error(self):
+ """
+ A context manager that can be used to wrap calls that do I/O from
+ SecureTransport. If any of the I/O callbacks hit an exception, this
+ context manager will correctly propagate the exception after the fact.
+ This avoids silently swallowing those exceptions.
+
+ It also correctly forces the socket closed.
+ """
+ self._exception = None
+
+ # We explicitly don't catch around this yield because in the unlikely
+ # event that an exception was hit in the block we don't want to swallow
+ # it.
+ yield
+ if self._exception is not None:
+ exception, self._exception = self._exception, None
+ self.close()
+ raise exception
+
+ def _set_ciphers(self):
+ """
+ Sets up the allowed ciphers. By default this matches the set in
+ util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
+ custom and doesn't allow changing at this time, mostly because parsing
+ OpenSSL cipher strings is going to be a freaking nightmare.
+ """
+ ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
+ result = Security.SSLSetEnabledCiphers(
+ self.context, ciphers, len(CIPHER_SUITES)
+ )
+ _assert_no_error(result)
+
+ def _custom_validate(self, verify, trust_bundle):
+ """
+ Called when we have set custom validation. We do this in two cases:
+ first, when cert validation is entirely disabled; and second, when
+ using a custom trust DB.
+ """
+ # If we disabled cert validation, just say: cool.
+ if not verify:
+ return
+
+ # We want data in memory, so load it up.
+ if os.path.isfile(trust_bundle):
+ with open(trust_bundle, 'rb') as f:
+ trust_bundle = f.read()
+
+ cert_array = None
+ trust = Security.SecTrustRef()
+
+ try:
+ # Get a CFArray that contains the certs we want.
+ cert_array = _cert_array_from_pem(trust_bundle)
+
+ # Ok, now the hard part. We want to get the SecTrustRef that ST has
+ # created for this connection, shove our CAs into it, tell ST to
+ # ignore everything else it knows, and then ask if it can build a
+ # chain. This is a buuuunch of code.
+ result = Security.SSLCopyPeerTrust(
+ self.context, ctypes.byref(trust)
+ )
+ _assert_no_error(result)
+ if not trust:
+ raise ssl.SSLError("Failed to copy trust reference")
+
+ result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
+ _assert_no_error(result)
+
+ result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
+ _assert_no_error(result)
+
+ trust_result = Security.SecTrustResultType()
+ result = Security.SecTrustEvaluate(
+ trust, ctypes.byref(trust_result)
+ )
+ _assert_no_error(result)
+ finally:
+ if trust:
+ CoreFoundation.CFRelease(trust)
+
+ if cert_array is not None:
+ CoreFoundation.CFRelease(cert_array)
+
+ # Ok, now we can look at what the result was.
+ successes = (
+ SecurityConst.kSecTrustResultUnspecified,
+ SecurityConst.kSecTrustResultProceed
+ )
+ if trust_result.value not in successes:
+ raise ssl.SSLError(
+ "certificate verify failed, error code: %d" %
+ trust_result.value
+ )
+
+ def handshake(self,
+ server_hostname,
+ verify,
+ trust_bundle,
+ min_version,
+ max_version,
+ client_cert,
+ client_key,
+ client_key_passphrase):
+ """
+ Actually performs the TLS handshake. This is run automatically by
+ wrapped socket, and shouldn't be needed in user code.
+ """
+ # First, we do the initial bits of connection setup. We need to create
+ # a context, set its I/O funcs, and set the connection reference.
+ self.context = Security.SSLCreateContext(
+ None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
+ )
+ result = Security.SSLSetIOFuncs(
+ self.context, _read_callback_pointer, _write_callback_pointer
+ )
+ _assert_no_error(result)
+
+ # Here we need to compute the handle to use. We do this by taking the
+ # id of self modulo 2**31 - 1. If this is already in the dictionary, we
+ # just keep incrementing by one until we find a free space.
+ with _connection_ref_lock:
+ handle = id(self) % 2147483647
+ while handle in _connection_refs:
+ handle = (handle + 1) % 2147483647
+ _connection_refs[handle] = self
+
+ result = Security.SSLSetConnection(self.context, handle)
+ _assert_no_error(result)
+
+ # If we have a server hostname, we should set that too.
+ if server_hostname:
+ if not isinstance(server_hostname, bytes):
+ server_hostname = server_hostname.encode('utf-8')
+
+ result = Security.SSLSetPeerDomainName(
+ self.context, server_hostname, len(server_hostname)
+ )
+ _assert_no_error(result)
+
+ # Setup the ciphers.
+ self._set_ciphers()
+
+ # Set the minimum and maximum TLS versions.
+ result = Security.SSLSetProtocolVersionMin(self.context, min_version)
+ _assert_no_error(result)
+
+ # TLS 1.3 isn't necessarily enabled by the OS
+ # so we have to detect when we error out and try
+ # setting TLS 1.3 if it's allowed. kTLSProtocolMaxSupported
+ # was added in macOS 10.13 along with kTLSProtocol13.
+ result = Security.SSLSetProtocolVersionMax(self.context, max_version)
+ if result != 0 and max_version == SecurityConst.kTLSProtocolMaxSupported:
+ result = Security.SSLSetProtocolVersionMax(self.context, SecurityConst.kTLSProtocol12)
+ _assert_no_error(result)
+
+ # If there's a trust DB, we need to use it. We do that by telling
+ # SecureTransport to break on server auth. We also do that if we don't
+ # want to validate the certs at all: we just won't actually do any
+ # authing in that case.
+ if not verify or trust_bundle is not None:
+ result = Security.SSLSetSessionOption(
+ self.context,
+ SecurityConst.kSSLSessionOptionBreakOnServerAuth,
+ True
+ )
+ _assert_no_error(result)
+
+ # If there's a client cert, we need to use it.
+ if client_cert:
+ self._keychain, self._keychain_dir = _temporary_keychain()
+ self._client_cert_chain = _load_client_cert_chain(
+ self._keychain, client_cert, client_key
+ )
+ result = Security.SSLSetCertificate(
+ self.context, self._client_cert_chain
+ )
+ _assert_no_error(result)
+
+ while True:
+ with self._raise_on_error():
+ result = Security.SSLHandshake(self.context)
+
+ if result == SecurityConst.errSSLWouldBlock:
+ raise socket.timeout("handshake timed out")
+ elif result == SecurityConst.errSSLServerAuthCompleted:
+ self._custom_validate(verify, trust_bundle)
+ continue
+ else:
+ _assert_no_error(result)
+ break
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ # Copy-pasted from Python 3.5 source code
+ def _decref_socketios(self):
+ if self._makefile_refs > 0:
+ self._makefile_refs -= 1
+ if self._closed:
+ self.close()
+
+ def recv(self, bufsiz):
+ buffer = ctypes.create_string_buffer(bufsiz)
+ bytes_read = self.recv_into(buffer, bufsiz)
+ data = buffer[:bytes_read]
+ return data
+
+ def recv_into(self, buffer, nbytes=None):
+ # Read short on EOF.
+ if self._closed:
+ return 0
+
+ if nbytes is None:
+ nbytes = len(buffer)
+
+ buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
+ processed_bytes = ctypes.c_size_t(0)
+
+ with self._raise_on_error():
+ result = Security.SSLRead(
+ self.context, buffer, nbytes, ctypes.byref(processed_bytes)
+ )
+
+ # There are some result codes that we want to treat as "not always
+ # errors". Specifically, those are errSSLWouldBlock,
+ # errSSLClosedGraceful, and errSSLClosedNoNotify.
+ if (result == SecurityConst.errSSLWouldBlock):
+ # If we didn't process any bytes, then this was just a time out.
+ # However, we can get errSSLWouldBlock in situations when we *did*
+ # read some data, and in those cases we should just read "short"
+ # and return.
+ if processed_bytes.value == 0:
+ # Timed out, no data read.
+ raise socket.timeout("recv timed out")
+ elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify):
+ # The remote peer has closed this connection. We should do so as
+ # well. Note that we don't actually return here because in
+ # principle this could actually be fired along with return data.
+ # It's unlikely though.
+ self.close()
+ else:
+ _assert_no_error(result)
+
+ # Ok, we read and probably succeeded. We should return whatever data
+ # was actually read.
+ return processed_bytes.value
+
+ def settimeout(self, timeout):
+ self._timeout = timeout
+
+ def gettimeout(self):
+ return self._timeout
+
+ def send(self, data):
+ processed_bytes = ctypes.c_size_t(0)
+
+ with self._raise_on_error():
+ result = Security.SSLWrite(
+ self.context, data, len(data), ctypes.byref(processed_bytes)
+ )
+
+ if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
+ # Timed out
+ raise socket.timeout("send timed out")
+ else:
+ _assert_no_error(result)
+
+ # We sent, and probably succeeded. Tell them how much we sent.
+ return processed_bytes.value
+
+ def sendall(self, data):
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
+ total_sent += sent
+
+ def shutdown(self):
+ with self._raise_on_error():
+ Security.SSLClose(self.context)
+
+ def close(self):
+ # TODO: should I do clean shutdown here? Do I have to?
+ if self._makefile_refs < 1:
+ self._closed = True
+ if self.context:
+ CoreFoundation.CFRelease(self.context)
+ self.context = None
+ if self._client_cert_chain:
+ CoreFoundation.CFRelease(self._client_cert_chain)
+ self._client_cert_chain = None
+ if self._keychain:
+ Security.SecKeychainDelete(self._keychain)
+ CoreFoundation.CFRelease(self._keychain)
+ shutil.rmtree(self._keychain_dir)
+ self._keychain = self._keychain_dir = None
+ return self.socket.close()
+ else:
+ self._makefile_refs -= 1
+
+ def getpeercert(self, binary_form=False):
+ # Urgh, annoying.
+ #
+ # Here's how we do this:
+ #
+ # 1. Call SSLCopyPeerTrust to get hold of the trust object for this
+ # connection.
+ # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
+ # 3. To get the CN, call SecCertificateCopyCommonName and process that
+ # string so that it's of the appropriate type.
+ # 4. To get the SAN, we need to do something a bit more complex:
+ # a. Call SecCertificateCopyValues to get the data, requesting
+ # kSecOIDSubjectAltName.
+ # b. Mess about with this dictionary to try to get the SANs out.
+ #
+ # This is gross. Really gross. It's going to be a few hundred LoC extra
+ # just to repeat something that SecureTransport can *already do*. So my
+ # operating assumption at this time is that what we want to do is
+ # instead to just flag to urllib3 that it shouldn't do its own hostname
+ # validation when using SecureTransport.
+ if not binary_form:
+ raise ValueError(
+ "SecureTransport only supports dumping binary certs"
+ )
+ trust = Security.SecTrustRef()
+ certdata = None
+ der_bytes = None
+
+ try:
+ # Grab the trust store.
+ result = Security.SSLCopyPeerTrust(
+ self.context, ctypes.byref(trust)
+ )
+ _assert_no_error(result)
+ if not trust:
+ # Probably we haven't done the handshake yet. No biggie.
+ return None
+
+ cert_count = Security.SecTrustGetCertificateCount(trust)
+ if not cert_count:
+ # Also a case that might happen if we haven't handshaked.
+ # Handshook? Handshaken?
+ return None
+
+ leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
+ assert leaf
+
+ # Ok, now we want the DER bytes.
+ certdata = Security.SecCertificateCopyData(leaf)
+ assert certdata
+
+ data_length = CoreFoundation.CFDataGetLength(certdata)
+ data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
+ der_bytes = ctypes.string_at(data_buffer, data_length)
+ finally:
+ if certdata:
+ CoreFoundation.CFRelease(certdata)
+ if trust:
+ CoreFoundation.CFRelease(trust)
+
+ return der_bytes
+
+ def version(self):
+ protocol = Security.SSLProtocol()
+ result = Security.SSLGetNegotiatedProtocolVersion(self.context, ctypes.byref(protocol))
+ _assert_no_error(result)
+ if protocol.value == SecurityConst.kTLSProtocol13:
+ return 'TLSv1.3'
+ elif protocol.value == SecurityConst.kTLSProtocol12:
+ return 'TLSv1.2'
+ elif protocol.value == SecurityConst.kTLSProtocol11:
+ return 'TLSv1.1'
+ elif protocol.value == SecurityConst.kTLSProtocol1:
+ return 'TLSv1'
+ elif protocol.value == SecurityConst.kSSLProtocol3:
+ return 'SSLv3'
+ elif protocol.value == SecurityConst.kSSLProtocol2:
+ return 'SSLv2'
+ else:
+ raise ssl.SSLError('Unknown TLS version: %r' % protocol)
+
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
+
+if _fileobject: # Platform-specific: Python 2
+ def makefile(self, mode, bufsize=-1):
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+else: # Platform-specific: Python 3
+ def makefile(self, mode="r", buffering=None, *args, **kwargs):
+ # We disable buffering with SecureTransport because it conflicts with
+ # the buffering that ST does internally (see issue #1153 for more).
+ buffering = 0
+ return backport_makefile(self, mode, buffering, *args, **kwargs)
+
+WrappedSocket.makefile = makefile
+
+
+class SecureTransportContext(object):
+ """
+ I am a wrapper class for the SecureTransport library, to translate the
+ interface of the standard library ``SSLContext`` object to calls into
+ SecureTransport.
+ """
+ def __init__(self, protocol):
+ self._min_version, self._max_version = _protocol_to_min_max[protocol]
+ self._options = 0
+ self._verify = False
+ self._trust_bundle = None
+ self._client_cert = None
+ self._client_key = None
+ self._client_key_passphrase = None
+
+ @property
+ def check_hostname(self):
+ """
+ SecureTransport cannot have its hostname checking disabled. For more,
+ see the comment on getpeercert() in this file.
+ """
+ return True
+
+ @check_hostname.setter
+ def check_hostname(self, value):
+ """
+ SecureTransport cannot have its hostname checking disabled. For more,
+ see the comment on getpeercert() in this file.
+ """
+ pass
+
+ @property
+ def options(self):
+ # TODO: Well, crap.
+ #
+ # So this is the bit of the code that is the most likely to cause us
+ # trouble. Essentially we need to enumerate all of the SSL options that
+ # users might want to use and try to see if we can sensibly translate
+ # them, or whether we should just ignore them.
+ return self._options
+
+ @options.setter
+ def options(self, value):
+ # TODO: Update in line with above.
+ self._options = value
+
+ @property
+ def verify_mode(self):
+ return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
+
+ @verify_mode.setter
+ def verify_mode(self, value):
+ self._verify = True if value == ssl.CERT_REQUIRED else False
+
+ def set_default_verify_paths(self):
+ # So, this has to do something a bit weird. Specifically, what it does
+ # is nothing.
+ #
+ # This means that, if we had previously had load_verify_locations
+ # called, this does not undo that. We need to do that because it turns
+ # out that the rest of the urllib3 code will attempt to load the
+ # default verify paths if it hasn't been told about any paths, even if
+ # the context itself was sometime earlier. We resolve that by just
+ # ignoring it.
+ pass
+
+ def load_default_certs(self):
+ return self.set_default_verify_paths()
+
+ def set_ciphers(self, ciphers):
+ # For now, we just require the default cipher string.
+ if ciphers != util.ssl_.DEFAULT_CIPHERS:
+ raise ValueError(
+ "SecureTransport doesn't support custom cipher strings"
+ )
+
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ # OK, we only really support cadata and cafile.
+ if capath is not None:
+ raise ValueError(
+ "SecureTransport does not support cert directories"
+ )
+
+ self._trust_bundle = cafile or cadata
+
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
+ self._client_cert = certfile
+ self._client_key = keyfile
+ self._client_cert_passphrase = password
+
+ def wrap_socket(self, sock, server_side=False,
+ do_handshake_on_connect=True, suppress_ragged_eofs=True,
+ server_hostname=None):
+ # So, what do we do here? Firstly, we assert some properties. This is a
+ # stripped down shim, so there is some functionality we don't support.
+ # See PEP 543 for the real deal.
+ assert not server_side
+ assert do_handshake_on_connect
+ assert suppress_ragged_eofs
+
+ # Ok, we're good to go. Now we want to create the wrapped socket object
+ # and store it in the appropriate place.
+ wrapped_socket = WrappedSocket(sock)
+
+ # Now we can handshake
+ wrapped_socket.handshake(
+ server_hostname, self._verify, self._trust_bundle,
+ self._min_version, self._max_version, self._client_cert,
+ self._client_key, self._client_key_passphrase
+ )
+ return wrapped_socket
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/socks.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/socks.py
new file mode 100644
index 00000000..636d261f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/contrib/socks.py
@@ -0,0 +1,205 @@
+# -*- coding: utf-8 -*-
+"""
+This module contains provisional support for SOCKS proxies from within
+urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
+SOCKS5. To enable its functionality, either install PySocks or install this
+module with the ``socks`` extra.
+
+The SOCKS implementation supports the full range of urllib3 features. It also
+supports the following SOCKS features:
+
+- SOCKS4A (``proxy_url='socks4a://...``)
+- SOCKS4 (``proxy_url='socks4://...``)
+- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
+- SOCKS5 with local DNS (``proxy_url='socks5://...``)
+- Usernames and passwords for the SOCKS proxy
+
+ .. note::
+ It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
+ your ``proxy_url`` to ensure that DNS resolution is done from the remote
+ server instead of client-side when connecting to a domain name.
+
+SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
+supports IPv4, IPv6, and domain names.
+
+When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
+will be sent as the ``userid`` section of the SOCKS request::
+
+ proxy_url="socks4a://<userid>@proxy-host"
+
+When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
+of the ``proxy_url`` will be sent as the username/password to authenticate
+with the proxy::
+
+ proxy_url="socks5h://<username>:<password>@proxy-host"
+
+"""
+from __future__ import absolute_import
+
+try:
+ import socks
+except ImportError:
+ import warnings
+ from ..exceptions import DependencyWarning
+
+ warnings.warn((
+ 'SOCKS support in urllib3 requires the installation of optional '
+ 'dependencies: specifically, PySocks. For more information, see '
+ 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies'
+ ),
+ DependencyWarning
+ )
+ raise
+
+from socket import error as SocketError, timeout as SocketTimeout
+
+from ..connection import (
+ HTTPConnection, HTTPSConnection
+)
+from ..connectionpool import (
+ HTTPConnectionPool, HTTPSConnectionPool
+)
+from ..exceptions import ConnectTimeoutError, NewConnectionError
+from ..poolmanager import PoolManager
+from ..util.url import parse_url
+
+try:
+ import ssl
+except ImportError:
+ ssl = None
+
+
+class SOCKSConnection(HTTPConnection):
+ """
+ A plain-text HTTP connection that connects via a SOCKS proxy.
+ """
+ def __init__(self, *args, **kwargs):
+ self._socks_options = kwargs.pop('_socks_options')
+ super(SOCKSConnection, self).__init__(*args, **kwargs)
+
+ def _new_conn(self):
+ """
+ Establish a new connection via the SOCKS proxy.
+ """
+ extra_kw = {}
+ if self.source_address:
+ extra_kw['source_address'] = self.source_address
+
+ if self.socket_options:
+ extra_kw['socket_options'] = self.socket_options
+
+ try:
+ conn = socks.create_connection(
+ (self.host, self.port),
+ proxy_type=self._socks_options['socks_version'],
+ proxy_addr=self._socks_options['proxy_host'],
+ proxy_port=self._socks_options['proxy_port'],
+ proxy_username=self._socks_options['username'],
+ proxy_password=self._socks_options['password'],
+ proxy_rdns=self._socks_options['rdns'],
+ timeout=self.timeout,
+ **extra_kw
+ )
+
+ except SocketTimeout:
+ raise ConnectTimeoutError(
+ self, "Connection to %s timed out. (connect timeout=%s)" %
+ (self.host, self.timeout))
+
+ except socks.ProxyError as e:
+ # This is fragile as hell, but it seems to be the only way to raise
+ # useful errors here.
+ if e.socket_err:
+ error = e.socket_err
+ if isinstance(error, SocketTimeout):
+ raise ConnectTimeoutError(
+ self,
+ "Connection to %s timed out. (connect timeout=%s)" %
+ (self.host, self.timeout)
+ )
+ else:
+ raise NewConnectionError(
+ self,
+ "Failed to establish a new connection: %s" % error
+ )
+ else:
+ raise NewConnectionError(
+ self,
+ "Failed to establish a new connection: %s" % e
+ )
+
+ except SocketError as e: # Defensive: PySocks should catch all these.
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e)
+
+ return conn
+
+
+# We don't need to duplicate the Verified/Unverified distinction from
+# urllib3/connection.py here because the HTTPSConnection will already have been
+# correctly set to either the Verified or Unverified form by that module. This
+# means the SOCKSHTTPSConnection will automatically be the correct type.
+class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
+ pass
+
+
+class SOCKSHTTPConnectionPool(HTTPConnectionPool):
+ ConnectionCls = SOCKSConnection
+
+
+class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
+ ConnectionCls = SOCKSHTTPSConnection
+
+
+class SOCKSProxyManager(PoolManager):
+ """
+ A version of the urllib3 ProxyManager that routes connections via the
+ defined SOCKS proxy.
+ """
+ pool_classes_by_scheme = {
+ 'http': SOCKSHTTPConnectionPool,
+ 'https': SOCKSHTTPSConnectionPool,
+ }
+
+ def __init__(self, proxy_url, username=None, password=None,
+ num_pools=10, headers=None, **connection_pool_kw):
+ parsed = parse_url(proxy_url)
+
+ if username is None and password is None and parsed.auth is not None:
+ split = parsed.auth.split(':')
+ if len(split) == 2:
+ username, password = split
+ if parsed.scheme == 'socks5':
+ socks_version = socks.PROXY_TYPE_SOCKS5
+ rdns = False
+ elif parsed.scheme == 'socks5h':
+ socks_version = socks.PROXY_TYPE_SOCKS5
+ rdns = True
+ elif parsed.scheme == 'socks4':
+ socks_version = socks.PROXY_TYPE_SOCKS4
+ rdns = False
+ elif parsed.scheme == 'socks4a':
+ socks_version = socks.PROXY_TYPE_SOCKS4
+ rdns = True
+ else:
+ raise ValueError(
+ "Unable to determine SOCKS version from %s" % proxy_url
+ )
+
+ self.proxy_url = proxy_url
+
+ socks_options = {
+ 'socks_version': socks_version,
+ 'proxy_host': parsed.host,
+ 'proxy_port': parsed.port,
+ 'username': username,
+ 'password': password,
+ 'rdns': rdns
+ }
+ connection_pool_kw['_socks_options'] = socks_options
+
+ super(SOCKSProxyManager, self).__init__(
+ num_pools, headers, **connection_pool_kw
+ )
+
+ self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/exceptions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/exceptions.py
new file mode 100644
index 00000000..7bbaa987
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/exceptions.py
@@ -0,0 +1,246 @@
+from __future__ import absolute_import
+from .packages.six.moves.http_client import (
+ IncompleteRead as httplib_IncompleteRead
+)
+# Base Exceptions
+
+
+class HTTPError(Exception):
+ "Base exception used by this module."
+ pass
+
+
+class HTTPWarning(Warning):
+ "Base warning used by this module."
+ pass
+
+
+class PoolError(HTTPError):
+ "Base exception for errors caused within a pool."
+ def __init__(self, pool, message):
+ self.pool = pool
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, None)
+
+
+class RequestError(PoolError):
+ "Base exception for PoolErrors that have associated URLs."
+ def __init__(self, pool, url, message):
+ self.url = url
+ PoolError.__init__(self, pool, message)
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, self.url, None)
+
+
+class SSLError(HTTPError):
+ "Raised when SSL certificate fails in an HTTPS connection."
+ pass
+
+
+class ProxyError(HTTPError):
+ "Raised when the connection to a proxy fails."
+ pass
+
+
+class DecodeError(HTTPError):
+ "Raised when automatic decoding based on Content-Type fails."
+ pass
+
+
+class ProtocolError(HTTPError):
+ "Raised when something unexpected happens mid-request/response."
+ pass
+
+
+#: Renamed to ProtocolError but aliased for backwards compatibility.
+ConnectionError = ProtocolError
+
+
+# Leaf Exceptions
+
+class MaxRetryError(RequestError):
+ """Raised when the maximum number of retries is exceeded.
+
+ :param pool: The connection pool
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
+ :param string url: The requested Url
+ :param exceptions.Exception reason: The underlying error
+
+ """
+
+ def __init__(self, pool, url, reason=None):
+ self.reason = reason
+
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (
+ url, reason)
+
+ RequestError.__init__(self, pool, url, message)
+
+
+class HostChangedError(RequestError):
+ "Raised when an existing pool gets a request for a foreign host."
+
+ def __init__(self, pool, url, retries=3):
+ message = "Tried to open a foreign host with url: %s" % url
+ RequestError.__init__(self, pool, url, message)
+ self.retries = retries
+
+
+class TimeoutStateError(HTTPError):
+ """ Raised when passing an invalid state to a timeout """
+ pass
+
+
+class TimeoutError(HTTPError):
+ """ Raised when a socket timeout error occurs.
+
+ Catching this error will catch both :exc:`ReadTimeoutErrors
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
+ """
+ pass
+
+
+class ReadTimeoutError(TimeoutError, RequestError):
+ "Raised when a socket timeout occurs while receiving data from a server"
+ pass
+
+
+# This timeout error does not have a URL attached and needs to inherit from the
+# base HTTPError
+class ConnectTimeoutError(TimeoutError):
+ "Raised when a socket timeout occurs while connecting to a server"
+ pass
+
+
+class NewConnectionError(ConnectTimeoutError, PoolError):
+ "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
+ pass
+
+
+class EmptyPoolError(PoolError):
+ "Raised when a pool runs out of connections and no more are allowed."
+ pass
+
+
+class ClosedPoolError(PoolError):
+ "Raised when a request enters a pool after the pool has been closed."
+ pass
+
+
+class LocationValueError(ValueError, HTTPError):
+ "Raised when there is something wrong with a given URL input."
+ pass
+
+
+class LocationParseError(LocationValueError):
+ "Raised when get_host or similar fails to parse the URL input."
+
+ def __init__(self, location):
+ message = "Failed to parse: %s" % location
+ HTTPError.__init__(self, message)
+
+ self.location = location
+
+
+class ResponseError(HTTPError):
+ "Used as a container for an error reason supplied in a MaxRetryError."
+ GENERIC_ERROR = 'too many error responses'
+ SPECIFIC_ERROR = 'too many {status_code} error responses'
+
+
+class SecurityWarning(HTTPWarning):
+ "Warned when performing security reducing actions"
+ pass
+
+
+class SubjectAltNameWarning(SecurityWarning):
+ "Warned when connecting to a host with a certificate missing a SAN."
+ pass
+
+
+class InsecureRequestWarning(SecurityWarning):
+ "Warned when making an unverified HTTPS request."
+ pass
+
+
+class SystemTimeWarning(SecurityWarning):
+ "Warned when system time is suspected to be wrong"
+ pass
+
+
+class InsecurePlatformWarning(SecurityWarning):
+ "Warned when certain SSL configuration is not available on a platform."
+ pass
+
+
+class SNIMissingWarning(HTTPWarning):
+ "Warned when making a HTTPS request without SNI available."
+ pass
+
+
+class DependencyWarning(HTTPWarning):
+ """
+ Warned when an attempt is made to import a module with missing optional
+ dependencies.
+ """
+ pass
+
+
+class ResponseNotChunked(ProtocolError, ValueError):
+ "Response needs to be chunked in order to read it as chunks."
+ pass
+
+
+class BodyNotHttplibCompatible(HTTPError):
+ """
+ Body should be httplib.HTTPResponse like (have an fp attribute which
+ returns raw chunks) for read_chunked().
+ """
+ pass
+
+
+class IncompleteRead(HTTPError, httplib_IncompleteRead):
+ """
+ Response length doesn't match expected Content-Length
+
+ Subclass of http_client.IncompleteRead to allow int value
+ for `partial` to avoid creating large objects on streamed
+ reads.
+ """
+ def __init__(self, partial, expected):
+ super(IncompleteRead, self).__init__(partial, expected)
+
+ def __repr__(self):
+ return ('IncompleteRead(%i bytes read, '
+ '%i more expected)' % (self.partial, self.expected))
+
+
+class InvalidHeader(HTTPError):
+ "The header provided was somehow invalid."
+ pass
+
+
+class ProxySchemeUnknown(AssertionError, ValueError):
+ "ProxyManager does not support the supplied scheme"
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+
+ def __init__(self, scheme):
+ message = "Not supported proxy scheme %s" % scheme
+ super(ProxySchemeUnknown, self).__init__(message)
+
+
+class HeaderParsingError(HTTPError):
+ "Raised by assert_header_parsing, but we convert it to a log.warning statement."
+ def __init__(self, defects, unparsed_data):
+ message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
+ super(HeaderParsingError, self).__init__(message)
+
+
+class UnrewindableBodyError(HTTPError):
+ "urllib3 encountered an error when trying to rewind a body"
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/fields.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/fields.py
new file mode 100644
index 00000000..6a9a5a7f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/fields.py
@@ -0,0 +1,272 @@
+from __future__ import absolute_import
+import email.utils
+import mimetypes
+import re
+
+from .packages import six
+
+
+def guess_content_type(filename, default='application/octet-stream'):
+ """
+ Guess the "Content-Type" of a file.
+
+ :param filename:
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
+ :param default:
+ If no "Content-Type" can be guessed, default to `default`.
+ """
+ if filename:
+ return mimetypes.guess_type(filename)[0] or default
+ return default
+
+
+def format_header_param_rfc2231(name, value):
+ """
+ Helper function to format and quote a single header parameter using the
+ strategy defined in RFC 2231.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows RFC 2388 Section 4.4.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :ret:
+ An RFC-2231-formatted unicode string.
+ """
+ if isinstance(value, six.binary_type):
+ value = value.decode("utf-8")
+
+ if not any(ch in value for ch in '"\\\r\n'):
+ result = u'%s="%s"' % (name, value)
+ try:
+ result.encode('ascii')
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ pass
+ else:
+ return result
+
+ if not six.PY3: # Python 2:
+ value = value.encode('utf-8')
+
+ # encode_rfc2231 accepts an encoded string and returns an ascii-encoded
+ # string in Python 2 but accepts and returns unicode strings in Python 3
+ value = email.utils.encode_rfc2231(value, 'utf-8')
+ value = '%s*=%s' % (name, value)
+
+ if not six.PY3: # Python 2:
+ value = value.decode('utf-8')
+
+ return value
+
+
+_HTML5_REPLACEMENTS = {
+ u"\u0022": u"%22",
+ # Replace "\" with "\\".
+ u"\u005C": u"\u005C\u005C",
+ u"\u005C": u"\u005C\u005C",
+}
+
+# All control characters from 0x00 to 0x1F *except* 0x1B.
+_HTML5_REPLACEMENTS.update({
+ six.unichr(cc): u"%{:02X}".format(cc)
+ for cc
+ in range(0x00, 0x1F+1)
+ if cc not in (0x1B,)
+})
+
+
+def _replace_multiple(value, needles_and_replacements):
+
+ def replacer(match):
+ return needles_and_replacements[match.group(0)]
+
+ pattern = re.compile(
+ r"|".join([
+ re.escape(needle) for needle in needles_and_replacements.keys()
+ ])
+ )
+
+ result = pattern.sub(replacer, value)
+
+ return result
+
+
+def format_header_param_html5(name, value):
+ """
+ Helper function to format and quote a single header parameter using the
+ HTML5 strategy.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows the `HTML5 Working Draft
+ Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
+
+ .. _HTML5 Working Draft Section 4.10.22.7:
+ https://w3c.github.io/html/sec-forms.html#multipart-form-data
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :ret:
+ A unicode string, stripped of troublesome characters.
+ """
+ if isinstance(value, six.binary_type):
+ value = value.decode("utf-8")
+
+ value = _replace_multiple(value, _HTML5_REPLACEMENTS)
+
+ return u'%s="%s"' % (name, value)
+
+
+# For backwards-compatibility.
+format_header_param = format_header_param_html5
+
+
+class RequestField(object):
+ """
+ A data container for request body parameters.
+
+ :param name:
+ The name of this request field. Must be unicode.
+ :param data:
+ The data/value body.
+ :param filename:
+ An optional filename of the request field. Must be unicode.
+ :param headers:
+ An optional dict-like object of headers to initially use for the field.
+ :param header_formatter:
+ An optional callable that is used to encode and format the headers. By
+ default, this is :func:`format_header_param_html5`.
+ """
+ def __init__(
+ self,
+ name,
+ data,
+ filename=None,
+ headers=None,
+ header_formatter=format_header_param_html5):
+ self._name = name
+ self._filename = filename
+ self.data = data
+ self.headers = {}
+ if headers:
+ self.headers = dict(headers)
+ self.header_formatter = header_formatter
+
+ @classmethod
+ def from_tuples(
+ cls,
+ fieldname,
+ value,
+ header_formatter=format_header_param_html5):
+ """
+ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
+
+ Supports constructing :class:`~urllib3.fields.RequestField` from
+ parameter of key/value strings AND key/filetuple. A filetuple is a
+ (filename, data, MIME type) tuple where the MIME type is optional.
+ For example::
+
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+
+ Field names and filenames must be unicode.
+ """
+ if isinstance(value, tuple):
+ if len(value) == 3:
+ filename, data, content_type = value
+ else:
+ filename, data = value
+ content_type = guess_content_type(filename)
+ else:
+ filename = None
+ content_type = None
+ data = value
+
+ request_param = cls(
+ fieldname, data, filename=filename, header_formatter=header_formatter)
+ request_param.make_multipart(content_type=content_type)
+
+ return request_param
+
+ def _render_part(self, name, value):
+ """
+ Overridable helper function to format a single header parameter. By
+ default, this calls ``self.header_formatter``.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as a unicode string.
+ """
+
+ return self.header_formatter(name, value)
+
+ def _render_parts(self, header_parts):
+ """
+ Helper function to format and quote a single header.
+
+ Useful for single headers that are composed of multiple items. E.g.,
+ 'Content-Disposition' fields.
+
+ :param header_parts:
+ A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
+ as `k1="v1"; k2="v2"; ...`.
+ """
+ parts = []
+ iterable = header_parts
+ if isinstance(header_parts, dict):
+ iterable = header_parts.items()
+
+ for name, value in iterable:
+ if value is not None:
+ parts.append(self._render_part(name, value))
+
+ return u'; '.join(parts)
+
+ def render_headers(self):
+ """
+ Renders the headers for this request field.
+ """
+ lines = []
+
+ sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
+ for sort_key in sort_keys:
+ if self.headers.get(sort_key, False):
+ lines.append(u'%s: %s' % (sort_key, self.headers[sort_key]))
+
+ for header_name, header_value in self.headers.items():
+ if header_name not in sort_keys:
+ if header_value:
+ lines.append(u'%s: %s' % (header_name, header_value))
+
+ lines.append(u'\r\n')
+ return u'\r\n'.join(lines)
+
+ def make_multipart(self, content_disposition=None, content_type=None,
+ content_location=None):
+ """
+ Makes this request field into a multipart request field.
+
+ This method overrides "Content-Disposition", "Content-Type" and
+ "Content-Location" headers to the request parameter.
+
+ :param content_type:
+ The 'Content-Type' of the request body.
+ :param content_location:
+ The 'Content-Location' of the request body.
+
+ """
+ self.headers['Content-Disposition'] = content_disposition or u'form-data'
+ self.headers['Content-Disposition'] += u'; '.join([
+ u'', self._render_parts(
+ ((u'name', self._name), (u'filename', self._filename))
+ )
+ ])
+ self.headers['Content-Type'] = content_type
+ self.headers['Content-Location'] = content_location
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/filepost.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/filepost.py
new file mode 100644
index 00000000..78f1e19b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/filepost.py
@@ -0,0 +1,98 @@
+from __future__ import absolute_import
+import binascii
+import codecs
+import os
+
+from io import BytesIO
+
+from .packages import six
+from .packages.six import b
+from .fields import RequestField
+
+writer = codecs.lookup('utf-8')[3]
+
+
+def choose_boundary():
+ """
+ Our embarrassingly-simple replacement for mimetools.choose_boundary.
+ """
+ boundary = binascii.hexlify(os.urandom(16))
+ if six.PY3:
+ boundary = boundary.decode('ascii')
+ return boundary
+
+
+def iter_field_objects(fields):
+ """
+ Iterate over fields.
+
+ Supports list of (k, v) tuples and dicts, and lists of
+ :class:`~urllib3.fields.RequestField`.
+
+ """
+ if isinstance(fields, dict):
+ i = six.iteritems(fields)
+ else:
+ i = iter(fields)
+
+ for field in i:
+ if isinstance(field, RequestField):
+ yield field
+ else:
+ yield RequestField.from_tuples(*field)
+
+
+def iter_fields(fields):
+ """
+ .. deprecated:: 1.6
+
+ Iterate over fields.
+
+ The addition of :class:`~urllib3.fields.RequestField` makes this function
+ obsolete. Instead, use :func:`iter_field_objects`, which returns
+ :class:`~urllib3.fields.RequestField` objects.
+
+ Supports list of (k, v) tuples and dicts.
+ """
+ if isinstance(fields, dict):
+ return ((k, v) for k, v in six.iteritems(fields))
+
+ return ((k, v) for k, v in fields)
+
+
+def encode_multipart_formdata(fields, boundary=None):
+ """
+ Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
+
+ :param fields:
+ Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
+
+ :param boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`urllib3.filepost.choose_boundary`.
+ """
+ body = BytesIO()
+ if boundary is None:
+ boundary = choose_boundary()
+
+ for field in iter_field_objects(fields):
+ body.write(b('--%s\r\n' % (boundary)))
+
+ writer(body).write(field.render_headers())
+ data = field.data
+
+ if isinstance(data, int):
+ data = str(data) # Backwards compatibility
+
+ if isinstance(data, six.text_type):
+ writer(body).write(data)
+ else:
+ body.write(data)
+
+ body.write(b'\r\n')
+
+ body.write(b('--%s--\r\n' % (boundary)))
+
+ content_type = str('multipart/form-data; boundary=%s' % boundary)
+
+ return body.getvalue(), content_type
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__init__.py
new file mode 100644
index 00000000..170e974c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+
+from . import ssl_match_hostname
+
+__all__ = ('ssl_match_hostname', )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..cc9ded82
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc
new file mode 100644
index 00000000..a549e6df
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..50ab4d17
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc
new file mode 100644
index 00000000..053988eb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py
new file mode 100644
index 00000000..740db377
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+"""
+backports.makefile
+~~~~~~~~~~~~~~~~~~
+
+Backports the Python 3 ``socket.makefile`` method for use with anything that
+wants to create a "fake" socket object.
+"""
+import io
+
+from socket import SocketIO
+
+
+def backport_makefile(self, mode="r", buffering=None, encoding=None,
+ errors=None, newline=None):
+ """
+ Backport of ``socket.makefile`` from Python 3.5.
+ """
+ if not set(mode) <= {"r", "w", "b"}:
+ raise ValueError(
+ "invalid mode %r (only r, w, b allowed)" % (mode,)
+ )
+ writing = "w" in mode
+ reading = "r" in mode or not writing
+ assert reading or writing
+ binary = "b" in mode
+ rawmode = ""
+ if reading:
+ rawmode += "r"
+ if writing:
+ rawmode += "w"
+ raw = SocketIO(self, rawmode)
+ self._makefile_refs += 1
+ if buffering is None:
+ buffering = -1
+ if buffering < 0:
+ buffering = io.DEFAULT_BUFFER_SIZE
+ if buffering == 0:
+ if not binary:
+ raise ValueError("unbuffered streams must be binary")
+ return raw
+ if reading and writing:
+ buffer = io.BufferedRWPair(raw, raw, buffering)
+ elif reading:
+ buffer = io.BufferedReader(raw, buffering)
+ else:
+ assert writing
+ buffer = io.BufferedWriter(raw, buffering)
+ if binary:
+ return buffer
+ text = io.TextIOWrapper(buffer, encoding, errors, newline)
+ text.mode = mode
+ return text
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.py
new file mode 100644
index 00000000..371c6dd5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__init__.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Rackspace
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+An implementation of semantics and validations described in RFC 3986.
+
+See http://rfc3986.readthedocs.io/ for detailed documentation.
+
+:copyright: (c) 2014 Rackspace
+:license: Apache v2.0, see LICENSE for details
+"""
+
+from .api import iri_reference
+from .api import IRIReference
+from .api import is_valid_uri
+from .api import normalize_uri
+from .api import uri_reference
+from .api import URIReference
+from .api import urlparse
+from .parseresult import ParseResult
+
+__title__ = 'rfc3986'
+__author__ = 'Ian Stapleton Cordasco'
+__author_email__ = 'graffatcolmingov@gmail.com'
+__license__ = 'Apache v2.0'
+__copyright__ = 'Copyright 2014 Rackspace'
+__version__ = '1.3.2'
+
+__all__ = (
+ 'ParseResult',
+ 'URIReference',
+ 'IRIReference',
+ 'is_valid_uri',
+ 'normalize_uri',
+ 'uri_reference',
+ 'iri_reference',
+ 'urlparse',
+ '__title__',
+ '__author__',
+ '__author_email__',
+ '__license__',
+ '__copyright__',
+ '__version__',
+)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..1e4a7b71
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/_mixin.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/_mixin.cpython-37.pyc
new file mode 100644
index 00000000..3462ebb8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/_mixin.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/abnf_regexp.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/abnf_regexp.cpython-37.pyc
new file mode 100644
index 00000000..2f58f53e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/abnf_regexp.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/api.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/api.cpython-37.pyc
new file mode 100644
index 00000000..1f0db47d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/api.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/builder.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/builder.cpython-37.pyc
new file mode 100644
index 00000000..850a0378
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/builder.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..bf8d2453
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/exceptions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 00000000..f8465ed7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/iri.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/iri.cpython-37.pyc
new file mode 100644
index 00000000..aabe4476
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/iri.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/misc.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/misc.cpython-37.pyc
new file mode 100644
index 00000000..4e546cb3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/misc.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/normalizers.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/normalizers.cpython-37.pyc
new file mode 100644
index 00000000..0a897283
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/normalizers.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/parseresult.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/parseresult.cpython-37.pyc
new file mode 100644
index 00000000..7a559e71
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/parseresult.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/uri.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/uri.cpython-37.pyc
new file mode 100644
index 00000000..7b0cd396
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/uri.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/validators.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/validators.cpython-37.pyc
new file mode 100644
index 00000000..945e3e76
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/__pycache__/validators.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.py
new file mode 100644
index 00000000..543925cd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/_mixin.py
@@ -0,0 +1,353 @@
+"""Module containing the implementation of the URIMixin class."""
+import warnings
+
+from . import exceptions as exc
+from . import misc
+from . import normalizers
+from . import validators
+
+
+class URIMixin(object):
+ """Mixin with all shared methods for URIs and IRIs."""
+
+ __hash__ = tuple.__hash__
+
+ def authority_info(self):
+ """Return a dictionary with the ``userinfo``, ``host``, and ``port``.
+
+ If the authority is not valid, it will raise a
+ :class:`~rfc3986.exceptions.InvalidAuthority` Exception.
+
+ :returns:
+ ``{'userinfo': 'username:password', 'host': 'www.example.com',
+ 'port': '80'}``
+ :rtype: dict
+ :raises rfc3986.exceptions.InvalidAuthority:
+ If the authority is not ``None`` and can not be parsed.
+ """
+ if not self.authority:
+ return {'userinfo': None, 'host': None, 'port': None}
+
+ match = self._match_subauthority()
+
+ if match is None:
+ # In this case, we have an authority that was parsed from the URI
+ # Reference, but it cannot be further parsed by our
+ # misc.SUBAUTHORITY_MATCHER. In this case it must not be a valid
+ # authority.
+ raise exc.InvalidAuthority(self.authority.encode(self.encoding))
+
+ # We had a match, now let's ensure that it is actually a valid host
+ # address if it is IPv4
+ matches = match.groupdict()
+ host = matches.get('host')
+
+ if (host and misc.IPv4_MATCHER.match(host) and not
+ validators.valid_ipv4_host_address(host)):
+ # If we have a host, it appears to be IPv4 and it does not have
+ # valid bytes, it is an InvalidAuthority.
+ raise exc.InvalidAuthority(self.authority.encode(self.encoding))
+
+ return matches
+
+ def _match_subauthority(self):
+ return misc.SUBAUTHORITY_MATCHER.match(self.authority)
+
+ @property
+ def host(self):
+ """If present, a string representing the host."""
+ try:
+ authority = self.authority_info()
+ except exc.InvalidAuthority:
+ return None
+ return authority['host']
+
+ @property
+ def port(self):
+ """If present, the port extracted from the authority."""
+ try:
+ authority = self.authority_info()
+ except exc.InvalidAuthority:
+ return None
+ return authority['port']
+
+ @property
+ def userinfo(self):
+ """If present, the userinfo extracted from the authority."""
+ try:
+ authority = self.authority_info()
+ except exc.InvalidAuthority:
+ return None
+ return authority['userinfo']
+
+ def is_absolute(self):
+ """Determine if this URI Reference is an absolute URI.
+
+ See http://tools.ietf.org/html/rfc3986#section-4.3 for explanation.
+
+ :returns: ``True`` if it is an absolute URI, ``False`` otherwise.
+ :rtype: bool
+ """
+ return bool(misc.ABSOLUTE_URI_MATCHER.match(self.unsplit()))
+
+ def is_valid(self, **kwargs):
+ """Determine if the URI is valid.
+
+ .. deprecated:: 1.1.0
+
+ Use the :class:`~rfc3986.validators.Validator` object instead.
+
+ :param bool require_scheme: Set to ``True`` if you wish to require the
+ presence of the scheme component.
+ :param bool require_authority: Set to ``True`` if you wish to require
+ the presence of the authority component.
+ :param bool require_path: Set to ``True`` if you wish to require the
+ presence of the path component.
+ :param bool require_query: Set to ``True`` if you wish to require the
+ presence of the query component.
+ :param bool require_fragment: Set to ``True`` if you wish to require
+ the presence of the fragment component.
+ :returns: ``True`` if the URI is valid. ``False`` otherwise.
+ :rtype: bool
+ """
+ warnings.warn("Please use rfc3986.validators.Validator instead. "
+ "This method will be eventually removed.",
+ DeprecationWarning)
+ validators = [
+ (self.scheme_is_valid, kwargs.get('require_scheme', False)),
+ (self.authority_is_valid, kwargs.get('require_authority', False)),
+ (self.path_is_valid, kwargs.get('require_path', False)),
+ (self.query_is_valid, kwargs.get('require_query', False)),
+ (self.fragment_is_valid, kwargs.get('require_fragment', False)),
+ ]
+ return all(v(r) for v, r in validators)
+
+ def authority_is_valid(self, require=False):
+ """Determine if the authority component is valid.
+
+ .. deprecated:: 1.1.0
+
+ Use the :class:`~rfc3986.validators.Validator` object instead.
+
+ :param bool require:
+ Set to ``True`` to require the presence of this component.
+ :returns:
+ ``True`` if the authority is valid. ``False`` otherwise.
+ :rtype:
+ bool
+ """
+ warnings.warn("Please use rfc3986.validators.Validator instead. "
+ "This method will be eventually removed.",
+ DeprecationWarning)
+ try:
+ self.authority_info()
+ except exc.InvalidAuthority:
+ return False
+
+ return validators.authority_is_valid(
+ self.authority,
+ host=self.host,
+ require=require,
+ )
+
+ def scheme_is_valid(self, require=False):
+ """Determine if the scheme component is valid.
+
+ .. deprecated:: 1.1.0
+
+ Use the :class:`~rfc3986.validators.Validator` object instead.
+
+ :param str require: Set to ``True`` to require the presence of this
+ component.
+ :returns: ``True`` if the scheme is valid. ``False`` otherwise.
+ :rtype: bool
+ """
+ warnings.warn("Please use rfc3986.validators.Validator instead. "
+ "This method will be eventually removed.",
+ DeprecationWarning)
+ return validators.scheme_is_valid(self.scheme, require)
+
+ def path_is_valid(self, require=False):
+ """Determine if the path component is valid.
+
+ .. deprecated:: 1.1.0
+
+ Use the :class:`~rfc3986.validators.Validator` object instead.
+
+ :param str require: Set to ``True`` to require the presence of this
+ component.
+ :returns: ``True`` if the path is valid. ``False`` otherwise.
+ :rtype: bool
+ """
+ warnings.warn("Please use rfc3986.validators.Validator instead. "
+ "This method will be eventually removed.",
+ DeprecationWarning)
+ return validators.path_is_valid(self.path, require)
+
+ def query_is_valid(self, require=False):
+ """Determine if the query component is valid.
+
+ .. deprecated:: 1.1.0
+
+ Use the :class:`~rfc3986.validators.Validator` object instead.
+
+ :param str require: Set to ``True`` to require the presence of this
+ component.
+ :returns: ``True`` if the query is valid. ``False`` otherwise.
+ :rtype: bool
+ """
+ warnings.warn("Please use rfc3986.validators.Validator instead. "
+ "This method will be eventually removed.",
+ DeprecationWarning)
+ return validators.query_is_valid(self.query, require)
+
+ def fragment_is_valid(self, require=False):
+ """Determine if the fragment component is valid.
+
+ .. deprecated:: 1.1.0
+
+ Use the Validator object instead.
+
+ :param str require: Set to ``True`` to require the presence of this
+ component.
+ :returns: ``True`` if the fragment is valid. ``False`` otherwise.
+ :rtype: bool
+ """
+ warnings.warn("Please use rfc3986.validators.Validator instead. "
+ "This method will be eventually removed.",
+ DeprecationWarning)
+ return validators.fragment_is_valid(self.fragment, require)
+
+ def normalized_equality(self, other_ref):
+ """Compare this URIReference to another URIReference.
+
+ :param URIReference other_ref: (required), The reference with which
+ we're comparing.
+ :returns: ``True`` if the references are equal, ``False`` otherwise.
+ :rtype: bool
+ """
+ return tuple(self.normalize()) == tuple(other_ref.normalize())
+
+ def resolve_with(self, base_uri, strict=False):
+ """Use an absolute URI Reference to resolve this relative reference.
+
+ Assuming this is a relative reference that you would like to resolve,
+ use the provided base URI to resolve it.
+
+ See http://tools.ietf.org/html/rfc3986#section-5 for more information.
+
+ :param base_uri: Either a string or URIReference. It must be an
+ absolute URI or it will raise an exception.
+ :returns: A new URIReference which is the result of resolving this
+ reference using ``base_uri``.
+ :rtype: :class:`URIReference`
+ :raises rfc3986.exceptions.ResolutionError:
+ If the ``base_uri`` is not an absolute URI.
+ """
+ if not isinstance(base_uri, URIMixin):
+ base_uri = type(self).from_string(base_uri)
+
+ if not base_uri.is_absolute():
+ raise exc.ResolutionError(base_uri)
+
+ # This is optional per
+ # http://tools.ietf.org/html/rfc3986#section-5.2.1
+ base_uri = base_uri.normalize()
+
+ # The reference we're resolving
+ resolving = self
+
+ if not strict and resolving.scheme == base_uri.scheme:
+ resolving = resolving.copy_with(scheme=None)
+
+ # http://tools.ietf.org/html/rfc3986#page-32
+ if resolving.scheme is not None:
+ target = resolving.copy_with(
+ path=normalizers.normalize_path(resolving.path)
+ )
+ else:
+ if resolving.authority is not None:
+ target = resolving.copy_with(
+ scheme=base_uri.scheme,
+ path=normalizers.normalize_path(resolving.path)
+ )
+ else:
+ if resolving.path is None:
+ if resolving.query is not None:
+ query = resolving.query
+ else:
+ query = base_uri.query
+ target = resolving.copy_with(
+ scheme=base_uri.scheme,
+ authority=base_uri.authority,
+ path=base_uri.path,
+ query=query
+ )
+ else:
+ if resolving.path.startswith('/'):
+ path = normalizers.normalize_path(resolving.path)
+ else:
+ path = normalizers.normalize_path(
+ misc.merge_paths(base_uri, resolving.path)
+ )
+ target = resolving.copy_with(
+ scheme=base_uri.scheme,
+ authority=base_uri.authority,
+ path=path,
+ query=resolving.query
+ )
+ return target
+
+ def unsplit(self):
+ """Create a URI string from the components.
+
+ :returns: The URI Reference reconstituted as a string.
+ :rtype: str
+ """
+ # See http://tools.ietf.org/html/rfc3986#section-5.3
+ result_list = []
+ if self.scheme:
+ result_list.extend([self.scheme, ':'])
+ if self.authority:
+ result_list.extend(['//', self.authority])
+ if self.path:
+ result_list.append(self.path)
+ if self.query is not None:
+ result_list.extend(['?', self.query])
+ if self.fragment is not None:
+ result_list.extend(['#', self.fragment])
+ return ''.join(result_list)
+
+ def copy_with(self, scheme=misc.UseExisting, authority=misc.UseExisting,
+ path=misc.UseExisting, query=misc.UseExisting,
+ fragment=misc.UseExisting):
+ """Create a copy of this reference with the new components.
+
+ :param str scheme:
+ (optional) The scheme to use for the new reference.
+ :param str authority:
+ (optional) The authority to use for the new reference.
+ :param str path:
+ (optional) The path to use for the new reference.
+ :param str query:
+ (optional) The query to use for the new reference.
+ :param str fragment:
+ (optional) The fragment to use for the new reference.
+ :returns:
+ New URIReference with provided components.
+ :rtype:
+ URIReference
+ """
+ attributes = {
+ 'scheme': scheme,
+ 'authority': authority,
+ 'path': path,
+ 'query': query,
+ 'fragment': fragment,
+ }
+ for key, value in list(attributes.items()):
+ if value is misc.UseExisting:
+ del attributes[key]
+ uri = self._replace(**attributes)
+ uri.encoding = self.encoding
+ return uri
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.py
new file mode 100644
index 00000000..24c9c3d0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/abnf_regexp.py
@@ -0,0 +1,267 @@
+# -*- coding: utf-8 -*-
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module for the regular expressions crafted from ABNF."""
+
+import sys
+
+# https://tools.ietf.org/html/rfc3986#page-13
+GEN_DELIMS = GENERIC_DELIMITERS = ":/?#[]@"
+GENERIC_DELIMITERS_SET = set(GENERIC_DELIMITERS)
+# https://tools.ietf.org/html/rfc3986#page-13
+SUB_DELIMS = SUB_DELIMITERS = "!$&'()*+,;="
+SUB_DELIMITERS_SET = set(SUB_DELIMITERS)
+# Escape the '*' for use in regular expressions
+SUB_DELIMITERS_RE = r"!$&'()\*+,;="
+RESERVED_CHARS_SET = GENERIC_DELIMITERS_SET.union(SUB_DELIMITERS_SET)
+ALPHA = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
+DIGIT = '0123456789'
+# https://tools.ietf.org/html/rfc3986#section-2.3
+UNRESERVED = UNRESERVED_CHARS = ALPHA + DIGIT + r'._!-'
+UNRESERVED_CHARS_SET = set(UNRESERVED_CHARS)
+NON_PCT_ENCODED_SET = RESERVED_CHARS_SET.union(UNRESERVED_CHARS_SET)
+# We need to escape the '-' in this case:
+UNRESERVED_RE = r'A-Za-z0-9._~\-'
+
+# Percent encoded character values
+PERCENT_ENCODED = PCT_ENCODED = '%[A-Fa-f0-9]{2}'
+PCHAR = '([' + UNRESERVED_RE + SUB_DELIMITERS_RE + ':@]|%s)' % PCT_ENCODED
+
+# NOTE(sigmavirus24): We're going to use more strict regular expressions
+# than appear in Appendix B for scheme. This will prevent over-eager
+# consuming of items that aren't schemes.
+SCHEME_RE = '[a-zA-Z][a-zA-Z0-9+.-]*'
+_AUTHORITY_RE = '[^/?#]*'
+_PATH_RE = '[^?#]*'
+_QUERY_RE = '[^#]*'
+_FRAGMENT_RE = '.*'
+
+# Extracted from http://tools.ietf.org/html/rfc3986#appendix-B
+COMPONENT_PATTERN_DICT = {
+ 'scheme': SCHEME_RE,
+ 'authority': _AUTHORITY_RE,
+ 'path': _PATH_RE,
+ 'query': _QUERY_RE,
+ 'fragment': _FRAGMENT_RE,
+}
+
+# See http://tools.ietf.org/html/rfc3986#appendix-B
+# In this case, we name each of the important matches so we can use
+# SRE_Match#groupdict to parse the values out if we so choose. This is also
+# modified to ignore other matches that are not important to the parsing of
+# the reference so we can also simply use SRE_Match#groups.
+URL_PARSING_RE = (
+ r'(?:(?P<scheme>{scheme}):)?(?://(?P<authority>{authority}))?'
+ r'(?P<path>{path})(?:\?(?P<query>{query}))?'
+ r'(?:#(?P<fragment>{fragment}))?'
+).format(**COMPONENT_PATTERN_DICT)
+
+
+# #########################
+# Authority Matcher Section
+# #########################
+
+# Host patterns, see: http://tools.ietf.org/html/rfc3986#section-3.2.2
+# The pattern for a regular name, e.g., www.google.com, api.github.com
+REGULAR_NAME_RE = REG_NAME = '((?:{0}|[{1}])*)'.format(
+ '%[0-9A-Fa-f]{2}', SUB_DELIMITERS_RE + UNRESERVED_RE
+)
+# The pattern for an IPv4 address, e.g., 192.168.255.255, 127.0.0.1,
+IPv4_RE = r'([0-9]{1,3}\.){3}[0-9]{1,3}'
+# Hexadecimal characters used in each piece of an IPv6 address
+HEXDIG_RE = '[0-9A-Fa-f]{1,4}'
+# Least-significant 32 bits of an IPv6 address
+LS32_RE = '({hex}:{hex}|{ipv4})'.format(hex=HEXDIG_RE, ipv4=IPv4_RE)
+# Substitutions into the following patterns for IPv6 patterns defined
+# http://tools.ietf.org/html/rfc3986#page-20
+_subs = {'hex': HEXDIG_RE, 'ls32': LS32_RE}
+
+# Below: h16 = hexdig, see: https://tools.ietf.org/html/rfc5234 for details
+# about ABNF (Augmented Backus-Naur Form) use in the comments
+variations = [
+ # 6( h16 ":" ) ls32
+ '(%(hex)s:){6}%(ls32)s' % _subs,
+ # "::" 5( h16 ":" ) ls32
+ '::(%(hex)s:){5}%(ls32)s' % _subs,
+ # [ h16 ] "::" 4( h16 ":" ) ls32
+ '(%(hex)s)?::(%(hex)s:){4}%(ls32)s' % _subs,
+ # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+ '((%(hex)s:)?%(hex)s)?::(%(hex)s:){3}%(ls32)s' % _subs,
+ # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+ '((%(hex)s:){0,2}%(hex)s)?::(%(hex)s:){2}%(ls32)s' % _subs,
+ # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
+ '((%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s' % _subs,
+ # [ *4( h16 ":" ) h16 ] "::" ls32
+ '((%(hex)s:){0,4}%(hex)s)?::%(ls32)s' % _subs,
+ # [ *5( h16 ":" ) h16 ] "::" h16
+ '((%(hex)s:){0,5}%(hex)s)?::%(hex)s' % _subs,
+ # [ *6( h16 ":" ) h16 ] "::"
+ '((%(hex)s:){0,6}%(hex)s)?::' % _subs,
+]
+
+IPv6_RE = '(({0})|({1})|({2})|({3})|({4})|({5})|({6})|({7})|({8}))'.format(
+ *variations
+)
+
+IPv_FUTURE_RE = r'v[0-9A-Fa-f]+\.[%s]+' % (
+ UNRESERVED_RE + SUB_DELIMITERS_RE + ':'
+)
+
+# RFC 6874 Zone ID ABNF
+ZONE_ID = '(?:[' + UNRESERVED_RE + ']|' + PCT_ENCODED + ')+'
+
+IPv6_ADDRZ_RFC4007_RE = IPv6_RE + '(?:(?:%25|%)' + ZONE_ID + ')?'
+IPv6_ADDRZ_RE = IPv6_RE + '(?:%25' + ZONE_ID + ')?'
+
+IP_LITERAL_RE = r'\[({0}|{1})\]'.format(
+ IPv6_ADDRZ_RFC4007_RE,
+ IPv_FUTURE_RE,
+)
+
+# Pattern for matching the host piece of the authority
+HOST_RE = HOST_PATTERN = '({0}|{1}|{2})'.format(
+ REG_NAME,
+ IPv4_RE,
+ IP_LITERAL_RE,
+)
+USERINFO_RE = '^([' + UNRESERVED_RE + SUB_DELIMITERS_RE + ':]|%s)+' % (
+ PCT_ENCODED
+)
+PORT_RE = '[0-9]{1,5}'
+
+# ####################
+# Path Matcher Section
+# ####################
+
+# See http://tools.ietf.org/html/rfc3986#section-3.3 for more information
+# about the path patterns defined below.
+segments = {
+ 'segment': PCHAR + '*',
+ # Non-zero length segment
+ 'segment-nz': PCHAR + '+',
+ # Non-zero length segment without ":"
+ 'segment-nz-nc': PCHAR.replace(':', '') + '+'
+}
+
+# Path types taken from Section 3.3 (linked above)
+PATH_EMPTY = '^$'
+PATH_ROOTLESS = '%(segment-nz)s(/%(segment)s)*' % segments
+PATH_NOSCHEME = '%(segment-nz-nc)s(/%(segment)s)*' % segments
+PATH_ABSOLUTE = '/(%s)?' % PATH_ROOTLESS
+PATH_ABEMPTY = '(/%(segment)s)*' % segments
+PATH_RE = '^(%s|%s|%s|%s|%s)$' % (
+ PATH_ABEMPTY, PATH_ABSOLUTE, PATH_NOSCHEME, PATH_ROOTLESS, PATH_EMPTY
+)
+
+FRAGMENT_RE = QUERY_RE = (
+ '^([/?:@' + UNRESERVED_RE + SUB_DELIMITERS_RE + ']|%s)*$' % PCT_ENCODED
+)
+
+# ##########################
+# Relative reference matcher
+# ##########################
+
+# See http://tools.ietf.org/html/rfc3986#section-4.2 for details
+RELATIVE_PART_RE = '(//%s%s|%s|%s|%s)' % (
+ COMPONENT_PATTERN_DICT['authority'],
+ PATH_ABEMPTY,
+ PATH_ABSOLUTE,
+ PATH_NOSCHEME,
+ PATH_EMPTY,
+)
+
+# See http://tools.ietf.org/html/rfc3986#section-3 for definition
+HIER_PART_RE = '(//%s%s|%s|%s|%s)' % (
+ COMPONENT_PATTERN_DICT['authority'],
+ PATH_ABEMPTY,
+ PATH_ABSOLUTE,
+ PATH_ROOTLESS,
+ PATH_EMPTY,
+)
+
+# ###############
+# IRIs / RFC 3987
+# ###############
+
+# Only wide-unicode gets the high-ranges of UCSCHAR
+if sys.maxunicode > 0xFFFF: # pragma: no cover
+ IPRIVATE = u'\uE000-\uF8FF\U000F0000-\U000FFFFD\U00100000-\U0010FFFD'
+ UCSCHAR_RE = (
+ u'\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF'
+ u'\U00010000-\U0001FFFD\U00020000-\U0002FFFD'
+ u'\U00030000-\U0003FFFD\U00040000-\U0004FFFD'
+ u'\U00050000-\U0005FFFD\U00060000-\U0006FFFD'
+ u'\U00070000-\U0007FFFD\U00080000-\U0008FFFD'
+ u'\U00090000-\U0009FFFD\U000A0000-\U000AFFFD'
+ u'\U000B0000-\U000BFFFD\U000C0000-\U000CFFFD'
+ u'\U000D0000-\U000DFFFD\U000E1000-\U000EFFFD'
+ )
+else: # pragma: no cover
+ IPRIVATE = u'\uE000-\uF8FF'
+ UCSCHAR_RE = (
+ u'\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF'
+ )
+
+IUNRESERVED_RE = u'A-Za-z0-9\\._~\\-' + UCSCHAR_RE
+IPCHAR = u'([' + IUNRESERVED_RE + SUB_DELIMITERS_RE + u':@]|%s)' % PCT_ENCODED
+
+isegments = {
+ 'isegment': IPCHAR + u'*',
+ # Non-zero length segment
+ 'isegment-nz': IPCHAR + u'+',
+ # Non-zero length segment without ":"
+ 'isegment-nz-nc': IPCHAR.replace(':', '') + u'+'
+}
+
+IPATH_ROOTLESS = u'%(isegment-nz)s(/%(isegment)s)*' % isegments
+IPATH_NOSCHEME = u'%(isegment-nz-nc)s(/%(isegment)s)*' % isegments
+IPATH_ABSOLUTE = u'/(?:%s)?' % IPATH_ROOTLESS
+IPATH_ABEMPTY = u'(?:/%(isegment)s)*' % isegments
+IPATH_RE = u'^(?:%s|%s|%s|%s|%s)$' % (
+ IPATH_ABEMPTY, IPATH_ABSOLUTE, IPATH_NOSCHEME, IPATH_ROOTLESS, PATH_EMPTY
+)
+
+IREGULAR_NAME_RE = IREG_NAME = u'(?:{0}|[{1}])*'.format(
+ u'%[0-9A-Fa-f]{2}', SUB_DELIMITERS_RE + IUNRESERVED_RE
+)
+
+IHOST_RE = IHOST_PATTERN = u'({0}|{1}|{2})'.format(
+ IREG_NAME,
+ IPv4_RE,
+ IP_LITERAL_RE,
+)
+
+IUSERINFO_RE = u'^(?:[' + IUNRESERVED_RE + SUB_DELIMITERS_RE + u':]|%s)+' % (
+ PCT_ENCODED
+)
+
+IFRAGMENT_RE = (u'^(?:[/?:@' + IUNRESERVED_RE + SUB_DELIMITERS_RE
+ + u']|%s)*$' % PCT_ENCODED)
+IQUERY_RE = (u'^(?:[/?:@' + IUNRESERVED_RE + SUB_DELIMITERS_RE
+ + IPRIVATE + u']|%s)*$' % PCT_ENCODED)
+
+IRELATIVE_PART_RE = u'(//%s%s|%s|%s|%s)' % (
+ COMPONENT_PATTERN_DICT['authority'],
+ IPATH_ABEMPTY,
+ IPATH_ABSOLUTE,
+ IPATH_NOSCHEME,
+ PATH_EMPTY,
+)
+
+IHIER_PART_RE = u'(//%s%s|%s|%s|%s)' % (
+ COMPONENT_PATTERN_DICT['authority'],
+ IPATH_ABEMPTY,
+ IPATH_ABSOLUTE,
+ IPATH_ROOTLESS,
+ PATH_EMPTY,
+)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.py
new file mode 100644
index 00000000..ddc4a1cd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/api.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Rackspace
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Module containing the simple and functional API for rfc3986.
+
+This module defines functions and provides access to the public attributes
+and classes of rfc3986.
+"""
+
+from .iri import IRIReference
+from .parseresult import ParseResult
+from .uri import URIReference
+
+
+def uri_reference(uri, encoding='utf-8'):
+ """Parse a URI string into a URIReference.
+
+ This is a convenience function. You could achieve the same end by using
+ ``URIReference.from_string(uri)``.
+
+ :param str uri: The URI which needs to be parsed into a reference.
+ :param str encoding: The encoding of the string provided
+ :returns: A parsed URI
+ :rtype: :class:`URIReference`
+ """
+ return URIReference.from_string(uri, encoding)
+
+
+def iri_reference(iri, encoding='utf-8'):
+ """Parse a IRI string into an IRIReference.
+
+ This is a convenience function. You could achieve the same end by using
+ ``IRIReference.from_string(iri)``.
+
+ :param str iri: The IRI which needs to be parsed into a reference.
+ :param str encoding: The encoding of the string provided
+ :returns: A parsed IRI
+ :rtype: :class:`IRIReference`
+ """
+ return IRIReference.from_string(iri, encoding)
+
+
+def is_valid_uri(uri, encoding='utf-8', **kwargs):
+ """Determine if the URI given is valid.
+
+ This is a convenience function. You could use either
+ ``uri_reference(uri).is_valid()`` or
+ ``URIReference.from_string(uri).is_valid()`` to achieve the same result.
+
+ :param str uri: The URI to be validated.
+ :param str encoding: The encoding of the string provided
+ :param bool require_scheme: Set to ``True`` if you wish to require the
+ presence of the scheme component.
+ :param bool require_authority: Set to ``True`` if you wish to require the
+ presence of the authority component.
+ :param bool require_path: Set to ``True`` if you wish to require the
+ presence of the path component.
+ :param bool require_query: Set to ``True`` if you wish to require the
+ presence of the query component.
+ :param bool require_fragment: Set to ``True`` if you wish to require the
+ presence of the fragment component.
+ :returns: ``True`` if the URI is valid, ``False`` otherwise.
+ :rtype: bool
+ """
+ return URIReference.from_string(uri, encoding).is_valid(**kwargs)
+
+
+def normalize_uri(uri, encoding='utf-8'):
+ """Normalize the given URI.
+
+ This is a convenience function. You could use either
+ ``uri_reference(uri).normalize().unsplit()`` or
+ ``URIReference.from_string(uri).normalize().unsplit()`` instead.
+
+ :param str uri: The URI to be normalized.
+ :param str encoding: The encoding of the string provided
+ :returns: The normalized URI.
+ :rtype: str
+ """
+ normalized_reference = URIReference.from_string(uri, encoding).normalize()
+ return normalized_reference.unsplit()
+
+
+def urlparse(uri, encoding='utf-8'):
+ """Parse a given URI and return a ParseResult.
+
+ This is a partial replacement of the standard library's urlparse function.
+
+ :param str uri: The URI to be parsed.
+ :param str encoding: The encoding of the string provided.
+ :returns: A parsed URI
+ :rtype: :class:`~rfc3986.parseresult.ParseResult`
+ """
+ return ParseResult.from_string(uri, encoding, strict=False)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.py
new file mode 100644
index 00000000..79342799
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/builder.py
@@ -0,0 +1,298 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017 Ian Stapleton Cordasco
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module containing the logic for the URIBuilder object."""
+from . import compat
+from . import normalizers
+from . import uri
+
+
+class URIBuilder(object):
+ """Object to aid in building up a URI Reference from parts.
+
+ .. note::
+
+ This object should be instantiated by the user, but it's recommended
+ that it is not provided with arguments. Instead, use the available
+ method to populate the fields.
+
+ """
+
+ def __init__(self, scheme=None, userinfo=None, host=None, port=None,
+ path=None, query=None, fragment=None):
+ """Initialize our URI builder.
+
+ :param str scheme:
+ (optional)
+ :param str userinfo:
+ (optional)
+ :param str host:
+ (optional)
+ :param int port:
+ (optional)
+ :param str path:
+ (optional)
+ :param str query:
+ (optional)
+ :param str fragment:
+ (optional)
+ """
+ self.scheme = scheme
+ self.userinfo = userinfo
+ self.host = host
+ self.port = port
+ self.path = path
+ self.query = query
+ self.fragment = fragment
+
+ def __repr__(self):
+ """Provide a convenient view of our builder object."""
+ formatstr = ('URIBuilder(scheme={b.scheme}, userinfo={b.userinfo}, '
+ 'host={b.host}, port={b.port}, path={b.path}, '
+ 'query={b.query}, fragment={b.fragment})')
+ return formatstr.format(b=self)
+
+ def add_scheme(self, scheme):
+ """Add a scheme to our builder object.
+
+ After normalizing, this will generate a new URIBuilder instance with
+ the specified scheme and all other attributes the same.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_scheme('HTTPS')
+ URIBuilder(scheme='https', userinfo=None, host=None, port=None,
+ path=None, query=None, fragment=None)
+
+ """
+ scheme = normalizers.normalize_scheme(scheme)
+ return URIBuilder(
+ scheme=scheme,
+ userinfo=self.userinfo,
+ host=self.host,
+ port=self.port,
+ path=self.path,
+ query=self.query,
+ fragment=self.fragment,
+ )
+
+ def add_credentials(self, username, password):
+ """Add credentials as the userinfo portion of the URI.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_credentials('root', 's3crete')
+ URIBuilder(scheme=None, userinfo='root:s3crete', host=None,
+ port=None, path=None, query=None, fragment=None)
+
+ >>> URIBuilder().add_credentials('root', None)
+ URIBuilder(scheme=None, userinfo='root', host=None,
+ port=None, path=None, query=None, fragment=None)
+ """
+ if username is None:
+ raise ValueError('Username cannot be None')
+ userinfo = normalizers.normalize_username(username)
+
+ if password is not None:
+ userinfo = '{}:{}'.format(
+ userinfo,
+ normalizers.normalize_password(password),
+ )
+
+ return URIBuilder(
+ scheme=self.scheme,
+ userinfo=userinfo,
+ host=self.host,
+ port=self.port,
+ path=self.path,
+ query=self.query,
+ fragment=self.fragment,
+ )
+
+ def add_host(self, host):
+ """Add hostname to the URI.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_host('google.com')
+ URIBuilder(scheme=None, userinfo=None, host='google.com',
+ port=None, path=None, query=None, fragment=None)
+
+ """
+ return URIBuilder(
+ scheme=self.scheme,
+ userinfo=self.userinfo,
+ host=normalizers.normalize_host(host),
+ port=self.port,
+ path=self.path,
+ query=self.query,
+ fragment=self.fragment,
+ )
+
+ def add_port(self, port):
+ """Add port to the URI.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_port(80)
+ URIBuilder(scheme=None, userinfo=None, host=None, port='80',
+ path=None, query=None, fragment=None)
+
+ >>> URIBuilder().add_port(443)
+ URIBuilder(scheme=None, userinfo=None, host=None, port='443',
+ path=None, query=None, fragment=None)
+
+ """
+ port_int = int(port)
+ if port_int < 0:
+ raise ValueError(
+ 'ports are not allowed to be negative. You provided {}'.format(
+ port_int,
+ )
+ )
+ if port_int > 65535:
+ raise ValueError(
+ 'ports are not allowed to be larger than 65535. '
+ 'You provided {}'.format(
+ port_int,
+ )
+ )
+
+ return URIBuilder(
+ scheme=self.scheme,
+ userinfo=self.userinfo,
+ host=self.host,
+ port='{}'.format(port_int),
+ path=self.path,
+ query=self.query,
+ fragment=self.fragment,
+ )
+
+ def add_path(self, path):
+ """Add a path to the URI.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_path('sigmavirus24/rfc3985')
+ URIBuilder(scheme=None, userinfo=None, host=None, port=None,
+ path='/sigmavirus24/rfc3986', query=None, fragment=None)
+
+ >>> URIBuilder().add_path('/checkout.php')
+ URIBuilder(scheme=None, userinfo=None, host=None, port=None,
+ path='/checkout.php', query=None, fragment=None)
+
+ """
+ if not path.startswith('/'):
+ path = '/{}'.format(path)
+
+ return URIBuilder(
+ scheme=self.scheme,
+ userinfo=self.userinfo,
+ host=self.host,
+ port=self.port,
+ path=normalizers.normalize_path(path),
+ query=self.query,
+ fragment=self.fragment,
+ )
+
+ def add_query_from(self, query_items):
+ """Generate and add a query a dictionary or list of tuples.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_query_from({'a': 'b c'})
+ URIBuilder(scheme=None, userinfo=None, host=None, port=None,
+ path=None, query='a=b+c', fragment=None)
+
+ >>> URIBuilder().add_query_from([('a', 'b c')])
+ URIBuilder(scheme=None, userinfo=None, host=None, port=None,
+ path=None, query='a=b+c', fragment=None)
+
+ """
+ query = normalizers.normalize_query(compat.urlencode(query_items))
+
+ return URIBuilder(
+ scheme=self.scheme,
+ userinfo=self.userinfo,
+ host=self.host,
+ port=self.port,
+ path=self.path,
+ query=query,
+ fragment=self.fragment,
+ )
+
+ def add_query(self, query):
+ """Add a pre-formated query string to the URI.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_query('a=b&c=d')
+ URIBuilder(scheme=None, userinfo=None, host=None, port=None,
+ path=None, query='a=b&c=d', fragment=None)
+
+ """
+ return URIBuilder(
+ scheme=self.scheme,
+ userinfo=self.userinfo,
+ host=self.host,
+ port=self.port,
+ path=self.path,
+ query=normalizers.normalize_query(query),
+ fragment=self.fragment,
+ )
+
+ def add_fragment(self, fragment):
+ """Add a fragment to the URI.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_fragment('section-2.6.1')
+ URIBuilder(scheme=None, userinfo=None, host=None, port=None,
+ path=None, query=None, fragment='section-2.6.1')
+
+ """
+ return URIBuilder(
+ scheme=self.scheme,
+ userinfo=self.userinfo,
+ host=self.host,
+ port=self.port,
+ path=self.path,
+ query=self.query,
+ fragment=normalizers.normalize_fragment(fragment),
+ )
+
+ def finalize(self):
+ """Create a URIReference from our builder.
+
+ .. code-block:: python
+
+ >>> URIBuilder().add_scheme('https').add_host('github.com'
+ ... ).add_path('sigmavirus24/rfc3986').finalize().unsplit()
+ 'https://github.com/sigmavirus24/rfc3986'
+
+ >>> URIBuilder().add_scheme('https').add_host('github.com'
+ ... ).add_path('sigmavirus24/rfc3986').add_credentials(
+ ... 'sigmavirus24', 'not-re@l').finalize().unsplit()
+ 'https://sigmavirus24:not-re%40l@github.com/sigmavirus24/rfc3986'
+
+ """
+ return uri.URIReference(
+ self.scheme,
+ normalizers.normalize_authority(
+ (self.userinfo, self.host, self.port)
+ ),
+ self.path,
+ self.query,
+ self.fragment,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.py
new file mode 100644
index 00000000..8968c384
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/compat.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Rackspace
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Compatibility module for Python 2 and 3 support."""
+import sys
+
+try:
+ from urllib.parse import quote as urlquote
+except ImportError: # Python 2.x
+ from urllib import quote as urlquote
+
+try:
+ from urllib.parse import urlencode
+except ImportError: # Python 2.x
+ from urllib import urlencode
+
+__all__ = (
+ 'to_bytes',
+ 'to_str',
+ 'urlquote',
+ 'urlencode',
+)
+
+PY3 = (3, 0) <= sys.version_info < (4, 0)
+PY2 = (2, 6) <= sys.version_info < (2, 8)
+
+
+if PY3:
+ unicode = str # Python 3.x
+
+
+def to_str(b, encoding='utf-8'):
+ """Ensure that b is text in the specified encoding."""
+ if hasattr(b, 'decode') and not isinstance(b, unicode):
+ b = b.decode(encoding)
+ return b
+
+
+def to_bytes(s, encoding='utf-8'):
+ """Ensure that s is converted to bytes from the encoding."""
+ if hasattr(s, 'encode') and not isinstance(s, bytes):
+ s = s.encode(encoding)
+ return s
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.py
new file mode 100644
index 00000000..da8ca7cb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/exceptions.py
@@ -0,0 +1,118 @@
+# -*- coding: utf-8 -*-
+"""Exceptions module for rfc3986."""
+
+from . import compat
+
+
+class RFC3986Exception(Exception):
+ """Base class for all rfc3986 exception classes."""
+
+ pass
+
+
+class InvalidAuthority(RFC3986Exception):
+ """Exception when the authority string is invalid."""
+
+ def __init__(self, authority):
+ """Initialize the exception with the invalid authority."""
+ super(InvalidAuthority, self).__init__(
+ u"The authority ({0}) is not valid.".format(
+ compat.to_str(authority)))
+
+
+class InvalidPort(RFC3986Exception):
+ """Exception when the port is invalid."""
+
+ def __init__(self, port):
+ """Initialize the exception with the invalid port."""
+ super(InvalidPort, self).__init__(
+ 'The port ("{0}") is not valid.'.format(port))
+
+
+class ResolutionError(RFC3986Exception):
+ """Exception to indicate a failure to resolve a URI."""
+
+ def __init__(self, uri):
+ """Initialize the error with the failed URI."""
+ super(ResolutionError, self).__init__(
+ "{0} is not an absolute URI.".format(uri.unsplit()))
+
+
+class ValidationError(RFC3986Exception):
+ """Exception raised during Validation of a URI."""
+
+ pass
+
+
+class MissingComponentError(ValidationError):
+ """Exception raised when a required component is missing."""
+
+ def __init__(self, uri, *component_names):
+ """Initialize the error with the missing component name."""
+ verb = 'was'
+ if len(component_names) > 1:
+ verb = 'were'
+
+ self.uri = uri
+ self.components = sorted(component_names)
+ components = ', '.join(self.components)
+ super(MissingComponentError, self).__init__(
+ "{} {} required but missing".format(components, verb),
+ uri,
+ self.components,
+ )
+
+
+class UnpermittedComponentError(ValidationError):
+ """Exception raised when a component has an unpermitted value."""
+
+ def __init__(self, component_name, component_value, allowed_values):
+ """Initialize the error with the unpermitted component."""
+ super(UnpermittedComponentError, self).__init__(
+ "{} was required to be one of {!r} but was {!r}".format(
+ component_name, list(sorted(allowed_values)), component_value,
+ ),
+ component_name,
+ component_value,
+ allowed_values,
+ )
+ self.component_name = component_name
+ self.component_value = component_value
+ self.allowed_values = allowed_values
+
+
+class PasswordForbidden(ValidationError):
+ """Exception raised when a URL has a password in the userinfo section."""
+
+ def __init__(self, uri):
+ """Initialize the error with the URI that failed validation."""
+ unsplit = getattr(uri, 'unsplit', lambda: uri)
+ super(PasswordForbidden, self).__init__(
+ '"{}" contained a password when validation forbade it'.format(
+ unsplit()
+ )
+ )
+ self.uri = uri
+
+
+class InvalidComponentsError(ValidationError):
+ """Exception raised when one or more components are invalid."""
+
+ def __init__(self, uri, *component_names):
+ """Initialize the error with the invalid component name(s)."""
+ verb = 'was'
+ if len(component_names) > 1:
+ verb = 'were'
+
+ self.uri = uri
+ self.components = sorted(component_names)
+ components = ', '.join(self.components)
+ super(InvalidComponentsError, self).__init__(
+ "{} {} found to be invalid".format(components, verb),
+ uri,
+ self.components,
+ )
+
+
+class MissingDependencyError(RFC3986Exception):
+ """Exception raised when an IRI is encoded without the 'idna' module."""
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.py
new file mode 100644
index 00000000..416cae4a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/iri.py
@@ -0,0 +1,147 @@
+"""Module containing the implementation of the IRIReference class."""
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Rackspace
+# Copyright (c) 2015 Ian Stapleton Cordasco
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from collections import namedtuple
+
+from . import compat
+from . import exceptions
+from . import misc
+from . import normalizers
+from . import uri
+
+
+try:
+ from pip._vendor import idna
+except ImportError: # pragma: no cover
+ idna = None
+
+
+class IRIReference(namedtuple('IRIReference', misc.URI_COMPONENTS),
+ uri.URIMixin):
+ """Immutable object representing a parsed IRI Reference.
+
+ Can be encoded into an URIReference object via the procedure
+ specified in RFC 3987 Section 3.1
+
+ .. note::
+ The IRI submodule is a new interface and may possibly change in
+ the future. Check for changes to the interface when upgrading.
+ """
+
+ slots = ()
+
+ def __new__(cls, scheme, authority, path, query, fragment,
+ encoding='utf-8'):
+ """Create a new IRIReference."""
+ ref = super(IRIReference, cls).__new__(
+ cls,
+ scheme or None,
+ authority or None,
+ path or None,
+ query,
+ fragment)
+ ref.encoding = encoding
+ return ref
+
+ def __eq__(self, other):
+ """Compare this reference to another."""
+ other_ref = other
+ if isinstance(other, tuple):
+ other_ref = self.__class__(*other)
+ elif not isinstance(other, IRIReference):
+ try:
+ other_ref = self.__class__.from_string(other)
+ except TypeError:
+ raise TypeError(
+ 'Unable to compare {0}() to {1}()'.format(
+ type(self).__name__, type(other).__name__))
+
+ # See http://tools.ietf.org/html/rfc3986#section-6.2
+ return tuple(self) == tuple(other_ref)
+
+ def _match_subauthority(self):
+ return misc.ISUBAUTHORITY_MATCHER.match(self.authority)
+
+ @classmethod
+ def from_string(cls, iri_string, encoding='utf-8'):
+ """Parse a IRI reference from the given unicode IRI string.
+
+ :param str iri_string: Unicode IRI to be parsed into a reference.
+ :param str encoding: The encoding of the string provided
+ :returns: :class:`IRIReference` or subclass thereof
+ """
+ iri_string = compat.to_str(iri_string, encoding)
+
+ split_iri = misc.IRI_MATCHER.match(iri_string).groupdict()
+ return cls(
+ split_iri['scheme'], split_iri['authority'],
+ normalizers.encode_component(split_iri['path'], encoding),
+ normalizers.encode_component(split_iri['query'], encoding),
+ normalizers.encode_component(split_iri['fragment'], encoding),
+ encoding,
+ )
+
+ def encode(self, idna_encoder=None): # noqa: C901
+ """Encode an IRIReference into a URIReference instance.
+
+ If the ``idna`` module is installed or the ``rfc3986[idna]``
+ extra is used then unicode characters in the IRI host
+ component will be encoded with IDNA2008.
+
+ :param idna_encoder:
+ Function that encodes each part of the host component
+ If not given will raise an exception if the IRI
+ contains a host component.
+ :rtype: uri.URIReference
+ :returns: A URI reference
+ """
+ authority = self.authority
+ if authority:
+ if idna_encoder is None:
+ if idna is None: # pragma: no cover
+ raise exceptions.MissingDependencyError(
+ "Could not import the 'idna' module "
+ "and the IRI hostname requires encoding"
+ )
+
+ def idna_encoder(name):
+ if any(ord(c) > 128 for c in name):
+ try:
+ return idna.encode(name.lower(),
+ strict=True,
+ std3_rules=True)
+ except idna.IDNAError:
+ raise exceptions.InvalidAuthority(self.authority)
+ return name
+
+ authority = ""
+ if self.host:
+ authority = ".".join([compat.to_str(idna_encoder(part))
+ for part in self.host.split(".")])
+
+ if self.userinfo is not None:
+ authority = (normalizers.encode_component(
+ self.userinfo, self.encoding) + '@' + authority)
+
+ if self.port is not None:
+ authority += ":" + str(self.port)
+
+ return uri.URIReference(self.scheme,
+ authority,
+ path=self.path,
+ query=self.query,
+ fragment=self.fragment,
+ encoding=self.encoding)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.py
new file mode 100644
index 00000000..b735e044
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/misc.py
@@ -0,0 +1,124 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Rackspace
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Module containing compiled regular expressions and constants.
+
+This module contains important constants, patterns, and compiled regular
+expressions for parsing and validating URIs and their components.
+"""
+
+import re
+
+from . import abnf_regexp
+
+# These are enumerated for the named tuple used as a superclass of
+# URIReference
+URI_COMPONENTS = ['scheme', 'authority', 'path', 'query', 'fragment']
+
+important_characters = {
+ 'generic_delimiters': abnf_regexp.GENERIC_DELIMITERS,
+ 'sub_delimiters': abnf_regexp.SUB_DELIMITERS,
+ # We need to escape the '*' in this case
+ 're_sub_delimiters': abnf_regexp.SUB_DELIMITERS_RE,
+ 'unreserved_chars': abnf_regexp.UNRESERVED_CHARS,
+ # We need to escape the '-' in this case:
+ 're_unreserved': abnf_regexp.UNRESERVED_RE,
+}
+
+# For details about delimiters and reserved characters, see:
+# http://tools.ietf.org/html/rfc3986#section-2.2
+GENERIC_DELIMITERS = abnf_regexp.GENERIC_DELIMITERS_SET
+SUB_DELIMITERS = abnf_regexp.SUB_DELIMITERS_SET
+RESERVED_CHARS = abnf_regexp.RESERVED_CHARS_SET
+# For details about unreserved characters, see:
+# http://tools.ietf.org/html/rfc3986#section-2.3
+UNRESERVED_CHARS = abnf_regexp.UNRESERVED_CHARS_SET
+NON_PCT_ENCODED = abnf_regexp.NON_PCT_ENCODED_SET
+
+URI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE)
+
+SUBAUTHORITY_MATCHER = re.compile((
+ '^(?:(?P<userinfo>{0})@)?' # userinfo
+ '(?P<host>{1})' # host
+ ':?(?P<port>{2})?$' # port
+ ).format(abnf_regexp.USERINFO_RE,
+ abnf_regexp.HOST_PATTERN,
+ abnf_regexp.PORT_RE))
+
+
+HOST_MATCHER = re.compile('^' + abnf_regexp.HOST_RE + '$')
+IPv4_MATCHER = re.compile('^' + abnf_regexp.IPv4_RE + '$')
+IPv6_MATCHER = re.compile(r'^\[' + abnf_regexp.IPv6_ADDRZ_RFC4007_RE + r'\]$')
+
+# Used by host validator
+IPv6_NO_RFC4007_MATCHER = re.compile(r'^\[%s\]$' % (
+ abnf_regexp.IPv6_ADDRZ_RE
+))
+
+# Matcher used to validate path components
+PATH_MATCHER = re.compile(abnf_regexp.PATH_RE)
+
+
+# ##################################
+# Query and Fragment Matcher Section
+# ##################################
+
+QUERY_MATCHER = re.compile(abnf_regexp.QUERY_RE)
+
+FRAGMENT_MATCHER = QUERY_MATCHER
+
+# Scheme validation, see: http://tools.ietf.org/html/rfc3986#section-3.1
+SCHEME_MATCHER = re.compile('^{0}$'.format(abnf_regexp.SCHEME_RE))
+
+RELATIVE_REF_MATCHER = re.compile(r'^%s(\?%s)?(#%s)?$' % (
+ abnf_regexp.RELATIVE_PART_RE,
+ abnf_regexp.QUERY_RE,
+ abnf_regexp.FRAGMENT_RE,
+))
+
+# See http://tools.ietf.org/html/rfc3986#section-4.3
+ABSOLUTE_URI_MATCHER = re.compile(r'^%s:%s(\?%s)?$' % (
+ abnf_regexp.COMPONENT_PATTERN_DICT['scheme'],
+ abnf_regexp.HIER_PART_RE,
+ abnf_regexp.QUERY_RE[1:-1],
+))
+
+# ###############
+# IRIs / RFC 3987
+# ###############
+
+IRI_MATCHER = re.compile(abnf_regexp.URL_PARSING_RE, re.UNICODE)
+
+ISUBAUTHORITY_MATCHER = re.compile((
+ u'^(?:(?P<userinfo>{0})@)?' # iuserinfo
+ u'(?P<host>{1})' # ihost
+ u':?(?P<port>{2})?$' # port
+ ).format(abnf_regexp.IUSERINFO_RE,
+ abnf_regexp.IHOST_RE,
+ abnf_regexp.PORT_RE), re.UNICODE)
+
+
+# Path merger as defined in http://tools.ietf.org/html/rfc3986#section-5.2.3
+def merge_paths(base_uri, relative_path):
+ """Merge a base URI's path with a relative URI's path."""
+ if base_uri.path is None and base_uri.authority is not None:
+ return '/' + relative_path
+ else:
+ path = base_uri.path or ''
+ index = path.rfind('/')
+ return path[:index] + '/' + relative_path
+
+
+UseExisting = object()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.py
new file mode 100644
index 00000000..2eb1bb36
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/normalizers.py
@@ -0,0 +1,167 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Rackspace
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module with functions to normalize components."""
+import re
+
+from . import compat
+from . import misc
+
+
+def normalize_scheme(scheme):
+ """Normalize the scheme component."""
+ return scheme.lower()
+
+
+def normalize_authority(authority):
+ """Normalize an authority tuple to a string."""
+ userinfo, host, port = authority
+ result = ''
+ if userinfo:
+ result += normalize_percent_characters(userinfo) + '@'
+ if host:
+ result += normalize_host(host)
+ if port:
+ result += ':' + port
+ return result
+
+
+def normalize_username(username):
+ """Normalize a username to make it safe to include in userinfo."""
+ return compat.urlquote(username)
+
+
+def normalize_password(password):
+ """Normalize a password to make safe for userinfo."""
+ return compat.urlquote(password)
+
+
+def normalize_host(host):
+ """Normalize a host string."""
+ if misc.IPv6_MATCHER.match(host):
+ percent = host.find('%')
+ if percent != -1:
+ percent_25 = host.find('%25')
+
+ # Replace RFC 4007 IPv6 Zone ID delimiter '%' with '%25'
+ # from RFC 6874. If the host is '[<IPv6 addr>%25]' then we
+ # assume RFC 4007 and normalize to '[<IPV6 addr>%2525]'
+ if percent_25 == -1 or percent < percent_25 or \
+ (percent == percent_25 and percent_25 == len(host) - 4):
+ host = host.replace('%', '%25', 1)
+
+ # Don't normalize the casing of the Zone ID
+ return host[:percent].lower() + host[percent:]
+
+ return host.lower()
+
+
+def normalize_path(path):
+ """Normalize the path string."""
+ if not path:
+ return path
+
+ path = normalize_percent_characters(path)
+ return remove_dot_segments(path)
+
+
+def normalize_query(query):
+ """Normalize the query string."""
+ if not query:
+ return query
+ return normalize_percent_characters(query)
+
+
+def normalize_fragment(fragment):
+ """Normalize the fragment string."""
+ if not fragment:
+ return fragment
+ return normalize_percent_characters(fragment)
+
+
+PERCENT_MATCHER = re.compile('%[A-Fa-f0-9]{2}')
+
+
+def normalize_percent_characters(s):
+ """All percent characters should be upper-cased.
+
+ For example, ``"%3afoo%DF%ab"`` should be turned into ``"%3Afoo%DF%AB"``.
+ """
+ matches = set(PERCENT_MATCHER.findall(s))
+ for m in matches:
+ if not m.isupper():
+ s = s.replace(m, m.upper())
+ return s
+
+
+def remove_dot_segments(s):
+ """Remove dot segments from the string.
+
+ See also Section 5.2.4 of :rfc:`3986`.
+ """
+ # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
+ segments = s.split('/') # Turn the path into a list of segments
+ output = [] # Initialize the variable to use to store output
+
+ for segment in segments:
+ # '.' is the current directory, so ignore it, it is superfluous
+ if segment == '.':
+ continue
+ # Anything other than '..', should be appended to the output
+ elif segment != '..':
+ output.append(segment)
+ # In this case segment == '..', if we can, we should pop the last
+ # element
+ elif output:
+ output.pop()
+
+ # If the path starts with '/' and the output is empty or the first string
+ # is non-empty
+ if s.startswith('/') and (not output or output[0]):
+ output.insert(0, '')
+
+ # If the path starts with '/.' or '/..' ensure we add one more empty
+ # string to add a trailing '/'
+ if s.endswith(('/.', '/..')):
+ output.append('')
+
+ return '/'.join(output)
+
+
+def encode_component(uri_component, encoding):
+ """Encode the specific component in the provided encoding."""
+ if uri_component is None:
+ return uri_component
+
+ # Try to see if the component we're encoding is already percent-encoded
+ # so we can skip all '%' characters but still encode all others.
+ percent_encodings = len(PERCENT_MATCHER.findall(
+ compat.to_str(uri_component, encoding)))
+
+ uri_bytes = compat.to_bytes(uri_component, encoding)
+ is_percent_encoded = percent_encodings == uri_bytes.count(b'%')
+
+ encoded_uri = bytearray()
+
+ for i in range(0, len(uri_bytes)):
+ # Will return a single character bytestring on both Python 2 & 3
+ byte = uri_bytes[i:i+1]
+ byte_ord = ord(byte)
+ if ((is_percent_encoded and byte == b'%')
+ or (byte_ord < 128 and byte.decode() in misc.NON_PCT_ENCODED)):
+ encoded_uri.extend(byte)
+ continue
+ encoded_uri.extend('%{0:02x}'.format(byte_ord).encode().upper())
+
+ return encoded_uri.decode(encoding)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.py
new file mode 100644
index 00000000..0a734566
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/parseresult.py
@@ -0,0 +1,385 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2015 Ian Stapleton Cordasco
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module containing the urlparse compatibility logic."""
+from collections import namedtuple
+
+from . import compat
+from . import exceptions
+from . import misc
+from . import normalizers
+from . import uri
+
+__all__ = ('ParseResult', 'ParseResultBytes')
+
+PARSED_COMPONENTS = ('scheme', 'userinfo', 'host', 'port', 'path', 'query',
+ 'fragment')
+
+
+class ParseResultMixin(object):
+ def _generate_authority(self, attributes):
+ # I swear I did not align the comparisons below. That's just how they
+ # happened to align based on pep8 and attribute lengths.
+ userinfo, host, port = (attributes[p]
+ for p in ('userinfo', 'host', 'port'))
+ if (self.userinfo != userinfo or
+ self.host != host or
+ self.port != port):
+ if port:
+ port = '{0}'.format(port)
+ return normalizers.normalize_authority(
+ (compat.to_str(userinfo, self.encoding),
+ compat.to_str(host, self.encoding),
+ port)
+ )
+ return self.authority
+
+ def geturl(self):
+ """Shim to match the standard library method."""
+ return self.unsplit()
+
+ @property
+ def hostname(self):
+ """Shim to match the standard library."""
+ return self.host
+
+ @property
+ def netloc(self):
+ """Shim to match the standard library."""
+ return self.authority
+
+ @property
+ def params(self):
+ """Shim to match the standard library."""
+ return self.query
+
+
+class ParseResult(namedtuple('ParseResult', PARSED_COMPONENTS),
+ ParseResultMixin):
+ """Implementation of urlparse compatibility class.
+
+ This uses the URIReference logic to handle compatibility with the
+ urlparse.ParseResult class.
+ """
+
+ slots = ()
+
+ def __new__(cls, scheme, userinfo, host, port, path, query, fragment,
+ uri_ref, encoding='utf-8'):
+ """Create a new ParseResult."""
+ parse_result = super(ParseResult, cls).__new__(
+ cls,
+ scheme or None,
+ userinfo or None,
+ host,
+ port or None,
+ path or None,
+ query,
+ fragment)
+ parse_result.encoding = encoding
+ parse_result.reference = uri_ref
+ return parse_result
+
+ @classmethod
+ def from_parts(cls, scheme=None, userinfo=None, host=None, port=None,
+ path=None, query=None, fragment=None, encoding='utf-8'):
+ """Create a ParseResult instance from its parts."""
+ authority = ''
+ if userinfo is not None:
+ authority += userinfo + '@'
+ if host is not None:
+ authority += host
+ if port is not None:
+ authority += ':{0}'.format(port)
+ uri_ref = uri.URIReference(scheme=scheme,
+ authority=authority,
+ path=path,
+ query=query,
+ fragment=fragment,
+ encoding=encoding).normalize()
+ userinfo, host, port = authority_from(uri_ref, strict=True)
+ return cls(scheme=uri_ref.scheme,
+ userinfo=userinfo,
+ host=host,
+ port=port,
+ path=uri_ref.path,
+ query=uri_ref.query,
+ fragment=uri_ref.fragment,
+ uri_ref=uri_ref,
+ encoding=encoding)
+
+ @classmethod
+ def from_string(cls, uri_string, encoding='utf-8', strict=True,
+ lazy_normalize=True):
+ """Parse a URI from the given unicode URI string.
+
+ :param str uri_string: Unicode URI to be parsed into a reference.
+ :param str encoding: The encoding of the string provided
+ :param bool strict: Parse strictly according to :rfc:`3986` if True.
+ If False, parse similarly to the standard library's urlparse
+ function.
+ :returns: :class:`ParseResult` or subclass thereof
+ """
+ reference = uri.URIReference.from_string(uri_string, encoding)
+ if not lazy_normalize:
+ reference = reference.normalize()
+ userinfo, host, port = authority_from(reference, strict)
+
+ return cls(scheme=reference.scheme,
+ userinfo=userinfo,
+ host=host,
+ port=port,
+ path=reference.path,
+ query=reference.query,
+ fragment=reference.fragment,
+ uri_ref=reference,
+ encoding=encoding)
+
+ @property
+ def authority(self):
+ """Return the normalized authority."""
+ return self.reference.authority
+
+ def copy_with(self, scheme=misc.UseExisting, userinfo=misc.UseExisting,
+ host=misc.UseExisting, port=misc.UseExisting,
+ path=misc.UseExisting, query=misc.UseExisting,
+ fragment=misc.UseExisting):
+ """Create a copy of this instance replacing with specified parts."""
+ attributes = zip(PARSED_COMPONENTS,
+ (scheme, userinfo, host, port, path, query, fragment))
+ attrs_dict = {}
+ for name, value in attributes:
+ if value is misc.UseExisting:
+ value = getattr(self, name)
+ attrs_dict[name] = value
+ authority = self._generate_authority(attrs_dict)
+ ref = self.reference.copy_with(scheme=attrs_dict['scheme'],
+ authority=authority,
+ path=attrs_dict['path'],
+ query=attrs_dict['query'],
+ fragment=attrs_dict['fragment'])
+ return ParseResult(uri_ref=ref, encoding=self.encoding, **attrs_dict)
+
+ def encode(self, encoding=None):
+ """Convert to an instance of ParseResultBytes."""
+ encoding = encoding or self.encoding
+ attrs = dict(
+ zip(PARSED_COMPONENTS,
+ (attr.encode(encoding) if hasattr(attr, 'encode') else attr
+ for attr in self)))
+ return ParseResultBytes(
+ uri_ref=self.reference,
+ encoding=encoding,
+ **attrs
+ )
+
+ def unsplit(self, use_idna=False):
+ """Create a URI string from the components.
+
+ :returns: The parsed URI reconstituted as a string.
+ :rtype: str
+ """
+ parse_result = self
+ if use_idna and self.host:
+ hostbytes = self.host.encode('idna')
+ host = hostbytes.decode(self.encoding)
+ parse_result = self.copy_with(host=host)
+ return parse_result.reference.unsplit()
+
+
+class ParseResultBytes(namedtuple('ParseResultBytes', PARSED_COMPONENTS),
+ ParseResultMixin):
+ """Compatibility shim for the urlparse.ParseResultBytes object."""
+
+ def __new__(cls, scheme, userinfo, host, port, path, query, fragment,
+ uri_ref, encoding='utf-8', lazy_normalize=True):
+ """Create a new ParseResultBytes instance."""
+ parse_result = super(ParseResultBytes, cls).__new__(
+ cls,
+ scheme or None,
+ userinfo or None,
+ host,
+ port or None,
+ path or None,
+ query or None,
+ fragment or None)
+ parse_result.encoding = encoding
+ parse_result.reference = uri_ref
+ parse_result.lazy_normalize = lazy_normalize
+ return parse_result
+
+ @classmethod
+ def from_parts(cls, scheme=None, userinfo=None, host=None, port=None,
+ path=None, query=None, fragment=None, encoding='utf-8',
+ lazy_normalize=True):
+ """Create a ParseResult instance from its parts."""
+ authority = ''
+ if userinfo is not None:
+ authority += userinfo + '@'
+ if host is not None:
+ authority += host
+ if port is not None:
+ authority += ':{0}'.format(int(port))
+ uri_ref = uri.URIReference(scheme=scheme,
+ authority=authority,
+ path=path,
+ query=query,
+ fragment=fragment,
+ encoding=encoding)
+ if not lazy_normalize:
+ uri_ref = uri_ref.normalize()
+ to_bytes = compat.to_bytes
+ userinfo, host, port = authority_from(uri_ref, strict=True)
+ return cls(scheme=to_bytes(scheme, encoding),
+ userinfo=to_bytes(userinfo, encoding),
+ host=to_bytes(host, encoding),
+ port=port,
+ path=to_bytes(path, encoding),
+ query=to_bytes(query, encoding),
+ fragment=to_bytes(fragment, encoding),
+ uri_ref=uri_ref,
+ encoding=encoding,
+ lazy_normalize=lazy_normalize)
+
+ @classmethod
+ def from_string(cls, uri_string, encoding='utf-8', strict=True,
+ lazy_normalize=True):
+ """Parse a URI from the given unicode URI string.
+
+ :param str uri_string: Unicode URI to be parsed into a reference.
+ :param str encoding: The encoding of the string provided
+ :param bool strict: Parse strictly according to :rfc:`3986` if True.
+ If False, parse similarly to the standard library's urlparse
+ function.
+ :returns: :class:`ParseResultBytes` or subclass thereof
+ """
+ reference = uri.URIReference.from_string(uri_string, encoding)
+ if not lazy_normalize:
+ reference = reference.normalize()
+ userinfo, host, port = authority_from(reference, strict)
+
+ to_bytes = compat.to_bytes
+ return cls(scheme=to_bytes(reference.scheme, encoding),
+ userinfo=to_bytes(userinfo, encoding),
+ host=to_bytes(host, encoding),
+ port=port,
+ path=to_bytes(reference.path, encoding),
+ query=to_bytes(reference.query, encoding),
+ fragment=to_bytes(reference.fragment, encoding),
+ uri_ref=reference,
+ encoding=encoding,
+ lazy_normalize=lazy_normalize)
+
+ @property
+ def authority(self):
+ """Return the normalized authority."""
+ return self.reference.authority.encode(self.encoding)
+
+ def copy_with(self, scheme=misc.UseExisting, userinfo=misc.UseExisting,
+ host=misc.UseExisting, port=misc.UseExisting,
+ path=misc.UseExisting, query=misc.UseExisting,
+ fragment=misc.UseExisting, lazy_normalize=True):
+ """Create a copy of this instance replacing with specified parts."""
+ attributes = zip(PARSED_COMPONENTS,
+ (scheme, userinfo, host, port, path, query, fragment))
+ attrs_dict = {}
+ for name, value in attributes:
+ if value is misc.UseExisting:
+ value = getattr(self, name)
+ if not isinstance(value, bytes) and hasattr(value, 'encode'):
+ value = value.encode(self.encoding)
+ attrs_dict[name] = value
+ authority = self._generate_authority(attrs_dict)
+ to_str = compat.to_str
+ ref = self.reference.copy_with(
+ scheme=to_str(attrs_dict['scheme'], self.encoding),
+ authority=to_str(authority, self.encoding),
+ path=to_str(attrs_dict['path'], self.encoding),
+ query=to_str(attrs_dict['query'], self.encoding),
+ fragment=to_str(attrs_dict['fragment'], self.encoding)
+ )
+ if not lazy_normalize:
+ ref = ref.normalize()
+ return ParseResultBytes(
+ uri_ref=ref,
+ encoding=self.encoding,
+ lazy_normalize=lazy_normalize,
+ **attrs_dict
+ )
+
+ def unsplit(self, use_idna=False):
+ """Create a URI bytes object from the components.
+
+ :returns: The parsed URI reconstituted as a string.
+ :rtype: bytes
+ """
+ parse_result = self
+ if use_idna and self.host:
+ # self.host is bytes, to encode to idna, we need to decode it
+ # first
+ host = self.host.decode(self.encoding)
+ hostbytes = host.encode('idna')
+ parse_result = self.copy_with(host=hostbytes)
+ if self.lazy_normalize:
+ parse_result = parse_result.copy_with(lazy_normalize=False)
+ uri = parse_result.reference.unsplit()
+ return uri.encode(self.encoding)
+
+
+def split_authority(authority):
+ # Initialize our expected return values
+ userinfo = host = port = None
+ # Initialize an extra var we may need to use
+ extra_host = None
+ # Set-up rest in case there is no userinfo portion
+ rest = authority
+
+ if '@' in authority:
+ userinfo, rest = authority.rsplit('@', 1)
+
+ # Handle IPv6 host addresses
+ if rest.startswith('['):
+ host, rest = rest.split(']', 1)
+ host += ']'
+
+ if ':' in rest:
+ extra_host, port = rest.split(':', 1)
+ elif not host and rest:
+ host = rest
+
+ if extra_host and not host:
+ host = extra_host
+
+ return userinfo, host, port
+
+
+def authority_from(reference, strict):
+ try:
+ subauthority = reference.authority_info()
+ except exceptions.InvalidAuthority:
+ if strict:
+ raise
+ userinfo, host, port = split_authority(reference.authority)
+ else:
+ # Thanks to Richard Barrell for this idea:
+ # https://twitter.com/0x2ba22e11/status/617338811975139328
+ userinfo, host, port = (subauthority.get(p)
+ for p in ('userinfo', 'host', 'port'))
+
+ if port:
+ try:
+ port = int(port)
+ except ValueError:
+ raise exceptions.InvalidPort(port)
+ return userinfo, host, port
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.py
new file mode 100644
index 00000000..d1d71505
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/uri.py
@@ -0,0 +1,153 @@
+"""Module containing the implementation of the URIReference class."""
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014 Rackspace
+# Copyright (c) 2015 Ian Stapleton Cordasco
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from collections import namedtuple
+
+from . import compat
+from . import misc
+from . import normalizers
+from ._mixin import URIMixin
+
+
+class URIReference(namedtuple('URIReference', misc.URI_COMPONENTS), URIMixin):
+ """Immutable object representing a parsed URI Reference.
+
+ .. note::
+
+ This class is not intended to be directly instantiated by the user.
+
+ This object exposes attributes for the following components of a
+ URI:
+
+ - scheme
+ - authority
+ - path
+ - query
+ - fragment
+
+ .. attribute:: scheme
+
+ The scheme that was parsed for the URI Reference. For example,
+ ``http``, ``https``, ``smtp``, ``imap``, etc.
+
+ .. attribute:: authority
+
+ Component of the URI that contains the user information, host,
+ and port sub-components. For example,
+ ``google.com``, ``127.0.0.1:5000``, ``username@[::1]``,
+ ``username:password@example.com:443``, etc.
+
+ .. attribute:: path
+
+ The path that was parsed for the given URI Reference. For example,
+ ``/``, ``/index.php``, etc.
+
+ .. attribute:: query
+
+ The query component for a given URI Reference. For example, ``a=b``,
+ ``a=b%20c``, ``a=b+c``, ``a=b,c=d,e=%20f``, etc.
+
+ .. attribute:: fragment
+
+ The fragment component of a URI. For example, ``section-3.1``.
+
+ This class also provides extra attributes for easier access to information
+ like the subcomponents of the authority component.
+
+ .. attribute:: userinfo
+
+ The user information parsed from the authority.
+
+ .. attribute:: host
+
+ The hostname, IPv4, or IPv6 adddres parsed from the authority.
+
+ .. attribute:: port
+
+ The port parsed from the authority.
+ """
+
+ slots = ()
+
+ def __new__(cls, scheme, authority, path, query, fragment,
+ encoding='utf-8'):
+ """Create a new URIReference."""
+ ref = super(URIReference, cls).__new__(
+ cls,
+ scheme or None,
+ authority or None,
+ path or None,
+ query,
+ fragment)
+ ref.encoding = encoding
+ return ref
+
+ __hash__ = tuple.__hash__
+
+ def __eq__(self, other):
+ """Compare this reference to another."""
+ other_ref = other
+ if isinstance(other, tuple):
+ other_ref = URIReference(*other)
+ elif not isinstance(other, URIReference):
+ try:
+ other_ref = URIReference.from_string(other)
+ except TypeError:
+ raise TypeError(
+ 'Unable to compare URIReference() to {0}()'.format(
+ type(other).__name__))
+
+ # See http://tools.ietf.org/html/rfc3986#section-6.2
+ naive_equality = tuple(self) == tuple(other_ref)
+ return naive_equality or self.normalized_equality(other_ref)
+
+ def normalize(self):
+ """Normalize this reference as described in Section 6.2.2.
+
+ This is not an in-place normalization. Instead this creates a new
+ URIReference.
+
+ :returns: A new reference object with normalized components.
+ :rtype: URIReference
+ """
+ # See http://tools.ietf.org/html/rfc3986#section-6.2.2 for logic in
+ # this method.
+ return URIReference(normalizers.normalize_scheme(self.scheme or ''),
+ normalizers.normalize_authority(
+ (self.userinfo, self.host, self.port)),
+ normalizers.normalize_path(self.path or ''),
+ normalizers.normalize_query(self.query),
+ normalizers.normalize_fragment(self.fragment),
+ self.encoding)
+
+ @classmethod
+ def from_string(cls, uri_string, encoding='utf-8'):
+ """Parse a URI reference from the given unicode URI string.
+
+ :param str uri_string: Unicode URI to be parsed into a reference.
+ :param str encoding: The encoding of the string provided
+ :returns: :class:`URIReference` or subclass thereof
+ """
+ uri_string = compat.to_str(uri_string, encoding)
+
+ split_uri = misc.URI_MATCHER.match(uri_string).groupdict()
+ return cls(
+ split_uri['scheme'], split_uri['authority'],
+ normalizers.encode_component(split_uri['path'], encoding),
+ normalizers.encode_component(split_uri['query'], encoding),
+ normalizers.encode_component(split_uri['fragment'], encoding),
+ encoding,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.py
new file mode 100644
index 00000000..7fc97215
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/rfc3986/validators.py
@@ -0,0 +1,450 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017 Ian Stapleton Cordasco
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Module containing the validation logic for rfc3986."""
+from . import exceptions
+from . import misc
+from . import normalizers
+
+
+class Validator(object):
+ """Object used to configure validation of all objects in rfc3986.
+
+ .. versionadded:: 1.0
+
+ Example usage::
+
+ >>> from rfc3986 import api, validators
+ >>> uri = api.uri_reference('https://github.com/')
+ >>> validator = validators.Validator().require_presence_of(
+ ... 'scheme', 'host', 'path',
+ ... ).allow_schemes(
+ ... 'http', 'https',
+ ... ).allow_hosts(
+ ... '127.0.0.1', 'github.com',
+ ... )
+ >>> validator.validate(uri)
+ >>> invalid_uri = rfc3986.uri_reference('imap://mail.google.com')
+ >>> validator.validate(invalid_uri)
+ Traceback (most recent call last):
+ ...
+ rfc3986.exceptions.MissingComponentError: ('path was required but
+ missing', URIReference(scheme=u'imap', authority=u'mail.google.com',
+ path=None, query=None, fragment=None), ['path'])
+
+ """
+
+ COMPONENT_NAMES = frozenset([
+ 'scheme',
+ 'userinfo',
+ 'host',
+ 'port',
+ 'path',
+ 'query',
+ 'fragment',
+ ])
+
+ def __init__(self):
+ """Initialize our default validations."""
+ self.allowed_schemes = set()
+ self.allowed_hosts = set()
+ self.allowed_ports = set()
+ self.allow_password = True
+ self.required_components = {
+ 'scheme': False,
+ 'userinfo': False,
+ 'host': False,
+ 'port': False,
+ 'path': False,
+ 'query': False,
+ 'fragment': False,
+ }
+ self.validated_components = self.required_components.copy()
+
+ def allow_schemes(self, *schemes):
+ """Require the scheme to be one of the provided schemes.
+
+ .. versionadded:: 1.0
+
+ :param schemes:
+ Schemes, without ``://`` that are allowed.
+ :returns:
+ The validator instance.
+ :rtype:
+ Validator
+ """
+ for scheme in schemes:
+ self.allowed_schemes.add(normalizers.normalize_scheme(scheme))
+ return self
+
+ def allow_hosts(self, *hosts):
+ """Require the host to be one of the provided hosts.
+
+ .. versionadded:: 1.0
+
+ :param hosts:
+ Hosts that are allowed.
+ :returns:
+ The validator instance.
+ :rtype:
+ Validator
+ """
+ for host in hosts:
+ self.allowed_hosts.add(normalizers.normalize_host(host))
+ return self
+
+ def allow_ports(self, *ports):
+ """Require the port to be one of the provided ports.
+
+ .. versionadded:: 1.0
+
+ :param ports:
+ Ports that are allowed.
+ :returns:
+ The validator instance.
+ :rtype:
+ Validator
+ """
+ for port in ports:
+ port_int = int(port, base=10)
+ if 0 <= port_int <= 65535:
+ self.allowed_ports.add(port)
+ return self
+
+ def allow_use_of_password(self):
+ """Allow passwords to be present in the URI.
+
+ .. versionadded:: 1.0
+
+ :returns:
+ The validator instance.
+ :rtype:
+ Validator
+ """
+ self.allow_password = True
+ return self
+
+ def forbid_use_of_password(self):
+ """Prevent passwords from being included in the URI.
+
+ .. versionadded:: 1.0
+
+ :returns:
+ The validator instance.
+ :rtype:
+ Validator
+ """
+ self.allow_password = False
+ return self
+
+ def check_validity_of(self, *components):
+ """Check the validity of the components provided.
+
+ This can be specified repeatedly.
+
+ .. versionadded:: 1.1
+
+ :param components:
+ Names of components from :attr:`Validator.COMPONENT_NAMES`.
+ :returns:
+ The validator instance.
+ :rtype:
+ Validator
+ """
+ components = [c.lower() for c in components]
+ for component in components:
+ if component not in self.COMPONENT_NAMES:
+ raise ValueError(
+ '"{}" is not a valid component'.format(component)
+ )
+ self.validated_components.update({
+ component: True for component in components
+ })
+ return self
+
+ def require_presence_of(self, *components):
+ """Require the components provided.
+
+ This can be specified repeatedly.
+
+ .. versionadded:: 1.0
+
+ :param components:
+ Names of components from :attr:`Validator.COMPONENT_NAMES`.
+ :returns:
+ The validator instance.
+ :rtype:
+ Validator
+ """
+ components = [c.lower() for c in components]
+ for component in components:
+ if component not in self.COMPONENT_NAMES:
+ raise ValueError(
+ '"{}" is not a valid component'.format(component)
+ )
+ self.required_components.update({
+ component: True for component in components
+ })
+ return self
+
+ def validate(self, uri):
+ """Check a URI for conditions specified on this validator.
+
+ .. versionadded:: 1.0
+
+ :param uri:
+ Parsed URI to validate.
+ :type uri:
+ rfc3986.uri.URIReference
+ :raises MissingComponentError:
+ When a required component is missing.
+ :raises UnpermittedComponentError:
+ When a component is not one of those allowed.
+ :raises PasswordForbidden:
+ When a password is present in the userinfo component but is
+ not permitted by configuration.
+ :raises InvalidComponentsError:
+ When a component was found to be invalid.
+ """
+ if not self.allow_password:
+ check_password(uri)
+
+ required_components = [
+ component
+ for component, required in self.required_components.items()
+ if required
+ ]
+ validated_components = [
+ component
+ for component, required in self.validated_components.items()
+ if required
+ ]
+ if required_components:
+ ensure_required_components_exist(uri, required_components)
+ if validated_components:
+ ensure_components_are_valid(uri, validated_components)
+
+ ensure_one_of(self.allowed_schemes, uri, 'scheme')
+ ensure_one_of(self.allowed_hosts, uri, 'host')
+ ensure_one_of(self.allowed_ports, uri, 'port')
+
+
+def check_password(uri):
+ """Assert that there is no password present in the uri."""
+ userinfo = uri.userinfo
+ if not userinfo:
+ return
+ credentials = userinfo.split(':', 1)
+ if len(credentials) <= 1:
+ return
+ raise exceptions.PasswordForbidden(uri)
+
+
+def ensure_one_of(allowed_values, uri, attribute):
+ """Assert that the uri's attribute is one of the allowed values."""
+ value = getattr(uri, attribute)
+ if value is not None and allowed_values and value not in allowed_values:
+ raise exceptions.UnpermittedComponentError(
+ attribute, value, allowed_values,
+ )
+
+
+def ensure_required_components_exist(uri, required_components):
+ """Assert that all required components are present in the URI."""
+ missing_components = sorted([
+ component
+ for component in required_components
+ if getattr(uri, component) is None
+ ])
+ if missing_components:
+ raise exceptions.MissingComponentError(uri, *missing_components)
+
+
+def is_valid(value, matcher, require):
+ """Determine if a value is valid based on the provided matcher.
+
+ :param str value:
+ Value to validate.
+ :param matcher:
+ Compiled regular expression to use to validate the value.
+ :param require:
+ Whether or not the value is required.
+ """
+ if require:
+ return (value is not None
+ and matcher.match(value))
+
+ # require is False and value is not None
+ return value is None or matcher.match(value)
+
+
+def authority_is_valid(authority, host=None, require=False):
+ """Determine if the authority string is valid.
+
+ :param str authority:
+ The authority to validate.
+ :param str host:
+ (optional) The host portion of the authority to validate.
+ :param bool require:
+ (optional) Specify if authority must not be None.
+ :returns:
+ ``True`` if valid, ``False`` otherwise
+ :rtype:
+ bool
+ """
+ validated = is_valid(authority, misc.SUBAUTHORITY_MATCHER, require)
+ if validated and host is not None:
+ return host_is_valid(host, require)
+ return validated
+
+
+def host_is_valid(host, require=False):
+ """Determine if the host string is valid.
+
+ :param str host:
+ The host to validate.
+ :param bool require:
+ (optional) Specify if host must not be None.
+ :returns:
+ ``True`` if valid, ``False`` otherwise
+ :rtype:
+ bool
+ """
+ validated = is_valid(host, misc.HOST_MATCHER, require)
+ if validated and host is not None and misc.IPv4_MATCHER.match(host):
+ return valid_ipv4_host_address(host)
+ elif validated and host is not None and misc.IPv6_MATCHER.match(host):
+ return misc.IPv6_NO_RFC4007_MATCHER.match(host) is not None
+ return validated
+
+
+def scheme_is_valid(scheme, require=False):
+ """Determine if the scheme is valid.
+
+ :param str scheme:
+ The scheme string to validate.
+ :param bool require:
+ (optional) Set to ``True`` to require the presence of a scheme.
+ :returns:
+ ``True`` if the scheme is valid. ``False`` otherwise.
+ :rtype:
+ bool
+ """
+ return is_valid(scheme, misc.SCHEME_MATCHER, require)
+
+
+def path_is_valid(path, require=False):
+ """Determine if the path component is valid.
+
+ :param str path:
+ The path string to validate.
+ :param bool require:
+ (optional) Set to ``True`` to require the presence of a path.
+ :returns:
+ ``True`` if the path is valid. ``False`` otherwise.
+ :rtype:
+ bool
+ """
+ return is_valid(path, misc.PATH_MATCHER, require)
+
+
+def query_is_valid(query, require=False):
+ """Determine if the query component is valid.
+
+ :param str query:
+ The query string to validate.
+ :param bool require:
+ (optional) Set to ``True`` to require the presence of a query.
+ :returns:
+ ``True`` if the query is valid. ``False`` otherwise.
+ :rtype:
+ bool
+ """
+ return is_valid(query, misc.QUERY_MATCHER, require)
+
+
+def fragment_is_valid(fragment, require=False):
+ """Determine if the fragment component is valid.
+
+ :param str fragment:
+ The fragment string to validate.
+ :param bool require:
+ (optional) Set to ``True`` to require the presence of a fragment.
+ :returns:
+ ``True`` if the fragment is valid. ``False`` otherwise.
+ :rtype:
+ bool
+ """
+ return is_valid(fragment, misc.FRAGMENT_MATCHER, require)
+
+
+def valid_ipv4_host_address(host):
+ """Determine if the given host is a valid IPv4 address."""
+ # If the host exists, and it might be IPv4, check each byte in the
+ # address.
+ return all([0 <= int(byte, base=10) <= 255 for byte in host.split('.')])
+
+
+_COMPONENT_VALIDATORS = {
+ 'scheme': scheme_is_valid,
+ 'path': path_is_valid,
+ 'query': query_is_valid,
+ 'fragment': fragment_is_valid,
+}
+
+_SUBAUTHORITY_VALIDATORS = set(['userinfo', 'host', 'port'])
+
+
+def subauthority_component_is_valid(uri, component):
+ """Determine if the userinfo, host, and port are valid."""
+ try:
+ subauthority_dict = uri.authority_info()
+ except exceptions.InvalidAuthority:
+ return False
+
+ # If we can parse the authority into sub-components and we're not
+ # validating the port, we can assume it's valid.
+ if component == 'host':
+ return host_is_valid(subauthority_dict['host'])
+ elif component != 'port':
+ return True
+
+ try:
+ port = int(subauthority_dict['port'])
+ except TypeError:
+ # If the port wasn't provided it'll be None and int(None) raises a
+ # TypeError
+ return True
+
+ return (0 <= port <= 65535)
+
+
+def ensure_components_are_valid(uri, validated_components):
+ """Assert that all components are valid in the URI."""
+ invalid_components = set([])
+ for component in validated_components:
+ if component in _SUBAUTHORITY_VALIDATORS:
+ if not subauthority_component_is_valid(uri, component):
+ invalid_components.add(component)
+ # Python's peephole optimizer means that while this continue *is*
+ # actually executed, coverage.py cannot detect that. See also,
+ # https://bitbucket.org/ned/coveragepy/issues/198/continue-marked-as-not-covered
+ continue # nocov: Python 2.7, 3.3, 3.4
+
+ validator = _COMPONENT_VALIDATORS[component]
+ if not validator(getattr(uri, component)):
+ invalid_components.add(component)
+
+ if invalid_components:
+ raise exceptions.InvalidComponentsError(uri, *invalid_components)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/six.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/six.py
new file mode 100644
index 00000000..190c0239
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/six.py
@@ -0,0 +1,868 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+# Copyright (c) 2010-2015 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.10.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ if from_value is None:
+ raise value
+ raise value from from_value
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ raise value from from_value
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py
new file mode 100644
index 00000000..d6594eb2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py
@@ -0,0 +1,19 @@
+import sys
+
+try:
+ # Our match_hostname function is the same as 3.5's, so we only want to
+ # import the match_hostname function if it's at least that good.
+ if sys.version_info < (3, 5):
+ raise ImportError("Fallback to vendored code")
+
+ from ssl import CertificateError, match_hostname
+except ImportError:
+ try:
+ # Backport of the function from a pypi module
+ from backports.ssl_match_hostname import CertificateError, match_hostname
+ except ImportError:
+ # Our vendored copy
+ from ._implementation import CertificateError, match_hostname
+
+# Not needed, but documenting what we provide.
+__all__ = ('CertificateError', 'match_hostname')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..106e7574
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc
new file mode 100644
index 00000000..b9b34b08
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py
new file mode 100644
index 00000000..970cf653
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py
@@ -0,0 +1,156 @@
+"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
+
+# Note: This file is under the PSF license as the code comes from the python
+# stdlib. http://docs.python.org/3/license.html
+
+import re
+import sys
+
+# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
+# system, use it to handle IPAddress ServerAltnames (this was added in
+# python-3.5) otherwise only do DNS matching. This allows
+# backports.ssl_match_hostname to continue to be used in Python 2.7.
+try:
+ from pip._vendor import ipaddress
+except ImportError:
+ ipaddress = None
+
+__version__ = '3.5.0.1'
+
+
+class CertificateError(ValueError):
+ pass
+
+
+def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ # Ported from python3-syntax:
+ # leftmost, *remainder = dn.split(r'.')
+ parts = dn.split(r'.')
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count('*')
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn))
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ return pat.match(hostname)
+
+
+def _to_unicode(obj):
+ if isinstance(obj, str) and sys.version_info < (3,):
+ obj = unicode(obj, encoding='ascii', errors='strict')
+ return obj
+
+def _ipaddress_match(ipname, host_ip):
+ """Exact matching of IP addresses.
+
+ RFC 6125 explicitly doesn't define an algorithm for this
+ (section 1.7.2 - "Out of Scope").
+ """
+ # OpenSSL may add a trailing newline to a subjectAltName's IP address
+ # Divergence from upstream: ipaddress can't handle byte str
+ ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
+ return ip == host_ip
+
+
+def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate, match_hostname needs a "
+ "SSL socket or SSL context with either "
+ "CERT_OPTIONAL or CERT_REQUIRED")
+ try:
+ # Divergence from upstream: ipaddress can't handle byte str
+ host_ip = ipaddress.ip_address(_to_unicode(hostname))
+ except ValueError:
+ # Not an IP address (common case)
+ host_ip = None
+ except UnicodeError:
+ # Divergence from upstream: Have to deal with ipaddress not taking
+ # byte strings. addresses should be all ascii, so we consider it not
+ # an ipaddress in this case
+ host_ip = None
+ except AttributeError:
+ # Divergence from upstream: Make ipaddress library optional
+ if ipaddress is None:
+ host_ip = None
+ else:
+ raise
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if host_ip is None and _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ elif key == 'IP Address':
+ if host_ip is not None and _ipaddress_match(value, host_ip):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/poolmanager.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/poolmanager.py
new file mode 100644
index 00000000..a6ade6e9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/poolmanager.py
@@ -0,0 +1,455 @@
+from __future__ import absolute_import
+import collections
+import functools
+import logging
+
+from ._collections import RecentlyUsedContainer
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from .connectionpool import port_by_scheme
+from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
+from .packages import six
+from .packages.six.moves.urllib.parse import urljoin
+from .request import RequestMethods
+from .util.url import parse_url
+from .util.retry import Retry
+
+
+__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
+
+
+log = logging.getLogger(__name__)
+
+SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
+ 'ssl_version', 'ca_cert_dir', 'ssl_context',
+ 'key_password')
+
+# All known keyword arguments that could be provided to the pool manager, its
+# pools, or the underlying connections. This is used to construct a pool key.
+_key_fields = (
+ 'key_scheme', # str
+ 'key_host', # str
+ 'key_port', # int
+ 'key_timeout', # int or float or Timeout
+ 'key_retries', # int or Retry
+ 'key_strict', # bool
+ 'key_block', # bool
+ 'key_source_address', # str
+ 'key_key_file', # str
+ 'key_key_password', # str
+ 'key_cert_file', # str
+ 'key_cert_reqs', # str
+ 'key_ca_certs', # str
+ 'key_ssl_version', # str
+ 'key_ca_cert_dir', # str
+ 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
+ 'key_maxsize', # int
+ 'key_headers', # dict
+ 'key__proxy', # parsed proxy url
+ 'key__proxy_headers', # dict
+ 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples
+ 'key__socks_options', # dict
+ 'key_assert_hostname', # bool or string
+ 'key_assert_fingerprint', # str
+ 'key_server_hostname', # str
+)
+
+#: The namedtuple class used to construct keys for the connection pool.
+#: All custom key schemes should include the fields in this key at a minimum.
+PoolKey = collections.namedtuple('PoolKey', _key_fields)
+
+
+def _default_key_normalizer(key_class, request_context):
+ """
+ Create a pool key out of a request context dictionary.
+
+ According to RFC 3986, both the scheme and host are case-insensitive.
+ Therefore, this function normalizes both before constructing the pool
+ key for an HTTPS request. If you wish to change this behaviour, provide
+ alternate callables to ``key_fn_by_scheme``.
+
+ :param key_class:
+ The class to use when constructing the key. This should be a namedtuple
+ with the ``scheme`` and ``host`` keys at a minimum.
+ :type key_class: namedtuple
+ :param request_context:
+ A dictionary-like object that contain the context for a request.
+ :type request_context: dict
+
+ :return: A namedtuple that can be used as a connection pool key.
+ :rtype: PoolKey
+ """
+ # Since we mutate the dictionary, make a copy first
+ context = request_context.copy()
+ context['scheme'] = context['scheme'].lower()
+ context['host'] = context['host'].lower()
+
+ # These are both dictionaries and need to be transformed into frozensets
+ for key in ('headers', '_proxy_headers', '_socks_options'):
+ if key in context and context[key] is not None:
+ context[key] = frozenset(context[key].items())
+
+ # The socket_options key may be a list and needs to be transformed into a
+ # tuple.
+ socket_opts = context.get('socket_options')
+ if socket_opts is not None:
+ context['socket_options'] = tuple(socket_opts)
+
+ # Map the kwargs to the names in the namedtuple - this is necessary since
+ # namedtuples can't have fields starting with '_'.
+ for key in list(context.keys()):
+ context['key_' + key] = context.pop(key)
+
+ # Default to ``None`` for keys missing from the context
+ for field in key_class._fields:
+ if field not in context:
+ context[field] = None
+
+ return key_class(**context)
+
+
+#: A dictionary that maps a scheme to a callable that creates a pool key.
+#: This can be used to alter the way pool keys are constructed, if desired.
+#: Each PoolManager makes a copy of this dictionary so they can be configured
+#: globally here, or individually on the instance.
+key_fn_by_scheme = {
+ 'http': functools.partial(_default_key_normalizer, PoolKey),
+ 'https': functools.partial(_default_key_normalizer, PoolKey),
+}
+
+pool_classes_by_scheme = {
+ 'http': HTTPConnectionPool,
+ 'https': HTTPSConnectionPool,
+}
+
+
+class PoolManager(RequestMethods):
+ """
+ Allows for arbitrary requests while transparently keeping track of
+ necessary connection pools for you.
+
+ :param num_pools:
+ Number of connection pools to cache before discarding the least
+ recently used pool.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param \\**connection_pool_kw:
+ Additional parameters are used to create fresh
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+ Example::
+
+ >>> manager = PoolManager(num_pools=2)
+ >>> r = manager.request('GET', 'http://google.com/')
+ >>> r = manager.request('GET', 'http://google.com/mail')
+ >>> r = manager.request('GET', 'http://yahoo.com/')
+ >>> len(manager.pools)
+ 2
+
+ """
+
+ proxy = None
+
+ def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
+ RequestMethods.__init__(self, headers)
+ self.connection_pool_kw = connection_pool_kw
+ self.pools = RecentlyUsedContainer(num_pools,
+ dispose_func=lambda p: p.close())
+
+ # Locally set the pool classes and keys so other PoolManagers can
+ # override them.
+ self.pool_classes_by_scheme = pool_classes_by_scheme
+ self.key_fn_by_scheme = key_fn_by_scheme.copy()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.clear()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def _new_pool(self, scheme, host, port, request_context=None):
+ """
+ Create a new :class:`ConnectionPool` based on host, port, scheme, and
+ any additional pool keyword arguments.
+
+ If ``request_context`` is provided, it is provided as keyword arguments
+ to the pool class used. This method is used to actually create the
+ connection pools handed out by :meth:`connection_from_url` and
+ companion methods. It is intended to be overridden for customization.
+ """
+ pool_cls = self.pool_classes_by_scheme[scheme]
+ if request_context is None:
+ request_context = self.connection_pool_kw.copy()
+
+ # Although the context has everything necessary to create the pool,
+ # this function has historically only used the scheme, host, and port
+ # in the positional args. When an API change is acceptable these can
+ # be removed.
+ for key in ('scheme', 'host', 'port'):
+ request_context.pop(key, None)
+
+ if scheme == 'http':
+ for kw in SSL_KEYWORDS:
+ request_context.pop(kw, None)
+
+ return pool_cls(host, port, **request_context)
+
+ def clear(self):
+ """
+ Empty our store of pools and direct them all to close.
+
+ This will not affect in-flight connections, but they will not be
+ re-used after completion.
+ """
+ self.pools.clear()
+
+ def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None):
+ """
+ Get a :class:`ConnectionPool` based on the host, port, and scheme.
+
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
+ ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
+ provided, it is merged with the instance's ``connection_pool_kw``
+ variable and used to create the new connection pool, if one is
+ needed.
+ """
+
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ request_context = self._merge_pool_kwargs(pool_kwargs)
+ request_context['scheme'] = scheme or 'http'
+ if not port:
+ port = port_by_scheme.get(request_context['scheme'].lower(), 80)
+ request_context['port'] = port
+ request_context['host'] = host
+
+ return self.connection_from_context(request_context)
+
+ def connection_from_context(self, request_context):
+ """
+ Get a :class:`ConnectionPool` based on the request context.
+
+ ``request_context`` must at least contain the ``scheme`` key and its
+ value must be a key in ``key_fn_by_scheme`` instance variable.
+ """
+ scheme = request_context['scheme'].lower()
+ pool_key_constructor = self.key_fn_by_scheme[scheme]
+ pool_key = pool_key_constructor(request_context)
+
+ return self.connection_from_pool_key(pool_key, request_context=request_context)
+
+ def connection_from_pool_key(self, pool_key, request_context=None):
+ """
+ Get a :class:`ConnectionPool` based on the provided pool key.
+
+ ``pool_key`` should be a namedtuple that only contains immutable
+ objects. At a minimum it must have the ``scheme``, ``host``, and
+ ``port`` fields.
+ """
+ with self.pools.lock:
+ # If the scheme, host, or port doesn't match existing open
+ # connections, open a new ConnectionPool.
+ pool = self.pools.get(pool_key)
+ if pool:
+ return pool
+
+ # Make a fresh ConnectionPool of the desired type
+ scheme = request_context['scheme']
+ host = request_context['host']
+ port = request_context['port']
+ pool = self._new_pool(scheme, host, port, request_context=request_context)
+ self.pools[pool_key] = pool
+
+ return pool
+
+ def connection_from_url(self, url, pool_kwargs=None):
+ """
+ Similar to :func:`urllib3.connectionpool.connection_from_url`.
+
+ If ``pool_kwargs`` is not provided and a new pool needs to be
+ constructed, ``self.connection_pool_kw`` is used to initialize
+ the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
+ is provided, it is used instead. Note that if a new pool does not
+ need to be created for the request, the provided ``pool_kwargs`` are
+ not used.
+ """
+ u = parse_url(url)
+ return self.connection_from_host(u.host, port=u.port, scheme=u.scheme,
+ pool_kwargs=pool_kwargs)
+
+ def _merge_pool_kwargs(self, override):
+ """
+ Merge a dictionary of override values for self.connection_pool_kw.
+
+ This does not modify self.connection_pool_kw and returns a new dict.
+ Any keys in the override dictionary with a value of ``None`` are
+ removed from the merged dictionary.
+ """
+ base_pool_kwargs = self.connection_pool_kw.copy()
+ if override:
+ for key, value in override.items():
+ if value is None:
+ try:
+ del base_pool_kwargs[key]
+ except KeyError:
+ pass
+ else:
+ base_pool_kwargs[key] = value
+ return base_pool_kwargs
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ """
+ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
+ with custom cross-host redirect logic and only sends the request-uri
+ portion of the ``url``.
+
+ The given ``url`` parameter must be absolute, such that an appropriate
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+ """
+ u = parse_url(url)
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+ kw['assert_same_host'] = False
+ kw['redirect'] = False
+
+ if 'headers' not in kw:
+ kw['headers'] = self.headers.copy()
+
+ if self.proxy is not None and u.scheme == "http":
+ response = conn.urlopen(method, url, **kw)
+ else:
+ response = conn.urlopen(method, u.request_uri, **kw)
+
+ redirect_location = redirect and response.get_redirect_location()
+ if not redirect_location:
+ return response
+
+ # Support relative URLs for redirecting.
+ redirect_location = urljoin(url, redirect_location)
+
+ # RFC 7231, Section 6.4.4
+ if response.status == 303:
+ method = 'GET'
+
+ retries = kw.get('retries')
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect)
+
+ # Strip headers marked as unsafe to forward to the redirected location.
+ # Check remove_headers_on_redirect to avoid a potential network call within
+ # conn.is_same_host() which may use socket.gethostbyname() in the future.
+ if (retries.remove_headers_on_redirect
+ and not conn.is_same_host(redirect_location)):
+ headers = list(six.iterkeys(kw['headers']))
+ for header in headers:
+ if header.lower() in retries.remove_headers_on_redirect:
+ kw['headers'].pop(header, None)
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=conn)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ raise
+ return response
+
+ kw['retries'] = retries
+ kw['redirect'] = redirect
+
+ log.info("Redirecting %s -> %s", url, redirect_location)
+ return self.urlopen(method, redirect_location, **kw)
+
+
+class ProxyManager(PoolManager):
+ """
+ Behaves just like :class:`PoolManager`, but sends all requests through
+ the defined proxy, using the CONNECT method for HTTPS URLs.
+
+ :param proxy_url:
+ The URL of the proxy to be used.
+
+ :param proxy_headers:
+ A dictionary containing headers that will be sent to the proxy. In case
+ of HTTP they are being sent with each request, while in the
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
+ authentication.
+
+ Example:
+ >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
+ >>> r1 = proxy.request('GET', 'http://google.com/')
+ >>> r2 = proxy.request('GET', 'http://httpbin.org/')
+ >>> len(proxy.pools)
+ 1
+ >>> r3 = proxy.request('GET', 'https://httpbin.org/')
+ >>> r4 = proxy.request('GET', 'https://twitter.com/')
+ >>> len(proxy.pools)
+ 3
+
+ """
+
+ def __init__(self, proxy_url, num_pools=10, headers=None,
+ proxy_headers=None, **connection_pool_kw):
+
+ if isinstance(proxy_url, HTTPConnectionPool):
+ proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
+ proxy_url.port)
+ proxy = parse_url(proxy_url)
+ if not proxy.port:
+ port = port_by_scheme.get(proxy.scheme, 80)
+ proxy = proxy._replace(port=port)
+
+ if proxy.scheme not in ("http", "https"):
+ raise ProxySchemeUnknown(proxy.scheme)
+
+ self.proxy = proxy
+ self.proxy_headers = proxy_headers or {}
+
+ connection_pool_kw['_proxy'] = self.proxy
+ connection_pool_kw['_proxy_headers'] = self.proxy_headers
+
+ super(ProxyManager, self).__init__(
+ num_pools, headers, **connection_pool_kw)
+
+ def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None):
+ if scheme == "https":
+ return super(ProxyManager, self).connection_from_host(
+ host, port, scheme, pool_kwargs=pool_kwargs)
+
+ return super(ProxyManager, self).connection_from_host(
+ self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs)
+
+ def _set_proxy_headers(self, url, headers=None):
+ """
+ Sets headers needed by proxies: specifically, the Accept and Host
+ headers. Only sets headers not provided by the user.
+ """
+ headers_ = {'Accept': '*/*'}
+
+ netloc = parse_url(url).netloc
+ if netloc:
+ headers_['Host'] = netloc
+
+ if headers:
+ headers_.update(headers)
+ return headers_
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+ u = parse_url(url)
+
+ if u.scheme == "http":
+ # For proxied HTTPS requests, httplib sets the necessary headers
+ # on the CONNECT to the proxy. For HTTP, we'll definitely
+ # need to set 'Host' at the very least.
+ headers = kw.get('headers', self.headers)
+ kw['headers'] = self._set_proxy_headers(url, headers)
+
+ return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
+
+
+def proxy_from_url(url, **kw):
+ return ProxyManager(proxy_url=url, **kw)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/request.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/request.py
new file mode 100644
index 00000000..8f2f44bb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/request.py
@@ -0,0 +1,150 @@
+from __future__ import absolute_import
+
+from .filepost import encode_multipart_formdata
+from .packages.six.moves.urllib.parse import urlencode
+
+
+__all__ = ['RequestMethods']
+
+
+class RequestMethods(object):
+ """
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
+ as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
+ :class:`~urllib3.poolmanager.PoolManager`.
+
+ Provides behavior for making common types of HTTP request methods and
+ decides which type of request field encoding to use.
+
+ Specifically,
+
+ :meth:`.request_encode_url` is for sending requests whose fields are
+ encoded in the URL (such as GET, HEAD, DELETE).
+
+ :meth:`.request_encode_body` is for sending requests whose fields are
+ encoded in the *body* of the request using multipart or www-form-urlencoded
+ (such as for POST, PUT, PATCH).
+
+ :meth:`.request` is for making any kind of request, it will look up the
+ appropriate encoding format and use one of the above two methods to make
+ the request.
+
+ Initializer parameters:
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+ """
+
+ _encode_url_methods = {'DELETE', 'GET', 'HEAD', 'OPTIONS'}
+
+ def __init__(self, headers=None):
+ self.headers = headers or {}
+
+ def urlopen(self, method, url, body=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **kw): # Abstract
+ raise NotImplementedError("Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method.")
+
+ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the appropriate encoding of
+ ``fields`` based on the ``method`` used.
+
+ This is a convenience method that requires the least amount of manual
+ effort. It can be used in most situations, while still having the
+ option to drop down to more specific methods when necessary, such as
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
+ or even the lowest level :meth:`urlopen`.
+ """
+ method = method.upper()
+
+ urlopen_kw['request_url'] = url
+
+ if method in self._encode_url_methods:
+ return self.request_encode_url(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+ else:
+ return self.request_encode_body(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+
+ def request_encode_url(self, method, url, fields=None, headers=None,
+ **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {'headers': headers}
+ extra_kw.update(urlopen_kw)
+
+ if fields:
+ url += '?' + urlencode(fields)
+
+ return self.urlopen(method, url, **extra_kw)
+
+ def request_encode_body(self, method, url, fields=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+ When ``encode_multipart=True`` (default), then
+ :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
+ the payload with the appropriate content type. Otherwise
+ :meth:`urllib.urlencode` is used with the
+ 'application/x-www-form-urlencoded' content type.
+
+ Multipart encoding must be used when posting files, and it's reasonably
+ safe to use it in other times too. However, it may break request
+ signing, such as with OAuth.
+
+ Supports an optional ``fields`` parameter of key/value strings AND
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
+ the MIME type is optional. For example::
+
+ fields = {
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(),
+ 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+ }
+
+ When uploading a file, providing a filename (the first parameter of the
+ tuple) is optional but recommended to best mimic behavior of browsers.
+
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
+ be overwritten because it depends on the dynamic random boundary string
+ which is used to compose the body of the request. The random boundary
+ string can be explicitly set with the ``multipart_boundary`` parameter.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {'headers': {}}
+
+ if fields:
+ if 'body' in urlopen_kw:
+ raise TypeError(
+ "request got values for both 'fields' and 'body', can only specify one.")
+
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
+ else:
+ body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
+
+ extra_kw['body'] = body
+ extra_kw['headers'] = {'Content-Type': content_type}
+
+ extra_kw['headers'].update(headers)
+ extra_kw.update(urlopen_kw)
+
+ return self.urlopen(method, url, **extra_kw)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/response.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/response.py
new file mode 100644
index 00000000..4f857932
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/response.py
@@ -0,0 +1,760 @@
+from __future__ import absolute_import
+from contextlib import contextmanager
+import zlib
+import io
+import logging
+from socket import timeout as SocketTimeout
+from socket import error as SocketError
+
+try:
+ import brotli
+except ImportError:
+ brotli = None
+
+from ._collections import HTTPHeaderDict
+from .exceptions import (
+ BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError,
+ ResponseNotChunked, IncompleteRead, InvalidHeader
+)
+from .packages.six import string_types as basestring, PY3
+from .packages.six.moves import http_client as httplib
+from .connection import HTTPException, BaseSSLError
+from .util.response import is_fp_closed, is_response_to_head
+
+log = logging.getLogger(__name__)
+
+
+class DeflateDecoder(object):
+
+ def __init__(self):
+ self._first_try = True
+ self._data = b''
+ self._obj = zlib.decompressobj()
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ if not data:
+ return data
+
+ if not self._first_try:
+ return self._obj.decompress(data)
+
+ self._data += data
+ try:
+ decompressed = self._obj.decompress(data)
+ if decompressed:
+ self._first_try = False
+ self._data = None
+ return decompressed
+ except zlib.error:
+ self._first_try = False
+ self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
+ try:
+ return self.decompress(self._data)
+ finally:
+ self._data = None
+
+
+class GzipDecoderState(object):
+
+ FIRST_MEMBER = 0
+ OTHER_MEMBERS = 1
+ SWALLOW_DATA = 2
+
+
+class GzipDecoder(object):
+
+ def __init__(self):
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+ self._state = GzipDecoderState.FIRST_MEMBER
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ ret = bytearray()
+ if self._state == GzipDecoderState.SWALLOW_DATA or not data:
+ return bytes(ret)
+ while True:
+ try:
+ ret += self._obj.decompress(data)
+ except zlib.error:
+ previous_state = self._state
+ # Ignore data after the first error
+ self._state = GzipDecoderState.SWALLOW_DATA
+ if previous_state == GzipDecoderState.OTHER_MEMBERS:
+ # Allow trailing garbage acceptable in other gzip clients
+ return bytes(ret)
+ raise
+ data = self._obj.unused_data
+ if not data:
+ return bytes(ret)
+ self._state = GzipDecoderState.OTHER_MEMBERS
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+
+if brotli is not None:
+ class BrotliDecoder(object):
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self):
+ self._obj = brotli.Decompressor()
+
+ def decompress(self, data):
+ if hasattr(self._obj, 'decompress'):
+ return self._obj.decompress(data)
+ return self._obj.process(data)
+
+ def flush(self):
+ if hasattr(self._obj, 'flush'):
+ return self._obj.flush()
+ return b''
+
+
+class MultiDecoder(object):
+ """
+ From RFC7231:
+ If one or more encodings have been applied to a representation, the
+ sender that applied the encodings MUST generate a Content-Encoding
+ header field that lists the content codings in the order in which
+ they were applied.
+ """
+
+ def __init__(self, modes):
+ self._decoders = [_get_decoder(m.strip()) for m in modes.split(',')]
+
+ def flush(self):
+ return self._decoders[0].flush()
+
+ def decompress(self, data):
+ for d in reversed(self._decoders):
+ data = d.decompress(data)
+ return data
+
+
+def _get_decoder(mode):
+ if ',' in mode:
+ return MultiDecoder(mode)
+
+ if mode == 'gzip':
+ return GzipDecoder()
+
+ if brotli is not None and mode == 'br':
+ return BrotliDecoder()
+
+ return DeflateDecoder()
+
+
+class HTTPResponse(io.IOBase):
+ """
+ HTTP Response container.
+
+ Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
+ loaded and decoded on-demand when the ``data`` property is accessed. This
+ class is also compatible with the Python standard library's :mod:`io`
+ module, and can hence be treated as a readable object in the context of that
+ framework.
+
+ Extra parameters for behaviour not present in httplib.HTTPResponse:
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param original_response:
+ When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
+ object, it's convenient to include the original for debug purposes. It's
+ otherwise unused.
+
+ :param retries:
+ The retries contains the last :class:`~urllib3.util.retry.Retry` that
+ was used during the request.
+
+ :param enforce_content_length:
+ Enforce content length checking. Body returned by server must match
+ value of Content-Length header, if present. Otherwise, raise error.
+ """
+
+ CONTENT_DECODERS = ['gzip', 'deflate']
+ if brotli is not None:
+ CONTENT_DECODERS += ['br']
+ REDIRECT_STATUSES = [301, 302, 303, 307, 308]
+
+ def __init__(self, body='', headers=None, status=0, version=0, reason=None,
+ strict=0, preload_content=True, decode_content=True,
+ original_response=None, pool=None, connection=None, msg=None,
+ retries=None, enforce_content_length=False,
+ request_method=None, request_url=None):
+
+ if isinstance(headers, HTTPHeaderDict):
+ self.headers = headers
+ else:
+ self.headers = HTTPHeaderDict(headers)
+ self.status = status
+ self.version = version
+ self.reason = reason
+ self.strict = strict
+ self.decode_content = decode_content
+ self.retries = retries
+ self.enforce_content_length = enforce_content_length
+
+ self._decoder = None
+ self._body = None
+ self._fp = None
+ self._original_response = original_response
+ self._fp_bytes_read = 0
+ self.msg = msg
+ self._request_url = request_url
+
+ if body and isinstance(body, (basestring, bytes)):
+ self._body = body
+
+ self._pool = pool
+ self._connection = connection
+
+ if hasattr(body, 'read'):
+ self._fp = body
+
+ # Are we using the chunked-style of transfer encoding?
+ self.chunked = False
+ self.chunk_left = None
+ tr_enc = self.headers.get('transfer-encoding', '').lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
+ self.chunked = True
+
+ # Determine length of response
+ self.length_remaining = self._init_length(request_method)
+
+ # If requested, preload the body.
+ if preload_content and not self._body:
+ self._body = self.read(decode_content=decode_content)
+
+ def get_redirect_location(self):
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in self.REDIRECT_STATUSES:
+ return self.headers.get('location')
+
+ return False
+
+ def release_conn(self):
+ if not self._pool or not self._connection:
+ return
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ @property
+ def data(self):
+ # For backwords-compat with earlier urllib3 0.4 and earlier.
+ if self._body:
+ return self._body
+
+ if self._fp:
+ return self.read(cache_content=True)
+
+ @property
+ def connection(self):
+ return self._connection
+
+ def isclosed(self):
+ return is_fp_closed(self._fp)
+
+ def tell(self):
+ """
+ Obtain the number of bytes pulled over the wire so far. May differ from
+ the amount of content returned by :meth:``HTTPResponse.read`` if bytes
+ are encoded on the wire (e.g, compressed).
+ """
+ return self._fp_bytes_read
+
+ def _init_length(self, request_method):
+ """
+ Set initial length value for Response content if available.
+ """
+ length = self.headers.get('content-length')
+
+ if length is not None:
+ if self.chunked:
+ # This Response will fail with an IncompleteRead if it can't be
+ # received as chunked. This method falls back to attempt reading
+ # the response before raising an exception.
+ log.warning("Received response with both Content-Length and "
+ "Transfer-Encoding set. This is expressly forbidden "
+ "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
+ "attempting to process response as Transfer-Encoding: "
+ "chunked.")
+ return None
+
+ try:
+ # RFC 7230 section 3.3.2 specifies multiple content lengths can
+ # be sent in a single Content-Length header
+ # (e.g. Content-Length: 42, 42). This line ensures the values
+ # are all valid ints and that as long as the `set` length is 1,
+ # all values are the same. Otherwise, the header is invalid.
+ lengths = set([int(val) for val in length.split(',')])
+ if len(lengths) > 1:
+ raise InvalidHeader("Content-Length contained multiple "
+ "unmatching values (%s)" % length)
+ length = lengths.pop()
+ except ValueError:
+ length = None
+ else:
+ if length < 0:
+ length = None
+
+ # Convert status to int for comparison
+ # In some cases, httplib returns a status of "_UNKNOWN"
+ try:
+ status = int(self.status)
+ except ValueError:
+ status = 0
+
+ # Check for responses that shouldn't include a body
+ if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD':
+ length = 0
+
+ return length
+
+ def _init_decoder(self):
+ """
+ Set-up the _decoder attribute if necessary.
+ """
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
+ content_encoding = self.headers.get('content-encoding', '').lower()
+ if self._decoder is None:
+ if content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+ elif ',' in content_encoding:
+ encodings = [
+ e.strip() for e in content_encoding.split(',')
+ if e.strip() in self.CONTENT_DECODERS]
+ if len(encodings):
+ self._decoder = _get_decoder(content_encoding)
+
+ DECODER_ERROR_CLASSES = (IOError, zlib.error)
+ if brotli is not None:
+ DECODER_ERROR_CLASSES += (brotli.error,)
+
+ def _decode(self, data, decode_content, flush_decoder):
+ """
+ Decode the data passed in and potentially flush the decoder.
+ """
+ if not decode_content:
+ return data
+
+ try:
+ if self._decoder:
+ data = self._decoder.decompress(data)
+ except self.DECODER_ERROR_CLASSES as e:
+ content_encoding = self.headers.get('content-encoding', '').lower()
+ raise DecodeError(
+ "Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding, e)
+ if flush_decoder:
+ data += self._flush_decoder()
+
+ return data
+
+ def _flush_decoder(self):
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ buf = self._decoder.decompress(b'')
+ return buf + self._decoder.flush()
+
+ return b''
+
+ @contextmanager
+ def _error_catcher(self):
+ """
+ Catch low-level python exceptions, instead re-raising urllib3
+ variants, so that low-level exceptions are not leaked in the
+ high-level api.
+
+ On exit, release the connection back to the pool.
+ """
+ clean_exit = False
+
+ try:
+ try:
+ yield
+
+ except SocketTimeout:
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+ # there is yet no clean way to get at it from this context.
+ raise ReadTimeoutError(self._pool, None, 'Read timed out.')
+
+ except BaseSSLError as e:
+ # FIXME: Is there a better way to differentiate between SSLErrors?
+ if 'read operation timed out' not in str(e): # Defensive:
+ # This shouldn't happen but just in case we're missing an edge
+ # case, let's avoid swallowing SSL errors.
+ raise
+
+ raise ReadTimeoutError(self._pool, None, 'Read timed out.')
+
+ except (HTTPException, SocketError) as e:
+ # This includes IncompleteRead.
+ raise ProtocolError('Connection broken: %r' % e, e)
+
+ # If no exception is thrown, we should avoid cleaning up
+ # unnecessarily.
+ clean_exit = True
+ finally:
+ # If we didn't terminate cleanly, we need to throw away our
+ # connection.
+ if not clean_exit:
+ # The response may not be closed but we're not going to use it
+ # anymore so close it now to ensure that the connection is
+ # released back to the pool.
+ if self._original_response:
+ self._original_response.close()
+
+ # Closing the response may not actually be sufficient to close
+ # everything, so if we have a hold of the connection close that
+ # too.
+ if self._connection:
+ self._connection.close()
+
+ # If we hold the original response but it's closed now, we should
+ # return the connection back to the pool.
+ if self._original_response and self._original_response.isclosed():
+ self.release_conn()
+
+ def read(self, amt=None, decode_content=None, cache_content=False):
+ """
+ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
+ parameters: ``decode_content`` and ``cache_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param cache_content:
+ If True, will save the returned data such that the same result is
+ returned despite of the state of the underlying file object. This
+ is useful if you want the ``.data`` property to continue working
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
+ set.)
+ """
+ self._init_decoder()
+ if decode_content is None:
+ decode_content = self.decode_content
+
+ if self._fp is None:
+ return
+
+ flush_decoder = False
+ data = None
+
+ with self._error_catcher():
+ if amt is None:
+ # cStringIO doesn't like amt=None
+ data = self._fp.read()
+ flush_decoder = True
+ else:
+ cache_content = False
+ data = self._fp.read(amt)
+ if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
+ # not properly close the connection in all cases. There is
+ # no harm in redundantly calling close.
+ self._fp.close()
+ flush_decoder = True
+ if self.enforce_content_length and self.length_remaining not in (0, None):
+ # This is an edge case that httplib failed to cover due
+ # to concerns of backward compatibility. We're
+ # addressing it here to make sure IncompleteRead is
+ # raised during streaming, so all calls with incorrect
+ # Content-Length are caught.
+ raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
+
+ if data:
+ self._fp_bytes_read += len(data)
+ if self.length_remaining is not None:
+ self.length_remaining -= len(data)
+
+ data = self._decode(data, decode_content, flush_decoder)
+
+ if cache_content:
+ self._body = data
+
+ return data
+
+ def stream(self, amt=2**16, decode_content=None):
+ """
+ A generator wrapper for the read() method. A call will block until
+ ``amt`` bytes have been read from the connection or until the
+ connection is closed.
+
+ :param amt:
+ How much of the content to read. The generator will return up to
+ much data per iteration, but may return less. This is particularly
+ likely when using compressed data. However, the empty string will
+ never be returned.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ if self.chunked and self.supports_chunked_reads():
+ for line in self.read_chunked(amt, decode_content=decode_content):
+ yield line
+ else:
+ while not is_fp_closed(self._fp):
+ data = self.read(amt=amt, decode_content=decode_content)
+
+ if data:
+ yield data
+
+ @classmethod
+ def from_httplib(ResponseCls, r, **response_kw):
+ """
+ Given an :class:`httplib.HTTPResponse` instance ``r``, return a
+ corresponding :class:`urllib3.response.HTTPResponse` object.
+
+ Remaining parameters are passed to the HTTPResponse constructor, along
+ with ``original_response=r``.
+ """
+ headers = r.msg
+
+ if not isinstance(headers, HTTPHeaderDict):
+ if PY3:
+ headers = HTTPHeaderDict(headers.items())
+ else:
+ # Python 2.7
+ headers = HTTPHeaderDict.from_httplib(headers)
+
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
+ strict = getattr(r, 'strict', 0)
+ resp = ResponseCls(body=r,
+ headers=headers,
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=strict,
+ original_response=r,
+ **response_kw)
+ return resp
+
+ # Backwards-compatibility methods for httplib.HTTPResponse
+ def getheaders(self):
+ return self.headers
+
+ def getheader(self, name, default=None):
+ return self.headers.get(name, default)
+
+ # Backwards compatibility for http.cookiejar
+ def info(self):
+ return self.headers
+
+ # Overrides from io.IOBase
+ def close(self):
+ if not self.closed:
+ self._fp.close()
+
+ if self._connection:
+ self._connection.close()
+
+ @property
+ def closed(self):
+ if self._fp is None:
+ return True
+ elif hasattr(self._fp, 'isclosed'):
+ return self._fp.isclosed()
+ elif hasattr(self._fp, 'closed'):
+ return self._fp.closed
+ else:
+ return True
+
+ def fileno(self):
+ if self._fp is None:
+ raise IOError("HTTPResponse has no file to get a fileno from")
+ elif hasattr(self._fp, "fileno"):
+ return self._fp.fileno()
+ else:
+ raise IOError("The file-like object this HTTPResponse is wrapped "
+ "around has no file descriptor")
+
+ def flush(self):
+ if self._fp is not None and hasattr(self._fp, 'flush'):
+ return self._fp.flush()
+
+ def readable(self):
+ # This method is required for `io` module compatibility.
+ return True
+
+ def readinto(self, b):
+ # This method is required for `io` module compatibility.
+ temp = self.read(len(b))
+ if len(temp) == 0:
+ return 0
+ else:
+ b[:len(temp)] = temp
+ return len(temp)
+
+ def supports_chunked_reads(self):
+ """
+ Checks if the underlying file-like object looks like a
+ httplib.HTTPResponse object. We do this by testing for the fp
+ attribute. If it is present we assume it returns raw chunks as
+ processed by read_chunked().
+ """
+ return hasattr(self._fp, 'fp')
+
+ def _update_chunk_length(self):
+ # First, we'll figure out length of a chunk and then
+ # we'll try to read it from socket.
+ if self.chunk_left is not None:
+ return
+ line = self._fp.fp.readline()
+ line = line.split(b';', 1)[0]
+ try:
+ self.chunk_left = int(line, 16)
+ except ValueError:
+ # Invalid chunked protocol response, abort.
+ self.close()
+ raise httplib.IncompleteRead(line)
+
+ def _handle_chunk(self, amt):
+ returned_chunk = None
+ if amt is None:
+ chunk = self._fp._safe_read(self.chunk_left)
+ returned_chunk = chunk
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ elif amt < self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self.chunk_left = self.chunk_left - amt
+ returned_chunk = value
+ elif amt == self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ returned_chunk = value
+ else: # amt > self.chunk_left
+ returned_chunk = self._fp._safe_read(self.chunk_left)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ return returned_chunk
+
+ def read_chunked(self, amt=None, decode_content=None):
+ """
+ Similar to :meth:`HTTPResponse.read`, but with an additional
+ parameter: ``decode_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ self._init_decoder()
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
+ if not self.chunked:
+ raise ResponseNotChunked(
+ "Response is not chunked. "
+ "Header 'transfer-encoding: chunked' is missing.")
+ if not self.supports_chunked_reads():
+ raise BodyNotHttplibCompatible(
+ "Body should be httplib.HTTPResponse like. "
+ "It should have have an fp attribute which returns raw chunks.")
+
+ with self._error_catcher():
+ # Don't bother reading the body of a HEAD request.
+ if self._original_response and is_response_to_head(self._original_response):
+ self._original_response.close()
+ return
+
+ # If a response is already read and closed
+ # then return immediately.
+ if self._fp.fp is None:
+ return
+
+ while True:
+ self._update_chunk_length()
+ if self.chunk_left == 0:
+ break
+ chunk = self._handle_chunk(amt)
+ decoded = self._decode(chunk, decode_content=decode_content,
+ flush_decoder=False)
+ if decoded:
+ yield decoded
+
+ if decode_content:
+ # On CPython and PyPy, we should never need to flush the
+ # decoder. However, on Jython we *might* need to, so
+ # lets defensively do it anyway.
+ decoded = self._flush_decoder()
+ if decoded: # Platform-specific: Jython.
+ yield decoded
+
+ # Chunk content ends with \r\n: discard it.
+ while True:
+ line = self._fp.fp.readline()
+ if not line:
+ # Some sites may not end with '\r\n'.
+ break
+ if line == b'\r\n':
+ break
+
+ # We read everything; close the "file".
+ if self._original_response:
+ self._original_response.close()
+
+ def geturl(self):
+ """
+ Returns the URL that was the source of this response.
+ If the request that generated this response redirected, this method
+ will return the final redirect location.
+ """
+ if self.retries is not None and len(self.retries.history):
+ return self.retries.history[-1].redirect_location
+ else:
+ return self._request_url
+
+ def __iter__(self):
+ buffer = [b""]
+ for chunk in self.stream(decode_content=True):
+ if b"\n" in chunk:
+ chunk = chunk.split(b"\n")
+ yield b"".join(buffer) + chunk[0] + b"\n"
+ for x in chunk[1:-1]:
+ yield x + b"\n"
+ if chunk[-1]:
+ buffer = [chunk[-1]]
+ else:
+ buffer = []
+ else:
+ buffer.append(chunk)
+ if buffer:
+ yield b"".join(buffer)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__init__.py
new file mode 100644
index 00000000..2914bb46
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__init__.py
@@ -0,0 +1,56 @@
+from __future__ import absolute_import
+# For backwards compatibility, provide imports that used to be here.
+from .connection import is_connection_dropped
+from .request import make_headers
+from .response import is_fp_closed
+from .ssl_ import (
+ SSLContext,
+ HAS_SNI,
+ IS_PYOPENSSL,
+ IS_SECURETRANSPORT,
+ assert_fingerprint,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+ PROTOCOL_TLS,
+)
+from .timeout import (
+ current_time,
+ Timeout,
+)
+
+from .retry import Retry
+from .url import (
+ get_host,
+ parse_url,
+ split_first,
+ Url,
+)
+from .wait import (
+ wait_for_read,
+ wait_for_write
+)
+
+__all__ = (
+ 'HAS_SNI',
+ 'IS_PYOPENSSL',
+ 'IS_SECURETRANSPORT',
+ 'SSLContext',
+ 'PROTOCOL_TLS',
+ 'Retry',
+ 'Timeout',
+ 'Url',
+ 'assert_fingerprint',
+ 'current_time',
+ 'is_connection_dropped',
+ 'is_fp_closed',
+ 'get_host',
+ 'parse_url',
+ 'make_headers',
+ 'resolve_cert_reqs',
+ 'resolve_ssl_version',
+ 'split_first',
+ 'ssl_wrap_socket',
+ 'wait_for_read',
+ 'wait_for_write'
+)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..514b361e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc
new file mode 100644
index 00000000..8244ec96
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc
new file mode 100644
index 00000000..548ec88d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc
new file mode 100644
index 00000000..d062891a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc
new file mode 100644
index 00000000..cd8cf970
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc
new file mode 100644
index 00000000..64bd141e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc
new file mode 100644
index 00000000..1f7e7f5e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc
new file mode 100644
index 00000000..3af342ba
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc
new file mode 100644
index 00000000..2bad415b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc
new file mode 100644
index 00000000..c9f47e30
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/connection.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/connection.py
new file mode 100644
index 00000000..5ad70b2f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/connection.py
@@ -0,0 +1,134 @@
+from __future__ import absolute_import
+import socket
+from .wait import NoWayToWaitForSocketError, wait_for_read
+from ..contrib import _appengine_environ
+
+
+def is_connection_dropped(conn): # Platform-specific
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ :class:`httplib.HTTPConnection` object.
+
+ Note: For platforms like AppEngine, this will always return ``False`` to
+ let the platform handle connection recycling transparently for us.
+ """
+ sock = getattr(conn, 'sock', False)
+ if sock is False: # Platform-specific: AppEngine
+ return False
+ if sock is None: # Connection already closed (such as by httplib).
+ return True
+ try:
+ # Returns True if readable, which here means it's been dropped
+ return wait_for_read(sock, timeout=0.0)
+ except NoWayToWaitForSocketError: # Platform-specific: AppEngine
+ return False
+
+
+# This function is copied from socket.py in the Python 2.7 standard
+# library test suite. Added to its signature is only `socket_options`.
+# One additional modification is that we avoid binding to IPv6 servers
+# discovered in DNS if the system doesn't have IPv6 functionality.
+def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None, socket_options=None):
+ """Connect to *address* and return the socket object.
+
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
+ port)``) and return the socket object. Passing the optional
+ *timeout* parameter will set the timeout on the socket instance
+ before attempting to connect. If no *timeout* is supplied, the
+ global default timeout setting returned by :func:`getdefaulttimeout`
+ is used. If *source_address* is set it must be a tuple of (host, port)
+ for the socket to bind as a source address before making the connection.
+ An host of '' or port 0 tells the OS to use the default.
+ """
+
+ host, port = address
+ if host.startswith('['):
+ host = host.strip('[]')
+ err = None
+
+ # Using the value from allowed_gai_family() in the context of getaddrinfo lets
+ # us select whether to work with IPv4 DNS records, IPv6 records, or both.
+ # The original create_connection function always returns all records.
+ family = allowed_gai_family()
+
+ for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ sock = None
+ try:
+ sock = socket.socket(af, socktype, proto)
+
+ # If provided, set socket level options before connecting.
+ _set_socket_options(sock, socket_options)
+
+ if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
+ sock.settimeout(timeout)
+ if source_address:
+ sock.bind(source_address)
+ sock.connect(sa)
+ return sock
+
+ except socket.error as e:
+ err = e
+ if sock is not None:
+ sock.close()
+ sock = None
+
+ if err is not None:
+ raise err
+
+ raise socket.error("getaddrinfo returns an empty list")
+
+
+def _set_socket_options(sock, options):
+ if options is None:
+ return
+
+ for opt in options:
+ sock.setsockopt(*opt)
+
+
+def allowed_gai_family():
+ """This function is designed to work in the context of
+ getaddrinfo, where family=socket.AF_UNSPEC is the default and
+ will perform a DNS search for both IPv6 and IPv4 records."""
+
+ family = socket.AF_INET
+ if HAS_IPV6:
+ family = socket.AF_UNSPEC
+ return family
+
+
+def _has_ipv6(host):
+ """ Returns True if the system can bind an IPv6 address. """
+ sock = None
+ has_ipv6 = False
+
+ # App Engine doesn't support IPV6 sockets and actually has a quota on the
+ # number of sockets that can be used, so just early out here instead of
+ # creating a socket needlessly.
+ # See https://github.com/urllib3/urllib3/issues/1446
+ if _appengine_environ.is_appengine_sandbox():
+ return False
+
+ if socket.has_ipv6:
+ # has_ipv6 returns true if cPython was compiled with IPv6 support.
+ # It does not tell us if the system has IPv6 support enabled. To
+ # determine that we must bind to an IPv6 address.
+ # https://github.com/shazow/urllib3/pull/611
+ # https://bugs.python.org/issue658327
+ try:
+ sock = socket.socket(socket.AF_INET6)
+ sock.bind((host, 0))
+ has_ipv6 = True
+ except Exception:
+ pass
+
+ if sock:
+ sock.close()
+ return has_ipv6
+
+
+HAS_IPV6 = _has_ipv6('::1')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/queue.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/queue.py
new file mode 100644
index 00000000..d3d379a1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/queue.py
@@ -0,0 +1,21 @@
+import collections
+from ..packages import six
+from ..packages.six.moves import queue
+
+if six.PY2:
+ # Queue is imported for side effects on MS Windows. See issue #229.
+ import Queue as _unused_module_Queue # noqa: F401
+
+
+class LifoQueue(queue.Queue):
+ def _init(self, _):
+ self.queue = collections.deque()
+
+ def _qsize(self, len=len):
+ return len(self.queue)
+
+ def _put(self, item):
+ self.queue.append(item)
+
+ def _get(self):
+ return self.queue.pop()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/request.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/request.py
new file mode 100644
index 00000000..280b8530
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/request.py
@@ -0,0 +1,125 @@
+from __future__ import absolute_import
+from base64 import b64encode
+
+from ..packages.six import b, integer_types
+from ..exceptions import UnrewindableBodyError
+
+ACCEPT_ENCODING = 'gzip,deflate'
+try:
+ import brotli as _unused_module_brotli # noqa: F401
+except ImportError:
+ pass
+else:
+ ACCEPT_ENCODING += ',br'
+
+_FAILEDTELL = object()
+
+
+def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
+ basic_auth=None, proxy_basic_auth=None, disable_cache=None):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ :param proxy_basic_auth:
+ Colon-separated username:password string for 'proxy-authorization: basic ...'
+ auth header.
+
+ :param disable_cache:
+ If ``True``, adds 'cache-control: no-cache' header.
+
+ Example::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ','.join(accept_encoding)
+ else:
+ accept_encoding = ACCEPT_ENCODING
+ headers['accept-encoding'] = accept_encoding
+
+ if user_agent:
+ headers['user-agent'] = user_agent
+
+ if keep_alive:
+ headers['connection'] = 'keep-alive'
+
+ if basic_auth:
+ headers['authorization'] = 'Basic ' + \
+ b64encode(b(basic_auth)).decode('utf-8')
+
+ if proxy_basic_auth:
+ headers['proxy-authorization'] = 'Basic ' + \
+ b64encode(b(proxy_basic_auth)).decode('utf-8')
+
+ if disable_cache:
+ headers['cache-control'] = 'no-cache'
+
+ return headers
+
+
+def set_file_position(body, pos):
+ """
+ If a position is provided, move file to that point.
+ Otherwise, we'll attempt to record a position for future use.
+ """
+ if pos is not None:
+ rewind_body(body, pos)
+ elif getattr(body, 'tell', None) is not None:
+ try:
+ pos = body.tell()
+ except (IOError, OSError):
+ # This differentiates from None, allowing us to catch
+ # a failed `tell()` later when trying to rewind the body.
+ pos = _FAILEDTELL
+
+ return pos
+
+
+def rewind_body(body, body_pos):
+ """
+ Attempt to rewind body to a certain position.
+ Primarily used for request redirects and retries.
+
+ :param body:
+ File-like object that supports seek.
+
+ :param int pos:
+ Position to seek to in file.
+ """
+ body_seek = getattr(body, 'seek', None)
+ if body_seek is not None and isinstance(body_pos, integer_types):
+ try:
+ body_seek(body_pos)
+ except (IOError, OSError):
+ raise UnrewindableBodyError("An error occurred when rewinding request "
+ "body for redirect/retry.")
+ elif body_pos is _FAILEDTELL:
+ raise UnrewindableBodyError("Unable to record file position for rewinding "
+ "request body during a redirect/retry.")
+ else:
+ raise ValueError("body_pos must be of type integer, "
+ "instead it was %s." % type(body_pos))
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/response.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/response.py
new file mode 100644
index 00000000..3d548648
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/response.py
@@ -0,0 +1,87 @@
+from __future__ import absolute_import
+from ..packages.six.moves import http_client as httplib
+
+from ..exceptions import HeaderParsingError
+
+
+def is_fp_closed(obj):
+ """
+ Checks whether a given file-like object is closed.
+
+ :param obj:
+ The file-like object to check.
+ """
+
+ try:
+ # Check `isclosed()` first, in case Python3 doesn't set `closed`.
+ # GH Issue #928
+ return obj.isclosed()
+ except AttributeError:
+ pass
+
+ try:
+ # Check via the official file-like-object way.
+ return obj.closed
+ except AttributeError:
+ pass
+
+ try:
+ # Check if the object is a container for another file-like object that
+ # gets released on exhaustion (e.g. HTTPResponse).
+ return obj.fp is None
+ except AttributeError:
+ pass
+
+ raise ValueError("Unable to determine whether fp is closed.")
+
+
+def assert_header_parsing(headers):
+ """
+ Asserts whether all headers have been successfully parsed.
+ Extracts encountered errors from the result of parsing headers.
+
+ Only works on Python 3.
+
+ :param headers: Headers to verify.
+ :type headers: `httplib.HTTPMessage`.
+
+ :raises urllib3.exceptions.HeaderParsingError:
+ If parsing errors are found.
+ """
+
+ # This will fail silently if we pass in the wrong kind of parameter.
+ # To make debugging easier add an explicit check.
+ if not isinstance(headers, httplib.HTTPMessage):
+ raise TypeError('expected httplib.Message, got {0}.'.format(
+ type(headers)))
+
+ defects = getattr(headers, 'defects', None)
+ get_payload = getattr(headers, 'get_payload', None)
+
+ unparsed_data = None
+ if get_payload:
+ # get_payload is actually email.message.Message.get_payload;
+ # we're only interested in the result if it's not a multipart message
+ if not headers.is_multipart():
+ payload = get_payload()
+
+ if isinstance(payload, (bytes, str)):
+ unparsed_data = payload
+
+ if defects or unparsed_data:
+ raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
+
+
+def is_response_to_head(response):
+ """
+ Checks whether the request of a response has been a HEAD-request.
+ Handles the quirks of AppEngine.
+
+ :param conn:
+ :type conn: :class:`httplib.HTTPResponse`
+ """
+ # FIXME: Can we do this somehow without accessing private httplib _method?
+ method = response._method
+ if isinstance(method, int): # Platform-specific: Appengine
+ return method == 3
+ return method.upper() == 'HEAD'
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/retry.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/retry.py
new file mode 100644
index 00000000..02429ee8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/retry.py
@@ -0,0 +1,412 @@
+from __future__ import absolute_import
+import time
+import logging
+from collections import namedtuple
+from itertools import takewhile
+import email
+import re
+
+from ..exceptions import (
+ ConnectTimeoutError,
+ MaxRetryError,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseError,
+ InvalidHeader,
+)
+from ..packages import six
+
+
+log = logging.getLogger(__name__)
+
+
+# Data structure for representing the metadata of requests that result in a retry.
+RequestHistory = namedtuple('RequestHistory', ["method", "url", "error",
+ "status", "redirect_location"])
+
+
+class Retry(object):
+ """ Retry configuration.
+
+ Each retry attempt will create a new Retry object with updated values, so
+ they can be safely reused.
+
+ Retries can be defined as a default for a pool::
+
+ retries = Retry(connect=5, read=2, redirect=5)
+ http = PoolManager(retries=retries)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', retries=Retry(10))
+
+ Retries can be disabled by passing ``False``::
+
+ response = http.request('GET', 'http://example.com/', retries=False)
+
+ Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+ retries are disabled, in which case the causing exception will be raised.
+
+ :param int total:
+ Total number of retries to allow. Takes precedence over other counts.
+
+ Set to ``None`` to remove this constraint and fall back on other
+ counts. It's a good idea to set this to some sensibly-high value to
+ account for unexpected edge cases and avoid infinite retry loops.
+
+ Set to ``0`` to fail on the first retry.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int connect:
+ How many connection-related errors to retry on.
+
+ These are errors raised before the request is sent to the remote server,
+ which we assume has not triggered the server to process the request.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int read:
+ How many times to retry on read errors.
+
+ These errors are raised after the request was sent to the server, so the
+ request may have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int redirect:
+ How many redirects to perform. Limit this to avoid infinite redirect
+ loops.
+
+ A redirect is a HTTP response with a status code 301, 302, 303, 307 or
+ 308.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int status:
+ How many times to retry on bad status codes.
+
+ These are retries made on responses, where status code matches
+ ``status_forcelist``.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param iterable method_whitelist:
+ Set of uppercased HTTP method verbs that we should retry on.
+
+ By default, we only retry on methods which are considered to be
+ idempotent (multiple requests with the same parameters end with the
+ same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
+
+ Set to a ``False`` value to retry on any verb.
+
+ :param iterable status_forcelist:
+ A set of integer HTTP status codes that we should force a retry on.
+ A retry is initiated if the request method is in ``method_whitelist``
+ and the response status code is in ``status_forcelist``.
+
+ By default, this is disabled with ``None``.
+
+ :param float backoff_factor:
+ A backoff factor to apply between attempts after the second try
+ (most errors are resolved immediately by a second try without a
+ delay). urllib3 will sleep for::
+
+ {backoff factor} * (2 ** ({number of total retries} - 1))
+
+ seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
+ for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
+ than :attr:`Retry.BACKOFF_MAX`.
+
+ By default, backoff is disabled (set to 0).
+
+ :param bool raise_on_redirect: Whether, if the number of redirects is
+ exhausted, to raise a MaxRetryError, or to return a response with a
+ response code in the 3xx range.
+
+ :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
+ whether we should raise an exception, or return a response,
+ if status falls in ``status_forcelist`` range and retries have
+ been exhausted.
+
+ :param tuple history: The history of the request encountered during
+ each call to :meth:`~Retry.increment`. The list is in the order
+ the requests occurred. Each list item is of class :class:`RequestHistory`.
+
+ :param bool respect_retry_after_header:
+ Whether to respect Retry-After header on status codes defined as
+ :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
+
+ :param iterable remove_headers_on_redirect:
+ Sequence of headers to remove from the request when a response
+ indicating a redirect is returned before firing off the redirected
+ request.
+ """
+
+ DEFAULT_METHOD_WHITELIST = frozenset([
+ 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
+
+ RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
+
+ DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization'])
+
+ #: Maximum backoff time.
+ BACKOFF_MAX = 120
+
+ def __init__(self, total=10, connect=None, read=None, redirect=None, status=None,
+ method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
+ backoff_factor=0, raise_on_redirect=True, raise_on_status=True,
+ history=None, respect_retry_after_header=True,
+ remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST):
+
+ self.total = total
+ self.connect = connect
+ self.read = read
+ self.status = status
+
+ if redirect is False or total is False:
+ redirect = 0
+ raise_on_redirect = False
+
+ self.redirect = redirect
+ self.status_forcelist = status_forcelist or set()
+ self.method_whitelist = method_whitelist
+ self.backoff_factor = backoff_factor
+ self.raise_on_redirect = raise_on_redirect
+ self.raise_on_status = raise_on_status
+ self.history = history or tuple()
+ self.respect_retry_after_header = respect_retry_after_header
+ self.remove_headers_on_redirect = frozenset([
+ h.lower() for h in remove_headers_on_redirect])
+
+ def new(self, **kw):
+ params = dict(
+ total=self.total,
+ connect=self.connect, read=self.read, redirect=self.redirect, status=self.status,
+ method_whitelist=self.method_whitelist,
+ status_forcelist=self.status_forcelist,
+ backoff_factor=self.backoff_factor,
+ raise_on_redirect=self.raise_on_redirect,
+ raise_on_status=self.raise_on_status,
+ history=self.history,
+ remove_headers_on_redirect=self.remove_headers_on_redirect
+ )
+ params.update(kw)
+ return type(self)(**params)
+
+ @classmethod
+ def from_int(cls, retries, redirect=True, default=None):
+ """ Backwards-compatibility for the old retries format."""
+ if retries is None:
+ retries = default if default is not None else cls.DEFAULT
+
+ if isinstance(retries, Retry):
+ return retries
+
+ redirect = bool(redirect) and None
+ new_retries = cls(retries, redirect=redirect)
+ log.debug("Converted retries value: %r -> %r", retries, new_retries)
+ return new_retries
+
+ def get_backoff_time(self):
+ """ Formula for computing the current backoff
+
+ :rtype: float
+ """
+ # We want to consider only the last consecutive errors sequence (Ignore redirects).
+ consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None,
+ reversed(self.history))))
+ if consecutive_errors_len <= 1:
+ return 0
+
+ backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
+ return min(self.BACKOFF_MAX, backoff_value)
+
+ def parse_retry_after(self, retry_after):
+ # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
+ if re.match(r"^\s*[0-9]+\s*$", retry_after):
+ seconds = int(retry_after)
+ else:
+ retry_date_tuple = email.utils.parsedate(retry_after)
+ if retry_date_tuple is None:
+ raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
+ retry_date = time.mktime(retry_date_tuple)
+ seconds = retry_date - time.time()
+
+ if seconds < 0:
+ seconds = 0
+
+ return seconds
+
+ def get_retry_after(self, response):
+ """ Get the value of Retry-After in seconds. """
+
+ retry_after = response.getheader("Retry-After")
+
+ if retry_after is None:
+ return None
+
+ return self.parse_retry_after(retry_after)
+
+ def sleep_for_retry(self, response=None):
+ retry_after = self.get_retry_after(response)
+ if retry_after:
+ time.sleep(retry_after)
+ return True
+
+ return False
+
+ def _sleep_backoff(self):
+ backoff = self.get_backoff_time()
+ if backoff <= 0:
+ return
+ time.sleep(backoff)
+
+ def sleep(self, response=None):
+ """ Sleep between retry attempts.
+
+ This method will respect a server's ``Retry-After`` response header
+ and sleep the duration of the time requested. If that is not present, it
+ will use an exponential backoff. By default, the backoff factor is 0 and
+ this method will return immediately.
+ """
+
+ if response:
+ slept = self.sleep_for_retry(response)
+ if slept:
+ return
+
+ self._sleep_backoff()
+
+ def _is_connection_error(self, err):
+ """ Errors when we're fairly sure that the server did not receive the
+ request, so it should be safe to retry.
+ """
+ return isinstance(err, ConnectTimeoutError)
+
+ def _is_read_error(self, err):
+ """ Errors that occur after the request has been started, so we should
+ assume that the server began processing it.
+ """
+ return isinstance(err, (ReadTimeoutError, ProtocolError))
+
+ def _is_method_retryable(self, method):
+ """ Checks if a given HTTP method should be retried upon, depending if
+ it is included on the method whitelist.
+ """
+ if self.method_whitelist and method.upper() not in self.method_whitelist:
+ return False
+
+ return True
+
+ def is_retry(self, method, status_code, has_retry_after=False):
+ """ Is this method/status code retryable? (Based on whitelists and control
+ variables such as the number of total retries to allow, whether to
+ respect the Retry-After header, whether this header is present, and
+ whether the returned status code is on the list of status codes to
+ be retried upon on the presence of the aforementioned header)
+ """
+ if not self._is_method_retryable(method):
+ return False
+
+ if self.status_forcelist and status_code in self.status_forcelist:
+ return True
+
+ return (self.total and self.respect_retry_after_header and
+ has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES))
+
+ def is_exhausted(self):
+ """ Are we out of retries? """
+ retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
+ retry_counts = list(filter(None, retry_counts))
+ if not retry_counts:
+ return False
+
+ return min(retry_counts) < 0
+
+ def increment(self, method=None, url=None, response=None, error=None,
+ _pool=None, _stacktrace=None):
+ """ Return a new Retry object with incremented retry counters.
+
+ :param response: A response object, or None, if the server did not
+ return a response.
+ :type response: :class:`~urllib3.response.HTTPResponse`
+ :param Exception error: An error encountered during the request, or
+ None if the response was received successfully.
+
+ :return: A new ``Retry`` object.
+ """
+ if self.total is False and error:
+ # Disabled, indicate to re-raise the error.
+ raise six.reraise(type(error), error, _stacktrace)
+
+ total = self.total
+ if total is not None:
+ total -= 1
+
+ connect = self.connect
+ read = self.read
+ redirect = self.redirect
+ status_count = self.status
+ cause = 'unknown'
+ status = None
+ redirect_location = None
+
+ if error and self._is_connection_error(error):
+ # Connect retry?
+ if connect is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif connect is not None:
+ connect -= 1
+
+ elif error and self._is_read_error(error):
+ # Read retry?
+ if read is False or not self._is_method_retryable(method):
+ raise six.reraise(type(error), error, _stacktrace)
+ elif read is not None:
+ read -= 1
+
+ elif response and response.get_redirect_location():
+ # Redirect retry?
+ if redirect is not None:
+ redirect -= 1
+ cause = 'too many redirects'
+ redirect_location = response.get_redirect_location()
+ status = response.status
+
+ else:
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and a the given method is in the whitelist
+ cause = ResponseError.GENERIC_ERROR
+ if response and response.status:
+ if status_count is not None:
+ status_count -= 1
+ cause = ResponseError.SPECIFIC_ERROR.format(
+ status_code=response.status)
+ status = response.status
+
+ history = self.history + (RequestHistory(method, url, error, status, redirect_location),)
+
+ new_retry = self.new(
+ total=total,
+ connect=connect, read=read, redirect=redirect, status=status_count,
+ history=history)
+
+ if new_retry.is_exhausted():
+ raise MaxRetryError(_pool, url, error or ResponseError(cause))
+
+ log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
+
+ return new_retry
+
+ def __repr__(self):
+ return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
+ 'read={self.read}, redirect={self.redirect}, status={self.status})').format(
+ cls=type(self), self=self)
+
+
+# For backwards compatibility (equivalent to pre-v1.9):
+Retry.DEFAULT = Retry(3)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/ssl_.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/ssl_.py
new file mode 100644
index 00000000..fbdef65d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/ssl_.py
@@ -0,0 +1,392 @@
+from __future__ import absolute_import
+import errno
+import warnings
+import hmac
+import re
+
+from binascii import hexlify, unhexlify
+from hashlib import md5, sha1, sha256
+
+from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
+from ..packages import six
+from ..packages.rfc3986 import abnf_regexp
+
+
+SSLContext = None
+HAS_SNI = False
+IS_PYOPENSSL = False
+IS_SECURETRANSPORT = False
+
+# Maps the length of a digest to a possible hash function producing this digest
+HASHFUNC_MAP = {
+ 32: md5,
+ 40: sha1,
+ 64: sha256,
+}
+
+
+def _const_compare_digest_backport(a, b):
+ """
+ Compare two digests of equal length in constant time.
+
+ The digests must be of type str/bytes.
+ Returns True if the digests match, and False otherwise.
+ """
+ result = abs(len(a) - len(b))
+ for l, r in zip(bytearray(a), bytearray(b)):
+ result |= l ^ r
+ return result == 0
+
+
+_const_compare_digest = getattr(hmac, 'compare_digest',
+ _const_compare_digest_backport)
+
+# Borrow rfc3986's regular expressions for IPv4
+# and IPv6 addresses for use in is_ipaddress()
+_IP_ADDRESS_REGEX = re.compile(
+ r'^(?:%s|%s|%s)$' % (
+ abnf_regexp.IPv4_RE,
+ abnf_regexp.IPv6_RE,
+ abnf_regexp.IPv6_ADDRZ_RFC4007_RE
+ )
+)
+
+try: # Test for SSL features
+ import ssl
+ from ssl import wrap_socket, CERT_REQUIRED
+ from ssl import HAS_SNI # Has SNI?
+except ImportError:
+ pass
+
+try: # Platform-specific: Python 3.6
+ from ssl import PROTOCOL_TLS
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+except ImportError:
+ try:
+ from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+ except ImportError:
+ PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+
+
+try:
+ from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
+except ImportError:
+ OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+ OP_NO_COMPRESSION = 0x20000
+
+
+# A secure default.
+# Sources for more information on TLS ciphers:
+#
+# - https://wiki.mozilla.org/Security/Server_Side_TLS
+# - https://www.ssllabs.com/projects/best-practices/index.html
+# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+#
+# The general intent is:
+# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
+# - prefer ECDHE over DHE for better performance,
+# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
+# security,
+# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
+# - disable NULL authentication, MD5 MACs, DSS, and other
+# insecure ciphers for security reasons.
+# - NOTE: TLS 1.3 cipher suites are managed through a different interface
+# not exposed by CPython (yet!) and are enabled by default if they're available.
+DEFAULT_CIPHERS = ':'.join([
+ 'ECDHE+AESGCM',
+ 'ECDHE+CHACHA20',
+ 'DHE+AESGCM',
+ 'DHE+CHACHA20',
+ 'ECDH+AESGCM',
+ 'DH+AESGCM',
+ 'ECDH+AES',
+ 'DH+AES',
+ 'RSA+AESGCM',
+ 'RSA+AES',
+ '!aNULL',
+ '!eNULL',
+ '!MD5',
+ '!DSS',
+])
+
+try:
+ from ssl import SSLContext # Modern SSL?
+except ImportError:
+ class SSLContext(object): # Platform-specific: Python 2
+ def __init__(self, protocol_version):
+ self.protocol = protocol_version
+ # Use default values from a real SSLContext
+ self.check_hostname = False
+ self.verify_mode = ssl.CERT_NONE
+ self.ca_certs = None
+ self.options = 0
+ self.certfile = None
+ self.keyfile = None
+ self.ciphers = None
+
+ def load_cert_chain(self, certfile, keyfile):
+ self.certfile = certfile
+ self.keyfile = keyfile
+
+ def load_verify_locations(self, cafile=None, capath=None):
+ self.ca_certs = cafile
+
+ if capath is not None:
+ raise SSLError("CA directories not supported in older Pythons")
+
+ def set_ciphers(self, cipher_suite):
+ self.ciphers = cipher_suite
+
+ def wrap_socket(self, socket, server_hostname=None, server_side=False):
+ warnings.warn(
+ 'A true SSLContext object is not available. This prevents '
+ 'urllib3 from configuring SSL appropriately and may cause '
+ 'certain SSL connections to fail. You can upgrade to a newer '
+ 'version of Python to solve this. For more information, see '
+ 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html'
+ '#ssl-warnings',
+ InsecurePlatformWarning
+ )
+ kwargs = {
+ 'keyfile': self.keyfile,
+ 'certfile': self.certfile,
+ 'ca_certs': self.ca_certs,
+ 'cert_reqs': self.verify_mode,
+ 'ssl_version': self.protocol,
+ 'server_side': server_side,
+ }
+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+
+
+def assert_fingerprint(cert, fingerprint):
+ """
+ Checks if given fingerprint matches the supplied certificate.
+
+ :param cert:
+ Certificate as bytes object.
+ :param fingerprint:
+ Fingerprint as string of hexdigits, can be interspersed by colons.
+ """
+
+ fingerprint = fingerprint.replace(':', '').lower()
+ digest_length = len(fingerprint)
+ hashfunc = HASHFUNC_MAP.get(digest_length)
+ if not hashfunc:
+ raise SSLError(
+ 'Fingerprint of invalid length: {0}'.format(fingerprint))
+
+ # We need encode() here for py32; works on py2 and p33.
+ fingerprint_bytes = unhexlify(fingerprint.encode())
+
+ cert_digest = hashfunc(cert).digest()
+
+ if not _const_compare_digest(cert_digest, fingerprint_bytes):
+ raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
+ .format(fingerprint, hexlify(cert_digest)))
+
+
+def resolve_cert_reqs(candidate):
+ """
+ Resolves the argument to a numeric constant, which can be passed to
+ the wrap_socket function/method from the ssl module.
+ Defaults to :data:`ssl.CERT_NONE`.
+ If given a string it is assumed to be the name of the constant in the
+ :mod:`ssl` module or its abbreviation.
+ (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+ If it's neither `None` nor a string we assume it is already the numeric
+ constant which can directly be passed to wrap_socket.
+ """
+ if candidate is None:
+ return CERT_REQUIRED
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'CERT_' + candidate)
+ return res
+
+ return candidate
+
+
+def resolve_ssl_version(candidate):
+ """
+ like resolve_cert_reqs
+ """
+ if candidate is None:
+ return PROTOCOL_TLS
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'PROTOCOL_' + candidate)
+ return res
+
+ return candidate
+
+
+def create_urllib3_context(ssl_version=None, cert_reqs=None,
+ options=None, ciphers=None):
+ """All arguments have the same meaning as ``ssl_wrap_socket``.
+
+ By default, this function does a lot of the same work that
+ ``ssl.create_default_context`` does on Python 3.4+. It:
+
+ - Disables SSLv2, SSLv3, and compression
+ - Sets a restricted set of server ciphers
+
+ If you wish to enable SSLv3, you can do::
+
+ from pip._vendor.urllib3.util import ssl_
+ context = ssl_.create_urllib3_context()
+ context.options &= ~ssl_.OP_NO_SSLv3
+
+ You can do the same to enable compression (substituting ``COMPRESSION``
+ for ``SSLv3`` in the last line above).
+
+ :param ssl_version:
+ The desired protocol version to use. This will default to
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+ the server and your installation of OpenSSL support.
+ :param cert_reqs:
+ Whether to require the certificate verification. This defaults to
+ ``ssl.CERT_REQUIRED``.
+ :param options:
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
+ :param ciphers:
+ Which cipher suites to allow the server to select.
+ :returns:
+ Constructed SSLContext object with specified options
+ :rtype: SSLContext
+ """
+ context = SSLContext(ssl_version or PROTOCOL_TLS)
+
+ context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+
+ # Setting the default here, as we may have no ssl module on import
+ cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+
+ if options is None:
+ options = 0
+ # SSLv2 is easily broken and is considered harmful and dangerous
+ options |= OP_NO_SSLv2
+ # SSLv3 has several problems and is now dangerous
+ options |= OP_NO_SSLv3
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+ # (issue #309)
+ options |= OP_NO_COMPRESSION
+
+ context.options |= options
+
+ context.verify_mode = cert_reqs
+ if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
+ # We do our own verification, including fingerprints and alternative
+ # hostnames. So disable it here
+ context.check_hostname = False
+ return context
+
+
+def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
+ ca_certs=None, server_hostname=None,
+ ssl_version=None, ciphers=None, ssl_context=None,
+ ca_cert_dir=None, key_password=None):
+ """
+ All arguments except for server_hostname, ssl_context, and ca_cert_dir have
+ the same meaning as they do when using :func:`ssl.wrap_socket`.
+
+ :param server_hostname:
+ When SNI is supported, the expected hostname of the certificate
+ :param ssl_context:
+ A pre-made :class:`SSLContext` object. If none is provided, one will
+ be created using :func:`create_urllib3_context`.
+ :param ciphers:
+ A string of ciphers we wish the client to support.
+ :param ca_cert_dir:
+ A directory containing CA certificates in multiple separate files, as
+ supported by OpenSSL's -CApath flag or the capath argument to
+ SSLContext.load_verify_locations().
+ :param key_password:
+ Optional password if the keyfile is encrypted.
+ """
+ context = ssl_context
+ if context is None:
+ # Note: This branch of code and all the variables in it are no longer
+ # used by urllib3 itself. We should consider deprecating and removing
+ # this code.
+ context = create_urllib3_context(ssl_version, cert_reqs,
+ ciphers=ciphers)
+
+ if ca_certs or ca_cert_dir:
+ try:
+ context.load_verify_locations(ca_certs, ca_cert_dir)
+ except IOError as e: # Platform-specific: Python 2.7
+ raise SSLError(e)
+ # Py33 raises FileNotFoundError which subclasses OSError
+ # These are not equivalent unless we check the errno attribute
+ except OSError as e: # Platform-specific: Python 3.3 and beyond
+ if e.errno == errno.ENOENT:
+ raise SSLError(e)
+ raise
+
+ elif ssl_context is None and hasattr(context, 'load_default_certs'):
+ # try to load OS default certs; works well on Windows (require Python3.4+)
+ context.load_default_certs()
+
+ # Attempt to detect if we get the goofy behavior of the
+ # keyfile being encrypted and OpenSSL asking for the
+ # passphrase via the terminal and instead error out.
+ if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
+ raise SSLError("Client private key is encrypted, password is required")
+
+ if certfile:
+ if key_password is None:
+ context.load_cert_chain(certfile, keyfile)
+ else:
+ context.load_cert_chain(certfile, keyfile, key_password)
+
+ # If we detect server_hostname is an IP address then the SNI
+ # extension should not be used according to RFC3546 Section 3.1
+ # We shouldn't warn the user if SNI isn't available but we would
+ # not be using SNI anyways due to IP address for server_hostname.
+ if ((server_hostname is not None and not is_ipaddress(server_hostname))
+ or IS_SECURETRANSPORT):
+ if HAS_SNI and server_hostname is not None:
+ return context.wrap_socket(sock, server_hostname=server_hostname)
+
+ warnings.warn(
+ 'An HTTPS request has been made, but the SNI (Server Name '
+ 'Indication) extension to TLS is not available on this platform. '
+ 'This may cause the server to present an incorrect TLS '
+ 'certificate, which can cause validation failures. You can upgrade to '
+ 'a newer version of Python to solve this. For more information, see '
+ 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html'
+ '#ssl-warnings',
+ SNIMissingWarning
+ )
+
+ return context.wrap_socket(sock)
+
+
+def is_ipaddress(hostname):
+ """Detects whether the hostname given is an IPv4 or IPv6 address.
+ Also detects IPv6 addresses with Zone IDs.
+
+ :param str hostname: Hostname to examine.
+ :return: True if the hostname is an IP address, False otherwise.
+ """
+ if six.PY3 and isinstance(hostname, bytes):
+ # IDN A-label bytes are ASCII compatible.
+ hostname = hostname.decode('ascii')
+ return _IP_ADDRESS_REGEX.match(hostname) is not None
+
+
+def _is_key_file_encrypted(key_file):
+ """Detects if a key file is encrypted or not."""
+ with open(key_file, 'r') as f:
+ for line in f:
+ # Look for Proc-Type: 4,ENCRYPTED
+ if 'ENCRYPTED' in line:
+ return True
+
+ return False
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/timeout.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/timeout.py
new file mode 100644
index 00000000..a4d004a8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/timeout.py
@@ -0,0 +1,243 @@
+from __future__ import absolute_import
+# The default socket timeout, used by httplib to indicate that no timeout was
+# specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT
+import time
+
+from ..exceptions import TimeoutStateError
+
+# A sentinel value to indicate that no timeout was specified by the user in
+# urllib3
+_Default = object()
+
+
+# Use time.monotonic if available.
+current_time = getattr(time, "monotonic", time.time)
+
+
+class Timeout(object):
+ """ Timeout configuration.
+
+ Timeouts can be defined as a default for a pool::
+
+ timeout = Timeout(connect=2.0, read=7.0)
+ http = PoolManager(timeout=timeout)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+
+ Timeouts can be disabled by setting all the parameters to ``None``::
+
+ no_timeout = Timeout(connect=None, read=None)
+ response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+
+
+ :param total:
+ This combines the connect and read timeouts into one; the read timeout
+ will be set to the time leftover from the connect attempt. In the
+ event that both a connect timeout and a total are specified, or a read
+ timeout and a total are specified, the shorter timeout will be applied.
+
+ Defaults to None.
+
+ :type total: integer, float, or None
+
+ :param connect:
+ The maximum amount of time to wait for a connection attempt to a server
+ to succeed. Omitting the parameter will default the connect timeout to
+ the system default, probably `the global default timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout for connection attempts.
+
+ :type connect: integer, float, or None
+
+ :param read:
+ The maximum amount of time to wait between consecutive
+ read operations for a response from the server. Omitting
+ the parameter will default the read timeout to the system
+ default, probably `the global default timeout in socket.py
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+ None will set an infinite timeout.
+
+ :type read: integer, float, or None
+
+ .. note::
+
+ Many factors can affect the total amount of time for urllib3 to return
+ an HTTP response.
+
+ For example, Python's DNS resolver does not obey the timeout specified
+ on the socket. Other factors that can affect total request time include
+ high CPU load, high swap, the program running at a low priority level,
+ or other behaviors.
+
+ In addition, the read and total timeouts only measure the time between
+ read operations on the socket connecting the client and the server,
+ not the total amount of time for the request to return a complete
+ response. For most requests, the timeout is raised because the server
+ has not sent the first byte in the specified time. This is not always
+ the case; if a server streams one byte every fifteen seconds, a timeout
+ of 20 seconds will not trigger, even though the request will take
+ several minutes to complete.
+
+ If your goal is to cut off any request after a set amount of wall clock
+ time, consider having a second "watcher" thread to cut off a slow
+ request.
+ """
+
+ #: A sentinel object representing the default timeout value
+ DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
+
+ def __init__(self, total=None, connect=_Default, read=_Default):
+ self._connect = self._validate_timeout(connect, 'connect')
+ self._read = self._validate_timeout(read, 'read')
+ self.total = self._validate_timeout(total, 'total')
+ self._start_connect = None
+
+ def __str__(self):
+ return '%s(connect=%r, read=%r, total=%r)' % (
+ type(self).__name__, self._connect, self._read, self.total)
+
+ @classmethod
+ def _validate_timeout(cls, value, name):
+ """ Check that a timeout attribute is valid.
+
+ :param value: The timeout value to validate
+ :param name: The name of the timeout attribute to validate. This is
+ used to specify in error messages.
+ :return: The validated and casted version of the given value.
+ :raises ValueError: If it is a numeric value less than or equal to
+ zero, or the type is not an integer, float, or None.
+ """
+ if value is _Default:
+ return cls.DEFAULT_TIMEOUT
+
+ if value is None or value is cls.DEFAULT_TIMEOUT:
+ return value
+
+ if isinstance(value, bool):
+ raise ValueError("Timeout cannot be a boolean value. It must "
+ "be an int, float or None.")
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ raise ValueError("Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value))
+
+ try:
+ if value <= 0:
+ raise ValueError("Attempted to set %s timeout to %s, but the "
+ "timeout cannot be set to a value less "
+ "than or equal to 0." % (name, value))
+ except TypeError:
+ # Python 3
+ raise ValueError("Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value))
+
+ return value
+
+ @classmethod
+ def from_float(cls, timeout):
+ """ Create a new Timeout from a legacy timeout value.
+
+ The timeout value used by httplib.py sets the same timeout on the
+ connect(), and recv() socket requests. This creates a :class:`Timeout`
+ object that sets the individual timeouts to the ``timeout`` value
+ passed to this function.
+
+ :param timeout: The legacy timeout value.
+ :type timeout: integer, float, sentinel default object, or None
+ :return: Timeout object
+ :rtype: :class:`Timeout`
+ """
+ return Timeout(read=timeout, connect=timeout)
+
+ def clone(self):
+ """ Create a copy of the timeout object
+
+ Timeout properties are stored per-pool but each request needs a fresh
+ Timeout object to ensure each one has its own start/stop configured.
+
+ :return: a copy of the timeout object
+ :rtype: :class:`Timeout`
+ """
+ # We can't use copy.deepcopy because that will also create a new object
+ # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+ # detect the user default.
+ return Timeout(connect=self._connect, read=self._read,
+ total=self.total)
+
+ def start_connect(self):
+ """ Start the timeout clock, used during a connect() attempt
+
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to start a timer that has been started already.
+ """
+ if self._start_connect is not None:
+ raise TimeoutStateError("Timeout timer has already been started.")
+ self._start_connect = current_time()
+ return self._start_connect
+
+ def get_connect_duration(self):
+ """ Gets the time elapsed since the call to :meth:`start_connect`.
+
+ :return: Elapsed time.
+ :rtype: float
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to get duration for a timer that hasn't been started.
+ """
+ if self._start_connect is None:
+ raise TimeoutStateError("Can't get connect duration for timer "
+ "that has not started.")
+ return current_time() - self._start_connect
+
+ @property
+ def connect_timeout(self):
+ """ Get the value to use when setting a connection timeout.
+
+ This will be a positive float or integer, the value None
+ (never timeout), or the default system timeout.
+
+ :return: Connect timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ """
+ if self.total is None:
+ return self._connect
+
+ if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+ return self.total
+
+ return min(self._connect, self.total)
+
+ @property
+ def read_timeout(self):
+ """ Get the value for the read timeout.
+
+ This assumes some time has elapsed in the connection timeout and
+ computes the read timeout appropriately.
+
+ If self.total is set, the read timeout is dependent on the amount of
+ time taken by the connect timeout. If the connection time has not been
+ established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+ raised.
+
+ :return: Value to use for the read timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
+ has not yet been called on this object.
+ """
+ if (self.total is not None and
+ self.total is not self.DEFAULT_TIMEOUT and
+ self._read is not None and
+ self._read is not self.DEFAULT_TIMEOUT):
+ # In case the connect timeout has not yet been established.
+ if self._start_connect is None:
+ return self._read
+ return max(0, min(self.total - self.get_connect_duration(),
+ self._read))
+ elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
+ return max(0, self.total - self.get_connect_duration())
+ else:
+ return self._read
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/url.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/url.py
new file mode 100644
index 00000000..aefa119b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/url.py
@@ -0,0 +1,289 @@
+from __future__ import absolute_import
+import re
+from collections import namedtuple
+
+from ..exceptions import LocationParseError
+from ..packages import six, rfc3986
+from ..packages.rfc3986.exceptions import RFC3986Exception, ValidationError
+from ..packages.rfc3986.validators import Validator
+from ..packages.rfc3986 import abnf_regexp, normalizers, compat, misc
+
+
+url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
+
+# We only want to normalize urls with an HTTP(S) scheme.
+# urllib3 infers URLs without a scheme (None) to be http.
+NORMALIZABLE_SCHEMES = ('http', 'https', None)
+
+# Regex for detecting URLs with schemes. RFC 3986 Section 3.1
+SCHEME_REGEX = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+\-]*:|/)")
+
+PATH_CHARS = abnf_regexp.UNRESERVED_CHARS_SET | abnf_regexp.SUB_DELIMITERS_SET | {':', '@', '/'}
+QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {'?'}
+
+
+class Url(namedtuple('Url', url_attrs)):
+ """
+ Data structure for representing an HTTP URL. Used as a return value for
+ :func:`parse_url`. Both the scheme and host are normalized as they are
+ both case-insensitive according to RFC 3986.
+ """
+ __slots__ = ()
+
+ def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
+ query=None, fragment=None):
+ if path and not path.startswith('/'):
+ path = '/' + path
+ if scheme is not None:
+ scheme = scheme.lower()
+ return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
+ query, fragment)
+
+ @property
+ def hostname(self):
+ """For backwards-compatibility with urlparse. We're nice like that."""
+ return self.host
+
+ @property
+ def request_uri(self):
+ """Absolute path including the query string."""
+ uri = self.path or '/'
+
+ if self.query is not None:
+ uri += '?' + self.query
+
+ return uri
+
+ @property
+ def netloc(self):
+ """Network location including host and port"""
+ if self.port:
+ return '%s:%d' % (self.host, self.port)
+ return self.host
+
+ @property
+ def url(self):
+ """
+ Convert self into a url
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port will have : removed).
+
+ Example: ::
+
+ >>> U = parse_url('http://google.com/mail/')
+ >>> U.url
+ 'http://google.com/mail/'
+ >>> Url('http', 'username:password', 'host.com', 80,
+ ... '/path', 'query', 'fragment').url
+ 'http://username:password@host.com:80/path?query#fragment'
+ """
+ scheme, auth, host, port, path, query, fragment = self
+ url = u''
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url += scheme + u'://'
+ if auth is not None:
+ url += auth + u'@'
+ if host is not None:
+ url += host
+ if port is not None:
+ url += u':' + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += u'?' + query
+ if fragment is not None:
+ url += u'#' + fragment
+
+ return url
+
+ def __str__(self):
+ return self.url
+
+
+def split_first(s, delims):
+ """
+ .. deprecated:: 1.25
+
+ Given a string and an iterable of delimiters, split on the first found
+ delimiter. Return two split parts and the matched delimiter.
+
+ If not found, then the first part is the full input string.
+
+ Example::
+
+ >>> split_first('foo/bar?baz', '?/=')
+ ('foo', 'bar?baz', '/')
+ >>> split_first('foo/bar?baz', '123')
+ ('foo/bar?baz', '', None)
+
+ Scales linearly with number of delims. Not ideal for large number of delims.
+ """
+ min_idx = None
+ min_delim = None
+ for d in delims:
+ idx = s.find(d)
+ if idx < 0:
+ continue
+
+ if min_idx is None or idx < min_idx:
+ min_idx = idx
+ min_delim = d
+
+ if min_idx is None or min_idx < 0:
+ return s, '', None
+
+ return s[:min_idx], s[min_idx + 1:], min_delim
+
+
+def _encode_invalid_chars(component, allowed_chars, encoding='utf-8'):
+ """Percent-encodes a URI component without reapplying
+ onto an already percent-encoded component. Based on
+ rfc3986.normalizers.encode_component()
+ """
+ if component is None:
+ return component
+
+ # Try to see if the component we're encoding is already percent-encoded
+ # so we can skip all '%' characters but still encode all others.
+ percent_encodings = len(normalizers.PERCENT_MATCHER.findall(
+ compat.to_str(component, encoding)))
+
+ uri_bytes = component.encode('utf-8', 'surrogatepass')
+ is_percent_encoded = percent_encodings == uri_bytes.count(b'%')
+
+ encoded_component = bytearray()
+
+ for i in range(0, len(uri_bytes)):
+ # Will return a single character bytestring on both Python 2 & 3
+ byte = uri_bytes[i:i+1]
+ byte_ord = ord(byte)
+ if ((is_percent_encoded and byte == b'%')
+ or (byte_ord < 128 and byte.decode() in allowed_chars)):
+ encoded_component.extend(byte)
+ continue
+ encoded_component.extend('%{0:02x}'.format(byte_ord).encode().upper())
+
+ return encoded_component.decode(encoding)
+
+
+def parse_url(url):
+ """
+ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+ performed to parse incomplete urls. Fields not provided will be None.
+ This parser is RFC 3986 compliant.
+
+ :param str url: URL to parse into a :class:`.Url` namedtuple.
+
+ Partly backwards-compatible with :mod:`urlparse`.
+
+ Example::
+
+ >>> parse_url('http://google.com/mail/')
+ Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
+ >>> parse_url('google.com:80')
+ Url(scheme=None, host='google.com', port=80, path=None, ...)
+ >>> parse_url('/foo?bar')
+ Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
+ """
+ if not url:
+ # Empty
+ return Url()
+
+ is_string = not isinstance(url, six.binary_type)
+
+ # RFC 3986 doesn't like URLs that have a host but don't start
+ # with a scheme and we support URLs like that so we need to
+ # detect that problem and add an empty scheme indication.
+ # We don't get hurt on path-only URLs here as it's stripped
+ # off and given an empty scheme anyways.
+ if not SCHEME_REGEX.search(url):
+ url = "//" + url
+
+ def idna_encode(name):
+ if name and any([ord(x) > 128 for x in name]):
+ try:
+ from pip._vendor import idna
+ except ImportError:
+ raise LocationParseError("Unable to parse URL without the 'idna' module")
+ try:
+ return idna.encode(name.lower(), strict=True, std3_rules=True)
+ except idna.IDNAError:
+ raise LocationParseError(u"Name '%s' is not a valid IDNA label" % name)
+ return name
+
+ try:
+ split_iri = misc.IRI_MATCHER.match(compat.to_str(url)).groupdict()
+ iri_ref = rfc3986.IRIReference(
+ split_iri['scheme'], split_iri['authority'],
+ _encode_invalid_chars(split_iri['path'], PATH_CHARS),
+ _encode_invalid_chars(split_iri['query'], QUERY_CHARS),
+ _encode_invalid_chars(split_iri['fragment'], FRAGMENT_CHARS)
+ )
+ has_authority = iri_ref.authority is not None
+ uri_ref = iri_ref.encode(idna_encoder=idna_encode)
+ except (ValueError, RFC3986Exception):
+ return six.raise_from(LocationParseError(url), None)
+
+ # rfc3986 strips the authority if it's invalid
+ if has_authority and uri_ref.authority is None:
+ raise LocationParseError(url)
+
+ # Only normalize schemes we understand to not break http+unix
+ # or other schemes that don't follow RFC 3986.
+ if uri_ref.scheme is None or uri_ref.scheme.lower() in NORMALIZABLE_SCHEMES:
+ uri_ref = uri_ref.normalize()
+
+ # Validate all URIReference components and ensure that all
+ # components that were set before are still set after
+ # normalization has completed.
+ validator = Validator()
+ try:
+ validator.check_validity_of(
+ *validator.COMPONENT_NAMES
+ ).validate(uri_ref)
+ except ValidationError:
+ return six.raise_from(LocationParseError(url), None)
+
+ # For the sake of backwards compatibility we put empty
+ # string values for path if there are any defined values
+ # beyond the path in the URL.
+ # TODO: Remove this when we break backwards compatibility.
+ path = uri_ref.path
+ if not path:
+ if (uri_ref.query is not None
+ or uri_ref.fragment is not None):
+ path = ""
+ else:
+ path = None
+
+ # Ensure that each part of the URL is a `str` for
+ # backwards compatibility.
+ def to_input_type(x):
+ if x is None:
+ return None
+ elif not is_string and not isinstance(x, six.binary_type):
+ return x.encode('utf-8')
+ return x
+
+ return Url(
+ scheme=to_input_type(uri_ref.scheme),
+ auth=to_input_type(uri_ref.userinfo),
+ host=to_input_type(uri_ref.host),
+ port=int(uri_ref.port) if uri_ref.port is not None else None,
+ path=to_input_type(path),
+ query=to_input_type(uri_ref.query),
+ fragment=to_input_type(uri_ref.fragment)
+ )
+
+
+def get_host(url):
+ """
+ Deprecated. Use :func:`parse_url` instead.
+ """
+ p = parse_url(url)
+ return p.scheme or 'http', p.hostname, p.port
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/wait.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/wait.py
new file mode 100644
index 00000000..4db71baf
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/urllib3/util/wait.py
@@ -0,0 +1,150 @@
+import errno
+from functools import partial
+import select
+import sys
+try:
+ from time import monotonic
+except ImportError:
+ from time import time as monotonic
+
+__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
+
+
+class NoWayToWaitForSocketError(Exception):
+ pass
+
+
+# How should we wait on sockets?
+#
+# There are two types of APIs you can use for waiting on sockets: the fancy
+# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
+# select/poll. The stateful APIs are more efficient when you have a lots of
+# sockets to keep track of, because you can set them up once and then use them
+# lots of times. But we only ever want to wait on a single socket at a time
+# and don't want to keep track of state, so the stateless APIs are actually
+# more efficient. So we want to use select() or poll().
+#
+# Now, how do we choose between select() and poll()? On traditional Unixes,
+# select() has a strange calling convention that makes it slow, or fail
+# altogether, for high-numbered file descriptors. The point of poll() is to fix
+# that, so on Unixes, we prefer poll().
+#
+# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
+# for it), but that's OK, because on Windows, select() doesn't have this
+# strange calling convention; plain select() works fine.
+#
+# So: on Windows we use select(), and everywhere else we use poll(). We also
+# fall back to select() in case poll() is somehow broken or missing.
+
+if sys.version_info >= (3, 5):
+ # Modern Python, that retries syscalls by default
+ def _retry_on_intr(fn, timeout):
+ return fn(timeout)
+else:
+ # Old and broken Pythons.
+ def _retry_on_intr(fn, timeout):
+ if timeout is None:
+ deadline = float("inf")
+ else:
+ deadline = monotonic() + timeout
+
+ while True:
+ try:
+ return fn(timeout)
+ # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
+ except (OSError, select.error) as e:
+ # 'e.args[0]' incantation works for both OSError and select.error
+ if e.args[0] != errno.EINTR:
+ raise
+ else:
+ timeout = deadline - monotonic()
+ if timeout < 0:
+ timeout = 0
+ if timeout == float("inf"):
+ timeout = None
+ continue
+
+
+def select_wait_for_socket(sock, read=False, write=False, timeout=None):
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ rcheck = []
+ wcheck = []
+ if read:
+ rcheck.append(sock)
+ if write:
+ wcheck.append(sock)
+ # When doing a non-blocking connect, most systems signal success by
+ # marking the socket writable. Windows, though, signals success by marked
+ # it as "exceptional". We paper over the difference by checking the write
+ # sockets for both conditions. (The stdlib selectors module does the same
+ # thing.)
+ fn = partial(select.select, rcheck, wcheck, wcheck)
+ rready, wready, xready = _retry_on_intr(fn, timeout)
+ return bool(rready or wready or xready)
+
+
+def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ mask = 0
+ if read:
+ mask |= select.POLLIN
+ if write:
+ mask |= select.POLLOUT
+ poll_obj = select.poll()
+ poll_obj.register(sock, mask)
+
+ # For some reason, poll() takes timeout in milliseconds
+ def do_poll(t):
+ if t is not None:
+ t *= 1000
+ return poll_obj.poll(t)
+
+ return bool(_retry_on_intr(do_poll, timeout))
+
+
+def null_wait_for_socket(*args, **kwargs):
+ raise NoWayToWaitForSocketError("no select-equivalent available")
+
+
+def _have_working_poll():
+ # Apparently some systems have a select.poll that fails as soon as you try
+ # to use it, either due to strange configuration or broken monkeypatching
+ # from libraries like eventlet/greenlet.
+ try:
+ poll_obj = select.poll()
+ _retry_on_intr(poll_obj.poll, 0)
+ except (AttributeError, OSError):
+ return False
+ else:
+ return True
+
+
+def wait_for_socket(*args, **kwargs):
+ # We delay choosing which implementation to use until the first time we're
+ # called. We could do it at import time, but then we might make the wrong
+ # decision if someone goes wild with monkeypatching select.poll after
+ # we're imported.
+ global wait_for_socket
+ if _have_working_poll():
+ wait_for_socket = poll_wait_for_socket
+ elif hasattr(select, "select"):
+ wait_for_socket = select_wait_for_socket
+ else: # Platform-specific: Appengine.
+ wait_for_socket = null_wait_for_socket
+ return wait_for_socket(*args, **kwargs)
+
+
+def wait_for_read(sock, timeout=None):
+ """ Waits for reading to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, read=True, timeout=timeout)
+
+
+def wait_for_write(sock, timeout=None):
+ """ Waits for writing to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, write=True, timeout=timeout)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__init__.py
new file mode 100644
index 00000000..d21d697c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__init__.py
@@ -0,0 +1,342 @@
+# coding: utf-8
+"""
+
+ webencodings
+ ~~~~~~~~~~~~
+
+ This is a Python implementation of the `WHATWG Encoding standard
+ <http://encoding.spec.whatwg.org/>`. See README for details.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+from __future__ import unicode_literals
+
+import codecs
+
+from .labels import LABELS
+
+
+VERSION = '0.5.1'
+
+
+# Some names in Encoding are not valid Python aliases. Remap these.
+PYTHON_NAMES = {
+ 'iso-8859-8-i': 'iso-8859-8',
+ 'x-mac-cyrillic': 'mac-cyrillic',
+ 'macintosh': 'mac-roman',
+ 'windows-874': 'cp874'}
+
+CACHE = {}
+
+
+def ascii_lower(string):
+ r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.
+
+ :param string: An Unicode string.
+ :returns: A new Unicode string.
+
+ This is used for `ASCII case-insensitive
+ <http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
+ matching of encoding labels.
+ The same matching is also used, among other things,
+ for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.
+
+ This is different from the :meth:`~py:str.lower` method of Unicode strings
+ which also affect non-ASCII characters,
+ sometimes mapping them into the ASCII range:
+
+ >>> keyword = u'Bac\N{KELVIN SIGN}ground'
+ >>> assert keyword.lower() == u'background'
+ >>> assert ascii_lower(keyword) != keyword.lower()
+ >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'
+
+ """
+ # This turns out to be faster than unicode.translate()
+ return string.encode('utf8').lower().decode('utf8')
+
+
+def lookup(label):
+ """
+ Look for an encoding by its label.
+ This is the spec’s `get an encoding
+ <http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
+ Supported labels are listed there.
+
+ :param label: A string.
+ :returns:
+ An :class:`Encoding` object, or :obj:`None` for an unknown label.
+
+ """
+ # Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020.
+ label = ascii_lower(label.strip('\t\n\f\r '))
+ name = LABELS.get(label)
+ if name is None:
+ return None
+ encoding = CACHE.get(name)
+ if encoding is None:
+ if name == 'x-user-defined':
+ from .x_user_defined import codec_info
+ else:
+ python_name = PYTHON_NAMES.get(name, name)
+ # Any python_name value that gets to here should be valid.
+ codec_info = codecs.lookup(python_name)
+ encoding = Encoding(name, codec_info)
+ CACHE[name] = encoding
+ return encoding
+
+
+def _get_encoding(encoding_or_label):
+ """
+ Accept either an encoding object or label.
+
+ :param encoding: An :class:`Encoding` object or a label string.
+ :returns: An :class:`Encoding` object.
+ :raises: :exc:`~exceptions.LookupError` for an unknown label.
+
+ """
+ if hasattr(encoding_or_label, 'codec_info'):
+ return encoding_or_label
+
+ encoding = lookup(encoding_or_label)
+ if encoding is None:
+ raise LookupError('Unknown encoding label: %r' % encoding_or_label)
+ return encoding
+
+
+class Encoding(object):
+ """Reresents a character encoding such as UTF-8,
+ that can be used for decoding or encoding.
+
+ .. attribute:: name
+
+ Canonical name of the encoding
+
+ .. attribute:: codec_info
+
+ The actual implementation of the encoding,
+ a stdlib :class:`~codecs.CodecInfo` object.
+ See :func:`codecs.register`.
+
+ """
+ def __init__(self, name, codec_info):
+ self.name = name
+ self.codec_info = codec_info
+
+ def __repr__(self):
+ return '<Encoding %s>' % self.name
+
+
+#: The UTF-8 encoding. Should be used for new content and formats.
+UTF8 = lookup('utf-8')
+
+_UTF16LE = lookup('utf-16le')
+_UTF16BE = lookup('utf-16be')
+
+
+def decode(input, fallback_encoding, errors='replace'):
+ """
+ Decode a single string.
+
+ :param input: A byte string
+ :param fallback_encoding:
+ An :class:`Encoding` object or a label string.
+ The encoding to use if :obj:`input` does note have a BOM.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+ :return:
+ A ``(output, encoding)`` tuple of an Unicode string
+ and an :obj:`Encoding`.
+
+ """
+ # Fail early if `encoding` is an invalid label.
+ fallback_encoding = _get_encoding(fallback_encoding)
+ bom_encoding, input = _detect_bom(input)
+ encoding = bom_encoding or fallback_encoding
+ return encoding.codec_info.decode(input, errors)[0], encoding
+
+
+def _detect_bom(input):
+ """Return (bom_encoding, input), with any BOM removed from the input."""
+ if input.startswith(b'\xFF\xFE'):
+ return _UTF16LE, input[2:]
+ if input.startswith(b'\xFE\xFF'):
+ return _UTF16BE, input[2:]
+ if input.startswith(b'\xEF\xBB\xBF'):
+ return UTF8, input[3:]
+ return None, input
+
+
+def encode(input, encoding=UTF8, errors='strict'):
+ """
+ Encode a single string.
+
+ :param input: An Unicode string.
+ :param encoding: An :class:`Encoding` object or a label string.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+ :return: A byte string.
+
+ """
+ return _get_encoding(encoding).codec_info.encode(input, errors)[0]
+
+
+def iter_decode(input, fallback_encoding, errors='replace'):
+ """
+ "Pull"-based decoder.
+
+ :param input:
+ An iterable of byte strings.
+
+ The input is first consumed just enough to determine the encoding
+ based on the precense of a BOM,
+ then consumed on demand when the return value is.
+ :param fallback_encoding:
+ An :class:`Encoding` object or a label string.
+ The encoding to use if :obj:`input` does note have a BOM.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+ :returns:
+ An ``(output, encoding)`` tuple.
+ :obj:`output` is an iterable of Unicode strings,
+ :obj:`encoding` is the :obj:`Encoding` that is being used.
+
+ """
+
+ decoder = IncrementalDecoder(fallback_encoding, errors)
+ generator = _iter_decode_generator(input, decoder)
+ encoding = next(generator)
+ return generator, encoding
+
+
+def _iter_decode_generator(input, decoder):
+ """Return a generator that first yields the :obj:`Encoding`,
+ then yields output chukns as Unicode strings.
+
+ """
+ decode = decoder.decode
+ input = iter(input)
+ for chunck in input:
+ output = decode(chunck)
+ if output:
+ assert decoder.encoding is not None
+ yield decoder.encoding
+ yield output
+ break
+ else:
+ # Input exhausted without determining the encoding
+ output = decode(b'', final=True)
+ assert decoder.encoding is not None
+ yield decoder.encoding
+ if output:
+ yield output
+ return
+
+ for chunck in input:
+ output = decode(chunck)
+ if output:
+ yield output
+ output = decode(b'', final=True)
+ if output:
+ yield output
+
+
+def iter_encode(input, encoding=UTF8, errors='strict'):
+ """
+ “Pull”-based encoder.
+
+ :param input: An iterable of Unicode strings.
+ :param encoding: An :class:`Encoding` object or a label string.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+ :returns: An iterable of byte strings.
+
+ """
+ # Fail early if `encoding` is an invalid label.
+ encode = IncrementalEncoder(encoding, errors).encode
+ return _iter_encode_generator(input, encode)
+
+
+def _iter_encode_generator(input, encode):
+ for chunck in input:
+ output = encode(chunck)
+ if output:
+ yield output
+ output = encode('', final=True)
+ if output:
+ yield output
+
+
+class IncrementalDecoder(object):
+ """
+ “Push”-based decoder.
+
+ :param fallback_encoding:
+ An :class:`Encoding` object or a label string.
+ The encoding to use if :obj:`input` does note have a BOM.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+
+ """
+ def __init__(self, fallback_encoding, errors='replace'):
+ # Fail early if `encoding` is an invalid label.
+ self._fallback_encoding = _get_encoding(fallback_encoding)
+ self._errors = errors
+ self._buffer = b''
+ self._decoder = None
+ #: The actual :class:`Encoding` that is being used,
+ #: or :obj:`None` if that is not determined yet.
+ #: (Ie. if there is not enough input yet to determine
+ #: if there is a BOM.)
+ self.encoding = None # Not known yet.
+
+ def decode(self, input, final=False):
+ """Decode one chunk of the input.
+
+ :param input: A byte string.
+ :param final:
+ Indicate that no more input is available.
+ Must be :obj:`True` if this is the last call.
+ :returns: An Unicode string.
+
+ """
+ decoder = self._decoder
+ if decoder is not None:
+ return decoder(input, final)
+
+ input = self._buffer + input
+ encoding, input = _detect_bom(input)
+ if encoding is None:
+ if len(input) < 3 and not final: # Not enough data yet.
+ self._buffer = input
+ return ''
+ else: # No BOM
+ encoding = self._fallback_encoding
+ decoder = encoding.codec_info.incrementaldecoder(self._errors).decode
+ self._decoder = decoder
+ self.encoding = encoding
+ return decoder(input, final)
+
+
+class IncrementalEncoder(object):
+ """
+ “Push”-based encoder.
+
+ :param encoding: An :class:`Encoding` object or a label string.
+ :param errors: Type of error handling. See :func:`codecs.register`.
+ :raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
+
+ .. method:: encode(input, final=False)
+
+ :param input: An Unicode string.
+ :param final:
+ Indicate that no more input is available.
+ Must be :obj:`True` if this is the last call.
+ :returns: A byte string.
+
+ """
+ def __init__(self, encoding=UTF8, errors='strict'):
+ encoding = _get_encoding(encoding)
+ self.encode = encoding.codec_info.incrementalencoder(errors).encode
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..2bfe6909
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc
new file mode 100644
index 00000000..55863c8b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pyc
new file mode 100644
index 00000000..5f5e37c4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-37.pyc
new file mode 100644
index 00000000..b67d7809
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/tests.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pyc
new file mode 100644
index 00000000..f49e7ebf
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/labels.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/labels.py
new file mode 100644
index 00000000..29cbf91e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/labels.py
@@ -0,0 +1,231 @@
+"""
+
+ webencodings.labels
+ ~~~~~~~~~~~~~~~~~~~
+
+ Map encoding labels to their name.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+# XXX Do not edit!
+# This file is automatically generated by mklabels.py
+
+LABELS = {
+ 'unicode-1-1-utf-8': 'utf-8',
+ 'utf-8': 'utf-8',
+ 'utf8': 'utf-8',
+ '866': 'ibm866',
+ 'cp866': 'ibm866',
+ 'csibm866': 'ibm866',
+ 'ibm866': 'ibm866',
+ 'csisolatin2': 'iso-8859-2',
+ 'iso-8859-2': 'iso-8859-2',
+ 'iso-ir-101': 'iso-8859-2',
+ 'iso8859-2': 'iso-8859-2',
+ 'iso88592': 'iso-8859-2',
+ 'iso_8859-2': 'iso-8859-2',
+ 'iso_8859-2:1987': 'iso-8859-2',
+ 'l2': 'iso-8859-2',
+ 'latin2': 'iso-8859-2',
+ 'csisolatin3': 'iso-8859-3',
+ 'iso-8859-3': 'iso-8859-3',
+ 'iso-ir-109': 'iso-8859-3',
+ 'iso8859-3': 'iso-8859-3',
+ 'iso88593': 'iso-8859-3',
+ 'iso_8859-3': 'iso-8859-3',
+ 'iso_8859-3:1988': 'iso-8859-3',
+ 'l3': 'iso-8859-3',
+ 'latin3': 'iso-8859-3',
+ 'csisolatin4': 'iso-8859-4',
+ 'iso-8859-4': 'iso-8859-4',
+ 'iso-ir-110': 'iso-8859-4',
+ 'iso8859-4': 'iso-8859-4',
+ 'iso88594': 'iso-8859-4',
+ 'iso_8859-4': 'iso-8859-4',
+ 'iso_8859-4:1988': 'iso-8859-4',
+ 'l4': 'iso-8859-4',
+ 'latin4': 'iso-8859-4',
+ 'csisolatincyrillic': 'iso-8859-5',
+ 'cyrillic': 'iso-8859-5',
+ 'iso-8859-5': 'iso-8859-5',
+ 'iso-ir-144': 'iso-8859-5',
+ 'iso8859-5': 'iso-8859-5',
+ 'iso88595': 'iso-8859-5',
+ 'iso_8859-5': 'iso-8859-5',
+ 'iso_8859-5:1988': 'iso-8859-5',
+ 'arabic': 'iso-8859-6',
+ 'asmo-708': 'iso-8859-6',
+ 'csiso88596e': 'iso-8859-6',
+ 'csiso88596i': 'iso-8859-6',
+ 'csisolatinarabic': 'iso-8859-6',
+ 'ecma-114': 'iso-8859-6',
+ 'iso-8859-6': 'iso-8859-6',
+ 'iso-8859-6-e': 'iso-8859-6',
+ 'iso-8859-6-i': 'iso-8859-6',
+ 'iso-ir-127': 'iso-8859-6',
+ 'iso8859-6': 'iso-8859-6',
+ 'iso88596': 'iso-8859-6',
+ 'iso_8859-6': 'iso-8859-6',
+ 'iso_8859-6:1987': 'iso-8859-6',
+ 'csisolatingreek': 'iso-8859-7',
+ 'ecma-118': 'iso-8859-7',
+ 'elot_928': 'iso-8859-7',
+ 'greek': 'iso-8859-7',
+ 'greek8': 'iso-8859-7',
+ 'iso-8859-7': 'iso-8859-7',
+ 'iso-ir-126': 'iso-8859-7',
+ 'iso8859-7': 'iso-8859-7',
+ 'iso88597': 'iso-8859-7',
+ 'iso_8859-7': 'iso-8859-7',
+ 'iso_8859-7:1987': 'iso-8859-7',
+ 'sun_eu_greek': 'iso-8859-7',
+ 'csiso88598e': 'iso-8859-8',
+ 'csisolatinhebrew': 'iso-8859-8',
+ 'hebrew': 'iso-8859-8',
+ 'iso-8859-8': 'iso-8859-8',
+ 'iso-8859-8-e': 'iso-8859-8',
+ 'iso-ir-138': 'iso-8859-8',
+ 'iso8859-8': 'iso-8859-8',
+ 'iso88598': 'iso-8859-8',
+ 'iso_8859-8': 'iso-8859-8',
+ 'iso_8859-8:1988': 'iso-8859-8',
+ 'visual': 'iso-8859-8',
+ 'csiso88598i': 'iso-8859-8-i',
+ 'iso-8859-8-i': 'iso-8859-8-i',
+ 'logical': 'iso-8859-8-i',
+ 'csisolatin6': 'iso-8859-10',
+ 'iso-8859-10': 'iso-8859-10',
+ 'iso-ir-157': 'iso-8859-10',
+ 'iso8859-10': 'iso-8859-10',
+ 'iso885910': 'iso-8859-10',
+ 'l6': 'iso-8859-10',
+ 'latin6': 'iso-8859-10',
+ 'iso-8859-13': 'iso-8859-13',
+ 'iso8859-13': 'iso-8859-13',
+ 'iso885913': 'iso-8859-13',
+ 'iso-8859-14': 'iso-8859-14',
+ 'iso8859-14': 'iso-8859-14',
+ 'iso885914': 'iso-8859-14',
+ 'csisolatin9': 'iso-8859-15',
+ 'iso-8859-15': 'iso-8859-15',
+ 'iso8859-15': 'iso-8859-15',
+ 'iso885915': 'iso-8859-15',
+ 'iso_8859-15': 'iso-8859-15',
+ 'l9': 'iso-8859-15',
+ 'iso-8859-16': 'iso-8859-16',
+ 'cskoi8r': 'koi8-r',
+ 'koi': 'koi8-r',
+ 'koi8': 'koi8-r',
+ 'koi8-r': 'koi8-r',
+ 'koi8_r': 'koi8-r',
+ 'koi8-u': 'koi8-u',
+ 'csmacintosh': 'macintosh',
+ 'mac': 'macintosh',
+ 'macintosh': 'macintosh',
+ 'x-mac-roman': 'macintosh',
+ 'dos-874': 'windows-874',
+ 'iso-8859-11': 'windows-874',
+ 'iso8859-11': 'windows-874',
+ 'iso885911': 'windows-874',
+ 'tis-620': 'windows-874',
+ 'windows-874': 'windows-874',
+ 'cp1250': 'windows-1250',
+ 'windows-1250': 'windows-1250',
+ 'x-cp1250': 'windows-1250',
+ 'cp1251': 'windows-1251',
+ 'windows-1251': 'windows-1251',
+ 'x-cp1251': 'windows-1251',
+ 'ansi_x3.4-1968': 'windows-1252',
+ 'ascii': 'windows-1252',
+ 'cp1252': 'windows-1252',
+ 'cp819': 'windows-1252',
+ 'csisolatin1': 'windows-1252',
+ 'ibm819': 'windows-1252',
+ 'iso-8859-1': 'windows-1252',
+ 'iso-ir-100': 'windows-1252',
+ 'iso8859-1': 'windows-1252',
+ 'iso88591': 'windows-1252',
+ 'iso_8859-1': 'windows-1252',
+ 'iso_8859-1:1987': 'windows-1252',
+ 'l1': 'windows-1252',
+ 'latin1': 'windows-1252',
+ 'us-ascii': 'windows-1252',
+ 'windows-1252': 'windows-1252',
+ 'x-cp1252': 'windows-1252',
+ 'cp1253': 'windows-1253',
+ 'windows-1253': 'windows-1253',
+ 'x-cp1253': 'windows-1253',
+ 'cp1254': 'windows-1254',
+ 'csisolatin5': 'windows-1254',
+ 'iso-8859-9': 'windows-1254',
+ 'iso-ir-148': 'windows-1254',
+ 'iso8859-9': 'windows-1254',
+ 'iso88599': 'windows-1254',
+ 'iso_8859-9': 'windows-1254',
+ 'iso_8859-9:1989': 'windows-1254',
+ 'l5': 'windows-1254',
+ 'latin5': 'windows-1254',
+ 'windows-1254': 'windows-1254',
+ 'x-cp1254': 'windows-1254',
+ 'cp1255': 'windows-1255',
+ 'windows-1255': 'windows-1255',
+ 'x-cp1255': 'windows-1255',
+ 'cp1256': 'windows-1256',
+ 'windows-1256': 'windows-1256',
+ 'x-cp1256': 'windows-1256',
+ 'cp1257': 'windows-1257',
+ 'windows-1257': 'windows-1257',
+ 'x-cp1257': 'windows-1257',
+ 'cp1258': 'windows-1258',
+ 'windows-1258': 'windows-1258',
+ 'x-cp1258': 'windows-1258',
+ 'x-mac-cyrillic': 'x-mac-cyrillic',
+ 'x-mac-ukrainian': 'x-mac-cyrillic',
+ 'chinese': 'gbk',
+ 'csgb2312': 'gbk',
+ 'csiso58gb231280': 'gbk',
+ 'gb2312': 'gbk',
+ 'gb_2312': 'gbk',
+ 'gb_2312-80': 'gbk',
+ 'gbk': 'gbk',
+ 'iso-ir-58': 'gbk',
+ 'x-gbk': 'gbk',
+ 'gb18030': 'gb18030',
+ 'hz-gb-2312': 'hz-gb-2312',
+ 'big5': 'big5',
+ 'big5-hkscs': 'big5',
+ 'cn-big5': 'big5',
+ 'csbig5': 'big5',
+ 'x-x-big5': 'big5',
+ 'cseucpkdfmtjapanese': 'euc-jp',
+ 'euc-jp': 'euc-jp',
+ 'x-euc-jp': 'euc-jp',
+ 'csiso2022jp': 'iso-2022-jp',
+ 'iso-2022-jp': 'iso-2022-jp',
+ 'csshiftjis': 'shift_jis',
+ 'ms_kanji': 'shift_jis',
+ 'shift-jis': 'shift_jis',
+ 'shift_jis': 'shift_jis',
+ 'sjis': 'shift_jis',
+ 'windows-31j': 'shift_jis',
+ 'x-sjis': 'shift_jis',
+ 'cseuckr': 'euc-kr',
+ 'csksc56011987': 'euc-kr',
+ 'euc-kr': 'euc-kr',
+ 'iso-ir-149': 'euc-kr',
+ 'korean': 'euc-kr',
+ 'ks_c_5601-1987': 'euc-kr',
+ 'ks_c_5601-1989': 'euc-kr',
+ 'ksc5601': 'euc-kr',
+ 'ksc_5601': 'euc-kr',
+ 'windows-949': 'euc-kr',
+ 'csiso2022kr': 'iso-2022-kr',
+ 'iso-2022-kr': 'iso-2022-kr',
+ 'utf-16be': 'utf-16be',
+ 'utf-16': 'utf-16le',
+ 'utf-16le': 'utf-16le',
+ 'x-user-defined': 'x-user-defined',
+}
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/mklabels.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/mklabels.py
new file mode 100644
index 00000000..295dc928
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/mklabels.py
@@ -0,0 +1,59 @@
+"""
+
+ webencodings.mklabels
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Regenarate the webencodings.labels module.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+import json
+try:
+ from urllib import urlopen
+except ImportError:
+ from urllib.request import urlopen
+
+
+def assert_lower(string):
+ assert string == string.lower()
+ return string
+
+
+def generate(url):
+ parts = ['''\
+"""
+
+ webencodings.labels
+ ~~~~~~~~~~~~~~~~~~~
+
+ Map encoding labels to their name.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+# XXX Do not edit!
+# This file is automatically generated by mklabels.py
+
+LABELS = {
+''']
+ labels = [
+ (repr(assert_lower(label)).lstrip('u'),
+ repr(encoding['name']).lstrip('u'))
+ for category in json.loads(urlopen(url).read().decode('ascii'))
+ for encoding in category['encodings']
+ for label in encoding['labels']]
+ max_len = max(len(label) for label, name in labels)
+ parts.extend(
+ ' %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name)
+ for label, name in labels)
+ parts.append('}')
+ return ''.join(parts)
+
+
+if __name__ == '__main__':
+ print(generate('http://encoding.spec.whatwg.org/encodings.json'))
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/tests.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/tests.py
new file mode 100644
index 00000000..e12c10d0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/tests.py
@@ -0,0 +1,153 @@
+# coding: utf-8
+"""
+
+ webencodings.tests
+ ~~~~~~~~~~~~~~~~~~
+
+ A basic test suite for Encoding.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+from __future__ import unicode_literals
+
+from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode,
+ IncrementalDecoder, IncrementalEncoder, UTF8)
+
+
+def assert_raises(exception, function, *args, **kwargs):
+ try:
+ function(*args, **kwargs)
+ except exception:
+ return
+ else: # pragma: no cover
+ raise AssertionError('Did not raise %s.' % exception)
+
+
+def test_labels():
+ assert lookup('utf-8').name == 'utf-8'
+ assert lookup('Utf-8').name == 'utf-8'
+ assert lookup('UTF-8').name == 'utf-8'
+ assert lookup('utf8').name == 'utf-8'
+ assert lookup('utf8').name == 'utf-8'
+ assert lookup('utf8 ').name == 'utf-8'
+ assert lookup(' \r\nutf8\t').name == 'utf-8'
+ assert lookup('u8') is None # Python label.
+ assert lookup('utf-8 ') is None # Non-ASCII white space.
+
+ assert lookup('US-ASCII').name == 'windows-1252'
+ assert lookup('iso-8859-1').name == 'windows-1252'
+ assert lookup('latin1').name == 'windows-1252'
+ assert lookup('LATIN1').name == 'windows-1252'
+ assert lookup('latin-1') is None
+ assert lookup('LATİN1') is None # ASCII-only case insensitivity.
+
+
+def test_all_labels():
+ for label in LABELS:
+ assert decode(b'', label) == ('', lookup(label))
+ assert encode('', label) == b''
+ for repeat in [0, 1, 12]:
+ output, _ = iter_decode([b''] * repeat, label)
+ assert list(output) == []
+ assert list(iter_encode([''] * repeat, label)) == []
+ decoder = IncrementalDecoder(label)
+ assert decoder.decode(b'') == ''
+ assert decoder.decode(b'', final=True) == ''
+ encoder = IncrementalEncoder(label)
+ assert encoder.encode('') == b''
+ assert encoder.encode('', final=True) == b''
+ # All encoding names are valid labels too:
+ for name in set(LABELS.values()):
+ assert lookup(name).name == name
+
+
+def test_invalid_label():
+ assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid')
+ assert_raises(LookupError, encode, 'é', 'invalid')
+ assert_raises(LookupError, iter_decode, [], 'invalid')
+ assert_raises(LookupError, iter_encode, [], 'invalid')
+ assert_raises(LookupError, IncrementalDecoder, 'invalid')
+ assert_raises(LookupError, IncrementalEncoder, 'invalid')
+
+
+def test_decode():
+ assert decode(b'\x80', 'latin1') == ('€', lookup('latin1'))
+ assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1'))
+ assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8'))
+ assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8'))
+ assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii'))
+ assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOM
+
+ assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOM
+ assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM
+ assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be'))
+ assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le'))
+
+ assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be'))
+ assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le'))
+ assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le'))
+
+ assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be'))
+ assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le'))
+ assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le'))
+
+
+def test_encode():
+ assert encode('é', 'latin1') == b'\xe9'
+ assert encode('é', 'utf8') == b'\xc3\xa9'
+ assert encode('é', 'utf8') == b'\xc3\xa9'
+ assert encode('é', 'utf-16') == b'\xe9\x00'
+ assert encode('é', 'utf-16le') == b'\xe9\x00'
+ assert encode('é', 'utf-16be') == b'\x00\xe9'
+
+
+def test_iter_decode():
+ def iter_decode_to_string(input, fallback_encoding):
+ output, _encoding = iter_decode(input, fallback_encoding)
+ return ''.join(output)
+ assert iter_decode_to_string([], 'latin1') == ''
+ assert iter_decode_to_string([b''], 'latin1') == ''
+ assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é'
+ assert iter_decode_to_string([b'hello'], 'latin1') == 'hello'
+ assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello'
+ assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello'
+ assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é'
+ assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é'
+ assert iter_decode_to_string([
+ b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é'
+ assert iter_decode_to_string([
+ b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD'
+ assert iter_decode_to_string([
+ b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é'
+ assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == ''
+ assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»'
+ assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é'
+ assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é'
+ assert iter_decode_to_string([
+ b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é'
+ assert iter_decode_to_string([
+ b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo'
+
+
+def test_iter_encode():
+ assert b''.join(iter_encode([], 'latin1')) == b''
+ assert b''.join(iter_encode([''], 'latin1')) == b''
+ assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9'
+ assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9'
+ assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00'
+ assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00'
+ assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9'
+ assert b''.join(iter_encode([
+ '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo'
+
+
+def test_x_user_defined():
+ encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca'
+ decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca'
+ encoded = b'aa'
+ decoded = 'aa'
+ assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined'))
+ assert encode(decoded, 'x-user-defined') == encoded
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/x_user_defined.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/x_user_defined.py
new file mode 100644
index 00000000..d16e3260
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_vendor/webencodings/x_user_defined.py
@@ -0,0 +1,325 @@
+# coding: utf-8
+"""
+
+ webencodings.x_user_defined
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ An implementation of the x-user-defined encoding.
+
+ :copyright: Copyright 2012 by Simon Sapin
+ :license: BSD, see LICENSE for details.
+
+"""
+
+from __future__ import unicode_literals
+
+import codecs
+
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+ def encode(self, input, errors='strict'):
+ return codecs.charmap_encode(input, errors, encoding_table)
+
+ def decode(self, input, errors='strict'):
+ return codecs.charmap_decode(input, errors, decoding_table)
+
+
+class IncrementalEncoder(codecs.IncrementalEncoder):
+ def encode(self, input, final=False):
+ return codecs.charmap_encode(input, self.errors, encoding_table)[0]
+
+
+class IncrementalDecoder(codecs.IncrementalDecoder):
+ def decode(self, input, final=False):
+ return codecs.charmap_decode(input, self.errors, decoding_table)[0]
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+ pass
+
+
+class StreamReader(Codec, codecs.StreamReader):
+ pass
+
+
+### encodings module API
+
+codec_info = codecs.CodecInfo(
+ name='x-user-defined',
+ encode=Codec().encode,
+ decode=Codec().decode,
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamreader=StreamReader,
+ streamwriter=StreamWriter,
+)
+
+
+### Decoding Table
+
+# Python 3:
+# for c in range(256): print(' %r' % chr(c if c < 128 else c + 0xF700))
+decoding_table = (
+ '\x00'
+ '\x01'
+ '\x02'
+ '\x03'
+ '\x04'
+ '\x05'
+ '\x06'
+ '\x07'
+ '\x08'
+ '\t'
+ '\n'
+ '\x0b'
+ '\x0c'
+ '\r'
+ '\x0e'
+ '\x0f'
+ '\x10'
+ '\x11'
+ '\x12'
+ '\x13'
+ '\x14'
+ '\x15'
+ '\x16'
+ '\x17'
+ '\x18'
+ '\x19'
+ '\x1a'
+ '\x1b'
+ '\x1c'
+ '\x1d'
+ '\x1e'
+ '\x1f'
+ ' '
+ '!'
+ '"'
+ '#'
+ '$'
+ '%'
+ '&'
+ "'"
+ '('
+ ')'
+ '*'
+ '+'
+ ','
+ '-'
+ '.'
+ '/'
+ '0'
+ '1'
+ '2'
+ '3'
+ '4'
+ '5'
+ '6'
+ '7'
+ '8'
+ '9'
+ ':'
+ ';'
+ '<'
+ '='
+ '>'
+ '?'
+ '@'
+ 'A'
+ 'B'
+ 'C'
+ 'D'
+ 'E'
+ 'F'
+ 'G'
+ 'H'
+ 'I'
+ 'J'
+ 'K'
+ 'L'
+ 'M'
+ 'N'
+ 'O'
+ 'P'
+ 'Q'
+ 'R'
+ 'S'
+ 'T'
+ 'U'
+ 'V'
+ 'W'
+ 'X'
+ 'Y'
+ 'Z'
+ '['
+ '\\'
+ ']'
+ '^'
+ '_'
+ '`'
+ 'a'
+ 'b'
+ 'c'
+ 'd'
+ 'e'
+ 'f'
+ 'g'
+ 'h'
+ 'i'
+ 'j'
+ 'k'
+ 'l'
+ 'm'
+ 'n'
+ 'o'
+ 'p'
+ 'q'
+ 'r'
+ 's'
+ 't'
+ 'u'
+ 'v'
+ 'w'
+ 'x'
+ 'y'
+ 'z'
+ '{'
+ '|'
+ '}'
+ '~'
+ '\x7f'
+ '\uf780'
+ '\uf781'
+ '\uf782'
+ '\uf783'
+ '\uf784'
+ '\uf785'
+ '\uf786'
+ '\uf787'
+ '\uf788'
+ '\uf789'
+ '\uf78a'
+ '\uf78b'
+ '\uf78c'
+ '\uf78d'
+ '\uf78e'
+ '\uf78f'
+ '\uf790'
+ '\uf791'
+ '\uf792'
+ '\uf793'
+ '\uf794'
+ '\uf795'
+ '\uf796'
+ '\uf797'
+ '\uf798'
+ '\uf799'
+ '\uf79a'
+ '\uf79b'
+ '\uf79c'
+ '\uf79d'
+ '\uf79e'
+ '\uf79f'
+ '\uf7a0'
+ '\uf7a1'
+ '\uf7a2'
+ '\uf7a3'
+ '\uf7a4'
+ '\uf7a5'
+ '\uf7a6'
+ '\uf7a7'
+ '\uf7a8'
+ '\uf7a9'
+ '\uf7aa'
+ '\uf7ab'
+ '\uf7ac'
+ '\uf7ad'
+ '\uf7ae'
+ '\uf7af'
+ '\uf7b0'
+ '\uf7b1'
+ '\uf7b2'
+ '\uf7b3'
+ '\uf7b4'
+ '\uf7b5'
+ '\uf7b6'
+ '\uf7b7'
+ '\uf7b8'
+ '\uf7b9'
+ '\uf7ba'
+ '\uf7bb'
+ '\uf7bc'
+ '\uf7bd'
+ '\uf7be'
+ '\uf7bf'
+ '\uf7c0'
+ '\uf7c1'
+ '\uf7c2'
+ '\uf7c3'
+ '\uf7c4'
+ '\uf7c5'
+ '\uf7c6'
+ '\uf7c7'
+ '\uf7c8'
+ '\uf7c9'
+ '\uf7ca'
+ '\uf7cb'
+ '\uf7cc'
+ '\uf7cd'
+ '\uf7ce'
+ '\uf7cf'
+ '\uf7d0'
+ '\uf7d1'
+ '\uf7d2'
+ '\uf7d3'
+ '\uf7d4'
+ '\uf7d5'
+ '\uf7d6'
+ '\uf7d7'
+ '\uf7d8'
+ '\uf7d9'
+ '\uf7da'
+ '\uf7db'
+ '\uf7dc'
+ '\uf7dd'
+ '\uf7de'
+ '\uf7df'
+ '\uf7e0'
+ '\uf7e1'
+ '\uf7e2'
+ '\uf7e3'
+ '\uf7e4'
+ '\uf7e5'
+ '\uf7e6'
+ '\uf7e7'
+ '\uf7e8'
+ '\uf7e9'
+ '\uf7ea'
+ '\uf7eb'
+ '\uf7ec'
+ '\uf7ed'
+ '\uf7ee'
+ '\uf7ef'
+ '\uf7f0'
+ '\uf7f1'
+ '\uf7f2'
+ '\uf7f3'
+ '\uf7f4'
+ '\uf7f5'
+ '\uf7f6'
+ '\uf7f7'
+ '\uf7f8'
+ '\uf7f9'
+ '\uf7fa'
+ '\uf7fb'
+ '\uf7fc'
+ '\uf7fd'
+ '\uf7fe'
+ '\uf7ff'
+)
+
+### Encoding table
+encoding_table = codecs.charmap_build(decoding_table)