summaryrefslogtreecommitdiff
path: root/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal
diff options
context:
space:
mode:
Diffstat (limited to '.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal')
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__init__.py77
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/__init__.cpython-37.pycbin0 -> 1695 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/build_env.cpython-37.pycbin0 -> 7390 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/cache.cpython-37.pycbin0 -> 7008 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/configuration.cpython-37.pycbin0 -> 10510 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/download.cpython-37.pycbin0 -> 25556 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/exceptions.cpython-37.pycbin0 -> 12700 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/index.cpython-37.pycbin0 -> 37345 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-37.pycbin0 -> 10498 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/locations.cpython-37.pycbin0 -> 2914 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pep425tags.cpython-37.pycbin0 -> 8273 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pyproject.cpython-37.pycbin0 -> 3134 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/wheel.cpython-37.pycbin0 -> 27114 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/build_env.py218
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cache.py224
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__init__.py4
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-37.pycbin0 -> 229 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pycbin0 -> 5042 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-37.pycbin0 -> 8106 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pycbin0 -> 19805 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-37.pycbin0 -> 2151 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/parser.cpython-37.pycbin0 -> 8893 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-37.pycbin0 -> 358 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py152
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/base_command.py346
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py929
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py98
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/parser.py261
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py8
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__init__.py81
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-37.pycbin0 -> 2526 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/check.cpython-37.pycbin0 -> 1282 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/completion.cpython-37.pycbin0 -> 3031 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-37.pycbin0 -> 7072 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/debug.cpython-37.pycbin0 -> 3313 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/download.cpython-37.pycbin0 -> 4649 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-37.pycbin0 -> 2973 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/hash.cpython-37.pycbin0 -> 2021 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/help.cpython-37.pycbin0 -> 1197 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/install.cpython-37.pycbin0 -> 12835 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/list.cpython-37.pycbin0 -> 8882 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/search.cpython-37.pycbin0 -> 4372 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/show.cpython-37.pycbin0 -> 5844 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-37.pycbin0 -> 2653 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-37.pycbin0 -> 4819 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/check.py41
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/completion.py94
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/configuration.py258
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/debug.py114
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/download.py168
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/freeze.py101
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/hash.py57
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/help.py37
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/install.py580
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/list.py311
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/search.py139
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/show.py168
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py78
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/wheel.py181
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/configuration.py417
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py23
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-37.pycbin0 -> 814 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/base.cpython-37.pycbin0 -> 1593 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-37.pycbin0 -> 939 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/source.cpython-37.pycbin0 -> 3007 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-37.pycbin0 -> 999 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/base.py33
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/installed.py15
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/source.py80
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py17
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/download.py1163
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/exceptions.py305
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/index.py1508
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py457
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/locations.py142
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__init__.py2
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/__init__.cpython-37.pycbin0 -> 217 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/candidate.cpython-37.pycbin0 -> 1442 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/format_control.cpython-37.pycbin0 -> 2220 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/index.cpython-37.pycbin0 -> 1121 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/link.cpython-37.pycbin0 -> 6207 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-37.pycbin0 -> 3215 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-37.pycbin0 -> 1579 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/target_python.cpython-37.pycbin0 -> 3155 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/candidate.py36
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/format_control.py73
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/index.py31
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/link.py213
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/search_scope.py113
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py47
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/target_python.py106
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__init__.py0
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-37.pycbin0 -> 153 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/check.cpython-37.pycbin0 -> 3649 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-37.pycbin0 -> 5643 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-37.pycbin0 -> 5671 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/check.py159
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/freeze.py253
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/prepare.py287
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pep425tags.py387
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pyproject.py171
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__init__.py78
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/__init__.cpython-37.pycbin0 -> 1664 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/constructors.cpython-37.pycbin0 -> 7860 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_file.cpython-37.pycbin0 -> 9310 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_install.cpython-37.pycbin0 -> 25400 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_set.cpython-37.pycbin0 -> 5625 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-37.pycbin0 -> 3176 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pycbin0 -> 17239 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/constructors.py349
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_file.py399
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_install.py1035
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_set.py193
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py96
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py633
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__init__.py0
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-37.pycbin0 -> 148 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-37.pycbin0 -> 7979 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/compat.cpython-37.pycbin0 -> 6955 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-37.pycbin0 -> 2765 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-37.pycbin0 -> 1232 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-37.pycbin0 -> 617 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-37.pycbin0 -> 2212 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-37.pycbin0 -> 4088 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/logging.cpython-37.pycbin0 -> 9080 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-37.pycbin0 -> 755 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/misc.cpython-37.pycbin0 -> 29434 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/models.cpython-37.pycbin0 -> 1896 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/outdated.cpython-37.pycbin0 -> 4154 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-37.pycbin0 -> 2583 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pycbin0 -> 1004 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pycbin0 -> 4870 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/typing.cpython-37.pycbin0 -> 1278 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/ui.cpython-37.pycbin0 -> 11697 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-37.pycbin0 -> 863 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py268
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/compat.py293
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py100
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/encoding.py39
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py30
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/glibc.py120
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/hashes.py128
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/logging.py394
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py20
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/misc.py1204
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/models.py40
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/outdated.py178
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/packaging.py94
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py36
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py155
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/typing.py29
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/ui.py424
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py34
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py12
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-37.pycbin0 -> 424 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pycbin0 -> 3340 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/git.cpython-37.pycbin0 -> 8877 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pycbin0 -> 3641 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-37.pycbin0 -> 8244 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pycbin0 -> 16902 bytes
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py101
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/git.py358
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py103
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py314
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py600
-rw-r--r--.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/wheel.py1125
166 files changed, 19745 insertions, 0 deletions
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__init__.py
new file mode 100644
index 00000000..fbadc28a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__init__.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+from __future__ import absolute_import
+
+import locale
+import logging
+import os
+import warnings
+
+import sys
+
+# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
+# but if invoked (i.e. imported), it will issue a warning to stderr if socks
+# isn't available. requests unconditionally imports urllib3's socks contrib
+# module, triggering this warning. The warning breaks DEP-8 tests (because of
+# the stderr output) and is just plain annoying in normal usage. I don't want
+# to add socks as yet another dependency for pip, nor do I want to allow-stderr
+# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
+# be done before the import of pip.vcs.
+from pip._vendor.urllib3.exceptions import DependencyWarning
+warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
+
+# We want to inject the use of SecureTransport as early as possible so that any
+# references or sessions or what have you are ensured to have it, however we
+# only want to do this in the case that we're running on macOS and the linked
+# OpenSSL is too old to handle TLSv1.2
+try:
+ import ssl
+except ImportError:
+ pass
+else:
+ # Checks for OpenSSL 1.0.1 on MacOS
+ if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
+ try:
+ from pip._vendor.urllib3.contrib import securetransport
+ except (ImportError, OSError):
+ pass
+ else:
+ securetransport.inject_into_urllib3()
+
+from pip._internal.cli.autocompletion import autocomplete
+from pip._internal.cli.main_parser import parse_command
+from pip._internal.commands import commands_dict
+from pip._internal.exceptions import PipError
+from pip._internal.utils import deprecation
+from pip._vendor.urllib3.exceptions import InsecureRequestWarning
+
+logger = logging.getLogger(__name__)
+
+# Hide the InsecureRequestWarning from urllib3
+warnings.filterwarnings("ignore", category=InsecureRequestWarning)
+
+
+def main(args=None):
+ if args is None:
+ args = sys.argv[1:]
+
+ # Configure our deprecation warnings to be sent through loggers
+ deprecation.install_warning_logger()
+
+ autocomplete()
+
+ try:
+ cmd_name, cmd_args = parse_command(args)
+ except PipError as exc:
+ sys.stderr.write("ERROR: %s" % exc)
+ sys.stderr.write(os.linesep)
+ sys.exit(1)
+
+ # Needed for locale.getpreferredencoding(False) to work
+ # in pip._internal.utils.encoding.auto_decode
+ try:
+ locale.setlocale(locale.LC_ALL, '')
+ except locale.Error as e:
+ # setlocale can apparently crash if locale are uninitialized
+ logger.debug("Ignoring error %s when setting locale", e)
+ command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
+ return command.main(cmd_args)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..fdb85bc3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/build_env.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/build_env.cpython-37.pyc
new file mode 100644
index 00000000..7f3cdbb1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/build_env.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/cache.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/cache.cpython-37.pyc
new file mode 100644
index 00000000..846c660d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/cache.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/configuration.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/configuration.cpython-37.pyc
new file mode 100644
index 00000000..20c8743e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/configuration.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/download.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/download.cpython-37.pyc
new file mode 100644
index 00000000..22fbb600
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/download.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/exceptions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 00000000..6d7ddb78
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/index.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/index.cpython-37.pyc
new file mode 100644
index 00000000..0ee5a4fc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/index.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-37.pyc
new file mode 100644
index 00000000..a671abd4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/locations.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/locations.cpython-37.pyc
new file mode 100644
index 00000000..1b6dcdbe
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/locations.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pep425tags.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pep425tags.cpython-37.pyc
new file mode 100644
index 00000000..307c519a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pep425tags.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pyproject.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pyproject.cpython-37.pyc
new file mode 100644
index 00000000..baf9a7bb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/pyproject.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/wheel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/wheel.cpython-37.pyc
new file mode 100644
index 00000000..5cfb8ef6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/__pycache__/wheel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/build_env.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/build_env.py
new file mode 100644
index 00000000..a060ceea
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/build_env.py
@@ -0,0 +1,218 @@
+"""Build Environment used for isolation during sdist building
+"""
+
+import logging
+import os
+import sys
+import textwrap
+from collections import OrderedDict
+from distutils.sysconfig import get_python_lib
+from sysconfig import get_paths
+
+from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
+
+from pip import __file__ as pip_location
+from pip._internal.utils.misc import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+
+if MYPY_CHECK_RUNNING:
+ from typing import Tuple, Set, Iterable, Optional, List
+ from pip._internal.index import PackageFinder
+
+logger = logging.getLogger(__name__)
+
+
+class _Prefix:
+
+ def __init__(self, path):
+ # type: (str) -> None
+ self.path = path
+ self.setup = False
+ self.bin_dir = get_paths(
+ 'nt' if os.name == 'nt' else 'posix_prefix',
+ vars={'base': path, 'platbase': path}
+ )['scripts']
+ # Note: prefer distutils' sysconfig to get the
+ # library paths so PyPy is correctly supported.
+ purelib = get_python_lib(plat_specific=False, prefix=path)
+ platlib = get_python_lib(plat_specific=True, prefix=path)
+ if purelib == platlib:
+ self.lib_dirs = [purelib]
+ else:
+ self.lib_dirs = [purelib, platlib]
+
+
+class BuildEnvironment(object):
+ """Creates and manages an isolated environment to install build deps
+ """
+
+ def __init__(self):
+ # type: () -> None
+ self._temp_dir = TempDirectory(kind="build-env")
+ self._temp_dir.create()
+
+ self._prefixes = OrderedDict((
+ (name, _Prefix(os.path.join(self._temp_dir.path, name)))
+ for name in ('normal', 'overlay')
+ ))
+
+ self._bin_dirs = [] # type: List[str]
+ self._lib_dirs = [] # type: List[str]
+ for prefix in reversed(list(self._prefixes.values())):
+ self._bin_dirs.append(prefix.bin_dir)
+ self._lib_dirs.extend(prefix.lib_dirs)
+
+ # Customize site to:
+ # - ensure .pth files are honored
+ # - prevent access to system site packages
+ system_sites = {
+ os.path.normcase(site) for site in (
+ get_python_lib(plat_specific=False),
+ get_python_lib(plat_specific=True),
+ )
+ }
+ self._site_dir = os.path.join(self._temp_dir.path, 'site')
+ if not os.path.exists(self._site_dir):
+ os.mkdir(self._site_dir)
+ with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
+ fp.write(textwrap.dedent(
+ '''
+ import os, site, sys
+
+ # First, drop system-sites related paths.
+ original_sys_path = sys.path[:]
+ known_paths = set()
+ for path in {system_sites!r}:
+ site.addsitedir(path, known_paths=known_paths)
+ system_paths = set(
+ os.path.normcase(path)
+ for path in sys.path[len(original_sys_path):]
+ )
+ original_sys_path = [
+ path for path in original_sys_path
+ if os.path.normcase(path) not in system_paths
+ ]
+ sys.path = original_sys_path
+
+ # Second, add lib directories.
+ # ensuring .pth file are processed.
+ for path in {lib_dirs!r}:
+ assert not path in sys.path
+ site.addsitedir(path)
+ '''
+ ).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
+
+ def __enter__(self):
+ self._save_env = {
+ name: os.environ.get(name, None)
+ for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
+ }
+
+ path = self._bin_dirs[:]
+ old_path = self._save_env['PATH']
+ if old_path:
+ path.extend(old_path.split(os.pathsep))
+
+ pythonpath = [self._site_dir]
+
+ os.environ.update({
+ 'PATH': os.pathsep.join(path),
+ 'PYTHONNOUSERSITE': '1',
+ 'PYTHONPATH': os.pathsep.join(pythonpath),
+ })
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ for varname, old_value in self._save_env.items():
+ if old_value is None:
+ os.environ.pop(varname, None)
+ else:
+ os.environ[varname] = old_value
+
+ def cleanup(self):
+ # type: () -> None
+ self._temp_dir.cleanup()
+
+ def check_requirements(self, reqs):
+ # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
+ """Return 2 sets:
+ - conflicting requirements: set of (installed, wanted) reqs tuples
+ - missing requirements: set of reqs
+ """
+ missing = set()
+ conflicting = set()
+ if reqs:
+ ws = WorkingSet(self._lib_dirs)
+ for req in reqs:
+ try:
+ if ws.find(Requirement.parse(req)) is None:
+ missing.add(req)
+ except VersionConflict as e:
+ conflicting.add((str(e.args[0].as_requirement()),
+ str(e.args[1])))
+ return conflicting, missing
+
+ def install_requirements(
+ self,
+ finder, # type: PackageFinder
+ requirements, # type: Iterable[str]
+ prefix_as_string, # type: str
+ message # type: Optional[str]
+ ):
+ # type: (...) -> None
+ prefix = self._prefixes[prefix_as_string]
+ assert not prefix.setup
+ prefix.setup = True
+ if not requirements:
+ return
+ args = [
+ sys.executable, os.path.dirname(pip_location), 'install',
+ '--ignore-installed', '--no-user', '--prefix', prefix.path,
+ '--no-warn-script-location',
+ ] # type: List[str]
+ if logger.getEffectiveLevel() <= logging.DEBUG:
+ args.append('-v')
+ for format_control in ('no_binary', 'only_binary'):
+ formats = getattr(finder.format_control, format_control)
+ args.extend(('--' + format_control.replace('_', '-'),
+ ','.join(sorted(formats or {':none:'}))))
+
+ index_urls = finder.index_urls
+ if index_urls:
+ args.extend(['-i', index_urls[0]])
+ for extra_index in index_urls[1:]:
+ args.extend(['--extra-index-url', extra_index])
+ else:
+ args.append('--no-index')
+ for link in finder.find_links:
+ args.extend(['--find-links', link])
+
+ for host in finder.trusted_hosts:
+ args.extend(['--trusted-host', host])
+ if finder.allow_all_prereleases:
+ args.append('--pre')
+ args.append('--')
+ args.extend(requirements)
+ with open_spinner(message) as spinner:
+ call_subprocess(args, spinner=spinner)
+
+
+class NoOpBuildEnvironment(BuildEnvironment):
+ """A no-op drop-in replacement for BuildEnvironment
+ """
+
+ def __init__(self):
+ pass
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+ def cleanup(self):
+ pass
+
+ def install_requirements(self, finder, requirements, prefix, message):
+ raise NotImplementedError()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cache.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cache.py
new file mode 100644
index 00000000..894624c1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cache.py
@@ -0,0 +1,224 @@
+"""Cache Management
+"""
+
+import errno
+import hashlib
+import logging
+import os
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.models.link import Link
+from pip._internal.utils.compat import expanduser
+from pip._internal.utils.misc import path_to_url
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel import InvalidWheelFilename, Wheel
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Set, List, Any
+ from pip._internal.index import FormatControl
+
+logger = logging.getLogger(__name__)
+
+
+class Cache(object):
+ """An abstract class - provides cache directories for data from links
+
+
+ :param cache_dir: The root of the cache.
+ :param format_control: An object of FormatControl class to limit
+ binaries being read from the cache.
+ :param allowed_formats: which formats of files the cache should store.
+ ('binary' and 'source' are the only allowed values)
+ """
+
+ def __init__(self, cache_dir, format_control, allowed_formats):
+ # type: (str, FormatControl, Set[str]) -> None
+ super(Cache, self).__init__()
+ self.cache_dir = expanduser(cache_dir) if cache_dir else None
+ self.format_control = format_control
+ self.allowed_formats = allowed_formats
+
+ _valid_formats = {"source", "binary"}
+ assert self.allowed_formats.union(_valid_formats) == _valid_formats
+
+ def _get_cache_path_parts(self, link):
+ # type: (Link) -> List[str]
+ """Get parts of part that must be os.path.joined with cache_dir
+ """
+
+ # We want to generate an url to use as our cache key, we don't want to
+ # just re-use the URL because it might have other items in the fragment
+ # and we don't care about those.
+ key_parts = [link.url_without_fragment]
+ if link.hash_name is not None and link.hash is not None:
+ key_parts.append("=".join([link.hash_name, link.hash]))
+ key_url = "#".join(key_parts)
+
+ # Encode our key url with sha224, we'll use this because it has similar
+ # security properties to sha256, but with a shorter total output (and
+ # thus less secure). However the differences don't make a lot of
+ # difference for our use case here.
+ hashed = hashlib.sha224(key_url.encode()).hexdigest()
+
+ # We want to nest the directories some to prevent having a ton of top
+ # level directories where we might run out of sub directories on some
+ # FS.
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
+
+ return parts
+
+ def _get_candidates(self, link, package_name):
+ # type: (Link, Optional[str]) -> List[Any]
+ can_not_cache = (
+ not self.cache_dir or
+ not package_name or
+ not link
+ )
+ if can_not_cache:
+ return []
+
+ canonical_name = canonicalize_name(package_name)
+ formats = self.format_control.get_allowed_formats(
+ canonical_name
+ )
+ if not self.allowed_formats.intersection(formats):
+ return []
+
+ root = self.get_path_for_link(link)
+ try:
+ return os.listdir(root)
+ except OSError as err:
+ if err.errno in {errno.ENOENT, errno.ENOTDIR}:
+ return []
+ raise
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ """Return a directory to store cached items in for link.
+ """
+ raise NotImplementedError()
+
+ def get(self, link, package_name):
+ # type: (Link, Optional[str]) -> Link
+ """Returns a link to a cached item if it exists, otherwise returns the
+ passed link.
+ """
+ raise NotImplementedError()
+
+ def _link_for_candidate(self, link, candidate):
+ # type: (Link, str) -> Link
+ root = self.get_path_for_link(link)
+ path = os.path.join(root, candidate)
+
+ return Link(path_to_url(path))
+
+ def cleanup(self):
+ # type: () -> None
+ pass
+
+
+class SimpleWheelCache(Cache):
+ """A cache of wheels for future installs.
+ """
+
+ def __init__(self, cache_dir, format_control):
+ # type: (str, FormatControl) -> None
+ super(SimpleWheelCache, self).__init__(
+ cache_dir, format_control, {"binary"}
+ )
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ """Return a directory to store cached wheels for link
+
+ Because there are M wheels for any one sdist, we provide a directory
+ to cache them in, and then consult that directory when looking up
+ cache hits.
+
+ We only insert things into the cache if they have plausible version
+ numbers, so that we don't contaminate the cache with things that were
+ not unique. E.g. ./package might have dozens of installs done for it
+ and build a version of 0.0...and if we built and cached a wheel, we'd
+ end up using the same wheel even if the source has been edited.
+
+ :param link: The link of the sdist for which this will cache wheels.
+ """
+ parts = self._get_cache_path_parts(link)
+
+ # Store wheels within the root cache_dir
+ return os.path.join(self.cache_dir, "wheels", *parts)
+
+ def get(self, link, package_name):
+ # type: (Link, Optional[str]) -> Link
+ candidates = []
+
+ for wheel_name in self._get_candidates(link, package_name):
+ try:
+ wheel = Wheel(wheel_name)
+ except InvalidWheelFilename:
+ continue
+ if not wheel.supported():
+ # Built for a different python/arch/etc
+ continue
+ candidates.append((wheel.support_index_min(), wheel_name))
+
+ if not candidates:
+ return link
+
+ return self._link_for_candidate(link, min(candidates)[1])
+
+
+class EphemWheelCache(SimpleWheelCache):
+ """A SimpleWheelCache that creates it's own temporary cache directory
+ """
+
+ def __init__(self, format_control):
+ # type: (FormatControl) -> None
+ self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
+ self._temp_dir.create()
+
+ super(EphemWheelCache, self).__init__(
+ self._temp_dir.path, format_control
+ )
+
+ def cleanup(self):
+ # type: () -> None
+ self._temp_dir.cleanup()
+
+
+class WheelCache(Cache):
+ """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
+
+ This Cache allows for gracefully degradation, using the ephem wheel cache
+ when a certain link is not found in the simple wheel cache first.
+ """
+
+ def __init__(self, cache_dir, format_control):
+ # type: (str, FormatControl) -> None
+ super(WheelCache, self).__init__(
+ cache_dir, format_control, {'binary'}
+ )
+ self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
+ self._ephem_cache = EphemWheelCache(format_control)
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ return self._wheel_cache.get_path_for_link(link)
+
+ def get_ephem_path_for_link(self, link):
+ # type: (Link) -> str
+ return self._ephem_cache.get_path_for_link(link)
+
+ def get(self, link, package_name):
+ # type: (Link, Optional[str]) -> Link
+ retval = self._wheel_cache.get(link, package_name)
+ if retval is link:
+ retval = self._ephem_cache.get(link, package_name)
+ return retval
+
+ def cleanup(self):
+ # type: () -> None
+ self._wheel_cache.cleanup()
+ self._ephem_cache.cleanup()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__init__.py
new file mode 100644
index 00000000..e589bb91
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__init__.py
@@ -0,0 +1,4 @@
+"""Subpackage containing all of pip's command line interface related code
+"""
+
+# This file intentionally does not import submodules
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..09f43687
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc
new file mode 100644
index 00000000..4c1743dd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc
new file mode 100644
index 00000000..a3a0d223
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc
new file mode 100644
index 00000000..0e3c51cb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc
new file mode 100644
index 00000000..0b719c29
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/parser.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/parser.cpython-37.pyc
new file mode 100644
index 00000000..3fa51194
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/parser.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc
new file mode 100644
index 00000000..8cede51c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py
new file mode 100644
index 00000000..0a04199e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py
@@ -0,0 +1,152 @@
+"""Logic that powers autocompletion installed by ``pip completion``.
+"""
+
+import optparse
+import os
+import sys
+
+from pip._internal.cli.main_parser import create_main_parser
+from pip._internal.commands import commands_dict, get_summaries
+from pip._internal.utils.misc import get_installed_distributions
+
+
+def autocomplete():
+ """Entry Point for completion of main and subcommand options.
+ """
+ # Don't complete if user hasn't sourced bash_completion file.
+ if 'PIP_AUTO_COMPLETE' not in os.environ:
+ return
+ cwords = os.environ['COMP_WORDS'].split()[1:]
+ cword = int(os.environ['COMP_CWORD'])
+ try:
+ current = cwords[cword - 1]
+ except IndexError:
+ current = ''
+
+ subcommands = [cmd for cmd, summary in get_summaries()]
+ options = []
+ # subcommand
+ try:
+ subcommand_name = [w for w in cwords if w in subcommands][0]
+ except IndexError:
+ subcommand_name = None
+
+ parser = create_main_parser()
+ # subcommand options
+ if subcommand_name:
+ # special case: 'help' subcommand has no options
+ if subcommand_name == 'help':
+ sys.exit(1)
+ # special case: list locally installed dists for show and uninstall
+ should_list_installed = (
+ subcommand_name in ['show', 'uninstall'] and
+ not current.startswith('-')
+ )
+ if should_list_installed:
+ installed = []
+ lc = current.lower()
+ for dist in get_installed_distributions(local_only=True):
+ if dist.key.startswith(lc) and dist.key not in cwords[1:]:
+ installed.append(dist.key)
+ # if there are no dists installed, fall back to option completion
+ if installed:
+ for dist in installed:
+ print(dist)
+ sys.exit(1)
+
+ subcommand = commands_dict[subcommand_name]()
+
+ for opt in subcommand.parser.option_list_all:
+ if opt.help != optparse.SUPPRESS_HELP:
+ for opt_str in opt._long_opts + opt._short_opts:
+ options.append((opt_str, opt.nargs))
+
+ # filter out previously specified options from available options
+ prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
+ # filter options by current input
+ options = [(k, v) for k, v in options if k.startswith(current)]
+ # get completion type given cwords and available subcommand options
+ completion_type = get_path_completion_type(
+ cwords, cword, subcommand.parser.option_list_all,
+ )
+ # get completion files and directories if ``completion_type`` is
+ # ``<file>``, ``<dir>`` or ``<path>``
+ if completion_type:
+ options = auto_complete_paths(current, completion_type)
+ options = ((opt, 0) for opt in options)
+ for option in options:
+ opt_label = option[0]
+ # append '=' to options which require args
+ if option[1] and option[0][:2] == "--":
+ opt_label += '='
+ print(opt_label)
+ else:
+ # show main parser options only when necessary
+
+ opts = [i.option_list for i in parser.option_groups]
+ opts.append(parser.option_list)
+ opts = (o for it in opts for o in it)
+ if current.startswith('-'):
+ for opt in opts:
+ if opt.help != optparse.SUPPRESS_HELP:
+ subcommands += opt._long_opts + opt._short_opts
+ else:
+ # get completion type given cwords and all available options
+ completion_type = get_path_completion_type(cwords, cword, opts)
+ if completion_type:
+ subcommands = auto_complete_paths(current, completion_type)
+
+ print(' '.join([x for x in subcommands if x.startswith(current)]))
+ sys.exit(1)
+
+
+def get_path_completion_type(cwords, cword, opts):
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
+
+ :param cwords: same as the environmental variable ``COMP_WORDS``
+ :param cword: same as the environmental variable ``COMP_CWORD``
+ :param opts: The available options to check
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
+ """
+ if cword < 2 or not cwords[cword - 2].startswith('-'):
+ return
+ for opt in opts:
+ if opt.help == optparse.SUPPRESS_HELP:
+ continue
+ for o in str(opt).split('/'):
+ if cwords[cword - 2].split('=')[0] == o:
+ if not opt.metavar or any(
+ x in ('path', 'file', 'dir')
+ for x in opt.metavar.split('/')):
+ return opt.metavar
+
+
+def auto_complete_paths(current, completion_type):
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
+ and directories starting with ``current``; otherwise only list directories
+ starting with ``current``.
+
+ :param current: The word to be completed
+ :param completion_type: path completion type(`file`, `path` or `dir`)i
+ :return: A generator of regular files and/or directories
+ """
+ directory, filename = os.path.split(current)
+ current_path = os.path.abspath(directory)
+ # Don't complete paths if they can't be accessed
+ if not os.access(current_path, os.R_OK):
+ return
+ filename = os.path.normcase(filename)
+ # list all files that start with ``filename``
+ file_list = (x for x in os.listdir(current_path)
+ if os.path.normcase(x).startswith(filename))
+ for f in file_list:
+ opt = os.path.join(current_path, f)
+ comp_file = os.path.normcase(os.path.join(directory, f))
+ # complete regular files when there is not ``<dir>`` after option
+ # complete directories when there is ``<file>``, ``<path>`` or
+ # ``<dir>``after option
+ if completion_type != 'dir' and os.path.isfile(opt):
+ yield comp_file
+ elif os.path.isdir(opt):
+ yield os.path.join(comp_file, '')
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/base_command.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/base_command.py
new file mode 100644
index 00000000..90830be4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/base_command.py
@@ -0,0 +1,346 @@
+"""Base Command class, and related routines"""
+from __future__ import absolute_import, print_function
+
+import logging
+import logging.config
+import optparse
+import os
+import platform
+import sys
+import traceback
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_search_scope
+from pip._internal.cli.parser import (
+ ConfigOptionParser, UpdatingDefaultsHelpFormatter,
+)
+from pip._internal.cli.status_codes import (
+ ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
+ VIRTUALENV_NOT_FOUND,
+)
+from pip._internal.download import PipSession
+from pip._internal.exceptions import (
+ BadCommand, CommandError, InstallationError, PreviousBuildDirError,
+ UninstallationError,
+)
+from pip._internal.index import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.req.constructors import (
+ install_req_from_editable, install_req_from_line,
+)
+from pip._internal.req.req_file import parse_requirements
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
+from pip._internal.utils.misc import get_prog, normalize_path
+from pip._internal.utils.outdated import pip_version_check
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, List, Tuple, Any
+ from optparse import Values
+ from pip._internal.cache import WheelCache
+ from pip._internal.req.req_set import RequirementSet
+
+__all__ = ['Command']
+
+logger = logging.getLogger(__name__)
+
+
+class Command(object):
+ name = None # type: Optional[str]
+ usage = None # type: Optional[str]
+ ignore_require_venv = False # type: bool
+
+ def __init__(self, isolated=False):
+ # type: (bool) -> None
+ parser_kw = {
+ 'usage': self.usage,
+ 'prog': '%s %s' % (get_prog(), self.name),
+ 'formatter': UpdatingDefaultsHelpFormatter(),
+ 'add_help_option': False,
+ 'name': self.name,
+ 'description': self.__doc__,
+ 'isolated': isolated,
+ }
+
+ self.parser = ConfigOptionParser(**parser_kw)
+
+ # Commands should add options to this option group
+ optgroup_name = '%s Options' % self.name.capitalize()
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
+
+ # Add the general options
+ gen_opts = cmdoptions.make_option_group(
+ cmdoptions.general_group,
+ self.parser,
+ )
+ self.parser.add_option_group(gen_opts)
+
+ def run(self, options, args):
+ # type: (Values, List[Any]) -> Any
+ raise NotImplementedError
+
+ @classmethod
+ def _get_index_urls(cls, options):
+ """Return a list of index urls from user-provided options."""
+ index_urls = []
+ if not getattr(options, "no_index", False):
+ url = getattr(options, "index_url", None)
+ if url:
+ index_urls.append(url)
+ urls = getattr(options, "extra_index_urls", None)
+ if urls:
+ index_urls.extend(urls)
+ # Return None rather than an empty list
+ return index_urls or None
+
+ def _build_session(self, options, retries=None, timeout=None):
+ # type: (Values, Optional[int], Optional[int]) -> PipSession
+ session = PipSession(
+ cache=(
+ normalize_path(os.path.join(options.cache_dir, "http"))
+ if options.cache_dir else None
+ ),
+ retries=retries if retries is not None else options.retries,
+ insecure_hosts=options.trusted_hosts,
+ index_urls=self._get_index_urls(options),
+ )
+
+ # Handle custom ca-bundles from the user
+ if options.cert:
+ session.verify = options.cert
+
+ # Handle SSL client certificate
+ if options.client_cert:
+ session.cert = options.client_cert
+
+ # Handle timeouts
+ if options.timeout or timeout:
+ session.timeout = (
+ timeout if timeout is not None else options.timeout
+ )
+
+ # Handle configured proxies
+ if options.proxy:
+ session.proxies = {
+ "http": options.proxy,
+ "https": options.proxy,
+ }
+
+ # Determine if we can prompt the user for authentication or not
+ session.auth.prompting = not options.no_input
+
+ return session
+
+ def parse_args(self, args):
+ # type: (List[str]) -> Tuple
+ # factored out for testability
+ return self.parser.parse_args(args)
+
+ def main(self, args):
+ # type: (List[str]) -> int
+ options, args = self.parse_args(args)
+
+ # Set verbosity so that it can be used elsewhere.
+ self.verbosity = options.verbose - options.quiet
+
+ level_number = setup_logging(
+ verbosity=self.verbosity,
+ no_color=options.no_color,
+ user_log_file=options.log,
+ )
+
+ if sys.version_info[:2] == (2, 7):
+ message = (
+ "A future version of pip will drop support for Python 2.7. "
+ "More details about Python 2 support in pip, can be found at "
+ "https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa
+ )
+ if platform.python_implementation() == "CPython":
+ message = (
+ "Python 2.7 will reach the end of its life on January "
+ "1st, 2020. Please upgrade your Python as Python 2.7 "
+ "won't be maintained after that date. "
+ ) + message
+ deprecated(message, replacement=None, gone_in=None)
+
+ # TODO: Try to get these passing down from the command?
+ # without resorting to os.environ to hold these.
+ # This also affects isolated builds and it should.
+
+ if options.no_input:
+ os.environ['PIP_NO_INPUT'] = '1'
+
+ if options.exists_action:
+ os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
+
+ if options.require_venv and not self.ignore_require_venv:
+ # If a venv is required check if it can really be found
+ if not running_under_virtualenv():
+ logger.critical(
+ 'Could not find an activated virtualenv (required).'
+ )
+ sys.exit(VIRTUALENV_NOT_FOUND)
+
+ try:
+ status = self.run(options, args)
+ # FIXME: all commands should return an exit status
+ # and when it is done, isinstance is not needed anymore
+ if isinstance(status, int):
+ return status
+ except PreviousBuildDirError as exc:
+ logger.critical(str(exc))
+ logger.debug('Exception information:', exc_info=True)
+
+ return PREVIOUS_BUILD_DIR_ERROR
+ except (InstallationError, UninstallationError, BadCommand) as exc:
+ logger.critical(str(exc))
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except CommandError as exc:
+ logger.critical('%s', exc)
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except BrokenStdoutLoggingError:
+ # Bypass our logger and write any remaining messages to stderr
+ # because stdout no longer works.
+ print('ERROR: Pipe to stdout was broken', file=sys.stderr)
+ if level_number <= logging.DEBUG:
+ traceback.print_exc(file=sys.stderr)
+
+ return ERROR
+ except KeyboardInterrupt:
+ logger.critical('Operation cancelled by user')
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except BaseException:
+ logger.critical('Exception:', exc_info=True)
+
+ return UNKNOWN_ERROR
+ finally:
+ allow_version_check = (
+ # Does this command have the index_group options?
+ hasattr(options, "no_index") and
+ # Is this command allowed to perform this check?
+ not (options.disable_pip_version_check or options.no_index)
+ )
+ # Check if we're using the latest version of pip available
+ if allow_version_check:
+ session = self._build_session(
+ options,
+ retries=0,
+ timeout=min(5, options.timeout)
+ )
+ with session:
+ pip_version_check(session, options)
+
+ # Shutdown the logging module
+ logging.shutdown()
+
+ return SUCCESS
+
+
+class RequirementCommand(Command):
+
+ @staticmethod
+ def populate_requirement_set(requirement_set, # type: RequirementSet
+ args, # type: List[str]
+ options, # type: Values
+ finder, # type: PackageFinder
+ session, # type: PipSession
+ name, # type: str
+ wheel_cache # type: Optional[WheelCache]
+ ):
+ # type: (...) -> None
+ """
+ Marshal cmd line args into a requirement set.
+ """
+ # NOTE: As a side-effect, options.require_hashes and
+ # requirement_set.require_hashes may be updated
+
+ for filename in options.constraints:
+ for req_to_add in parse_requirements(
+ filename,
+ constraint=True, finder=finder, options=options,
+ session=session, wheel_cache=wheel_cache):
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for req in args:
+ req_to_add = install_req_from_line(
+ req, None, isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ wheel_cache=wheel_cache
+ )
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for req in options.editables:
+ req_to_add = install_req_from_editable(
+ req,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ wheel_cache=wheel_cache
+ )
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for filename in options.requirements:
+ for req_to_add in parse_requirements(
+ filename,
+ finder=finder, options=options, session=session,
+ wheel_cache=wheel_cache,
+ use_pep517=options.use_pep517):
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+ # If --require-hashes was a line in a requirements file, tell
+ # RequirementSet about it:
+ requirement_set.require_hashes = options.require_hashes
+
+ if not (args or options.editables or options.requirements):
+ opts = {'name': name}
+ if options.find_links:
+ raise CommandError(
+ 'You must give at least one requirement to %(name)s '
+ '(maybe you meant "pip %(name)s %(links)s"?)' %
+ dict(opts, links=' '.join(options.find_links)))
+ else:
+ raise CommandError(
+ 'You must give at least one requirement to %(name)s '
+ '(see "pip help %(name)s")' % opts)
+
+ def _build_package_finder(
+ self,
+ options, # type: Values
+ session, # type: PipSession
+ target_python=None, # type: Optional[TargetPython]
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> PackageFinder
+ """
+ Create a package finder appropriate to this requirement command.
+
+ :param ignore_requires_python: Whether to ignore incompatible
+ "Requires-Python" values in links. Defaults to False.
+ """
+ search_scope = make_search_scope(options)
+ selection_prefs = SelectionPreferences(
+ allow_yanked=True,
+ format_control=options.format_control,
+ allow_all_prereleases=options.pre,
+ prefer_binary=options.prefer_binary,
+ ignore_requires_python=ignore_requires_python,
+ )
+
+ return PackageFinder.create(
+ search_scope=search_scope,
+ selection_prefs=selection_prefs,
+ trusted_hosts=options.trusted_hosts,
+ session=session,
+ target_python=target_python,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py
new file mode 100644
index 00000000..ecf4d20b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py
@@ -0,0 +1,929 @@
+"""
+shared options and groups
+
+The principle here is to define options once, but *not* instantiate them
+globally. One reason being that options with action='append' can carry state
+between parses. pip parses general options twice internally, and shouldn't
+pass on state. To be consistent, all options will follow this design.
+
+"""
+from __future__ import absolute_import
+
+import logging
+import textwrap
+import warnings
+from distutils.util import strtobool
+from functools import partial
+from optparse import SUPPRESS_HELP, Option, OptionGroup
+from textwrap import dedent
+
+from pip._internal.exceptions import CommandError
+from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.index import PyPI
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.models.target_python import TargetPython
+from pip._internal.utils.hashes import STRONG_HASHES
+from pip._internal.utils.misc import redact_password_from_url
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import BAR_TYPES
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Callable, Dict, Optional, Tuple
+ from optparse import OptionParser, Values
+ from pip._internal.cli.parser import ConfigOptionParser
+
+logger = logging.getLogger(__name__)
+
+
+def raise_option_error(parser, option, msg):
+ """
+ Raise an option parsing error using parser.error().
+
+ Args:
+ parser: an OptionParser instance.
+ option: an Option instance.
+ msg: the error text.
+ """
+ msg = '{} error: {}'.format(option, msg)
+ msg = textwrap.fill(' '.join(msg.split()))
+ parser.error(msg)
+
+
+def make_option_group(group, parser):
+ # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
+ """
+ Return an OptionGroup object
+ group -- assumed to be dict with 'name' and 'options' keys
+ parser -- an optparse Parser
+ """
+ option_group = OptionGroup(parser, group['name'])
+ for option in group['options']:
+ option_group.add_option(option())
+ return option_group
+
+
+def check_install_build_global(options, check_options=None):
+ # type: (Values, Optional[Values]) -> None
+ """Disable wheels if per-setup.py call options are set.
+
+ :param options: The OptionParser options to update.
+ :param check_options: The options to check, if not supplied defaults to
+ options.
+ """
+ if check_options is None:
+ check_options = options
+
+ def getname(n):
+ return getattr(check_options, n, None)
+ names = ["build_options", "global_options", "install_options"]
+ if any(map(getname, names)):
+ control = options.format_control
+ control.disallow_binaries()
+ warnings.warn(
+ 'Disabling all use of wheels due to the use of --build-options '
+ '/ --global-options / --install-options.', stacklevel=2,
+ )
+
+
+def check_dist_restriction(options, check_target=False):
+ # type: (Values, bool) -> None
+ """Function for determining if custom platform options are allowed.
+
+ :param options: The OptionParser options.
+ :param check_target: Whether or not to check if --target is being used.
+ """
+ dist_restriction_set = any([
+ options.python_version,
+ options.platform,
+ options.abi,
+ options.implementation,
+ ])
+
+ binary_only = FormatControl(set(), {':all:'})
+ sdist_dependencies_allowed = (
+ options.format_control != binary_only and
+ not options.ignore_dependencies
+ )
+
+ # Installations or downloads using dist restrictions must not combine
+ # source distributions and dist-specific wheels, as they are not
+ # guaranteed to be locally compatible.
+ if dist_restriction_set and sdist_dependencies_allowed:
+ raise CommandError(
+ "When restricting platform and interpreter constraints using "
+ "--python-version, --platform, --abi, or --implementation, "
+ "either --no-deps must be set, or --only-binary=:all: must be "
+ "set and --no-binary must not be set (or must be set to "
+ ":none:)."
+ )
+
+ if check_target:
+ if dist_restriction_set and not options.target_dir:
+ raise CommandError(
+ "Can not use any platform or abi specific options unless "
+ "installing via '--target'"
+ )
+
+
+###########
+# options #
+###########
+
+help_ = partial(
+ Option,
+ '-h', '--help',
+ dest='help',
+ action='help',
+ help='Show help.',
+) # type: Callable[..., Option]
+
+isolated_mode = partial(
+ Option,
+ "--isolated",
+ dest="isolated_mode",
+ action="store_true",
+ default=False,
+ help=(
+ "Run pip in an isolated mode, ignoring environment variables and user "
+ "configuration."
+ ),
+) # type: Callable[..., Option]
+
+require_virtualenv = partial(
+ Option,
+ # Run only if inside a virtualenv, bail if not.
+ '--require-virtualenv', '--require-venv',
+ dest='require_venv',
+ action='store_true',
+ default=False,
+ help=SUPPRESS_HELP
+) # type: Callable[..., Option]
+
+verbose = partial(
+ Option,
+ '-v', '--verbose',
+ dest='verbose',
+ action='count',
+ default=0,
+ help='Give more output. Option is additive, and can be used up to 3 times.'
+) # type: Callable[..., Option]
+
+no_color = partial(
+ Option,
+ '--no-color',
+ dest='no_color',
+ action='store_true',
+ default=False,
+ help="Suppress colored output",
+) # type: Callable[..., Option]
+
+version = partial(
+ Option,
+ '-V', '--version',
+ dest='version',
+ action='store_true',
+ help='Show version and exit.',
+) # type: Callable[..., Option]
+
+quiet = partial(
+ Option,
+ '-q', '--quiet',
+ dest='quiet',
+ action='count',
+ default=0,
+ help=(
+ 'Give less output. Option is additive, and can be used up to 3'
+ ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
+ ' levels).'
+ ),
+) # type: Callable[..., Option]
+
+progress_bar = partial(
+ Option,
+ '--progress-bar',
+ dest='progress_bar',
+ type='choice',
+ choices=list(BAR_TYPES.keys()),
+ default='on',
+ help=(
+ 'Specify type of progress to be displayed [' +
+ '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
+ ),
+) # type: Callable[..., Option]
+
+log = partial(
+ Option,
+ "--log", "--log-file", "--local-log",
+ dest="log",
+ metavar="path",
+ help="Path to a verbose appending log."
+) # type: Callable[..., Option]
+
+no_input = partial(
+ Option,
+ # Don't ask for input
+ '--no-input',
+ dest='no_input',
+ action='store_true',
+ default=False,
+ help=SUPPRESS_HELP
+) # type: Callable[..., Option]
+
+proxy = partial(
+ Option,
+ '--proxy',
+ dest='proxy',
+ type='str',
+ default='',
+ help="Specify a proxy in the form [user:passwd@]proxy.server:port."
+) # type: Callable[..., Option]
+
+retries = partial(
+ Option,
+ '--retries',
+ dest='retries',
+ type='int',
+ default=5,
+ help="Maximum number of retries each connection should attempt "
+ "(default %default times).",
+) # type: Callable[..., Option]
+
+timeout = partial(
+ Option,
+ '--timeout', '--default-timeout',
+ metavar='sec',
+ dest='timeout',
+ type='float',
+ default=15,
+ help='Set the socket timeout (default %default seconds).',
+) # type: Callable[..., Option]
+
+skip_requirements_regex = partial(
+ Option,
+ # A regex to be used to skip requirements
+ '--skip-requirements-regex',
+ dest='skip_requirements_regex',
+ type='str',
+ default='',
+ help=SUPPRESS_HELP,
+) # type: Callable[..., Option]
+
+
+def exists_action():
+ # type: () -> Option
+ return Option(
+ # Option when path already exist
+ '--exists-action',
+ dest='exists_action',
+ type='choice',
+ choices=['s', 'i', 'w', 'b', 'a'],
+ default=[],
+ action='append',
+ metavar='action',
+ help="Default action when a path already exists: "
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
+ )
+
+
+cert = partial(
+ Option,
+ '--cert',
+ dest='cert',
+ type='str',
+ metavar='path',
+ help="Path to alternate CA bundle.",
+) # type: Callable[..., Option]
+
+client_cert = partial(
+ Option,
+ '--client-cert',
+ dest='client_cert',
+ type='str',
+ default=None,
+ metavar='path',
+ help="Path to SSL client certificate, a single file containing the "
+ "private key and the certificate in PEM format.",
+) # type: Callable[..., Option]
+
+index_url = partial(
+ Option,
+ '-i', '--index-url', '--pypi-url',
+ dest='index_url',
+ metavar='URL',
+ default=PyPI.simple_url,
+ help="Base URL of the Python Package Index (default %default). "
+ "This should point to a repository compliant with PEP 503 "
+ "(the simple repository API) or a local directory laid out "
+ "in the same format.",
+) # type: Callable[..., Option]
+
+
+def extra_index_url():
+ return Option(
+ '--extra-index-url',
+ dest='extra_index_urls',
+ metavar='URL',
+ action='append',
+ default=[],
+ help="Extra URLs of package indexes to use in addition to "
+ "--index-url. Should follow the same rules as "
+ "--index-url.",
+ )
+
+
+no_index = partial(
+ Option,
+ '--no-index',
+ dest='no_index',
+ action='store_true',
+ default=False,
+ help='Ignore package index (only looking at --find-links URLs instead).',
+) # type: Callable[..., Option]
+
+
+def find_links():
+ # type: () -> Option
+ return Option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='url',
+ help="If a url or path to an html file, then parse for links to "
+ "archives. If a local path or file:// url that's a directory, "
+ "then look for archives in the directory listing.",
+ )
+
+
+def make_search_scope(options, suppress_no_index=False):
+ # type: (Values, bool) -> SearchScope
+ """
+ :param suppress_no_index: Whether to ignore the --no-index option
+ when constructing the SearchScope object.
+ """
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index and not suppress_no_index:
+ logger.debug(
+ 'Ignoring indexes: %s',
+ ','.join(redact_password_from_url(url) for url in index_urls),
+ )
+ index_urls = []
+
+ search_scope = SearchScope(
+ find_links=options.find_links,
+ index_urls=index_urls,
+ )
+
+ return search_scope
+
+
+def trusted_host():
+ # type: () -> Option
+ return Option(
+ "--trusted-host",
+ dest="trusted_hosts",
+ action="append",
+ metavar="HOSTNAME",
+ default=[],
+ help="Mark this host as trusted, even though it does not have valid "
+ "or any HTTPS.",
+ )
+
+
+def constraints():
+ # type: () -> Option
+ return Option(
+ '-c', '--constraint',
+ dest='constraints',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Constrain versions using the given constraints file. '
+ 'This option can be used multiple times.'
+ )
+
+
+def requirements():
+ # type: () -> Option
+ return Option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Install from the given requirements file. '
+ 'This option can be used multiple times.'
+ )
+
+
+def editable():
+ # type: () -> Option
+ return Option(
+ '-e', '--editable',
+ dest='editables',
+ action='append',
+ default=[],
+ metavar='path/url',
+ help=('Install a project in editable mode (i.e. setuptools '
+ '"develop mode") from a local project path or a VCS url.'),
+ )
+
+
+src = partial(
+ Option,
+ '--src', '--source', '--source-dir', '--source-directory',
+ dest='src_dir',
+ metavar='dir',
+ default=get_src_prefix(),
+ help='Directory to check out editable projects into. '
+ 'The default in a virtualenv is "<venv path>/src". '
+ 'The default for global installs is "<current dir>/src".'
+) # type: Callable[..., Option]
+
+
+def _get_format_control(values, option):
+ # type: (Values, Option) -> Any
+ """Get a format_control object."""
+ return getattr(values, option.dest)
+
+
+def _handle_no_binary(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ existing = _get_format_control(parser.values, option)
+ FormatControl.handle_mutual_excludes(
+ value, existing.no_binary, existing.only_binary,
+ )
+
+
+def _handle_only_binary(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ existing = _get_format_control(parser.values, option)
+ FormatControl.handle_mutual_excludes(
+ value, existing.only_binary, existing.no_binary,
+ )
+
+
+def no_binary():
+ # type: () -> Option
+ format_control = FormatControl(set(), set())
+ return Option(
+ "--no-binary", dest="format_control", action="callback",
+ callback=_handle_no_binary, type="str",
+ default=format_control,
+ help="Do not use binary packages. Can be supplied multiple times, and "
+ "each time adds to the existing value. Accepts either :all: to "
+ "disable all binary packages, :none: to empty the set, or one or "
+ "more package names with commas between them. Note that some "
+ "packages are tricky to compile and may fail to install when "
+ "this option is used on them.",
+ )
+
+
+def only_binary():
+ # type: () -> Option
+ format_control = FormatControl(set(), set())
+ return Option(
+ "--only-binary", dest="format_control", action="callback",
+ callback=_handle_only_binary, type="str",
+ default=format_control,
+ help="Do not use source packages. Can be supplied multiple times, and "
+ "each time adds to the existing value. Accepts either :all: to "
+ "disable all source packages, :none: to empty the set, or one or "
+ "more package names with commas between them. Packages without "
+ "binary distributions will fail to install when this option is "
+ "used on them.",
+ )
+
+
+platform = partial(
+ Option,
+ '--platform',
+ dest='platform',
+ metavar='platform',
+ default=None,
+ help=("Only use wheels compatible with <platform>. "
+ "Defaults to the platform of the running system."),
+) # type: Callable[..., Option]
+
+
+# This was made a separate function for unit-testing purposes.
+def _convert_python_version(value):
+ # type: (str) -> Tuple[Tuple[int, ...], Optional[str]]
+ """
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
+
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
+ non-None if and only if there was a parsing error.
+ """
+ if not value:
+ # The empty string is the same as not providing a value.
+ return (None, None)
+
+ parts = value.split('.')
+ if len(parts) > 3:
+ return ((), 'at most three version parts are allowed')
+
+ if len(parts) == 1:
+ # Then we are in the case of "3" or "37".
+ value = parts[0]
+ if len(value) > 1:
+ parts = [value[0], value[1:]]
+
+ try:
+ version_info = tuple(int(part) for part in parts)
+ except ValueError:
+ return ((), 'each version part must be an integer')
+
+ return (version_info, None)
+
+
+def _handle_python_version(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Handle a provided --python-version value.
+ """
+ version_info, error_msg = _convert_python_version(value)
+ if error_msg is not None:
+ msg = (
+ 'invalid --python-version value: {!r}: {}'.format(
+ value, error_msg,
+ )
+ )
+ raise_option_error(parser, option=option, msg=msg)
+
+ parser.values.python_version = version_info
+
+
+python_version = partial(
+ Option,
+ '--python-version',
+ dest='python_version',
+ metavar='python_version',
+ action='callback',
+ callback=_handle_python_version, type='str',
+ default=None,
+ help=dedent("""\
+ The Python interpreter version to use for wheel and "Requires-Python"
+ compatibility checks. Defaults to a version derived from the running
+ interpreter. The version can be specified using up to three dot-separated
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
+ """),
+) # type: Callable[..., Option]
+
+
+implementation = partial(
+ Option,
+ '--implementation',
+ dest='implementation',
+ metavar='implementation',
+ default=None,
+ help=("Only use wheels compatible with Python "
+ "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
+ " or 'ip'. If not specified, then the current "
+ "interpreter implementation is used. Use 'py' to force "
+ "implementation-agnostic wheels."),
+) # type: Callable[..., Option]
+
+
+abi = partial(
+ Option,
+ '--abi',
+ dest='abi',
+ metavar='abi',
+ default=None,
+ help=("Only use wheels compatible with Python "
+ "abi <abi>, e.g. 'pypy_41'. If not specified, then the "
+ "current interpreter abi tag is used. Generally "
+ "you will need to specify --implementation, "
+ "--platform, and --python-version when using "
+ "this option."),
+) # type: Callable[..., Option]
+
+
+def add_target_python_options(cmd_opts):
+ # type: (OptionGroup) -> None
+ cmd_opts.add_option(platform())
+ cmd_opts.add_option(python_version())
+ cmd_opts.add_option(implementation())
+ cmd_opts.add_option(abi())
+
+
+def make_target_python(options):
+ # type: (Values) -> TargetPython
+ target_python = TargetPython(
+ platform=options.platform,
+ py_version_info=options.python_version,
+ abi=options.abi,
+ implementation=options.implementation,
+ )
+
+ return target_python
+
+
+def prefer_binary():
+ # type: () -> Option
+ return Option(
+ "--prefer-binary",
+ dest="prefer_binary",
+ action="store_true",
+ default=False,
+ help="Prefer older binary packages over newer source packages."
+ )
+
+
+cache_dir = partial(
+ Option,
+ "--cache-dir",
+ dest="cache_dir",
+ default=USER_CACHE_DIR,
+ metavar="dir",
+ help="Store the cache data in <dir>."
+) # type: Callable[..., Option]
+
+
+def _handle_no_cache_dir(option, opt, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Process a value provided for the --no-cache-dir option.
+
+ This is an optparse.Option callback for the --no-cache-dir option.
+ """
+ # The value argument will be None if --no-cache-dir is passed via the
+ # command-line, since the option doesn't accept arguments. However,
+ # the value can be non-None if the option is triggered e.g. by an
+ # environment variable, like PIP_NO_CACHE_DIR=true.
+ if value is not None:
+ # Then parse the string value to get argument error-checking.
+ try:
+ strtobool(value)
+ except ValueError as exc:
+ raise_option_error(parser, option=option, msg=str(exc))
+
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
+ # rather than enabled (logic would say the latter). Thus, we disable
+ # the cache directory not just on values that parse to True, but (for
+ # backwards compatibility reasons) also on values that parse to False.
+ # In other words, always set it to False if the option is provided in
+ # some (valid) form.
+ parser.values.cache_dir = False
+
+
+no_cache = partial(
+ Option,
+ "--no-cache-dir",
+ dest="cache_dir",
+ action="callback",
+ callback=_handle_no_cache_dir,
+ help="Disable the cache.",
+) # type: Callable[..., Option]
+
+no_deps = partial(
+ Option,
+ '--no-deps', '--no-dependencies',
+ dest='ignore_dependencies',
+ action='store_true',
+ default=False,
+ help="Don't install package dependencies.",
+) # type: Callable[..., Option]
+
+build_dir = partial(
+ Option,
+ '-b', '--build', '--build-dir', '--build-directory',
+ dest='build_dir',
+ metavar='dir',
+ help='Directory to unpack packages into and build in. Note that '
+ 'an initial build still takes place in a temporary directory. '
+ 'The location of temporary directories can be controlled by setting '
+ 'the TMPDIR environment variable (TEMP on Windows) appropriately. '
+ 'When passed, build directories are not cleaned in case of failures.'
+) # type: Callable[..., Option]
+
+ignore_requires_python = partial(
+ Option,
+ '--ignore-requires-python',
+ dest='ignore_requires_python',
+ action='store_true',
+ help='Ignore the Requires-Python information.'
+) # type: Callable[..., Option]
+
+no_build_isolation = partial(
+ Option,
+ '--no-build-isolation',
+ dest='build_isolation',
+ action='store_false',
+ default=True,
+ help='Disable isolation when building a modern source distribution. '
+ 'Build dependencies specified by PEP 518 must be already installed '
+ 'if this option is used.'
+) # type: Callable[..., Option]
+
+
+def _handle_no_use_pep517(option, opt, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Process a value provided for the --no-use-pep517 option.
+
+ This is an optparse.Option callback for the no_use_pep517 option.
+ """
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
+ # will be None if --no-use-pep517 is passed via the command-line.
+ # However, the value can be non-None if the option is triggered e.g.
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
+ if value is not None:
+ msg = """A value was passed for --no-use-pep517,
+ probably using either the PIP_NO_USE_PEP517 environment variable
+ or the "no-use-pep517" config file option. Use an appropriate value
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
+ config file option instead.
+ """
+ raise_option_error(parser, option=option, msg=msg)
+
+ # Otherwise, --no-use-pep517 was passed via the command-line.
+ parser.values.use_pep517 = False
+
+
+use_pep517 = partial(
+ Option,
+ '--use-pep517',
+ dest='use_pep517',
+ action='store_true',
+ default=None,
+ help='Use PEP 517 for building source distributions '
+ '(use --no-use-pep517 to force legacy behaviour).'
+) # type: Any
+
+no_use_pep517 = partial(
+ Option,
+ '--no-use-pep517',
+ dest='use_pep517',
+ action='callback',
+ callback=_handle_no_use_pep517,
+ default=None,
+ help=SUPPRESS_HELP
+) # type: Any
+
+install_options = partial(
+ Option,
+ '--install-option',
+ dest='install_options',
+ action='append',
+ metavar='options',
+ help="Extra arguments to be supplied to the setup.py install "
+ "command (use like --install-option=\"--install-scripts=/usr/local/"
+ "bin\"). Use multiple --install-option options to pass multiple "
+ "options to setup.py install. If you are using an option with a "
+ "directory path, be sure to use absolute path.",
+) # type: Callable[..., Option]
+
+global_options = partial(
+ Option,
+ '--global-option',
+ dest='global_options',
+ action='append',
+ metavar='options',
+ help="Extra global options to be supplied to the setup.py "
+ "call before the install command.",
+) # type: Callable[..., Option]
+
+no_clean = partial(
+ Option,
+ '--no-clean',
+ action='store_true',
+ default=False,
+ help="Don't clean up build directories."
+) # type: Callable[..., Option]
+
+pre = partial(
+ Option,
+ '--pre',
+ action='store_true',
+ default=False,
+ help="Include pre-release and development versions. By default, "
+ "pip only finds stable versions.",
+) # type: Callable[..., Option]
+
+disable_pip_version_check = partial(
+ Option,
+ "--disable-pip-version-check",
+ dest="disable_pip_version_check",
+ action="store_true",
+ default=False,
+ help="Don't periodically check PyPI to determine whether a new version "
+ "of pip is available for download. Implied with --no-index.",
+) # type: Callable[..., Option]
+
+
+# Deprecated, Remove later
+always_unzip = partial(
+ Option,
+ '-Z', '--always-unzip',
+ dest='always_unzip',
+ action='store_true',
+ help=SUPPRESS_HELP,
+) # type: Callable[..., Option]
+
+
+def _handle_merge_hash(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """Given a value spelled "algo:digest", append the digest to a list
+ pointed to in a dict by the algo name."""
+ if not parser.values.hashes:
+ parser.values.hashes = {}
+ try:
+ algo, digest = value.split(':', 1)
+ except ValueError:
+ parser.error('Arguments to %s must be a hash name '
+ 'followed by a value, like --hash=sha256:abcde...' %
+ opt_str)
+ if algo not in STRONG_HASHES:
+ parser.error('Allowed hash algorithms for %s are %s.' %
+ (opt_str, ', '.join(STRONG_HASHES)))
+ parser.values.hashes.setdefault(algo, []).append(digest)
+
+
+hash = partial(
+ Option,
+ '--hash',
+ # Hash values eventually end up in InstallRequirement.hashes due to
+ # __dict__ copying in process_line().
+ dest='hashes',
+ action='callback',
+ callback=_handle_merge_hash,
+ type='string',
+ help="Verify that the package's archive matches this "
+ 'hash before installing. Example: --hash=sha256:abcdef...',
+) # type: Callable[..., Option]
+
+
+require_hashes = partial(
+ Option,
+ '--require-hashes',
+ dest='require_hashes',
+ action='store_true',
+ default=False,
+ help='Require a hash to check each requirement against, for '
+ 'repeatable installs. This option is implied when any package in a '
+ 'requirements file has a --hash option.',
+) # type: Callable[..., Option]
+
+
+list_path = partial(
+ Option,
+ '--path',
+ dest='path',
+ action='append',
+ help='Restrict to the specified installation path for listing '
+ 'packages (can be used multiple times).'
+) # type: Callable[..., Option]
+
+
+def check_list_path_option(options):
+ # type: (Values) -> None
+ if options.path and (options.user or options.local):
+ raise CommandError(
+ "Cannot combine '--path' with '--user' or '--local'"
+ )
+
+
+##########
+# groups #
+##########
+
+general_group = {
+ 'name': 'General Options',
+ 'options': [
+ help_,
+ isolated_mode,
+ require_virtualenv,
+ verbose,
+ version,
+ quiet,
+ log,
+ no_input,
+ proxy,
+ retries,
+ timeout,
+ skip_requirements_regex,
+ exists_action,
+ trusted_host,
+ cert,
+ client_cert,
+ cache_dir,
+ no_cache,
+ disable_pip_version_check,
+ no_color,
+ ]
+} # type: Dict[str, Any]
+
+index_group = {
+ 'name': 'Package Index Options',
+ 'options': [
+ index_url,
+ extra_index_url,
+ no_index,
+ find_links,
+ ]
+} # type: Dict[str, Any]
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py
new file mode 100644
index 00000000..6d0b719a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py
@@ -0,0 +1,98 @@
+"""A single place for constructing and exposing the main parser
+"""
+
+import os
+import sys
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.parser import (
+ ConfigOptionParser, UpdatingDefaultsHelpFormatter,
+)
+from pip._internal.commands import (
+ commands_dict, get_similar_commands, get_summaries,
+)
+from pip._internal.exceptions import CommandError
+from pip._internal.utils.misc import get_pip_version, get_prog
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Tuple, List
+
+
+__all__ = ["create_main_parser", "parse_command"]
+
+
+def create_main_parser():
+ # type: () -> ConfigOptionParser
+ """Creates and returns the main parser for pip's CLI
+ """
+
+ parser_kw = {
+ 'usage': '\n%prog <command> [options]',
+ 'add_help_option': False,
+ 'formatter': UpdatingDefaultsHelpFormatter(),
+ 'name': 'global',
+ 'prog': get_prog(),
+ }
+
+ parser = ConfigOptionParser(**parser_kw)
+ parser.disable_interspersed_args()
+
+ parser.version = get_pip_version()
+
+ # add the general options
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
+ parser.add_option_group(gen_opts)
+
+ # so the help formatter knows
+ parser.main = True # type: ignore
+
+ # create command listing for description
+ command_summaries = get_summaries()
+ description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
+ parser.description = '\n'.join(description)
+
+ return parser
+
+
+def parse_command(args):
+ # type: (List[str]) -> Tuple[str, List[str]]
+ parser = create_main_parser()
+
+ # Note: parser calls disable_interspersed_args(), so the result of this
+ # call is to split the initial args into the general options before the
+ # subcommand and everything else.
+ # For example:
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
+ # general_options: ['--timeout==5']
+ # args_else: ['install', '--user', 'INITools']
+ general_options, args_else = parser.parse_args(args)
+
+ # --version
+ if general_options.version:
+ sys.stdout.write(parser.version) # type: ignore
+ sys.stdout.write(os.linesep)
+ sys.exit()
+
+ # pip || pip help -> print_help()
+ if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
+ parser.print_help()
+ sys.exit()
+
+ # the subcommand name
+ cmd_name = args_else[0]
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = ['unknown command "%s"' % cmd_name]
+ if guess:
+ msg.append('maybe you meant "%s"' % guess)
+
+ raise CommandError(' - '.join(msg))
+
+ # all the args without the subcommand
+ cmd_args = args[:]
+ cmd_args.remove(cmd_name)
+
+ return cmd_name, cmd_args
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/parser.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/parser.py
new file mode 100644
index 00000000..e1eaac42
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/parser.py
@@ -0,0 +1,261 @@
+"""Base option parser setup"""
+from __future__ import absolute_import
+
+import logging
+import optparse
+import sys
+import textwrap
+from distutils.util import strtobool
+
+from pip._vendor.six import string_types
+
+from pip._internal.cli.status_codes import UNKNOWN_ERROR
+from pip._internal.configuration import Configuration, ConfigurationError
+from pip._internal.utils.compat import get_terminal_size
+
+logger = logging.getLogger(__name__)
+
+
+class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
+ """A prettier/less verbose help formatter for optparse."""
+
+ def __init__(self, *args, **kwargs):
+ # help position must be aligned with __init__.parseopts.description
+ kwargs['max_help_position'] = 30
+ kwargs['indent_increment'] = 1
+ kwargs['width'] = get_terminal_size()[0] - 2
+ optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
+
+ def format_option_strings(self, option):
+ return self._format_option_strings(option, ' <%s>', ', ')
+
+ def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
+ """
+ Return a comma-separated list of option strings and metavars.
+
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
+ :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
+ :param optsep: separator
+ """
+ opts = []
+
+ if option._short_opts:
+ opts.append(option._short_opts[0])
+ if option._long_opts:
+ opts.append(option._long_opts[0])
+ if len(opts) > 1:
+ opts.insert(1, optsep)
+
+ if option.takes_value():
+ metavar = option.metavar or option.dest.lower()
+ opts.append(mvarfmt % metavar.lower())
+
+ return ''.join(opts)
+
+ def format_heading(self, heading):
+ if heading == 'Options':
+ return ''
+ return heading + ':\n'
+
+ def format_usage(self, usage):
+ """
+ Ensure there is only one newline between usage and the first heading
+ if there is no description.
+ """
+ msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
+ return msg
+
+ def format_description(self, description):
+ # leave full control over description to us
+ if description:
+ if hasattr(self.parser, 'main'):
+ label = 'Commands'
+ else:
+ label = 'Description'
+ # some doc strings have initial newlines, some don't
+ description = description.lstrip('\n')
+ # some doc strings have final newlines and spaces, some don't
+ description = description.rstrip()
+ # dedent, then reindent
+ description = self.indent_lines(textwrap.dedent(description), " ")
+ description = '%s:\n%s\n' % (label, description)
+ return description
+ else:
+ return ''
+
+ def format_epilog(self, epilog):
+ # leave full control over epilog to us
+ if epilog:
+ return epilog
+ else:
+ return ''
+
+ def indent_lines(self, text, indent):
+ new_lines = [indent + line for line in text.split('\n')]
+ return "\n".join(new_lines)
+
+
+class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
+ """Custom help formatter for use in ConfigOptionParser.
+
+ This is updates the defaults before expanding them, allowing
+ them to show up correctly in the help listing.
+ """
+
+ def expand_default(self, option):
+ if self.parser is not None:
+ self.parser._update_defaults(self.parser.defaults)
+ return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class CustomOptionParser(optparse.OptionParser):
+
+ def insert_option_group(self, idx, *args, **kwargs):
+ """Insert an OptionGroup at a given position."""
+ group = self.add_option_group(*args, **kwargs)
+
+ self.option_groups.pop()
+ self.option_groups.insert(idx, group)
+
+ return group
+
+ @property
+ def option_list_all(self):
+ """Get a list of all options, including those in option groups."""
+ res = self.option_list[:]
+ for i in self.option_groups:
+ res.extend(i.option_list)
+
+ return res
+
+
+class ConfigOptionParser(CustomOptionParser):
+ """Custom option parser which updates its defaults by checking the
+ configuration files and environmental variables"""
+
+ def __init__(self, *args, **kwargs):
+ self.name = kwargs.pop('name')
+
+ isolated = kwargs.pop("isolated", False)
+ self.config = Configuration(isolated)
+
+ assert self.name
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def check_default(self, option, key, val):
+ try:
+ return option.check_value(key, val)
+ except optparse.OptionValueError as exc:
+ print("An error occurred during configuration: %s" % exc)
+ sys.exit(3)
+
+ def _get_ordered_configuration_items(self):
+ # Configuration gives keys in an unordered manner. Order them.
+ override_order = ["global", self.name, ":env:"]
+
+ # Pool the options into different groups
+ section_items = {name: [] for name in override_order}
+ for section_key, val in self.config.items():
+ # ignore empty values
+ if not val:
+ logger.debug(
+ "Ignoring configuration key '%s' as it's value is empty.",
+ section_key
+ )
+ continue
+
+ section, key = section_key.split(".", 1)
+ if section in override_order:
+ section_items[section].append((key, val))
+
+ # Yield each group in their override order
+ for section in override_order:
+ for key, val in section_items[section]:
+ yield key, val
+
+ def _update_defaults(self, defaults):
+ """Updates the given defaults with values from the config files and
+ the environ. Does a little special handling for certain types of
+ options (lists)."""
+
+ # Accumulate complex default state.
+ self.values = optparse.Values(self.defaults)
+ late_eval = set()
+ # Then set the options with those values
+ for key, val in self._get_ordered_configuration_items():
+ # '--' because configuration supports only long names
+ option = self.get_option('--' + key)
+
+ # Ignore options not present in this parser. E.g. non-globals put
+ # in [global] by users that want them to apply to all applicable
+ # commands.
+ if option is None:
+ continue
+
+ if option.action in ('store_true', 'store_false', 'count'):
+ try:
+ val = strtobool(val)
+ except ValueError:
+ error_msg = invalid_config_error_message(
+ option.action, key, val
+ )
+ self.error(error_msg)
+
+ elif option.action == 'append':
+ val = val.split()
+ val = [self.check_default(option, key, v) for v in val]
+ elif option.action == 'callback':
+ late_eval.add(option.dest)
+ opt_str = option.get_opt_string()
+ val = option.convert_value(opt_str, val)
+ # From take_action
+ args = option.callback_args or ()
+ kwargs = option.callback_kwargs or {}
+ option.callback(option, opt_str, val, self, *args, **kwargs)
+ else:
+ val = self.check_default(option, key, val)
+
+ defaults[option.dest] = val
+
+ for key in late_eval:
+ defaults[key] = getattr(self.values, key)
+ self.values = None
+ return defaults
+
+ def get_default_values(self):
+ """Overriding to make updating the defaults after instantiation of
+ the option parser possible, _update_defaults() does the dirty work."""
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return optparse.Values(self.defaults)
+
+ # Load the configuration, or error out in case of an error
+ try:
+ self.config.load()
+ except ConfigurationError as err:
+ self.exit(UNKNOWN_ERROR, str(err))
+
+ defaults = self._update_defaults(self.defaults.copy()) # ours
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isinstance(default, string_types):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+ return optparse.Values(defaults)
+
+ def error(self, msg):
+ self.print_usage(sys.stderr)
+ self.exit(UNKNOWN_ERROR, "%s\n" % msg)
+
+
+def invalid_config_error_message(action, key, val):
+ """Returns a better error message when invalid configuration option
+ is provided."""
+ if action in ('store_true', 'store_false'):
+ return ("{0} is not a valid value for {1} option, "
+ "please specify a boolean value like yes/no, "
+ "true/false or 1/0 instead.").format(val, key)
+
+ return ("{0} is not a valid value for {1} option, "
+ "please specify a numerical value like 1/0 "
+ "instead.").format(val, key)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py
new file mode 100644
index 00000000..275360a3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+SUCCESS = 0
+ERROR = 1
+UNKNOWN_ERROR = 2
+VIRTUALENV_NOT_FOUND = 3
+PREVIOUS_BUILD_DIR_ERROR = 4
+NO_MATCHES_FOUND = 23
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__init__.py
new file mode 100644
index 00000000..9e0ab86b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__init__.py
@@ -0,0 +1,81 @@
+"""
+Package containing all pip commands
+"""
+from __future__ import absolute_import
+
+from pip._internal.commands.completion import CompletionCommand
+from pip._internal.commands.configuration import ConfigurationCommand
+from pip._internal.commands.debug import DebugCommand
+from pip._internal.commands.download import DownloadCommand
+from pip._internal.commands.freeze import FreezeCommand
+from pip._internal.commands.hash import HashCommand
+from pip._internal.commands.help import HelpCommand
+from pip._internal.commands.list import ListCommand
+from pip._internal.commands.check import CheckCommand
+from pip._internal.commands.search import SearchCommand
+from pip._internal.commands.show import ShowCommand
+from pip._internal.commands.install import InstallCommand
+from pip._internal.commands.uninstall import UninstallCommand
+from pip._internal.commands.wheel import WheelCommand
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Type
+ from pip._internal.cli.base_command import Command
+
+commands_order = [
+ InstallCommand,
+ DownloadCommand,
+ UninstallCommand,
+ FreezeCommand,
+ ListCommand,
+ ShowCommand,
+ CheckCommand,
+ ConfigurationCommand,
+ SearchCommand,
+ WheelCommand,
+ HashCommand,
+ CompletionCommand,
+ DebugCommand,
+ HelpCommand,
+] # type: List[Type[Command]]
+
+commands_dict = {c.name: c for c in commands_order}
+
+
+def get_summaries(ordered=True):
+ """Yields sorted (command name, command summary) tuples."""
+
+ if ordered:
+ cmditems = _sort_commands(commands_dict, commands_order)
+ else:
+ cmditems = commands_dict.items()
+
+ for name, command_class in cmditems:
+ yield (name, command_class.summary)
+
+
+def get_similar_commands(name):
+ """Command name auto-correct."""
+ from difflib import get_close_matches
+
+ name = name.lower()
+
+ close_commands = get_close_matches(name, commands_dict.keys())
+
+ if close_commands:
+ return close_commands[0]
+ else:
+ return False
+
+
+def _sort_commands(cmddict, order):
+ def keyfn(key):
+ try:
+ return order.index(key[1])
+ except ValueError:
+ # unordered items should come last
+ return 0xff
+
+ return sorted(cmddict.items(), key=keyfn)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..2cde7849
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/check.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/check.cpython-37.pyc
new file mode 100644
index 00000000..144e4054
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/check.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/completion.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/completion.cpython-37.pyc
new file mode 100644
index 00000000..7fdc69fe
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/completion.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc
new file mode 100644
index 00000000..309428ab
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/debug.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/debug.cpython-37.pyc
new file mode 100644
index 00000000..f7cac51b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/debug.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/download.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/download.cpython-37.pyc
new file mode 100644
index 00000000..2c9d0b1c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/download.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc
new file mode 100644
index 00000000..4bc32147
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/hash.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/hash.cpython-37.pyc
new file mode 100644
index 00000000..353ce32a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/hash.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/help.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/help.cpython-37.pyc
new file mode 100644
index 00000000..a310cb1d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/help.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/install.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/install.cpython-37.pyc
new file mode 100644
index 00000000..edf1d4e7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/install.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/list.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/list.cpython-37.pyc
new file mode 100644
index 00000000..6ff853c7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/list.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/search.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/search.cpython-37.pyc
new file mode 100644
index 00000000..dd9671f0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/search.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/show.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/show.cpython-37.pyc
new file mode 100644
index 00000000..44c0e016
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/show.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc
new file mode 100644
index 00000000..6879aed7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc
new file mode 100644
index 00000000..05e74585
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/check.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/check.py
new file mode 100644
index 00000000..801cecc0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/check.py
@@ -0,0 +1,41 @@
+import logging
+
+from pip._internal.cli.base_command import Command
+from pip._internal.operations.check import (
+ check_package_set, create_package_set_from_installed,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class CheckCommand(Command):
+ """Verify installed packages have compatible dependencies."""
+ name = 'check'
+ usage = """
+ %prog [options]"""
+ summary = 'Verify installed packages have compatible dependencies.'
+
+ def run(self, options, args):
+ package_set, parsing_probs = create_package_set_from_installed()
+ missing, conflicting = check_package_set(package_set)
+
+ for project_name in missing:
+ version = package_set[project_name].version
+ for dependency in missing[project_name]:
+ logger.info(
+ "%s %s requires %s, which is not installed.",
+ project_name, version, dependency[0],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name].version
+ for dep_name, dep_version, req in conflicting[project_name]:
+ logger.info(
+ "%s %s has requirement %s, but you have %s %s.",
+ project_name, version, req, dep_name, dep_version,
+ )
+
+ if missing or conflicting or parsing_probs:
+ return 1
+ else:
+ logger.info("No broken requirements found.")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/completion.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/completion.py
new file mode 100644
index 00000000..2fcdd393
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/completion.py
@@ -0,0 +1,94 @@
+from __future__ import absolute_import
+
+import sys
+import textwrap
+
+from pip._internal.cli.base_command import Command
+from pip._internal.utils.misc import get_prog
+
+BASE_COMPLETION = """
+# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
+"""
+
+COMPLETION_SCRIPTS = {
+ 'bash': """
+ _pip_completion()
+ {
+ COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ PIP_AUTO_COMPLETE=1 $1 ) )
+ }
+ complete -o default -F _pip_completion %(prog)s
+ """,
+ 'zsh': """
+ function _pip_completion {
+ local words cword
+ read -Ac words
+ read -cn cword
+ reply=( $( COMP_WORDS="$words[*]" \\
+ COMP_CWORD=$(( cword-1 )) \\
+ PIP_AUTO_COMPLETE=1 $words[1] ) )
+ }
+ compctl -K _pip_completion %(prog)s
+ """,
+ 'fish': """
+ function __fish_complete_pip
+ set -lx COMP_WORDS (commandline -o) ""
+ set -lx COMP_CWORD ( \\
+ math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
+ )
+ set -lx PIP_AUTO_COMPLETE 1
+ string split \\ -- (eval $COMP_WORDS[1])
+ end
+ complete -fa "(__fish_complete_pip)" -c %(prog)s
+ """,
+}
+
+
+class CompletionCommand(Command):
+ """A helper command to be used for command completion."""
+ name = 'completion'
+ summary = 'A helper command used for command completion.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(CompletionCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '--bash', '-b',
+ action='store_const',
+ const='bash',
+ dest='shell',
+ help='Emit completion code for bash')
+ cmd_opts.add_option(
+ '--zsh', '-z',
+ action='store_const',
+ const='zsh',
+ dest='shell',
+ help='Emit completion code for zsh')
+ cmd_opts.add_option(
+ '--fish', '-f',
+ action='store_const',
+ const='fish',
+ dest='shell',
+ help='Emit completion code for fish')
+
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ """Prints the completion code of the given shell"""
+ shells = COMPLETION_SCRIPTS.keys()
+ shell_options = ['--' + shell for shell in sorted(shells)]
+ if options.shell in shells:
+ script = textwrap.dedent(
+ COMPLETION_SCRIPTS.get(options.shell, '') % {
+ 'prog': get_prog(),
+ }
+ )
+ print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
+ else:
+ sys.stderr.write(
+ 'ERROR: You must pass %s\n' % ' or '.join(shell_options)
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/configuration.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/configuration.py
new file mode 100644
index 00000000..1ec77d2a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/configuration.py
@@ -0,0 +1,258 @@
+import logging
+import os
+import subprocess
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.configuration import (
+ Configuration, get_configuration_files, kinds,
+)
+from pip._internal.exceptions import PipError
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.misc import get_prog
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+logger = logging.getLogger(__name__)
+
+
+class ConfigurationCommand(Command):
+ """Manage local and global configuration.
+
+ Subcommands:
+
+ list: List the active configuration (or from the file specified)
+ edit: Edit the configuration file in an editor
+ get: Get the value associated with name
+ set: Set the name=value
+ unset: Unset the value associated with name
+
+ If none of --user, --global and --site are passed, a virtual
+ environment configuration file is used if one is active and the file
+ exists. Otherwise, all modifications happen on the to the user file by
+ default.
+ """
+
+ name = 'config'
+ usage = """
+ %prog [<file-option>] list
+ %prog [<file-option>] [--editor <editor-path>] edit
+
+ %prog [<file-option>] get name
+ %prog [<file-option>] set name value
+ %prog [<file-option>] unset name
+ """
+
+ summary = "Manage local and global configuration."
+
+ def __init__(self, *args, **kwargs):
+ super(ConfigurationCommand, self).__init__(*args, **kwargs)
+
+ self.configuration = None
+
+ self.cmd_opts.add_option(
+ '--editor',
+ dest='editor',
+ action='store',
+ default=None,
+ help=(
+ 'Editor to use to edit the file. Uses VISUAL or EDITOR '
+ 'environment variables if not provided.'
+ )
+ )
+
+ self.cmd_opts.add_option(
+ '--global',
+ dest='global_file',
+ action='store_true',
+ default=False,
+ help='Use the system-wide configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user_file',
+ action='store_true',
+ default=False,
+ help='Use the user configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--site',
+ dest='site_file',
+ action='store_true',
+ default=False,
+ help='Use the current environment configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--venv',
+ dest='venv_file',
+ action='store_true',
+ default=False,
+ help=(
+ '[Deprecated] Use the current environment configuration '
+ 'file in a virtual environment only'
+ )
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ handlers = {
+ "list": self.list_values,
+ "edit": self.open_in_editor,
+ "get": self.get_name,
+ "set": self.set_name_value,
+ "unset": self.unset_name
+ }
+
+ # Determine action
+ if not args or args[0] not in handlers:
+ logger.error("Need an action ({}) to perform.".format(
+ ", ".join(sorted(handlers)))
+ )
+ return ERROR
+
+ action = args[0]
+
+ # Determine which configuration files are to be loaded
+ # Depends on whether the command is modifying.
+ try:
+ load_only = self._determine_file(
+ options, need_value=(action in ["get", "set", "unset", "edit"])
+ )
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ # Load a new configuration
+ self.configuration = Configuration(
+ isolated=options.isolated_mode, load_only=load_only
+ )
+ self.configuration.load()
+
+ # Error handling happens here, not in the action-handlers.
+ try:
+ handlers[action](options, args[1:])
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ return SUCCESS
+
+ def _determine_file(self, options, need_value):
+ # Convert legacy venv_file option to site_file or error
+ if options.venv_file and not options.site_file:
+ if running_under_virtualenv():
+ options.site_file = True
+ deprecated(
+ "The --venv option has been deprecated.",
+ replacement="--site",
+ gone_in="19.3",
+ )
+ else:
+ raise PipError(
+ "Legacy --venv option requires a virtual environment. "
+ "Use --site instead."
+ )
+
+ file_options = [key for key, value in (
+ (kinds.USER, options.user_file),
+ (kinds.GLOBAL, options.global_file),
+ (kinds.SITE, options.site_file),
+ ) if value]
+
+ if not file_options:
+ if not need_value:
+ return None
+ # Default to user, unless there's a site file.
+ elif any(
+ os.path.exists(site_config_file)
+ for site_config_file in get_configuration_files()[kinds.SITE]
+ ):
+ return kinds.SITE
+ else:
+ return kinds.USER
+ elif len(file_options) == 1:
+ return file_options[0]
+
+ raise PipError(
+ "Need exactly one file to operate upon "
+ "(--user, --site, --global) to perform."
+ )
+
+ def list_values(self, options, args):
+ self._get_n_args(args, "list", n=0)
+
+ for key, value in sorted(self.configuration.items()):
+ logger.info("%s=%r", key, value)
+
+ def get_name(self, options, args):
+ key = self._get_n_args(args, "get [name]", n=1)
+ value = self.configuration.get_value(key)
+
+ logger.info("%s", value)
+
+ def set_name_value(self, options, args):
+ key, value = self._get_n_args(args, "set [name] [value]", n=2)
+ self.configuration.set_value(key, value)
+
+ self._save_configuration()
+
+ def unset_name(self, options, args):
+ key = self._get_n_args(args, "unset [name]", n=1)
+ self.configuration.unset_value(key)
+
+ self._save_configuration()
+
+ def open_in_editor(self, options, args):
+ editor = self._determine_editor(options)
+
+ fname = self.configuration.get_file_to_edit()
+ if fname is None:
+ raise PipError("Could not determine appropriate file.")
+
+ try:
+ subprocess.check_call([editor, fname])
+ except subprocess.CalledProcessError as e:
+ raise PipError(
+ "Editor Subprocess exited with exit code {}"
+ .format(e.returncode)
+ )
+
+ def _get_n_args(self, args, example, n):
+ """Helper to make sure the command got the right number of arguments
+ """
+ if len(args) != n:
+ msg = (
+ 'Got unexpected number of arguments, expected {}. '
+ '(example: "{} config {}")'
+ ).format(n, get_prog(), example)
+ raise PipError(msg)
+
+ if n == 1:
+ return args[0]
+ else:
+ return args
+
+ def _save_configuration(self):
+ # We successfully ran a modifying command. Need to save the
+ # configuration.
+ try:
+ self.configuration.save()
+ except Exception:
+ logger.error(
+ "Unable to save configuration. Please report this as a bug.",
+ exc_info=1
+ )
+ raise PipError("Internal Error.")
+
+ def _determine_editor(self, options):
+ if options.editor is not None:
+ return options.editor
+ elif "VISUAL" in os.environ:
+ return os.environ["VISUAL"]
+ elif "EDITOR" in os.environ:
+ return os.environ["EDITOR"]
+ else:
+ raise PipError("Could not determine editor to use.")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/debug.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/debug.py
new file mode 100644
index 00000000..eb4f8c4e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/debug.py
@@ -0,0 +1,114 @@
+from __future__ import absolute_import
+
+import locale
+import logging
+import sys
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_pip_version
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel import format_tag
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, List
+ from optparse import Values
+
+logger = logging.getLogger(__name__)
+
+
+def show_value(name, value):
+ # type: (str, str) -> None
+ logger.info('{}: {}'.format(name, value))
+
+
+def show_sys_implementation():
+ # type: () -> None
+ logger.info('sys.implementation:')
+ if hasattr(sys, 'implementation'):
+ implementation = sys.implementation # type: ignore
+ implementation_name = implementation.name
+ else:
+ implementation_name = ''
+
+ with indent_log():
+ show_value('name', implementation_name)
+
+
+def show_tags(options):
+ # type: (Values) -> None
+ tag_limit = 10
+
+ target_python = make_target_python(options)
+ tags = target_python.get_tags()
+
+ # Display the target options that were explicitly provided.
+ formatted_target = target_python.format_given()
+ suffix = ''
+ if formatted_target:
+ suffix = ' (target: {})'.format(formatted_target)
+
+ msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
+ logger.info(msg)
+
+ if options.verbose < 1 and len(tags) > tag_limit:
+ tags_limited = True
+ tags = tags[:tag_limit]
+ else:
+ tags_limited = False
+
+ with indent_log():
+ for tag in tags:
+ logger.info(format_tag(tag))
+
+ if tags_limited:
+ msg = (
+ '...\n'
+ '[First {tag_limit} tags shown. Pass --verbose to show all.]'
+ ).format(tag_limit=tag_limit)
+ logger.info(msg)
+
+
+class DebugCommand(Command):
+ """
+ Display debug information.
+ """
+
+ name = 'debug'
+ usage = """
+ %prog <options>"""
+ summary = 'Show information useful for debugging.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(DebugCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+ cmdoptions.add_target_python_options(cmd_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ # type: (Values, List[Any]) -> int
+ logger.warning(
+ "This command is only meant for debugging. "
+ "Do not use this with automation for parsing and getting these "
+ "details, since the output and options of this command may "
+ "change without notice."
+ )
+ show_value('pip version', get_pip_version())
+ show_value('sys.version', sys.version)
+ show_value('sys.executable', sys.executable)
+ show_value('sys.getdefaultencoding', sys.getdefaultencoding())
+ show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
+ show_value(
+ 'locale.getpreferredencoding', locale.getpreferredencoding(),
+ )
+ show_value('sys.platform', sys.platform)
+ show_sys_implementation()
+
+ show_tags(options)
+
+ return SUCCESS
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/download.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/download.py
new file mode 100644
index 00000000..5642b561
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/download.py
@@ -0,0 +1,168 @@
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import RequirementCommand
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.legacy_resolve import Resolver
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import RequirementTracker
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import ensure_dir, normalize_path
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+class DownloadCommand(RequirementCommand):
+ """
+ Download packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports downloading from "requirements files", which provide
+ an easy way to specify a whole environment to be downloaded.
+ """
+ name = 'download'
+
+ usage = """
+ %prog [options] <requirement specifier> [package-index-options] ...
+ %prog [options] -r <requirements file> [package-index-options] ...
+ %prog [options] <vcs project url> ...
+ %prog [options] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ summary = 'Download packages.'
+
+ def __init__(self, *args, **kw):
+ super(DownloadCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.build_dir())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.global_options())
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(cmdoptions.src())
+ cmd_opts.add_option(cmdoptions.pre())
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+
+ cmd_opts.add_option(
+ '-d', '--dest', '--destination-dir', '--destination-directory',
+ dest='download_dir',
+ metavar='dir',
+ default=os.curdir,
+ help=("Download packages into <dir>."),
+ )
+
+ cmdoptions.add_target_python_options(cmd_opts)
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ options.ignore_installed = True
+ # editable doesn't really make sense for `pip download`, but the bowels
+ # of the RequirementSet code require that property.
+ options.editables = []
+
+ cmdoptions.check_dist_restriction(options)
+
+ options.src_dir = os.path.abspath(options.src_dir)
+ options.download_dir = normalize_path(options.download_dir)
+
+ ensure_dir(options.download_dir)
+
+ with self._build_session(options) as session:
+ target_python = make_target_python(options)
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ )
+ build_delete = (not (options.no_clean or options.build_dir))
+ if options.cache_dir and not check_path_owner(options.cache_dir):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned "
+ "by the current user and caching wheels has been "
+ "disabled. check the permissions and owner of that "
+ "directory. If executing pip with sudo, you may want "
+ "sudo's -H flag.",
+ options.cache_dir,
+ )
+ options.cache_dir = None
+
+ with RequirementTracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="download"
+ ) as directory:
+
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ )
+ self.populate_requirement_set(
+ requirement_set,
+ args,
+ options,
+ finder,
+ session,
+ self.name,
+ None
+ )
+
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=options.download_dir,
+ wheel_download_dir=None,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ req_tracker=req_tracker,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=None,
+ use_user_site=False,
+ upgrade_strategy="to-satisfy-only",
+ force_reinstall=False,
+ ignore_dependencies=options.ignore_dependencies,
+ py_version_info=options.python_version,
+ ignore_requires_python=False,
+ ignore_installed=True,
+ isolated=options.isolated_mode,
+ )
+ resolver.resolve(requirement_set)
+
+ downloaded = ' '.join([
+ req.name for req in requirement_set.successfully_downloaded
+ ])
+ if downloaded:
+ logger.info('Successfully downloaded %s', downloaded)
+
+ # Clean up
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+
+ return requirement_set
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/freeze.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/freeze.py
new file mode 100644
index 00000000..9fc5b046
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/freeze.py
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+
+import sys
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.models.format_control import FormatControl
+from pip._internal.operations.freeze import freeze
+from pip._internal.utils.compat import stdlib_pkgs
+
+DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
+
+
+class FreezeCommand(Command):
+ """
+ Output installed packages in requirements format.
+
+ packages are listed in a case-insensitive sorted order.
+ """
+ name = 'freeze'
+ usage = """
+ %prog [options]"""
+ summary = 'Output installed packages in requirements format.'
+ log_streams = ("ext://sys.stderr", "ext://sys.stderr")
+
+ def __init__(self, *args, **kw):
+ super(FreezeCommand, self).__init__(*args, **kw)
+
+ self.cmd_opts.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help="Use the order in the given requirements file and its "
+ "comments when generating output. This option can be "
+ "used multiple times.")
+ self.cmd_opts.add_option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='URL',
+ help='URL for finding packages, which will be added to the '
+ 'output.')
+ self.cmd_opts.add_option(
+ '-l', '--local',
+ dest='local',
+ action='store_true',
+ default=False,
+ help='If in a virtualenv that has global access, do not output '
+ 'globally-installed packages.')
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user',
+ action='store_true',
+ default=False,
+ help='Only output packages installed in user-site.')
+ self.cmd_opts.add_option(cmdoptions.list_path())
+ self.cmd_opts.add_option(
+ '--all',
+ dest='freeze_all',
+ action='store_true',
+ help='Do not skip these packages in the output:'
+ ' %s' % ', '.join(DEV_PKGS))
+ self.cmd_opts.add_option(
+ '--exclude-editable',
+ dest='exclude_editable',
+ action='store_true',
+ help='Exclude editable package from output.')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ format_control = FormatControl(set(), set())
+ wheel_cache = WheelCache(options.cache_dir, format_control)
+ skip = set(stdlib_pkgs)
+ if not options.freeze_all:
+ skip.update(DEV_PKGS)
+
+ cmdoptions.check_list_path_option(options)
+
+ freeze_kwargs = dict(
+ requirement=options.requirements,
+ find_links=options.find_links,
+ local_only=options.local,
+ user_only=options.user,
+ paths=options.path,
+ skip_regex=options.skip_requirements_regex,
+ isolated=options.isolated_mode,
+ wheel_cache=wheel_cache,
+ skip=skip,
+ exclude_editable=options.exclude_editable,
+ )
+
+ try:
+ for line in freeze(**freeze_kwargs):
+ sys.stdout.write(line + '\n')
+ finally:
+ wheel_cache.cleanup()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/hash.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/hash.py
new file mode 100644
index 00000000..423440e9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/hash.py
@@ -0,0 +1,57 @@
+from __future__ import absolute_import
+
+import hashlib
+import logging
+import sys
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR
+from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
+from pip._internal.utils.misc import read_chunks
+
+logger = logging.getLogger(__name__)
+
+
+class HashCommand(Command):
+ """
+ Compute a hash of a local package archive.
+
+ These can be used with --hash in a requirements file to do repeatable
+ installs.
+
+ """
+ name = 'hash'
+ usage = '%prog [options] <file> ...'
+ summary = 'Compute hashes of package archives.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(HashCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-a', '--algorithm',
+ dest='algorithm',
+ choices=STRONG_HASHES,
+ action='store',
+ default=FAVORITE_HASH,
+ help='The hash algorithm to use: one of %s' %
+ ', '.join(STRONG_HASHES))
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ self.parser.print_usage(sys.stderr)
+ return ERROR
+
+ algorithm = options.algorithm
+ for path in args:
+ logger.info('%s:\n--hash=%s:%s',
+ path, algorithm, _hash_of_file(path, algorithm))
+
+
+def _hash_of_file(path, algorithm):
+ """Return the hash digest of a file."""
+ with open(path, 'rb') as archive:
+ hash = hashlib.new(algorithm)
+ for chunk in read_chunks(archive):
+ hash.update(chunk)
+ return hash.hexdigest()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/help.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/help.py
new file mode 100644
index 00000000..49a81cbb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/help.py
@@ -0,0 +1,37 @@
+from __future__ import absolute_import
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+
+
+class HelpCommand(Command):
+ """Show help for commands"""
+ name = 'help'
+ usage = """
+ %prog <command>"""
+ summary = 'Show help for commands.'
+ ignore_require_venv = True
+
+ def run(self, options, args):
+ from pip._internal.commands import commands_dict, get_similar_commands
+
+ try:
+ # 'pip help' with no args is handled by pip.__init__.parseopt()
+ cmd_name = args[0] # the command we need help for
+ except IndexError:
+ return SUCCESS
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = ['unknown command "%s"' % cmd_name]
+ if guess:
+ msg.append('maybe you meant "%s"' % guess)
+
+ raise CommandError(' - '.join(msg))
+
+ command = commands_dict[cmd_name]()
+ command.parser.print_help()
+
+ return SUCCESS
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/install.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/install.py
new file mode 100644
index 00000000..ebeceacf
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/install.py
@@ -0,0 +1,580 @@
+from __future__ import absolute_import
+
+import errno
+import logging
+import operator
+import os
+import shutil
+from optparse import SUPPRESS_HELP
+
+from pip._vendor import pkg_resources
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import RequirementCommand
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.status_codes import ERROR
+from pip._internal.exceptions import (
+ CommandError, InstallationError, PreviousBuildDirError,
+)
+from pip._internal.legacy_resolve import Resolver
+from pip._internal.locations import distutils_scheme
+from pip._internal.operations.check import check_install_conflicts
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet, install_given_reqs
+from pip._internal.req.req_tracker import RequirementTracker
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import (
+ ensure_dir, get_installed_version,
+ protect_pip_from_modification_on_windows,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.virtualenv import virtualenv_no_global
+from pip._internal.wheel import WheelBuilder
+
+logger = logging.getLogger(__name__)
+
+
+def is_wheel_installed():
+ """
+ Return whether the wheel package is installed.
+ """
+ try:
+ import wheel # noqa: F401
+ except ImportError:
+ return False
+
+ return True
+
+
+def build_wheels(builder, pep517_requirements, legacy_requirements, session):
+ """
+ Build wheels for requirements, depending on whether wheel is installed.
+ """
+ # We don't build wheels for legacy requirements if wheel is not installed.
+ should_build_legacy = is_wheel_installed()
+
+ # Always build PEP 517 requirements
+ build_failures = builder.build(
+ pep517_requirements,
+ session=session, autobuilding=True
+ )
+
+ if should_build_legacy:
+ # We don't care about failures building legacy
+ # requirements, as we'll fall through to a direct
+ # install for those.
+ builder.build(
+ legacy_requirements,
+ session=session, autobuilding=True
+ )
+
+ return build_failures
+
+
+class InstallCommand(RequirementCommand):
+ """
+ Install packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports installing from "requirements files," which provide
+ an easy way to specify a whole environment to be installed.
+ """
+ name = 'install'
+
+ usage = """
+ %prog [options] <requirement specifier> [package-index-options] ...
+ %prog [options] -r <requirements file> [package-index-options] ...
+ %prog [options] [-e] <vcs project url> ...
+ %prog [options] [-e] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ summary = 'Install packages.'
+
+ def __init__(self, *args, **kw):
+ super(InstallCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.pre())
+
+ cmd_opts.add_option(cmdoptions.editable())
+ cmd_opts.add_option(
+ '-t', '--target',
+ dest='target_dir',
+ metavar='dir',
+ default=None,
+ help='Install packages into <dir>. '
+ 'By default this will not replace existing files/folders in '
+ '<dir>. Use --upgrade to replace existing packages in <dir> '
+ 'with new versions.'
+ )
+ cmdoptions.add_target_python_options(cmd_opts)
+
+ cmd_opts.add_option(
+ '--user',
+ dest='use_user_site',
+ action='store_true',
+ help="Install to the Python user install directory for your "
+ "platform. Typically ~/.local/, or %APPDATA%\\Python on "
+ "Windows. (See the Python documentation for site.USER_BASE "
+ "for full details.)")
+ cmd_opts.add_option(
+ '--no-user',
+ dest='use_user_site',
+ action='store_false',
+ help=SUPPRESS_HELP)
+ cmd_opts.add_option(
+ '--root',
+ dest='root_path',
+ metavar='dir',
+ default=None,
+ help="Install everything relative to this alternate root "
+ "directory.")
+ cmd_opts.add_option(
+ '--prefix',
+ dest='prefix_path',
+ metavar='dir',
+ default=None,
+ help="Installation prefix where lib, bin and other top-level "
+ "folders are placed")
+
+ cmd_opts.add_option(cmdoptions.build_dir())
+
+ cmd_opts.add_option(cmdoptions.src())
+
+ cmd_opts.add_option(
+ '-U', '--upgrade',
+ dest='upgrade',
+ action='store_true',
+ help='Upgrade all specified packages to the newest available '
+ 'version. The handling of dependencies depends on the '
+ 'upgrade-strategy used.'
+ )
+
+ cmd_opts.add_option(
+ '--upgrade-strategy',
+ dest='upgrade_strategy',
+ default='only-if-needed',
+ choices=['only-if-needed', 'eager'],
+ help='Determines how dependency upgrading should be handled '
+ '[default: %default]. '
+ '"eager" - dependencies are upgraded regardless of '
+ 'whether the currently installed version satisfies the '
+ 'requirements of the upgraded package(s). '
+ '"only-if-needed" - are upgraded only when they do not '
+ 'satisfy the requirements of the upgraded package(s).'
+ )
+
+ cmd_opts.add_option(
+ '--force-reinstall',
+ dest='force_reinstall',
+ action='store_true',
+ help='Reinstall all packages even if they are already '
+ 'up-to-date.')
+
+ cmd_opts.add_option(
+ '-I', '--ignore-installed',
+ dest='ignore_installed',
+ action='store_true',
+ help='Ignore the installed packages (reinstalling instead).')
+
+ cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+
+ cmd_opts.add_option(cmdoptions.install_options())
+ cmd_opts.add_option(cmdoptions.global_options())
+
+ cmd_opts.add_option(
+ "--compile",
+ action="store_true",
+ dest="compile",
+ default=True,
+ help="Compile Python source files to bytecode",
+ )
+
+ cmd_opts.add_option(
+ "--no-compile",
+ action="store_false",
+ dest="compile",
+ help="Do not compile Python source files to bytecode",
+ )
+
+ cmd_opts.add_option(
+ "--no-warn-script-location",
+ action="store_false",
+ dest="warn_script_location",
+ default=True,
+ help="Do not warn when installing scripts outside PATH",
+ )
+ cmd_opts.add_option(
+ "--no-warn-conflicts",
+ action="store_false",
+ dest="warn_about_conflicts",
+ default=True,
+ help="Do not warn about broken dependencies",
+ )
+
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ cmdoptions.check_install_build_global(options)
+ upgrade_strategy = "to-satisfy-only"
+ if options.upgrade:
+ upgrade_strategy = options.upgrade_strategy
+
+ if options.build_dir:
+ options.build_dir = os.path.abspath(options.build_dir)
+
+ cmdoptions.check_dist_restriction(options, check_target=True)
+
+ options.src_dir = os.path.abspath(options.src_dir)
+ install_options = options.install_options or []
+ if options.use_user_site:
+ if options.prefix_path:
+ raise CommandError(
+ "Can not combine '--user' and '--prefix' as they imply "
+ "different installation locations"
+ )
+ if virtualenv_no_global():
+ raise InstallationError(
+ "Can not perform a '--user' install. User site-packages "
+ "are not visible in this virtualenv."
+ )
+ install_options.append('--user')
+ install_options.append('--prefix=')
+
+ target_temp_dir = TempDirectory(kind="target")
+ if options.target_dir:
+ options.ignore_installed = True
+ options.target_dir = os.path.abspath(options.target_dir)
+ if (os.path.exists(options.target_dir) and not
+ os.path.isdir(options.target_dir)):
+ raise CommandError(
+ "Target path exists but is not a directory, will not "
+ "continue."
+ )
+
+ # Create a target directory for using with the target option
+ target_temp_dir.create()
+ install_options.append('--home=' + target_temp_dir.path)
+
+ global_options = options.global_options or []
+
+ with self._build_session(options) as session:
+ target_python = make_target_python(options)
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ ignore_requires_python=options.ignore_requires_python,
+ )
+ build_delete = (not (options.no_clean or options.build_dir))
+ wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+ if options.cache_dir and not check_path_owner(options.cache_dir):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned "
+ "by the current user and caching wheels has been "
+ "disabled. check the permissions and owner of that "
+ "directory. If executing pip with sudo, you may want "
+ "sudo's -H flag.",
+ options.cache_dir,
+ )
+ options.cache_dir = None
+
+ with RequirementTracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="install"
+ ) as directory:
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ check_supported_wheels=not options.target_dir,
+ )
+
+ try:
+ self.populate_requirement_set(
+ requirement_set, args, options, finder, session,
+ self.name, wheel_cache
+ )
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=None,
+ wheel_download_dir=None,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ req_tracker=req_tracker,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=wheel_cache,
+ use_user_site=options.use_user_site,
+ upgrade_strategy=upgrade_strategy,
+ force_reinstall=options.force_reinstall,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_requires_python=options.ignore_requires_python,
+ ignore_installed=options.ignore_installed,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517
+ )
+ resolver.resolve(requirement_set)
+
+ protect_pip_from_modification_on_windows(
+ modifying_pip=requirement_set.has_requirement("pip")
+ )
+
+ # Consider legacy and PEP517-using requirements separately
+ legacy_requirements = []
+ pep517_requirements = []
+ for req in requirement_set.requirements.values():
+ if req.use_pep517:
+ pep517_requirements.append(req)
+ else:
+ legacy_requirements.append(req)
+
+ wheel_builder = WheelBuilder(
+ finder, preparer, wheel_cache,
+ build_options=[], global_options=[],
+ )
+
+ build_failures = build_wheels(
+ builder=wheel_builder,
+ pep517_requirements=pep517_requirements,
+ legacy_requirements=legacy_requirements,
+ session=session,
+ )
+
+ # If we're using PEP 517, we cannot do a direct install
+ # so we fail here.
+ if build_failures:
+ raise InstallationError(
+ "Could not build wheels for {} which use"
+ " PEP 517 and cannot be installed directly".format(
+ ", ".join(r.name for r in build_failures)))
+
+ to_install = resolver.get_installation_order(
+ requirement_set
+ )
+
+ # Consistency Checking of the package set we're installing.
+ should_warn_about_conflicts = (
+ not options.ignore_dependencies and
+ options.warn_about_conflicts
+ )
+ if should_warn_about_conflicts:
+ self._warn_about_conflicts(to_install)
+
+ # Don't warn about script install locations if
+ # --target has been specified
+ warn_script_location = options.warn_script_location
+ if options.target_dir:
+ warn_script_location = False
+
+ installed = install_given_reqs(
+ to_install,
+ install_options,
+ global_options,
+ root=options.root_path,
+ home=target_temp_dir.path,
+ prefix=options.prefix_path,
+ pycompile=options.compile,
+ warn_script_location=warn_script_location,
+ use_user_site=options.use_user_site,
+ )
+
+ lib_locations = get_lib_location_guesses(
+ user=options.use_user_site,
+ home=target_temp_dir.path,
+ root=options.root_path,
+ prefix=options.prefix_path,
+ isolated=options.isolated_mode,
+ )
+ working_set = pkg_resources.WorkingSet(lib_locations)
+
+ reqs = sorted(installed, key=operator.attrgetter('name'))
+ items = []
+ for req in reqs:
+ item = req.name
+ try:
+ installed_version = get_installed_version(
+ req.name, working_set=working_set
+ )
+ if installed_version:
+ item += '-' + installed_version
+ except Exception:
+ pass
+ items.append(item)
+ installed = ' '.join(items)
+ if installed:
+ logger.info('Successfully installed %s', installed)
+ except EnvironmentError as error:
+ show_traceback = (self.verbosity >= 1)
+
+ message = create_env_error_message(
+ error, show_traceback, options.use_user_site,
+ )
+ logger.error(message, exc_info=show_traceback)
+
+ return ERROR
+ except PreviousBuildDirError:
+ options.no_clean = True
+ raise
+ finally:
+ # Clean up
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+ wheel_cache.cleanup()
+
+ if options.target_dir:
+ self._handle_target_dir(
+ options.target_dir, target_temp_dir, options.upgrade
+ )
+ return requirement_set
+
+ def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
+ ensure_dir(target_dir)
+
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ lib_dir_list = []
+
+ with target_temp_dir:
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ scheme = distutils_scheme('', home=target_temp_dir.path)
+ purelib_dir = scheme['purelib']
+ platlib_dir = scheme['platlib']
+ data_dir = scheme['data']
+
+ if os.path.exists(purelib_dir):
+ lib_dir_list.append(purelib_dir)
+ if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
+ lib_dir_list.append(platlib_dir)
+ if os.path.exists(data_dir):
+ lib_dir_list.append(data_dir)
+
+ for lib_dir in lib_dir_list:
+ for item in os.listdir(lib_dir):
+ if lib_dir == data_dir:
+ ddir = os.path.join(data_dir, item)
+ if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
+ continue
+ target_item_dir = os.path.join(target_dir, item)
+ if os.path.exists(target_item_dir):
+ if not upgrade:
+ logger.warning(
+ 'Target directory %s already exists. Specify '
+ '--upgrade to force replacement.',
+ target_item_dir
+ )
+ continue
+ if os.path.islink(target_item_dir):
+ logger.warning(
+ 'Target directory %s already exists and is '
+ 'a link. Pip will not automatically replace '
+ 'links, please remove if replacement is '
+ 'desired.',
+ target_item_dir
+ )
+ continue
+ if os.path.isdir(target_item_dir):
+ shutil.rmtree(target_item_dir)
+ else:
+ os.remove(target_item_dir)
+
+ shutil.move(
+ os.path.join(lib_dir, item),
+ target_item_dir
+ )
+
+ def _warn_about_conflicts(self, to_install):
+ try:
+ package_set, _dep_info = check_install_conflicts(to_install)
+ except Exception:
+ logger.error("Error checking for conflicts.", exc_info=True)
+ return
+ missing, conflicting = _dep_info
+
+ # NOTE: There is some duplication here from pip check
+ for project_name in missing:
+ version = package_set[project_name][0]
+ for dependency in missing[project_name]:
+ logger.critical(
+ "%s %s requires %s, which is not installed.",
+ project_name, version, dependency[1],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name][0]
+ for dep_name, dep_version, req in conflicting[project_name]:
+ logger.critical(
+ "%s %s has requirement %s, but you'll have %s %s which is "
+ "incompatible.",
+ project_name, version, req, dep_name, dep_version,
+ )
+
+
+def get_lib_location_guesses(*args, **kwargs):
+ scheme = distutils_scheme('', *args, **kwargs)
+ return [scheme['purelib'], scheme['platlib']]
+
+
+def create_env_error_message(error, show_traceback, using_user_site):
+ """Format an error message for an EnvironmentError
+
+ It may occur anytime during the execution of the install command.
+ """
+ parts = []
+
+ # Mention the error if we are not going to show a traceback
+ parts.append("Could not install packages due to an EnvironmentError")
+ if not show_traceback:
+ parts.append(": ")
+ parts.append(str(error))
+ else:
+ parts.append(".")
+
+ # Spilt the error indication from a helper message (if any)
+ parts[-1] += "\n"
+
+ # Suggest useful actions to the user:
+ # (1) using user site-packages or (2) verifying the permissions
+ if error.errno == errno.EACCES:
+ user_option_part = "Consider using the `--user` option"
+ permissions_part = "Check the permissions"
+
+ if not using_user_site:
+ parts.extend([
+ user_option_part, " or ",
+ permissions_part.lower(),
+ ])
+ else:
+ parts.append(permissions_part)
+ parts.append(".\n")
+
+ return "".join(parts).strip() + "\n"
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/list.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/list.py
new file mode 100644
index 00000000..cf71b13e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/list.py
@@ -0,0 +1,311 @@
+from __future__ import absolute_import
+
+import json
+import logging
+
+from pip._vendor import six
+from pip._vendor.six.moves import zip_longest
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.cmdoptions import make_search_scope
+from pip._internal.exceptions import CommandError
+from pip._internal.index import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.utils.misc import (
+ dist_is_editable, get_installed_distributions,
+)
+from pip._internal.utils.packaging import get_installer
+
+logger = logging.getLogger(__name__)
+
+
+class ListCommand(Command):
+ """
+ List installed packages, including editables.
+
+ Packages are listed in a case-insensitive sorted order.
+ """
+ name = 'list'
+ usage = """
+ %prog [options]"""
+ summary = 'List installed packages.'
+
+ def __init__(self, *args, **kw):
+ super(ListCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '-o', '--outdated',
+ action='store_true',
+ default=False,
+ help='List outdated packages')
+ cmd_opts.add_option(
+ '-u', '--uptodate',
+ action='store_true',
+ default=False,
+ help='List uptodate packages')
+ cmd_opts.add_option(
+ '-e', '--editable',
+ action='store_true',
+ default=False,
+ help='List editable projects.')
+ cmd_opts.add_option(
+ '-l', '--local',
+ action='store_true',
+ default=False,
+ help=('If in a virtualenv that has global access, do not list '
+ 'globally-installed packages.'),
+ )
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user',
+ action='store_true',
+ default=False,
+ help='Only output packages installed in user-site.')
+ cmd_opts.add_option(cmdoptions.list_path())
+ cmd_opts.add_option(
+ '--pre',
+ action='store_true',
+ default=False,
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
+
+ cmd_opts.add_option(
+ '--format',
+ action='store',
+ dest='list_format',
+ default="columns",
+ choices=('columns', 'freeze', 'json'),
+ help="Select the output format among: columns (default), freeze, "
+ "or json",
+ )
+
+ cmd_opts.add_option(
+ '--not-required',
+ action='store_true',
+ dest='not_required',
+ help="List packages that are not dependencies of "
+ "installed packages.",
+ )
+
+ cmd_opts.add_option(
+ '--exclude-editable',
+ action='store_false',
+ dest='include_editable',
+ help='Exclude editable package from output.',
+ )
+ cmd_opts.add_option(
+ '--include-editable',
+ action='store_true',
+ dest='include_editable',
+ help='Include editable package from output.',
+ default=True,
+ )
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group, self.parser
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def _build_package_finder(self, options, session):
+ """
+ Create a package finder appropriate to this list command.
+ """
+ search_scope = make_search_scope(options)
+
+ # Pass allow_yanked=False to ignore yanked versions.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=options.pre,
+ )
+
+ return PackageFinder.create(
+ search_scope=search_scope,
+ selection_prefs=selection_prefs,
+ trusted_hosts=options.trusted_hosts,
+ session=session,
+ )
+
+ def run(self, options, args):
+ if options.outdated and options.uptodate:
+ raise CommandError(
+ "Options --outdated and --uptodate cannot be combined.")
+
+ cmdoptions.check_list_path_option(options)
+
+ packages = get_installed_distributions(
+ local_only=options.local,
+ user_only=options.user,
+ editables_only=options.editable,
+ include_editables=options.include_editable,
+ paths=options.path,
+ )
+
+ # get_not_required must be called firstly in order to find and
+ # filter out all dependencies correctly. Otherwise a package
+ # can't be identified as requirement because some parent packages
+ # could be filtered out before.
+ if options.not_required:
+ packages = self.get_not_required(packages, options)
+
+ if options.outdated:
+ packages = self.get_outdated(packages, options)
+ elif options.uptodate:
+ packages = self.get_uptodate(packages, options)
+
+ self.output_package_listing(packages, options)
+
+ def get_outdated(self, packages, options):
+ return [
+ dist for dist in self.iter_packages_latest_infos(packages, options)
+ if dist.latest_version > dist.parsed_version
+ ]
+
+ def get_uptodate(self, packages, options):
+ return [
+ dist for dist in self.iter_packages_latest_infos(packages, options)
+ if dist.latest_version == dist.parsed_version
+ ]
+
+ def get_not_required(self, packages, options):
+ dep_keys = set()
+ for dist in packages:
+ dep_keys.update(requirement.key for requirement in dist.requires())
+ return {pkg for pkg in packages if pkg.key not in dep_keys}
+
+ def iter_packages_latest_infos(self, packages, options):
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, session)
+
+ for dist in packages:
+ typ = 'unknown'
+ all_candidates = finder.find_all_candidates(dist.key)
+ if not options.pre:
+ # Remove prereleases
+ all_candidates = [candidate for candidate in all_candidates
+ if not candidate.version.is_prerelease]
+
+ evaluator = finder.make_candidate_evaluator(
+ project_name=dist.project_name,
+ )
+ best_candidate = evaluator.get_best_candidate(all_candidates)
+ if best_candidate is None:
+ continue
+
+ remote_version = best_candidate.version
+ if best_candidate.link.is_wheel:
+ typ = 'wheel'
+ else:
+ typ = 'sdist'
+ # This is dirty but makes the rest of the code much cleaner
+ dist.latest_version = remote_version
+ dist.latest_filetype = typ
+ yield dist
+
+ def output_package_listing(self, packages, options):
+ packages = sorted(
+ packages,
+ key=lambda dist: dist.project_name.lower(),
+ )
+ if options.list_format == 'columns' and packages:
+ data, header = format_for_columns(packages, options)
+ self.output_package_listing_columns(data, header)
+ elif options.list_format == 'freeze':
+ for dist in packages:
+ if options.verbose >= 1:
+ logger.info("%s==%s (%s)", dist.project_name,
+ dist.version, dist.location)
+ else:
+ logger.info("%s==%s", dist.project_name, dist.version)
+ elif options.list_format == 'json':
+ logger.info(format_for_json(packages, options))
+
+ def output_package_listing_columns(self, data, header):
+ # insert the header first: we need to know the size of column names
+ if len(data) > 0:
+ data.insert(0, header)
+
+ pkg_strings, sizes = tabulate(data)
+
+ # Create and add a separator.
+ if len(data) > 0:
+ pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
+
+ for val in pkg_strings:
+ logger.info(val)
+
+
+def tabulate(vals):
+ # From pfmoore on GitHub:
+ # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
+ assert len(vals) > 0
+
+ sizes = [0] * max(len(x) for x in vals)
+ for row in vals:
+ sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
+
+ result = []
+ for row in vals:
+ display = " ".join([str(c).ljust(s) if c is not None else ''
+ for s, c in zip_longest(sizes, row)])
+ result.append(display)
+
+ return result, sizes
+
+
+def format_for_columns(pkgs, options):
+ """
+ Convert the package data into something usable
+ by output_package_listing_columns.
+ """
+ running_outdated = options.outdated
+ # Adjust the header for the `pip list --outdated` case.
+ if running_outdated:
+ header = ["Package", "Version", "Latest", "Type"]
+ else:
+ header = ["Package", "Version"]
+
+ data = []
+ if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
+ header.append("Location")
+ if options.verbose >= 1:
+ header.append("Installer")
+
+ for proj in pkgs:
+ # if we're working on the 'outdated' list, separate out the
+ # latest_version and type
+ row = [proj.project_name, proj.version]
+
+ if running_outdated:
+ row.append(proj.latest_version)
+ row.append(proj.latest_filetype)
+
+ if options.verbose >= 1 or dist_is_editable(proj):
+ row.append(proj.location)
+ if options.verbose >= 1:
+ row.append(get_installer(proj))
+
+ data.append(row)
+
+ return data, header
+
+
+def format_for_json(packages, options):
+ data = []
+ for dist in packages:
+ info = {
+ 'name': dist.project_name,
+ 'version': six.text_type(dist.version),
+ }
+ if options.verbose >= 1:
+ info['location'] = dist.location
+ info['installer'] = get_installer(dist)
+ if options.outdated:
+ info['latest_version'] = six.text_type(dist.latest_version)
+ info['latest_filetype'] = dist.latest_filetype
+ data.append(info)
+ return json.dumps(data)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/search.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/search.py
new file mode 100644
index 00000000..58027112
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/search.py
@@ -0,0 +1,139 @@
+from __future__ import absolute_import
+
+import logging
+import sys
+import textwrap
+from collections import OrderedDict
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.version import parse as parse_version
+# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import
+from pip._vendor.six.moves import xmlrpc_client # type: ignore
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
+from pip._internal.download import PipXmlrpcTransport
+from pip._internal.exceptions import CommandError
+from pip._internal.models.index import PyPI
+from pip._internal.utils.compat import get_terminal_size
+from pip._internal.utils.logging import indent_log
+
+logger = logging.getLogger(__name__)
+
+
+class SearchCommand(Command):
+ """Search for PyPI packages whose name or summary contains <query>."""
+ name = 'search'
+ usage = """
+ %prog [options] <query>"""
+ summary = 'Search PyPI for packages.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(SearchCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-i', '--index',
+ dest='index',
+ metavar='URL',
+ default=PyPI.pypi_url,
+ help='Base URL of Python Package Index (default %default)')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ raise CommandError('Missing required argument (search query).')
+ query = args
+ pypi_hits = self.search(query, options)
+ hits = transform_hits(pypi_hits)
+
+ terminal_width = None
+ if sys.stdout.isatty():
+ terminal_width = get_terminal_size()[0]
+
+ print_results(hits, terminal_width=terminal_width)
+ if pypi_hits:
+ return SUCCESS
+ return NO_MATCHES_FOUND
+
+ def search(self, query, options):
+ index_url = options.index
+ with self._build_session(options) as session:
+ transport = PipXmlrpcTransport(index_url, session)
+ pypi = xmlrpc_client.ServerProxy(index_url, transport)
+ hits = pypi.search({'name': query, 'summary': query}, 'or')
+ return hits
+
+
+def transform_hits(hits):
+ """
+ The list from pypi is really a list of versions. We want a list of
+ packages with the list of versions stored inline. This converts the
+ list from pypi into one we can use.
+ """
+ packages = OrderedDict()
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary']
+ version = hit['version']
+
+ if name not in packages.keys():
+ packages[name] = {
+ 'name': name,
+ 'summary': summary,
+ 'versions': [version],
+ }
+ else:
+ packages[name]['versions'].append(version)
+
+ # if this is the highest version, replace summary and score
+ if version == highest_version(packages[name]['versions']):
+ packages[name]['summary'] = summary
+
+ return list(packages.values())
+
+
+def print_results(hits, name_column_width=None, terminal_width=None):
+ if not hits:
+ return
+ if name_column_width is None:
+ name_column_width = max([
+ len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
+ for hit in hits
+ ]) + 4
+
+ installed_packages = [p.project_name for p in pkg_resources.working_set]
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary'] or ''
+ latest = highest_version(hit.get('versions', ['-']))
+ if terminal_width is not None:
+ target_width = terminal_width - name_column_width - 5
+ if target_width > 10:
+ # wrap and indent summary to fit terminal
+ summary = textwrap.wrap(summary, target_width)
+ summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
+
+ line = '%-*s - %s' % (name_column_width,
+ '%s (%s)' % (name, latest), summary)
+ try:
+ logger.info(line)
+ if name in installed_packages:
+ dist = pkg_resources.get_distribution(name)
+ with indent_log():
+ if dist.version == latest:
+ logger.info('INSTALLED: %s (latest)', dist.version)
+ else:
+ logger.info('INSTALLED: %s', dist.version)
+ if parse_version(latest).pre:
+ logger.info('LATEST: %s (pre-release; install'
+ ' with "pip install --pre")', latest)
+ else:
+ logger.info('LATEST: %s', latest)
+ except UnicodeEncodeError:
+ pass
+
+
+def highest_version(versions):
+ return max(versions, key=parse_version)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/show.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/show.py
new file mode 100644
index 00000000..a18a9020
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/show.py
@@ -0,0 +1,168 @@
+from __future__ import absolute_import
+
+import logging
+import os
+from email.parser import FeedParser
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+
+logger = logging.getLogger(__name__)
+
+
+class ShowCommand(Command):
+ """
+ Show information about one or more installed packages.
+
+ The output is in RFC-compliant mail header format.
+ """
+ name = 'show'
+ usage = """
+ %prog [options] <package> ..."""
+ summary = 'Show information about installed packages.'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(ShowCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-f', '--files',
+ dest='files',
+ action='store_true',
+ default=False,
+ help='Show the full list of installed files for each package.')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ logger.warning('ERROR: Please provide a package name or names.')
+ return ERROR
+ query = args
+
+ results = search_packages_info(query)
+ if not print_results(
+ results, list_files=options.files, verbose=options.verbose):
+ return ERROR
+ return SUCCESS
+
+
+def search_packages_info(query):
+ """
+ Gather details from installed distributions. Print distribution name,
+ version, location, and installed files. Installed files requires a
+ pip generated 'installed-files.txt' in the distributions '.egg-info'
+ directory.
+ """
+ installed = {}
+ for p in pkg_resources.working_set:
+ installed[canonicalize_name(p.project_name)] = p
+
+ query_names = [canonicalize_name(name) for name in query]
+
+ for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
+ package = {
+ 'name': dist.project_name,
+ 'version': dist.version,
+ 'location': dist.location,
+ 'requires': [dep.project_name for dep in dist.requires()],
+ }
+ file_list = None
+ metadata = None
+ if isinstance(dist, pkg_resources.DistInfoDistribution):
+ # RECORDs should be part of .dist-info metadatas
+ if dist.has_metadata('RECORD'):
+ lines = dist.get_metadata_lines('RECORD')
+ paths = [l.split(',')[0] for l in lines]
+ paths = [os.path.join(dist.location, p) for p in paths]
+ file_list = [os.path.relpath(p, dist.location) for p in paths]
+
+ if dist.has_metadata('METADATA'):
+ metadata = dist.get_metadata('METADATA')
+ else:
+ # Otherwise use pip's log for .egg-info's
+ if dist.has_metadata('installed-files.txt'):
+ paths = dist.get_metadata_lines('installed-files.txt')
+ paths = [os.path.join(dist.egg_info, p) for p in paths]
+ file_list = [os.path.relpath(p, dist.location) for p in paths]
+
+ if dist.has_metadata('PKG-INFO'):
+ metadata = dist.get_metadata('PKG-INFO')
+
+ if dist.has_metadata('entry_points.txt'):
+ entry_points = dist.get_metadata_lines('entry_points.txt')
+ package['entry_points'] = entry_points
+
+ if dist.has_metadata('INSTALLER'):
+ for line in dist.get_metadata_lines('INSTALLER'):
+ if line.strip():
+ package['installer'] = line.strip()
+ break
+
+ # @todo: Should pkg_resources.Distribution have a
+ # `get_pkg_info` method?
+ feed_parser = FeedParser()
+ feed_parser.feed(metadata)
+ pkg_info_dict = feed_parser.close()
+ for key in ('metadata-version', 'summary',
+ 'home-page', 'author', 'author-email', 'license'):
+ package[key] = pkg_info_dict.get(key)
+
+ # It looks like FeedParser cannot deal with repeated headers
+ classifiers = []
+ for line in metadata.splitlines():
+ if line.startswith('Classifier: '):
+ classifiers.append(line[len('Classifier: '):])
+ package['classifiers'] = classifiers
+
+ if file_list:
+ package['files'] = sorted(file_list)
+ yield package
+
+
+def print_results(distributions, list_files=False, verbose=False):
+ """
+ Print the informations from installed distributions found.
+ """
+ results_printed = False
+ for i, dist in enumerate(distributions):
+ results_printed = True
+ if i > 0:
+ logger.info("---")
+
+ name = dist.get('name', '')
+ required_by = [
+ pkg.project_name for pkg in pkg_resources.working_set
+ if name in [required.name for required in pkg.requires()]
+ ]
+
+ logger.info("Name: %s", name)
+ logger.info("Version: %s", dist.get('version', ''))
+ logger.info("Summary: %s", dist.get('summary', ''))
+ logger.info("Home-page: %s", dist.get('home-page', ''))
+ logger.info("Author: %s", dist.get('author', ''))
+ logger.info("Author-email: %s", dist.get('author-email', ''))
+ logger.info("License: %s", dist.get('license', ''))
+ logger.info("Location: %s", dist.get('location', ''))
+ logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
+ logger.info("Required-by: %s", ', '.join(required_by))
+
+ if verbose:
+ logger.info("Metadata-Version: %s",
+ dist.get('metadata-version', ''))
+ logger.info("Installer: %s", dist.get('installer', ''))
+ logger.info("Classifiers:")
+ for classifier in dist.get('classifiers', []):
+ logger.info(" %s", classifier)
+ logger.info("Entry-points:")
+ for entry in dist.get('entry_points', []):
+ logger.info(" %s", entry.strip())
+ if list_files:
+ logger.info("Files:")
+ for line in dist.get('files', []):
+ logger.info(" %s", line.strip())
+ if "files" not in dist:
+ logger.info("Cannot locate installed-files.txt")
+ return results_printed
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py
new file mode 100644
index 00000000..0cd6f54b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py
@@ -0,0 +1,78 @@
+from __future__ import absolute_import
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.exceptions import InstallationError
+from pip._internal.req import parse_requirements
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import protect_pip_from_modification_on_windows
+
+
+class UninstallCommand(Command):
+ """
+ Uninstall packages.
+
+ pip is able to uninstall most installed packages. Known exceptions are:
+
+ - Pure distutils packages installed with ``python setup.py install``, which
+ leave behind no metadata to determine what files were installed.
+ - Script wrappers installed by ``python setup.py develop``.
+ """
+ name = 'uninstall'
+ usage = """
+ %prog [options] <package> ...
+ %prog [options] -r <requirements file> ..."""
+ summary = 'Uninstall packages.'
+
+ def __init__(self, *args, **kw):
+ super(UninstallCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Uninstall all the packages listed in the given requirements '
+ 'file. This option can be used multiple times.',
+ )
+ self.cmd_opts.add_option(
+ '-y', '--yes',
+ dest='yes',
+ action='store_true',
+ help="Don't ask for confirmation of uninstall deletions.")
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ with self._build_session(options) as session:
+ reqs_to_uninstall = {}
+ for name in args:
+ req = install_req_from_line(
+ name, isolated=options.isolated_mode,
+ )
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ for filename in options.requirements:
+ for req in parse_requirements(
+ filename,
+ options=options,
+ session=session):
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ if not reqs_to_uninstall:
+ raise InstallationError(
+ 'You must give at least one requirement to %(name)s (see '
+ '"pip help %(name)s")' % dict(name=self.name)
+ )
+
+ protect_pip_from_modification_on_windows(
+ modifying_pip="pip" in reqs_to_uninstall
+ )
+
+ for req in reqs_to_uninstall.values():
+ uninstall_pathset = req.uninstall(
+ auto_confirm=options.yes, verbose=self.verbosity > 0,
+ )
+ if uninstall_pathset:
+ uninstall_pathset.commit()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/wheel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/wheel.py
new file mode 100644
index 00000000..97f3b148
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/commands/wheel.py
@@ -0,0 +1,181 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import RequirementCommand
+from pip._internal.exceptions import CommandError, PreviousBuildDirError
+from pip._internal.legacy_resolve import Resolver
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import RequirementTracker
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.wheel import WheelBuilder
+
+logger = logging.getLogger(__name__)
+
+
+class WheelCommand(RequirementCommand):
+ """
+ Build Wheel archives for your requirements and dependencies.
+
+ Wheel is a built-package format, and offers the advantage of not
+ recompiling your software during every install. For more details, see the
+ wheel docs: https://wheel.readthedocs.io/en/latest/
+
+ Requirements: setuptools>=0.8, and wheel.
+
+ 'pip wheel' uses the bdist_wheel setuptools extension from the wheel
+ package to build individual wheels.
+
+ """
+
+ name = 'wheel'
+ usage = """
+ %prog [options] <requirement specifier> ...
+ %prog [options] -r <requirements file> ...
+ %prog [options] [-e] <vcs project url> ...
+ %prog [options] [-e] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ summary = 'Build wheels from your requirements.'
+
+ def __init__(self, *args, **kw):
+ super(WheelCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '-w', '--wheel-dir',
+ dest='wheel_dir',
+ metavar='dir',
+ default=os.curdir,
+ help=("Build wheels into <dir>, where the default is the "
+ "current working directory."),
+ )
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(
+ '--build-option',
+ dest='build_options',
+ metavar='options',
+ action='append',
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
+ )
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.editable())
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.src())
+ cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.build_dir())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+
+ cmd_opts.add_option(
+ '--global-option',
+ dest='global_options',
+ action='append',
+ metavar='options',
+ help="Extra global options to be supplied to the setup.py "
+ "call before the 'bdist_wheel' command.")
+
+ cmd_opts.add_option(
+ '--pre',
+ action='store_true',
+ default=False,
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
+
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ cmdoptions.check_install_build_global(options)
+
+ if options.build_dir:
+ options.build_dir = os.path.abspath(options.build_dir)
+
+ options.src_dir = os.path.abspath(options.src_dir)
+
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, session)
+ build_delete = (not (options.no_clean or options.build_dir))
+ wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+ with RequirementTracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="wheel"
+ ) as directory:
+
+ requirement_set = RequirementSet(
+ require_hashes=options.require_hashes,
+ )
+
+ try:
+ self.populate_requirement_set(
+ requirement_set, args, options, finder, session,
+ self.name, wheel_cache
+ )
+
+ preparer = RequirementPreparer(
+ build_dir=directory.path,
+ src_dir=options.src_dir,
+ download_dir=None,
+ wheel_download_dir=options.wheel_dir,
+ progress_bar=options.progress_bar,
+ build_isolation=options.build_isolation,
+ req_tracker=req_tracker,
+ )
+
+ resolver = Resolver(
+ preparer=preparer,
+ finder=finder,
+ session=session,
+ wheel_cache=wheel_cache,
+ use_user_site=False,
+ upgrade_strategy="to-satisfy-only",
+ force_reinstall=False,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_requires_python=options.ignore_requires_python,
+ ignore_installed=True,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517
+ )
+ resolver.resolve(requirement_set)
+
+ # build wheels
+ wb = WheelBuilder(
+ finder, preparer, wheel_cache,
+ build_options=options.build_options or [],
+ global_options=options.global_options or [],
+ no_clean=options.no_clean,
+ )
+ build_failures = wb.build(
+ requirement_set.requirements.values(), session=session,
+ )
+ if len(build_failures) != 0:
+ raise CommandError(
+ "Failed to build one or more wheels"
+ )
+ except PreviousBuildDirError:
+ options.no_clean = True
+ raise
+ finally:
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+ wheel_cache.cleanup()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/configuration.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/configuration.py
new file mode 100644
index 00000000..437e92ee
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/configuration.py
@@ -0,0 +1,417 @@
+"""Configuration management setup
+
+Some terminology:
+- name
+ As written in config files.
+- value
+ Value associated with a name
+- key
+ Name combined with it's section (section.name)
+- variant
+ A single word describing where the configuration key-value pair came from
+"""
+
+import locale
+import logging
+import os
+import sys
+
+from pip._vendor.six.moves import configparser
+
+from pip._internal.exceptions import (
+ ConfigurationError, ConfigurationFileCouldNotBeLoaded,
+)
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS, expanduser
+from pip._internal.utils.misc import ensure_dir, enum
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, List, NewType, Optional, Tuple
+ )
+
+ RawConfigParser = configparser.RawConfigParser # Shorthand
+ Kind = NewType("Kind", str)
+
+logger = logging.getLogger(__name__)
+
+
+# NOTE: Maybe use the optionx attribute to normalize keynames.
+def _normalize_name(name):
+ # type: (str) -> str
+ """Make a name consistent regardless of source (environment or file)
+ """
+ name = name.lower().replace('_', '-')
+ if name.startswith('--'):
+ name = name[2:] # only prefer long opts
+ return name
+
+
+def _disassemble_key(name):
+ # type: (str) -> List[str]
+ if "." not in name:
+ error_message = (
+ "Key does not contain dot separated section and key. "
+ "Perhaps you wanted to use 'global.{}' instead?"
+ ).format(name)
+ raise ConfigurationError(error_message)
+ return name.split(".", 1)
+
+
+# The kinds of configurations there are.
+kinds = enum(
+ USER="user", # User Specific
+ GLOBAL="global", # System Wide
+ SITE="site", # [Virtual] Environment Specific
+ ENV="env", # from PIP_CONFIG_FILE
+ ENV_VAR="env-var", # from Environment Variables
+)
+
+
+CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
+
+
+def get_configuration_files():
+ global_config_files = [
+ os.path.join(path, CONFIG_BASENAME)
+ for path in appdirs.site_config_dirs('pip')
+ ]
+
+ site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
+ legacy_config_file = os.path.join(
+ expanduser('~'),
+ 'pip' if WINDOWS else '.pip',
+ CONFIG_BASENAME,
+ )
+ new_config_file = os.path.join(
+ appdirs.user_config_dir("pip"), CONFIG_BASENAME
+ )
+ return {
+ kinds.GLOBAL: global_config_files,
+ kinds.SITE: [site_config_file],
+ kinds.USER: [legacy_config_file, new_config_file],
+ }
+
+
+class Configuration(object):
+ """Handles management of configuration.
+
+ Provides an interface to accessing and managing configuration files.
+
+ This class converts provides an API that takes "section.key-name" style
+ keys and stores the value associated with it as "key-name" under the
+ section "section".
+
+ This allows for a clean interface wherein the both the section and the
+ key-name are preserved in an easy to manage form in the configuration files
+ and the data stored is also nice.
+ """
+
+ def __init__(self, isolated, load_only=None):
+ # type: (bool, Kind) -> None
+ super(Configuration, self).__init__()
+
+ _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None]
+ if load_only not in _valid_load_only:
+ raise ConfigurationError(
+ "Got invalid value for load_only - should be one of {}".format(
+ ", ".join(map(repr, _valid_load_only[:-1]))
+ )
+ )
+ self.isolated = isolated # type: bool
+ self.load_only = load_only # type: Optional[Kind]
+
+ # The order here determines the override order.
+ self._override_order = [
+ kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
+ ]
+
+ self._ignore_env_names = ["version", "help"]
+
+ # Because we keep track of where we got the data from
+ self._parsers = {
+ variant: [] for variant in self._override_order
+ } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
+ self._config = {
+ variant: {} for variant in self._override_order
+ } # type: Dict[Kind, Dict[str, Any]]
+ self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
+
+ def load(self):
+ # type: () -> None
+ """Loads configuration from configuration files and environment
+ """
+ self._load_config_files()
+ if not self.isolated:
+ self._load_environment_vars()
+
+ def get_file_to_edit(self):
+ # type: () -> Optional[str]
+ """Returns the file with highest priority in configuration
+ """
+ assert self.load_only is not None, \
+ "Need to be specified a file to be editing"
+
+ try:
+ return self._get_parser_to_modify()[0]
+ except IndexError:
+ return None
+
+ def items(self):
+ # type: () -> Iterable[Tuple[str, Any]]
+ """Returns key-value pairs like dict.items() representing the loaded
+ configuration
+ """
+ return self._dictionary.items()
+
+ def get_value(self, key):
+ # type: (str) -> Any
+ """Get a value from the configuration.
+ """
+ try:
+ return self._dictionary[key]
+ except KeyError:
+ raise ConfigurationError("No such key - {}".format(key))
+
+ def set_value(self, key, value):
+ # type: (str, Any) -> None
+ """Modify a value in the configuration.
+ """
+ self._ensure_have_load_only()
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Modify the parser and the configuration
+ if not parser.has_section(section):
+ parser.add_section(section)
+ parser.set(section, name, value)
+
+ self._config[self.load_only][key] = value
+ self._mark_as_modified(fname, parser)
+
+ def unset_value(self, key):
+ # type: (str) -> None
+ """Unset a value in the configuration.
+ """
+ self._ensure_have_load_only()
+
+ if key not in self._config[self.load_only]:
+ raise ConfigurationError("No such key - {}".format(key))
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Remove the key in the parser
+ modified_something = False
+ if parser.has_section(section):
+ # Returns whether the option was removed or not
+ modified_something = parser.remove_option(section, name)
+
+ if modified_something:
+ # name removed from parser, section may now be empty
+ section_iter = iter(parser.items(section))
+ try:
+ val = next(section_iter)
+ except StopIteration:
+ val = None
+
+ if val is None:
+ parser.remove_section(section)
+
+ self._mark_as_modified(fname, parser)
+ else:
+ raise ConfigurationError(
+ "Fatal Internal error [id=1]. Please report as a bug."
+ )
+
+ del self._config[self.load_only][key]
+
+ def save(self):
+ # type: () -> None
+ """Save the current in-memory state.
+ """
+ self._ensure_have_load_only()
+
+ for fname, parser in self._modified_parsers:
+ logger.info("Writing to %s", fname)
+
+ # Ensure directory exists.
+ ensure_dir(os.path.dirname(fname))
+
+ with open(fname, "w") as f:
+ parser.write(f)
+
+ #
+ # Private routines
+ #
+
+ def _ensure_have_load_only(self):
+ # type: () -> None
+ if self.load_only is None:
+ raise ConfigurationError("Needed a specific file to be modifying.")
+ logger.debug("Will be working with %s variant only", self.load_only)
+
+ @property
+ def _dictionary(self):
+ # type: () -> Dict[str, Any]
+ """A dictionary representing the loaded configuration.
+ """
+ # NOTE: Dictionaries are not populated if not loaded. So, conditionals
+ # are not needed here.
+ retval = {}
+
+ for variant in self._override_order:
+ retval.update(self._config[variant])
+
+ return retval
+
+ def _load_config_files(self):
+ # type: () -> None
+ """Loads configuration from configuration files
+ """
+ config_files = dict(self._iter_config_files())
+ if config_files[kinds.ENV][0:1] == [os.devnull]:
+ logger.debug(
+ "Skipping loading configuration files due to "
+ "environment's PIP_CONFIG_FILE being os.devnull"
+ )
+ return
+
+ for variant, files in config_files.items():
+ for fname in files:
+ # If there's specific variant set in `load_only`, load only
+ # that variant, not the others.
+ if self.load_only is not None and variant != self.load_only:
+ logger.debug(
+ "Skipping file '%s' (variant: %s)", fname, variant
+ )
+ continue
+
+ parser = self._load_file(variant, fname)
+
+ # Keeping track of the parsers used
+ self._parsers[variant].append((fname, parser))
+
+ def _load_file(self, variant, fname):
+ # type: (Kind, str) -> RawConfigParser
+ logger.debug("For variant '%s', will try loading '%s'", variant, fname)
+ parser = self._construct_parser(fname)
+
+ for section in parser.sections():
+ items = parser.items(section)
+ self._config[variant].update(self._normalized_keys(section, items))
+
+ return parser
+
+ def _construct_parser(self, fname):
+ # type: (str) -> RawConfigParser
+ parser = configparser.RawConfigParser()
+ # If there is no such file, don't bother reading it but create the
+ # parser anyway, to hold the data.
+ # Doing this is useful when modifying and saving files, where we don't
+ # need to construct a parser.
+ if os.path.exists(fname):
+ try:
+ parser.read(fname)
+ except UnicodeDecodeError:
+ # See https://github.com/pypa/pip/issues/4963
+ raise ConfigurationFileCouldNotBeLoaded(
+ reason="contains invalid {} characters".format(
+ locale.getpreferredencoding(False)
+ ),
+ fname=fname,
+ )
+ except configparser.Error as error:
+ # See https://github.com/pypa/pip/issues/4893
+ raise ConfigurationFileCouldNotBeLoaded(error=error)
+ return parser
+
+ def _load_environment_vars(self):
+ # type: () -> None
+ """Loads configuration from environment variables
+ """
+ self._config[kinds.ENV_VAR].update(
+ self._normalized_keys(":env:", self._get_environ_vars())
+ )
+
+ def _normalized_keys(self, section, items):
+ # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
+ """Normalizes items to construct a dictionary with normalized keys.
+
+ This routine is where the names become keys and are made the same
+ regardless of source - configuration files or environment.
+ """
+ normalized = {}
+ for name, val in items:
+ key = section + "." + _normalize_name(name)
+ normalized[key] = val
+ return normalized
+
+ def _get_environ_vars(self):
+ # type: () -> Iterable[Tuple[str, str]]
+ """Returns a generator with all environmental vars with prefix PIP_"""
+ for key, val in os.environ.items():
+ should_be_yielded = (
+ key.startswith("PIP_") and
+ key[4:].lower() not in self._ignore_env_names
+ )
+ if should_be_yielded:
+ yield key[4:].lower(), val
+
+ # XXX: This is patched in the tests.
+ def _iter_config_files(self):
+ # type: () -> Iterable[Tuple[Kind, List[str]]]
+ """Yields variant and configuration files associated with it.
+
+ This should be treated like items of a dictionary.
+ """
+ # SMELL: Move the conditions out of this function
+
+ # environment variables have the lowest priority
+ config_file = os.environ.get('PIP_CONFIG_FILE', None)
+ if config_file is not None:
+ yield kinds.ENV, [config_file]
+ else:
+ yield kinds.ENV, []
+
+ config_files = get_configuration_files()
+
+ # at the base we have any global configuration
+ yield kinds.GLOBAL, config_files[kinds.GLOBAL]
+
+ # per-user configuration next
+ should_load_user_config = not self.isolated and not (
+ config_file and os.path.exists(config_file)
+ )
+ if should_load_user_config:
+ # The legacy config file is overridden by the new config file
+ yield kinds.USER, config_files[kinds.USER]
+
+ # finally virtualenv configuration first trumping others
+ yield kinds.SITE, config_files[kinds.SITE]
+
+ def _get_parser_to_modify(self):
+ # type: () -> Tuple[str, RawConfigParser]
+ # Determine which parser to modify
+ parsers = self._parsers[self.load_only]
+ if not parsers:
+ # This should not happen if everything works correctly.
+ raise ConfigurationError(
+ "Fatal Internal error [id=2]. Please report as a bug."
+ )
+
+ # Use the highest priority parser.
+ return parsers[-1]
+
+ # XXX: This is patched in the tests.
+ def _mark_as_modified(self, fname, parser):
+ # type: (str, RawConfigParser) -> None
+ file_parser_tuple = (fname, parser)
+ if file_parser_tuple not in self._modified_parsers:
+ self._modified_parsers.append(file_parser_tuple)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py
new file mode 100644
index 00000000..fdf332a8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py
@@ -0,0 +1,23 @@
+from pip._internal.distributions.source import SourceDistribution
+from pip._internal.distributions.wheel import WheelDistribution
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.distributions.base import AbstractDistribution
+ from pip._internal.req.req_install import InstallRequirement
+
+
+def make_distribution_for_install_requirement(install_req):
+ # type: (InstallRequirement) -> AbstractDistribution
+ """Returns a Distribution for the given InstallRequirement
+ """
+ # If it's not an editable, is a wheel, it's a WheelDistribution
+ if install_req.editable:
+ return SourceDistribution(install_req)
+
+ if install_req.link and install_req.is_wheel:
+ return WheelDistribution(install_req)
+
+ # Otherwise, a SourceDistribution
+ return SourceDistribution(install_req)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..122d53d7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/base.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/base.cpython-37.pyc
new file mode 100644
index 00000000..a120de57
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/base.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-37.pyc
new file mode 100644
index 00000000..b8607195
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/source.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/source.cpython-37.pyc
new file mode 100644
index 00000000..fb91159a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/source.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc
new file mode 100644
index 00000000..594a21d4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/base.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/base.py
new file mode 100644
index 00000000..b9af3f02
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/base.py
@@ -0,0 +1,33 @@
+import abc
+
+from pip._vendor.six import add_metaclass
+
+
+@add_metaclass(abc.ABCMeta)
+class AbstractDistribution(object):
+ """A base class for handling installable artifacts.
+
+ The requirements for anything installable are as follows:
+
+ - we must be able to determine the requirement name
+ (or we can't correctly handle the non-upgrade case).
+
+ - for packages with setup requirements, we must also be able
+ to determine their requirements without installing additional
+ packages (for the same reason as run-time dependencies)
+
+ - we must be able to create a Distribution object exposing the
+ above metadata.
+ """
+
+ def __init__(self, req):
+ super(AbstractDistribution, self).__init__()
+ self.req = req
+
+ @abc.abstractmethod
+ def get_pkg_resources_distribution(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ raise NotImplementedError()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/installed.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/installed.py
new file mode 100644
index 00000000..c4a64e7c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/installed.py
@@ -0,0 +1,15 @@
+from pip._internal.distributions.base import AbstractDistribution
+
+
+class InstalledDistribution(AbstractDistribution):
+ """Represents an installed package.
+
+ This does not need any preparation as the required information has already
+ been computed.
+ """
+
+ def get_pkg_resources_distribution(self):
+ return self.req.satisfied_by
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/source.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/source.py
new file mode 100644
index 00000000..e5d9fd4b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/source.py
@@ -0,0 +1,80 @@
+import logging
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.exceptions import InstallationError
+
+logger = logging.getLogger(__name__)
+
+
+class SourceDistribution(AbstractDistribution):
+ """Represents a source distribution.
+
+ The preparation step for these needs metadata for the packages to be
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
+
+ NOTE from @pradyunsg (14 June 2019)
+ I expect SourceDistribution class will need to be split into
+ `legacy_source` (setup.py based) and `source` (PEP 517 based) when we start
+ bringing logic for preparation out of InstallRequirement into this class.
+ """
+
+ def get_pkg_resources_distribution(self):
+ return self.req.get_dist()
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ # Prepare for building. We need to:
+ # 1. Load pyproject.toml (if it exists)
+ # 2. Set up the build environment
+
+ self.req.load_pyproject_toml()
+ should_isolate = self.req.use_pep517 and build_isolation
+
+ def _raise_conflicts(conflicting_with, conflicting_reqs):
+ raise InstallationError(
+ "Some build dependencies for %s conflict with %s: %s." % (
+ self.req, conflicting_with, ', '.join(
+ '%s is incompatible with %s' % (installed, wanted)
+ for installed, wanted in sorted(conflicting))))
+
+ if should_isolate:
+ # Isolate in a BuildEnvironment and install the build-time
+ # requirements.
+ self.req.build_env = BuildEnvironment()
+ self.req.build_env.install_requirements(
+ finder, self.req.pyproject_requires, 'overlay',
+ "Installing build dependencies"
+ )
+ conflicting, missing = self.req.build_env.check_requirements(
+ self.req.requirements_to_check
+ )
+ if conflicting:
+ _raise_conflicts("PEP 517/518 supported requirements",
+ conflicting)
+ if missing:
+ logger.warning(
+ "Missing build requirements in pyproject.toml for %s.",
+ self.req,
+ )
+ logger.warning(
+ "The project does not specify a build backend, and "
+ "pip cannot fall back to setuptools without %s.",
+ " and ".join(map(repr, sorted(missing)))
+ )
+ # Install any extra build dependencies that the backend requests.
+ # This must be done in a second pass, as the pyproject.toml
+ # dependencies must be installed before we can call the backend.
+ with self.req.build_env:
+ # We need to have the env active when calling the hook.
+ self.req.spin_message = "Getting requirements to build wheel"
+ reqs = self.req.pep517_backend.get_requires_for_build_wheel()
+ conflicting, missing = self.req.build_env.check_requirements(reqs)
+ if conflicting:
+ _raise_conflicts("the backend dependencies", conflicting)
+ self.req.build_env.install_requirements(
+ finder, missing, 'normal',
+ "Installing backend dependencies"
+ )
+
+ self.req.prepare_metadata()
+ self.req.assert_source_matches_version()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py
new file mode 100644
index 00000000..de7be38e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py
@@ -0,0 +1,17 @@
+from pip._vendor import pkg_resources
+
+from pip._internal.distributions.base import AbstractDistribution
+
+
+class WheelDistribution(AbstractDistribution):
+ """Represents a wheel distribution.
+
+ This does not need any preparation as wheels can be directly unpacked.
+ """
+
+ def get_pkg_resources_distribution(self):
+ return list(pkg_resources.find_distributions(
+ self.req.source_dir))[0]
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ pass
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/download.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/download.py
new file mode 100644
index 00000000..8715eb5b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/download.py
@@ -0,0 +1,1163 @@
+from __future__ import absolute_import
+
+import cgi
+import email.utils
+import json
+import logging
+import mimetypes
+import os
+import platform
+import re
+import shutil
+import sys
+
+from pip._vendor import requests, urllib3
+from pip._vendor.cachecontrol import CacheControlAdapter
+from pip._vendor.cachecontrol.caches import FileCache
+from pip._vendor.lockfile import LockError
+from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
+from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+from pip._vendor.requests.structures import CaseInsensitiveDict
+from pip._vendor.requests.utils import get_netrc_auth
+# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import
+from pip._vendor.six.moves import xmlrpc_client # type: ignore
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+import pip
+from pip._internal.exceptions import HashMismatch, InstallationError
+from pip._internal.models.index import PyPI
+# Import ssl from compat so the initial import occurs in only one place.
+from pip._internal.utils.compat import HAS_TLS, ssl
+from pip._internal.utils.encoding import auto_decode
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.glibc import libc_ver
+from pip._internal.utils.marker_files import write_delete_marker_file
+from pip._internal.utils.misc import (
+ ARCHIVE_EXTENSIONS, ask, ask_input, ask_password, ask_path_exists,
+ backup_dir, consume, display_path, format_size, get_installed_version,
+ path_to_url, remove_auth_from_url, rmtree, split_auth_netloc_from_url,
+ splitext, unpack_file,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import DownloadProgressProvider
+from pip._internal.vcs import vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Optional, Tuple, Dict, IO, Text, Union
+ )
+ from optparse import Values
+ from pip._internal.models.link import Link
+ from pip._internal.utils.hashes import Hashes
+ from pip._internal.vcs.versioncontrol import AuthInfo, VersionControl
+
+ Credentials = Tuple[str, str, str]
+
+
+__all__ = ['get_file_content',
+ 'is_url', 'url_to_path', 'path_to_url',
+ 'is_archive_file', 'unpack_vcs_link',
+ 'unpack_file_url', 'is_vcs_url', 'is_file_url',
+ 'unpack_http_url', 'unpack_url',
+ 'parse_content_disposition', 'sanitize_content_filename']
+
+
+logger = logging.getLogger(__name__)
+
+
+try:
+ import keyring # noqa
+except ImportError:
+ keyring = None
+except Exception as exc:
+ logger.warning("Keyring is skipped due to an exception: %s",
+ str(exc))
+ keyring = None
+
+# These are environment variables present when running under various
+# CI systems. For each variable, some CI systems that use the variable
+# are indicated. The collection was chosen so that for each of a number
+# of popular systems, at least one of the environment variables is used.
+# This list is used to provide some indication of and lower bound for
+# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
+# For more background, see: https://github.com/pypa/pip/issues/5499
+CI_ENVIRONMENT_VARIABLES = (
+ # Azure Pipelines
+ 'BUILD_BUILDID',
+ # Jenkins
+ 'BUILD_ID',
+ # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
+ 'CI',
+ # Explicit environment variable.
+ 'PIP_IS_CI',
+)
+
+
+def looks_like_ci():
+ # type: () -> bool
+ """
+ Return whether it looks like pip is running under CI.
+ """
+ # We don't use the method of checking for a tty (e.g. using isatty())
+ # because some CI systems mimic a tty (e.g. Travis CI). Thus that
+ # method doesn't provide definitive information in either direction.
+ return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
+
+
+def user_agent():
+ """
+ Return a string representing the user agent.
+ """
+ data = {
+ "installer": {"name": "pip", "version": pip.__version__},
+ "python": platform.python_version(),
+ "implementation": {
+ "name": platform.python_implementation(),
+ },
+ }
+
+ if data["implementation"]["name"] == 'CPython':
+ data["implementation"]["version"] = platform.python_version()
+ elif data["implementation"]["name"] == 'PyPy':
+ if sys.pypy_version_info.releaselevel == 'final':
+ pypy_version_info = sys.pypy_version_info[:3]
+ else:
+ pypy_version_info = sys.pypy_version_info
+ data["implementation"]["version"] = ".".join(
+ [str(x) for x in pypy_version_info]
+ )
+ elif data["implementation"]["name"] == 'Jython':
+ # Complete Guess
+ data["implementation"]["version"] = platform.python_version()
+ elif data["implementation"]["name"] == 'IronPython':
+ # Complete Guess
+ data["implementation"]["version"] = platform.python_version()
+
+ if sys.platform.startswith("linux"):
+ from pip._vendor import distro
+ distro_infos = dict(filter(
+ lambda x: x[1],
+ zip(["name", "version", "id"], distro.linux_distribution()),
+ ))
+ libc = dict(filter(
+ lambda x: x[1],
+ zip(["lib", "version"], libc_ver()),
+ ))
+ if libc:
+ distro_infos["libc"] = libc
+ if distro_infos:
+ data["distro"] = distro_infos
+
+ if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
+ data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
+
+ if platform.system():
+ data.setdefault("system", {})["name"] = platform.system()
+
+ if platform.release():
+ data.setdefault("system", {})["release"] = platform.release()
+
+ if platform.machine():
+ data["cpu"] = platform.machine()
+
+ if HAS_TLS:
+ data["openssl_version"] = ssl.OPENSSL_VERSION
+
+ setuptools_version = get_installed_version("setuptools")
+ if setuptools_version is not None:
+ data["setuptools_version"] = setuptools_version
+
+ # Use None rather than False so as not to give the impression that
+ # pip knows it is not being run under CI. Rather, it is a null or
+ # inconclusive result. Also, we include some value rather than no
+ # value to make it easier to know that the check has been run.
+ data["ci"] = True if looks_like_ci() else None
+
+ user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
+ if user_data is not None:
+ data["user_data"] = user_data
+
+ return "{data[installer][name]}/{data[installer][version]} {json}".format(
+ data=data,
+ json=json.dumps(data, separators=(",", ":"), sort_keys=True),
+ )
+
+
+def _get_keyring_auth(url, username):
+ """Return the tuple auth for a given url from keyring."""
+ if not url or not keyring:
+ return None
+
+ try:
+ try:
+ get_credential = keyring.get_credential
+ except AttributeError:
+ pass
+ else:
+ logger.debug("Getting credentials from keyring for %s", url)
+ cred = get_credential(url, username)
+ if cred is not None:
+ return cred.username, cred.password
+ return None
+
+ if username:
+ logger.debug("Getting password from keyring for %s", url)
+ password = keyring.get_password(url, username)
+ if password:
+ return username, password
+
+ except Exception as exc:
+ logger.warning("Keyring is skipped due to an exception: %s",
+ str(exc))
+
+
+class MultiDomainBasicAuth(AuthBase):
+
+ def __init__(self, prompting=True, index_urls=None):
+ # type: (bool, Optional[Values]) -> None
+ self.prompting = prompting
+ self.index_urls = index_urls
+ self.passwords = {} # type: Dict[str, AuthInfo]
+ # When the user is prompted to enter credentials and keyring is
+ # available, we will offer to save them. If the user accepts,
+ # this value is set to the credentials they entered. After the
+ # request authenticates, the caller should call
+ # ``save_credentials`` to save these.
+ self._credentials_to_save = None # type: Optional[Credentials]
+
+ def _get_index_url(self, url):
+ """Return the original index URL matching the requested URL.
+
+ Cached or dynamically generated credentials may work against
+ the original index URL rather than just the netloc.
+
+ The provided url should have had its username and password
+ removed already. If the original index url had credentials then
+ they will be included in the return value.
+
+ Returns None if no matching index was found, or if --no-index
+ was specified by the user.
+ """
+ if not url or not self.index_urls:
+ return None
+
+ for u in self.index_urls:
+ prefix = remove_auth_from_url(u).rstrip("/") + "/"
+ if url.startswith(prefix):
+ return u
+
+ def _get_new_credentials(self, original_url, allow_netrc=True,
+ allow_keyring=True):
+ """Find and return credentials for the specified URL."""
+ # Split the credentials and netloc from the url.
+ url, netloc, url_user_password = split_auth_netloc_from_url(
+ original_url)
+
+ # Start with the credentials embedded in the url
+ username, password = url_user_password
+ if username is not None and password is not None:
+ logger.debug("Found credentials in url for %s", netloc)
+ return url_user_password
+
+ # Find a matching index url for this request
+ index_url = self._get_index_url(url)
+ if index_url:
+ # Split the credentials from the url.
+ index_info = split_auth_netloc_from_url(index_url)
+ if index_info:
+ index_url, _, index_url_user_password = index_info
+ logger.debug("Found index url %s", index_url)
+
+ # If an index URL was found, try its embedded credentials
+ if index_url and index_url_user_password[0] is not None:
+ username, password = index_url_user_password
+ if username is not None and password is not None:
+ logger.debug("Found credentials in index url for %s", netloc)
+ return index_url_user_password
+
+ # Get creds from netrc if we still don't have them
+ if allow_netrc:
+ netrc_auth = get_netrc_auth(original_url)
+ if netrc_auth:
+ logger.debug("Found credentials in netrc for %s", netloc)
+ return netrc_auth
+
+ # If we don't have a password and keyring is available, use it.
+ if allow_keyring:
+ # The index url is more specific than the netloc, so try it first
+ kr_auth = (_get_keyring_auth(index_url, username) or
+ _get_keyring_auth(netloc, username))
+ if kr_auth:
+ logger.debug("Found credentials in keyring for %s", netloc)
+ return kr_auth
+
+ return None, None
+
+ def _get_url_and_credentials(self, original_url):
+ """Return the credentials to use for the provided URL.
+
+ If allowed, netrc and keyring may be used to obtain the
+ correct credentials.
+
+ Returns (url_without_credentials, username, password). Note
+ that even if the original URL contains credentials, this
+ function may return a different username and password.
+ """
+ url, netloc, _ = split_auth_netloc_from_url(original_url)
+
+ # Use any stored credentials that we have for this netloc
+ username, password = self.passwords.get(netloc, (None, None))
+
+ # If nothing cached, acquire new credentials without prompting
+ # the user (e.g. from netrc, keyring, or similar).
+ if username is None or password is None:
+ username, password = self._get_new_credentials(original_url)
+
+ if username is not None and password is not None:
+ # Store the username and password
+ self.passwords[netloc] = (username, password)
+
+ return url, username, password
+
+ def __call__(self, req):
+ # Get credentials for this request
+ url, username, password = self._get_url_and_credentials(req.url)
+
+ # Set the url of the request to the url without any credentials
+ req.url = url
+
+ if username is not None and password is not None:
+ # Send the basic auth with this request
+ req = HTTPBasicAuth(username, password)(req)
+
+ # Attach a hook to handle 401 responses
+ req.register_hook("response", self.handle_401)
+
+ return req
+
+ # Factored out to allow for easy patching in tests
+ def _prompt_for_password(self, netloc):
+ username = ask_input("User for %s: " % netloc)
+ if not username:
+ return None, None
+ auth = _get_keyring_auth(netloc, username)
+ if auth:
+ return auth[0], auth[1], False
+ password = ask_password("Password: ")
+ return username, password, True
+
+ # Factored out to allow for easy patching in tests
+ def _should_save_password_to_keyring(self):
+ if not keyring:
+ return False
+ return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
+
+ def handle_401(self, resp, **kwargs):
+ # We only care about 401 responses, anything else we want to just
+ # pass through the actual response
+ if resp.status_code != 401:
+ return resp
+
+ # We are not able to prompt the user so simply return the response
+ if not self.prompting:
+ return resp
+
+ parsed = urllib_parse.urlparse(resp.url)
+
+ # Prompt the user for a new username and password
+ username, password, save = self._prompt_for_password(parsed.netloc)
+
+ # Store the new username and password to use for future requests
+ self._credentials_to_save = None
+ if username is not None and password is not None:
+ self.passwords[parsed.netloc] = (username, password)
+
+ # Prompt to save the password to keyring
+ if save and self._should_save_password_to_keyring():
+ self._credentials_to_save = (parsed.netloc, username, password)
+
+ # Consume content and release the original connection to allow our new
+ # request to reuse the same one.
+ resp.content
+ resp.raw.release_conn()
+
+ # Add our new username and password to the request
+ req = HTTPBasicAuth(username or "", password or "")(resp.request)
+ req.register_hook("response", self.warn_on_401)
+
+ # On successful request, save the credentials that were used to
+ # keyring. (Note that if the user responded "no" above, this member
+ # is not set and nothing will be saved.)
+ if self._credentials_to_save:
+ req.register_hook("response", self.save_credentials)
+
+ # Send our new request
+ new_resp = resp.connection.send(req, **kwargs)
+ new_resp.history.append(resp)
+
+ return new_resp
+
+ def warn_on_401(self, resp, **kwargs):
+ """Response callback to warn about incorrect credentials."""
+ if resp.status_code == 401:
+ logger.warning('401 Error, Credentials not correct for %s',
+ resp.request.url)
+
+ def save_credentials(self, resp, **kwargs):
+ """Response callback to save credentials on success."""
+ assert keyring is not None, "should never reach here without keyring"
+ if not keyring:
+ return
+
+ creds = self._credentials_to_save
+ self._credentials_to_save = None
+ if creds and resp.status_code < 400:
+ try:
+ logger.info('Saving credentials to keyring')
+ keyring.set_password(*creds)
+ except Exception:
+ logger.exception('Failed to save credentials')
+
+
+class LocalFSAdapter(BaseAdapter):
+
+ def send(self, request, stream=None, timeout=None, verify=None, cert=None,
+ proxies=None):
+ pathname = url_to_path(request.url)
+
+ resp = Response()
+ resp.status_code = 200
+ resp.url = request.url
+
+ try:
+ stats = os.stat(pathname)
+ except OSError as exc:
+ resp.status_code = 404
+ resp.raw = exc
+ else:
+ modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
+ content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
+ resp.headers = CaseInsensitiveDict({
+ "Content-Type": content_type,
+ "Content-Length": stats.st_size,
+ "Last-Modified": modified,
+ })
+
+ resp.raw = open(pathname, "rb")
+ resp.close = resp.raw.close
+
+ return resp
+
+ def close(self):
+ pass
+
+
+class SafeFileCache(FileCache):
+ """
+ A file based cache which is safe to use even when the target directory may
+ not be accessible or writable.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(SafeFileCache, self).__init__(*args, **kwargs)
+
+ # Check to ensure that the directory containing our cache directory
+ # is owned by the user current executing pip. If it does not exist
+ # we will check the parent directory until we find one that does exist.
+ # If it is not owned by the user executing pip then we will disable
+ # the cache and log a warning.
+ if not check_path_owner(self.directory):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned by "
+ "the current user and the cache has been disabled. Please "
+ "check the permissions and owner of that directory. If "
+ "executing pip with sudo, you may want sudo's -H flag.",
+ self.directory,
+ )
+
+ # Set our directory to None to disable the Cache
+ self.directory = None
+
+ def get(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).get(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+ def set(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).set(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+ def delete(self, *args, **kwargs):
+ # If we don't have a directory, then the cache should be a no-op.
+ if self.directory is None:
+ return
+
+ try:
+ return super(SafeFileCache, self).delete(*args, **kwargs)
+ except (LockError, OSError, IOError):
+ # We intentionally silence this error, if we can't access the cache
+ # then we can just skip caching and process the request as if
+ # caching wasn't enabled.
+ pass
+
+
+class InsecureHTTPAdapter(HTTPAdapter):
+
+ def cert_verify(self, conn, url, verify, cert):
+ conn.cert_reqs = 'CERT_NONE'
+ conn.ca_certs = None
+
+
+class PipSession(requests.Session):
+
+ timeout = None # type: Optional[int]
+
+ def __init__(self, *args, **kwargs):
+ retries = kwargs.pop("retries", 0)
+ cache = kwargs.pop("cache", None)
+ insecure_hosts = kwargs.pop("insecure_hosts", [])
+ index_urls = kwargs.pop("index_urls", None)
+
+ super(PipSession, self).__init__(*args, **kwargs)
+
+ # Attach our User Agent to the request
+ self.headers["User-Agent"] = user_agent()
+
+ # Attach our Authentication handler to the session
+ self.auth = MultiDomainBasicAuth(index_urls=index_urls)
+
+ # Create our urllib3.Retry instance which will allow us to customize
+ # how we handle retries.
+ retries = urllib3.Retry(
+ # Set the total number of retries that a particular request can
+ # have.
+ total=retries,
+
+ # A 503 error from PyPI typically means that the Fastly -> Origin
+ # connection got interrupted in some way. A 503 error in general
+ # is typically considered a transient error so we'll go ahead and
+ # retry it.
+ # A 500 may indicate transient error in Amazon S3
+ # A 520 or 527 - may indicate transient error in CloudFlare
+ status_forcelist=[500, 503, 520, 527],
+
+ # Add a small amount of back off between failed requests in
+ # order to prevent hammering the service.
+ backoff_factor=0.25,
+ )
+
+ # We want to _only_ cache responses on securely fetched origins. We do
+ # this because we can't validate the response of an insecurely fetched
+ # origin, and we don't want someone to be able to poison the cache and
+ # require manual eviction from the cache to fix it.
+ if cache:
+ secure_adapter = CacheControlAdapter(
+ cache=SafeFileCache(cache, use_dir_lock=True),
+ max_retries=retries,
+ )
+ else:
+ secure_adapter = HTTPAdapter(max_retries=retries)
+
+ # Our Insecure HTTPAdapter disables HTTPS validation. It does not
+ # support caching (see above) so we'll use it for all http:// URLs as
+ # well as any https:// host that we've marked as ignoring TLS errors
+ # for.
+ insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
+ # Save this for later use in add_insecure_host().
+ self._insecure_adapter = insecure_adapter
+
+ self.mount("https://", secure_adapter)
+ self.mount("http://", insecure_adapter)
+
+ # Enable file:// urls
+ self.mount("file://", LocalFSAdapter())
+
+ # We want to use a non-validating adapter for any requests which are
+ # deemed insecure.
+ for host in insecure_hosts:
+ self.add_insecure_host(host)
+
+ def add_insecure_host(self, host):
+ # type: (str) -> None
+ self.mount('https://{}/'.format(host), self._insecure_adapter)
+
+ def request(self, method, url, *args, **kwargs):
+ # Allow setting a default timeout on a session
+ kwargs.setdefault("timeout", self.timeout)
+
+ # Dispatch the actual request
+ return super(PipSession, self).request(method, url, *args, **kwargs)
+
+
+def get_file_content(url, comes_from=None, session=None):
+ # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
+ """Gets the content of a file; it may be a filename, file: URL, or
+ http: URL. Returns (location, content). Content is unicode.
+
+ :param url: File path or url.
+ :param comes_from: Origin description of requirements.
+ :param session: Instance of pip.download.PipSession.
+ """
+ if session is None:
+ raise TypeError(
+ "get_file_content() missing 1 required keyword argument: 'session'"
+ )
+
+ match = _scheme_re.search(url)
+ if match:
+ scheme = match.group(1).lower()
+ if (scheme == 'file' and comes_from and
+ comes_from.startswith('http')):
+ raise InstallationError(
+ 'Requirements file %s references URL %s, which is local'
+ % (comes_from, url))
+ if scheme == 'file':
+ path = url.split(':', 1)[1]
+ path = path.replace('\\', '/')
+ match = _url_slash_drive_re.match(path)
+ if match:
+ path = match.group(1) + ':' + path.split('|', 1)[1]
+ path = urllib_parse.unquote(path)
+ if path.startswith('/'):
+ path = '/' + path.lstrip('/')
+ url = path
+ else:
+ # FIXME: catch some errors
+ resp = session.get(url)
+ resp.raise_for_status()
+ return resp.url, resp.text
+ try:
+ with open(url, 'rb') as f:
+ content = auto_decode(f.read())
+ except IOError as exc:
+ raise InstallationError(
+ 'Could not open requirements file: %s' % str(exc)
+ )
+ return url, content
+
+
+_scheme_re = re.compile(r'^(http|https|file):', re.I)
+_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
+
+
+def is_url(name):
+ # type: (Union[str, Text]) -> bool
+ """Returns true if the name looks like a URL"""
+ if ':' not in name:
+ return False
+ scheme = name.split(':', 1)[0].lower()
+ return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
+
+
+def url_to_path(url):
+ # type: (str) -> str
+ """
+ Convert a file: URL to a path.
+ """
+ assert url.startswith('file:'), (
+ "You can only turn file: urls into filenames (not %r)" % url)
+
+ _, netloc, path, _, _ = urllib_parse.urlsplit(url)
+
+ if not netloc or netloc == 'localhost':
+ # According to RFC 8089, same as empty authority.
+ netloc = ''
+ elif sys.platform == 'win32':
+ # If we have a UNC path, prepend UNC share notation.
+ netloc = '\\\\' + netloc
+ else:
+ raise ValueError(
+ 'non-local file URIs are not supported on this platform: %r'
+ % url
+ )
+
+ path = urllib_request.url2pathname(netloc + path)
+ return path
+
+
+def is_archive_file(name):
+ # type: (str) -> bool
+ """Return True if `name` is a considered as an archive file."""
+ ext = splitext(name)[1].lower()
+ if ext in ARCHIVE_EXTENSIONS:
+ return True
+ return False
+
+
+def unpack_vcs_link(link, location):
+ vcs_backend = _get_used_vcs_backend(link)
+ vcs_backend.unpack(location, url=link.url)
+
+
+def _get_used_vcs_backend(link):
+ # type: (Link) -> Optional[VersionControl]
+ """
+ Return a VersionControl object or None.
+ """
+ for vcs_backend in vcs.backends:
+ if link.scheme in vcs_backend.schemes:
+ return vcs_backend
+ return None
+
+
+def is_vcs_url(link):
+ # type: (Link) -> bool
+ return bool(_get_used_vcs_backend(link))
+
+
+def is_file_url(link):
+ # type: (Link) -> bool
+ return link.url.lower().startswith('file:')
+
+
+def is_dir_url(link):
+ # type: (Link) -> bool
+ """Return whether a file:// Link points to a directory.
+
+ ``link`` must not have any other scheme but file://. Call is_file_url()
+ first.
+
+ """
+ link_path = url_to_path(link.url_without_fragment)
+ return os.path.isdir(link_path)
+
+
+def _progress_indicator(iterable, *args, **kwargs):
+ return iterable
+
+
+def _download_url(
+ resp, # type: Response
+ link, # type: Link
+ content_file, # type: IO
+ hashes, # type: Optional[Hashes]
+ progress_bar # type: str
+):
+ # type: (...) -> None
+ try:
+ total_length = int(resp.headers['content-length'])
+ except (ValueError, KeyError, TypeError):
+ total_length = 0
+
+ cached_resp = getattr(resp, "from_cache", False)
+ if logger.getEffectiveLevel() > logging.INFO:
+ show_progress = False
+ elif cached_resp:
+ show_progress = False
+ elif total_length > (40 * 1000):
+ show_progress = True
+ elif not total_length:
+ show_progress = True
+ else:
+ show_progress = False
+
+ show_url = link.show_url
+
+ def resp_read(chunk_size):
+ try:
+ # Special case for urllib3.
+ for chunk in resp.raw.stream(
+ chunk_size,
+ # We use decode_content=False here because we don't
+ # want urllib3 to mess with the raw bytes we get
+ # from the server. If we decompress inside of
+ # urllib3 then we cannot verify the checksum
+ # because the checksum will be of the compressed
+ # file. This breakage will only occur if the
+ # server adds a Content-Encoding header, which
+ # depends on how the server was configured:
+ # - Some servers will notice that the file isn't a
+ # compressible file and will leave the file alone
+ # and with an empty Content-Encoding
+ # - Some servers will notice that the file is
+ # already compressed and will leave the file
+ # alone and will add a Content-Encoding: gzip
+ # header
+ # - Some servers won't notice anything at all and
+ # will take a file that's already been compressed
+ # and compress it again and set the
+ # Content-Encoding: gzip header
+ #
+ # By setting this not to decode automatically we
+ # hope to eliminate problems with the second case.
+ decode_content=False):
+ yield chunk
+ except AttributeError:
+ # Standard file-like object.
+ while True:
+ chunk = resp.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ def written_chunks(chunks):
+ for chunk in chunks:
+ content_file.write(chunk)
+ yield chunk
+
+ progress_indicator = _progress_indicator
+
+ if link.netloc == PyPI.netloc:
+ url = show_url
+ else:
+ url = link.url_without_fragment
+
+ if show_progress: # We don't show progress on cached responses
+ progress_indicator = DownloadProgressProvider(progress_bar,
+ max=total_length)
+ if total_length:
+ logger.info("Downloading %s (%s)", url, format_size(total_length))
+ else:
+ logger.info("Downloading %s", url)
+ elif cached_resp:
+ logger.info("Using cached %s", url)
+ else:
+ logger.info("Downloading %s", url)
+
+ logger.debug('Downloading from URL %s', link)
+
+ downloaded_chunks = written_chunks(
+ progress_indicator(
+ resp_read(CONTENT_CHUNK_SIZE),
+ CONTENT_CHUNK_SIZE
+ )
+ )
+ if hashes:
+ hashes.check_against_chunks(downloaded_chunks)
+ else:
+ consume(downloaded_chunks)
+
+
+def _copy_file(filename, location, link):
+ copy = True
+ download_location = os.path.join(location, link.filename)
+ if os.path.exists(download_location):
+ response = ask_path_exists(
+ 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
+ display_path(download_location), ('i', 'w', 'b', 'a'))
+ if response == 'i':
+ copy = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(download_location))
+ os.remove(download_location)
+ elif response == 'b':
+ dest_file = backup_dir(download_location)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(download_location),
+ display_path(dest_file),
+ )
+ shutil.move(download_location, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+ if copy:
+ shutil.copy(filename, download_location)
+ logger.info('Saved %s', display_path(download_location))
+
+
+def unpack_http_url(
+ link, # type: Link
+ location, # type: str
+ download_dir=None, # type: Optional[str]
+ session=None, # type: Optional[PipSession]
+ hashes=None, # type: Optional[Hashes]
+ progress_bar="on" # type: str
+):
+ # type: (...) -> None
+ if session is None:
+ raise TypeError(
+ "unpack_http_url() missing 1 required keyword argument: 'session'"
+ )
+
+ with TempDirectory(kind="unpack") as temp_dir:
+ # If a download dir is specified, is the file already downloaded there?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(link,
+ download_dir,
+ hashes)
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ content_type = mimetypes.guess_type(from_path)[0]
+ else:
+ # let's download to a tmp dir
+ from_path, content_type = _download_http_url(link,
+ session,
+ temp_dir.path,
+ hashes,
+ progress_bar)
+
+ # unpack the archive to the build dir location. even when only
+ # downloading archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type, link)
+
+ # a download dir is specified; let's copy the archive there
+ if download_dir and not already_downloaded_path:
+ _copy_file(from_path, download_dir, link)
+
+ if not already_downloaded_path:
+ os.unlink(from_path)
+
+
+def unpack_file_url(
+ link, # type: Link
+ location, # type: str
+ download_dir=None, # type: Optional[str]
+ hashes=None # type: Optional[Hashes]
+):
+ # type: (...) -> None
+ """Unpack link into location.
+
+ If download_dir is provided and link points to a file, make a copy
+ of the link file inside download_dir.
+ """
+ link_path = url_to_path(link.url_without_fragment)
+
+ # If it's a url to a local directory
+ if is_dir_url(link):
+ if os.path.isdir(location):
+ rmtree(location)
+ shutil.copytree(link_path, location, symlinks=True)
+ if download_dir:
+ logger.info('Link is a directory, ignoring download_dir')
+ return
+
+ # If --require-hashes is off, `hashes` is either empty, the
+ # link's embedded hash, or MissingHashes; it is required to
+ # match. If --require-hashes is on, we are satisfied by any
+ # hash in `hashes` matching: a URL-based or an option-based
+ # one; no internet-sourced hash will be in `hashes`.
+ if hashes:
+ hashes.check_against_path(link_path)
+
+ # If a download dir is specified, is the file already there and valid?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(link,
+ download_dir,
+ hashes)
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ else:
+ from_path = link_path
+
+ content_type = mimetypes.guess_type(from_path)[0]
+
+ # unpack the archive to the build dir location. even when only downloading
+ # archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type, link)
+
+ # a download dir is specified and not already downloaded
+ if download_dir and not already_downloaded_path:
+ _copy_file(from_path, download_dir, link)
+
+
+class PipXmlrpcTransport(xmlrpc_client.Transport):
+ """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
+ object.
+ """
+
+ def __init__(self, index_url, session, use_datetime=False):
+ xmlrpc_client.Transport.__init__(self, use_datetime)
+ index_parts = urllib_parse.urlparse(index_url)
+ self._scheme = index_parts.scheme
+ self._session = session
+
+ def request(self, host, handler, request_body, verbose=False):
+ parts = (self._scheme, host, handler, None, None, None)
+ url = urllib_parse.urlunparse(parts)
+ try:
+ headers = {'Content-Type': 'text/xml'}
+ response = self._session.post(url, data=request_body,
+ headers=headers, stream=True)
+ response.raise_for_status()
+ self.verbose = verbose
+ return self.parse_response(response.raw)
+ except requests.HTTPError as exc:
+ logger.critical(
+ "HTTP error %s while getting %s",
+ exc.response.status_code, url,
+ )
+ raise
+
+
+def unpack_url(
+ link, # type: Link
+ location, # type: str
+ download_dir=None, # type: Optional[str]
+ only_download=False, # type: bool
+ session=None, # type: Optional[PipSession]
+ hashes=None, # type: Optional[Hashes]
+ progress_bar="on" # type: str
+):
+ # type: (...) -> None
+ """Unpack link.
+ If link is a VCS link:
+ if only_download, export into download_dir and ignore location
+ else unpack into location
+ for other types of link:
+ - unpack into location
+ - if download_dir, copy the file into download_dir
+ - if only_download, mark location for deletion
+
+ :param hashes: A Hashes object, one of whose embedded hashes must match,
+ or HashMismatch will be raised. If the Hashes is empty, no matches are
+ required, and unhashable types of requirements (like VCS ones, which
+ would ordinarily raise HashUnsupported) are allowed.
+ """
+ # non-editable vcs urls
+ if is_vcs_url(link):
+ unpack_vcs_link(link, location)
+
+ # file urls
+ elif is_file_url(link):
+ unpack_file_url(link, location, download_dir, hashes=hashes)
+
+ # http urls
+ else:
+ if session is None:
+ session = PipSession()
+
+ unpack_http_url(
+ link,
+ location,
+ download_dir,
+ session,
+ hashes=hashes,
+ progress_bar=progress_bar
+ )
+ if only_download:
+ write_delete_marker_file(location)
+
+
+def sanitize_content_filename(filename):
+ # type: (str) -> str
+ """
+ Sanitize the "filename" value from a Content-Disposition header.
+ """
+ return os.path.basename(filename)
+
+
+def parse_content_disposition(content_disposition, default_filename):
+ # type: (str, str) -> str
+ """
+ Parse the "filename" value from a Content-Disposition header, and
+ return the default filename if the result is empty.
+ """
+ _type, params = cgi.parse_header(content_disposition)
+ filename = params.get('filename')
+ if filename:
+ # We need to sanitize the filename to prevent directory traversal
+ # in case the filename contains ".." path parts.
+ filename = sanitize_content_filename(filename)
+ return filename or default_filename
+
+
+def _download_http_url(
+ link, # type: Link
+ session, # type: PipSession
+ temp_dir, # type: str
+ hashes, # type: Optional[Hashes]
+ progress_bar # type: str
+):
+ # type: (...) -> Tuple[str, str]
+ """Download link url into temp_dir using provided session"""
+ target_url = link.url.split('#', 1)[0]
+ try:
+ resp = session.get(
+ target_url,
+ # We use Accept-Encoding: identity here because requests
+ # defaults to accepting compressed responses. This breaks in
+ # a variety of ways depending on how the server is configured.
+ # - Some servers will notice that the file isn't a compressible
+ # file and will leave the file alone and with an empty
+ # Content-Encoding
+ # - Some servers will notice that the file is already
+ # compressed and will leave the file alone and will add a
+ # Content-Encoding: gzip header
+ # - Some servers won't notice anything at all and will take
+ # a file that's already been compressed and compress it again
+ # and set the Content-Encoding: gzip header
+ # By setting this to request only the identity encoding We're
+ # hoping to eliminate the third case. Hopefully there does not
+ # exist a server which when given a file will notice it is
+ # already compressed and that you're not asking for a
+ # compressed file and will then decompress it before sending
+ # because if that's the case I don't think it'll ever be
+ # possible to make this work.
+ headers={"Accept-Encoding": "identity"},
+ stream=True,
+ )
+ resp.raise_for_status()
+ except requests.HTTPError as exc:
+ logger.critical(
+ "HTTP error %s while getting %s", exc.response.status_code, link,
+ )
+ raise
+
+ content_type = resp.headers.get('content-type', '')
+ filename = link.filename # fallback
+ # Have a look at the Content-Disposition header for a better guess
+ content_disposition = resp.headers.get('content-disposition')
+ if content_disposition:
+ filename = parse_content_disposition(content_disposition, filename)
+ ext = splitext(filename)[1] # type: Optional[str]
+ if not ext:
+ ext = mimetypes.guess_extension(content_type)
+ if ext:
+ filename += ext
+ if not ext and link.url != resp.url:
+ ext = os.path.splitext(resp.url)[1]
+ if ext:
+ filename += ext
+ file_path = os.path.join(temp_dir, filename)
+ with open(file_path, 'wb') as content_file:
+ _download_url(resp, link, content_file, hashes, progress_bar)
+ return file_path, content_type
+
+
+def _check_download_dir(link, download_dir, hashes):
+ # type: (Link, str, Optional[Hashes]) -> Optional[str]
+ """ Check download_dir for previously downloaded file with correct hash
+ If a correct file is found return its path else None
+ """
+ download_path = os.path.join(download_dir, link.filename)
+ if os.path.exists(download_path):
+ # If already downloaded, does its hash match?
+ logger.info('File was already downloaded %s', download_path)
+ if hashes:
+ try:
+ hashes.check_against_path(download_path)
+ except HashMismatch:
+ logger.warning(
+ 'Previously-downloaded file %s has bad hash. '
+ 'Re-downloading.',
+ download_path
+ )
+ os.unlink(download_path)
+ return None
+ return download_path
+ return None
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/exceptions.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/exceptions.py
new file mode 100644
index 00000000..096adcd6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/exceptions.py
@@ -0,0 +1,305 @@
+"""Exceptions used throughout package"""
+from __future__ import absolute_import
+
+from itertools import chain, groupby, repeat
+
+from pip._vendor.six import iteritems
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.req.req_install import InstallRequirement
+
+
+class PipError(Exception):
+ """Base pip exception"""
+
+
+class ConfigurationError(PipError):
+ """General exception in configuration"""
+
+
+class InstallationError(PipError):
+ """General exception during installation"""
+
+
+class UninstallationError(PipError):
+ """General exception during uninstallation"""
+
+
+class NoneMetadataError(PipError):
+ """
+ Raised when accessing "METADATA" or "PKG-INFO" metadata for a
+ pip._vendor.pkg_resources.Distribution object and
+ `dist.has_metadata('METADATA')` returns True but
+ `dist.get_metadata('METADATA')` returns None (and similarly for
+ "PKG-INFO").
+ """
+
+ def __init__(self, dist, metadata_name):
+ # type: (Distribution, str) -> None
+ """
+ :param dist: A Distribution object.
+ :param metadata_name: The name of the metadata being accessed
+ (can be "METADATA" or "PKG-INFO").
+ """
+ self.dist = dist
+ self.metadata_name = metadata_name
+
+ def __str__(self):
+ # type: () -> str
+ # Use `dist` in the error message because its stringification
+ # includes more information, like the version and location.
+ return (
+ 'None {} metadata found for distribution: {}'.format(
+ self.metadata_name, self.dist,
+ )
+ )
+
+
+class DistributionNotFound(InstallationError):
+ """Raised when a distribution cannot be found to satisfy a requirement"""
+
+
+class RequirementsFileParseError(InstallationError):
+ """Raised when a general error occurs parsing a requirements file line."""
+
+
+class BestVersionAlreadyInstalled(PipError):
+ """Raised when the most up-to-date version of a package is already
+ installed."""
+
+
+class BadCommand(PipError):
+ """Raised when virtualenv or a command is not found"""
+
+
+class CommandError(PipError):
+ """Raised when there is an error in command-line arguments"""
+
+
+class PreviousBuildDirError(PipError):
+ """Raised when there's a previous conflicting build directory"""
+
+
+class InvalidWheelFilename(InstallationError):
+ """Invalid wheel filename."""
+
+
+class UnsupportedWheel(InstallationError):
+ """Unsupported wheel."""
+
+
+class HashErrors(InstallationError):
+ """Multiple HashError instances rolled into one for reporting"""
+
+ def __init__(self):
+ self.errors = []
+
+ def append(self, error):
+ self.errors.append(error)
+
+ def __str__(self):
+ lines = []
+ self.errors.sort(key=lambda e: e.order)
+ for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
+ lines.append(cls.head)
+ lines.extend(e.body() for e in errors_of_cls)
+ if lines:
+ return '\n'.join(lines)
+
+ def __nonzero__(self):
+ return bool(self.errors)
+
+ def __bool__(self):
+ return self.__nonzero__()
+
+
+class HashError(InstallationError):
+ """
+ A failure to verify a package against known-good hashes
+
+ :cvar order: An int sorting hash exception classes by difficulty of
+ recovery (lower being harder), so the user doesn't bother fretting
+ about unpinned packages when he has deeper issues, like VCS
+ dependencies, to deal with. Also keeps error reports in a
+ deterministic order.
+ :cvar head: A section heading for display above potentially many
+ exceptions of this kind
+ :ivar req: The InstallRequirement that triggered this error. This is
+ pasted on after the exception is instantiated, because it's not
+ typically available earlier.
+
+ """
+ req = None # type: Optional[InstallRequirement]
+ head = ''
+
+ def body(self):
+ """Return a summary of me for display under the heading.
+
+ This default implementation simply prints a description of the
+ triggering requirement.
+
+ :param req: The InstallRequirement that provoked this error, with
+ populate_link() having already been called
+
+ """
+ return ' %s' % self._requirement_name()
+
+ def __str__(self):
+ return '%s\n%s' % (self.head, self.body())
+
+ def _requirement_name(self):
+ """Return a description of the requirement that triggered me.
+
+ This default implementation returns long description of the req, with
+ line numbers
+
+ """
+ return str(self.req) if self.req else 'unknown package'
+
+
+class VcsHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 0
+ head = ("Can't verify hashes for these requirements because we don't "
+ "have a way to hash version control repositories:")
+
+
+class DirectoryUrlHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 1
+ head = ("Can't verify hashes for these file:// requirements because they "
+ "point to directories:")
+
+
+class HashMissing(HashError):
+ """A hash was needed for a requirement but is absent."""
+
+ order = 2
+ head = ('Hashes are required in --require-hashes mode, but they are '
+ 'missing from some requirements. Here is a list of those '
+ 'requirements along with the hashes their downloaded archives '
+ 'actually had. Add lines like these to your requirements files to '
+ 'prevent tampering. (If you did not enable --require-hashes '
+ 'manually, note that it turns on automatically when any package '
+ 'has a hash.)')
+
+ def __init__(self, gotten_hash):
+ """
+ :param gotten_hash: The hash of the (possibly malicious) archive we
+ just downloaded
+ """
+ self.gotten_hash = gotten_hash
+
+ def body(self):
+ # Dodge circular import.
+ from pip._internal.utils.hashes import FAVORITE_HASH
+
+ package = None
+ if self.req:
+ # In the case of URL-based requirements, display the original URL
+ # seen in the requirements file rather than the package name,
+ # so the output can be directly copied into the requirements file.
+ package = (self.req.original_link if self.req.original_link
+ # In case someone feeds something downright stupid
+ # to InstallRequirement's constructor.
+ else getattr(self.req, 'req', None))
+ return ' %s --hash=%s:%s' % (package or 'unknown package',
+ FAVORITE_HASH,
+ self.gotten_hash)
+
+
+class HashUnpinned(HashError):
+ """A requirement had a hash specified but was not pinned to a specific
+ version."""
+
+ order = 3
+ head = ('In --require-hashes mode, all requirements must have their '
+ 'versions pinned with ==. These do not:')
+
+
+class HashMismatch(HashError):
+ """
+ Distribution file hash values don't match.
+
+ :ivar package_name: The name of the package that triggered the hash
+ mismatch. Feel free to write to this after the exception is raise to
+ improve its error message.
+
+ """
+ order = 4
+ head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
+ 'FILE. If you have updated the package versions, please update '
+ 'the hashes. Otherwise, examine the package contents carefully; '
+ 'someone may have tampered with them.')
+
+ def __init__(self, allowed, gots):
+ """
+ :param allowed: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ :param gots: A dict of algorithm names pointing to hashes we
+ actually got from the files under suspicion
+ """
+ self.allowed = allowed
+ self.gots = gots
+
+ def body(self):
+ return ' %s:\n%s' % (self._requirement_name(),
+ self._hash_comparison())
+
+ def _hash_comparison(self):
+ """
+ Return a comparison of actual and expected hash values.
+
+ Example::
+
+ Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
+ or 123451234512345123451234512345123451234512345
+ Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
+
+ """
+ def hash_then_or(hash_name):
+ # For now, all the decent hashes have 6-char names, so we can get
+ # away with hard-coding space literals.
+ return chain([hash_name], repeat(' or'))
+
+ lines = []
+ for hash_name, expecteds in iteritems(self.allowed):
+ prefix = hash_then_or(hash_name)
+ lines.extend((' Expected %s %s' % (next(prefix), e))
+ for e in expecteds)
+ lines.append(' Got %s\n' %
+ self.gots[hash_name].hexdigest())
+ prefix = ' or'
+ return '\n'.join(lines)
+
+
+class UnsupportedPythonVersion(InstallationError):
+ """Unsupported python version according to Requires-Python package
+ metadata."""
+
+
+class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
+ """When there are errors while loading a configuration file
+ """
+
+ def __init__(self, reason="could not be loaded", fname=None, error=None):
+ super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
+ self.reason = reason
+ self.fname = fname
+ self.error = error
+
+ def __str__(self):
+ if self.fname is not None:
+ message_part = " in {}.".format(self.fname)
+ else:
+ assert self.error is not None
+ message_part = ".\n{}\n".format(self.error.message)
+ return "Configuration file {}{}".format(self.reason, message_part)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/index.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/index.py
new file mode 100644
index 00000000..a1aaad59
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/index.py
@@ -0,0 +1,1508 @@
+"""Routines related to PyPI, indexes"""
+from __future__ import absolute_import
+
+import cgi
+import itertools
+import logging
+import mimetypes
+import os
+import re
+
+from pip._vendor import html5lib, requests, six
+from pip._vendor.distlib.compat import unescape
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.requests.exceptions import HTTPError, RetryError, SSLError
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.download import is_url, url_to_path
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
+ UnsupportedWheel,
+)
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.link import Link
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.utils.compat import ipaddress
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, path_to_url,
+ redact_password_from_url,
+)
+from pip._internal.utils.packaging import check_requires_python
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel import Wheel
+
+if MYPY_CHECK_RUNNING:
+ from logging import Logger
+ from typing import (
+ Any, Callable, FrozenSet, Iterable, Iterator, List, MutableMapping,
+ Optional, Sequence, Set, Text, Tuple, Union,
+ )
+ import xml.etree.ElementTree
+ from pip._vendor.packaging.version import _BaseVersion
+ from pip._vendor.requests import Response
+ from pip._internal.models.search_scope import SearchScope
+ from pip._internal.req import InstallRequirement
+ from pip._internal.download import PipSession
+ from pip._internal.pep425tags import Pep425Tag
+ from pip._internal.utils.hashes import Hashes
+
+ BuildTag = Tuple[Any, ...] # either empty tuple or Tuple[int, str]
+ CandidateSortingKey = (
+ Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]]
+ )
+ HTMLElement = xml.etree.ElementTree.Element
+ SecureOrigin = Tuple[str, str, Optional[str]]
+
+
+__all__ = ['FormatControl', 'FoundCandidates', 'PackageFinder']
+
+
+SECURE_ORIGINS = [
+ # protocol, hostname, port
+ # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
+ ("https", "*", "*"),
+ ("*", "localhost", "*"),
+ ("*", "127.0.0.0/8", "*"),
+ ("*", "::1/128", "*"),
+ ("file", "*", None),
+ # ssh is always secure.
+ ("ssh", "*", "*"),
+] # type: List[SecureOrigin]
+
+
+logger = logging.getLogger(__name__)
+
+
+def _match_vcs_scheme(url):
+ # type: (str) -> Optional[str]
+ """Look for VCS schemes in the URL.
+
+ Returns the matched VCS scheme, or None if there's no match.
+ """
+ from pip._internal.vcs import vcs
+ for scheme in vcs.schemes:
+ if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
+ return scheme
+ return None
+
+
+def _is_url_like_archive(url):
+ # type: (str) -> bool
+ """Return whether the URL looks like an archive.
+ """
+ filename = Link(url).filename
+ for bad_ext in ARCHIVE_EXTENSIONS:
+ if filename.endswith(bad_ext):
+ return True
+ return False
+
+
+class _NotHTML(Exception):
+ def __init__(self, content_type, request_desc):
+ # type: (str, str) -> None
+ super(_NotHTML, self).__init__(content_type, request_desc)
+ self.content_type = content_type
+ self.request_desc = request_desc
+
+
+def _ensure_html_header(response):
+ # type: (Response) -> None
+ """Check the Content-Type header to ensure the response contains HTML.
+
+ Raises `_NotHTML` if the content type is not text/html.
+ """
+ content_type = response.headers.get("Content-Type", "")
+ if not content_type.lower().startswith("text/html"):
+ raise _NotHTML(content_type, response.request.method)
+
+
+class _NotHTTP(Exception):
+ pass
+
+
+def _ensure_html_response(url, session):
+ # type: (str, PipSession) -> None
+ """Send a HEAD request to the URL, and ensure the response contains HTML.
+
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
+ `_NotHTML` if the content type is not text/html.
+ """
+ scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
+ if scheme not in {'http', 'https'}:
+ raise _NotHTTP()
+
+ resp = session.head(url, allow_redirects=True)
+ resp.raise_for_status()
+
+ _ensure_html_header(resp)
+
+
+def _get_html_response(url, session):
+ # type: (str, PipSession) -> Response
+ """Access an HTML page with GET, and return the response.
+
+ This consists of three parts:
+
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
+ check the Content-Type is HTML, to avoid downloading a large file.
+ Raise `_NotHTTP` if the content type cannot be determined, or
+ `_NotHTML` if it is not HTML.
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
+ 3. Check the Content-Type header to make sure we got HTML, and raise
+ `_NotHTML` otherwise.
+ """
+ if _is_url_like_archive(url):
+ _ensure_html_response(url, session=session)
+
+ logger.debug('Getting page %s', redact_password_from_url(url))
+
+ resp = session.get(
+ url,
+ headers={
+ "Accept": "text/html",
+ # We don't want to blindly returned cached data for
+ # /simple/, because authors generally expecting that
+ # twine upload && pip install will function, but if
+ # they've done a pip install in the last ~10 minutes
+ # it won't. Thus by setting this to zero we will not
+ # blindly use any cached data, however the benefit of
+ # using max-age=0 instead of no-cache, is that we will
+ # still support conditional requests, so we will still
+ # minimize traffic sent in cases where the page hasn't
+ # changed at all, we will just always incur the round
+ # trip for the conditional GET now instead of only
+ # once per 10 minutes.
+ # For more information, please see pypa/pip#5670.
+ "Cache-Control": "max-age=0",
+ },
+ )
+ resp.raise_for_status()
+
+ # The check for archives above only works if the url ends with
+ # something that looks like an archive. However that is not a
+ # requirement of an url. Unless we issue a HEAD request on every
+ # url we cannot know ahead of time for sure if something is HTML
+ # or not. However we can check after we've downloaded it.
+ _ensure_html_header(resp)
+
+ return resp
+
+
+def _handle_get_page_fail(
+ link, # type: Link
+ reason, # type: Union[str, Exception]
+ meth=None # type: Optional[Callable[..., None]]
+):
+ # type: (...) -> None
+ if meth is None:
+ meth = logger.debug
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
+
+
+def _get_html_page(link, session=None):
+ # type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
+ if session is None:
+ raise TypeError(
+ "_get_html_page() missing 1 required keyword argument: 'session'"
+ )
+
+ url = link.url.split('#', 1)[0]
+
+ # Check for VCS schemes that do not support lookup as web pages.
+ vcs_scheme = _match_vcs_scheme(url)
+ if vcs_scheme:
+ logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
+ return None
+
+ # Tack index.html onto file:// URLs that point to directories
+ scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
+ if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
+ # add trailing slash if not present so urljoin doesn't trim
+ # final segment
+ if not url.endswith('/'):
+ url += '/'
+ url = urllib_parse.urljoin(url, 'index.html')
+ logger.debug(' file: URL is directory, getting %s', url)
+
+ try:
+ resp = _get_html_response(url, session=session)
+ except _NotHTTP:
+ logger.debug(
+ 'Skipping page %s because it looks like an archive, and cannot '
+ 'be checked by HEAD.', link,
+ )
+ except _NotHTML as exc:
+ logger.debug(
+ 'Skipping page %s because the %s request got Content-Type: %s',
+ link, exc.request_desc, exc.content_type,
+ )
+ except HTTPError as exc:
+ _handle_get_page_fail(link, exc)
+ except RetryError as exc:
+ _handle_get_page_fail(link, exc)
+ except SSLError as exc:
+ reason = "There was a problem confirming the ssl certificate: "
+ reason += str(exc)
+ _handle_get_page_fail(link, reason, meth=logger.info)
+ except requests.ConnectionError as exc:
+ _handle_get_page_fail(link, "connection error: %s" % exc)
+ except requests.Timeout:
+ _handle_get_page_fail(link, "timed out")
+ else:
+ return HTMLPage(resp.content, resp.url, resp.headers)
+ return None
+
+
+def _check_link_requires_python(
+ link, # type: Link
+ version_info, # type: Tuple[int, int, int]
+ ignore_requires_python=False, # type: bool
+):
+ # type: (...) -> bool
+ """
+ Return whether the given Python version is compatible with a link's
+ "Requires-Python" value.
+
+ :param version_info: A 3-tuple of ints representing the Python
+ major-minor-micro version to check.
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
+ value if the given Python version isn't compatible.
+ """
+ try:
+ is_compatible = check_requires_python(
+ link.requires_python, version_info=version_info,
+ )
+ except specifiers.InvalidSpecifier:
+ logger.debug(
+ "Ignoring invalid Requires-Python (%r) for link: %s",
+ link.requires_python, link,
+ )
+ else:
+ if not is_compatible:
+ version = '.'.join(map(str, version_info))
+ if not ignore_requires_python:
+ logger.debug(
+ 'Link requires a different Python (%s not in: %r): %s',
+ version, link.requires_python, link,
+ )
+ return False
+
+ logger.debug(
+ 'Ignoring failed Requires-Python check (%s not in: %r) '
+ 'for link: %s',
+ version, link.requires_python, link,
+ )
+
+ return True
+
+
+class LinkEvaluator(object):
+
+ """
+ Responsible for evaluating links for a particular project.
+ """
+
+ _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ def __init__(
+ self,
+ project_name, # type: str
+ canonical_name, # type: str
+ formats, # type: FrozenSet
+ target_python, # type: TargetPython
+ allow_yanked, # type: bool
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """
+ :param project_name: The user supplied package name.
+ :param canonical_name: The canonical package name.
+ :param formats: The formats allowed for this package. Should be a set
+ with 'binary' or 'source' or both in it.
+ :param target_python: The target Python interpreter to use when
+ evaluating link compatibility. This is used, for example, to
+ check wheel compatibility, as well as when checking the Python
+ version, e.g. the Python version embedded in a link filename
+ (or egg fragment) and against an HTML link's optional PEP 503
+ "data-requires-python" attribute.
+ :param allow_yanked: Whether files marked as yanked (in the sense
+ of PEP 592) are permitted to be candidates for install.
+ :param ignore_requires_python: Whether to ignore incompatible
+ PEP 503 "data-requires-python" values in HTML links. Defaults
+ to False.
+ """
+ if ignore_requires_python is None:
+ ignore_requires_python = False
+
+ self._allow_yanked = allow_yanked
+ self._canonical_name = canonical_name
+ self._ignore_requires_python = ignore_requires_python
+ self._formats = formats
+ self._target_python = target_python
+
+ self.project_name = project_name
+
+ def evaluate_link(self, link):
+ # type: (Link) -> Tuple[bool, Optional[Text]]
+ """
+ Determine whether a link is a candidate for installation.
+
+ :return: A tuple (is_candidate, result), where `result` is (1) a
+ version string if `is_candidate` is True, and (2) if
+ `is_candidate` is False, an optional string to log the reason
+ the link fails to qualify.
+ """
+ version = None
+ if link.is_yanked and not self._allow_yanked:
+ reason = link.yanked_reason or '<none given>'
+ # Mark this as a unicode string to prevent "UnicodeEncodeError:
+ # 'ascii' codec can't encode character" in Python 2 when
+ # the reason contains non-ascii characters.
+ return (False, u'yanked for reason: {}'.format(reason))
+
+ if link.egg_fragment:
+ egg_info = link.egg_fragment
+ ext = link.ext
+ else:
+ egg_info, ext = link.splitext()
+ if not ext:
+ return (False, 'not a file')
+ if ext not in SUPPORTED_EXTENSIONS:
+ return (False, 'unsupported archive format: %s' % ext)
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
+ reason = 'No binaries permitted for %s' % self.project_name
+ return (False, reason)
+ if "macosx10" in link.path and ext == '.zip':
+ return (False, 'macosx10 one')
+ if ext == WHEEL_EXTENSION:
+ try:
+ wheel = Wheel(link.filename)
+ except InvalidWheelFilename:
+ return (False, 'invalid wheel filename')
+ if canonicalize_name(wheel.name) != self._canonical_name:
+ reason = 'wrong project name (not %s)' % self.project_name
+ return (False, reason)
+
+ supported_tags = self._target_python.get_tags()
+ if not wheel.supported(supported_tags):
+ # Include the wheel's tags in the reason string to
+ # simplify troubleshooting compatibility issues.
+ file_tags = wheel.get_formatted_file_tags()
+ reason = (
+ "none of the wheel's tags match: {}".format(
+ ', '.join(file_tags)
+ )
+ )
+ return (False, reason)
+
+ version = wheel.version
+
+ # This should be up by the self.ok_binary check, but see issue 2700.
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
+ return (False, 'No sources permitted for %s' % self.project_name)
+
+ if not version:
+ version = _extract_version_from_fragment(
+ egg_info, self._canonical_name,
+ )
+ if not version:
+ return (
+ False, 'Missing project version for %s' % self.project_name,
+ )
+
+ match = self._py_version_re.search(version)
+ if match:
+ version = version[:match.start()]
+ py_version = match.group(1)
+ if py_version != self._target_python.py_version:
+ return (False, 'Python version is incorrect')
+
+ supports_python = _check_link_requires_python(
+ link, version_info=self._target_python.py_version_info,
+ ignore_requires_python=self._ignore_requires_python,
+ )
+ if not supports_python:
+ # Return None for the reason text to suppress calling
+ # _log_skipped_link().
+ return (False, None)
+
+ logger.debug('Found link %s, version: %s', link, version)
+
+ return (True, version)
+
+
+def filter_unallowed_hashes(
+ candidates, # type: List[InstallationCandidate]
+ hashes, # type: Hashes
+ project_name, # type: str
+):
+ # type: (...) -> List[InstallationCandidate]
+ """
+ Filter out candidates whose hashes aren't allowed, and return a new
+ list of candidates.
+
+ If at least one candidate has an allowed hash, then all candidates with
+ either an allowed hash or no hash specified are returned. Otherwise,
+ the given candidates are returned.
+
+ Including the candidates with no hash specified when there is a match
+ allows a warning to be logged if there is a more preferred candidate
+ with no hash specified. Returning all candidates in the case of no
+ matches lets pip report the hash of the candidate that would otherwise
+ have been installed (e.g. permitting the user to more easily update
+ their requirements file with the desired hash).
+ """
+ if not hashes:
+ logger.debug(
+ 'Given no hashes to check %s links for project %r: '
+ 'discarding no candidates',
+ len(candidates),
+ project_name,
+ )
+ # Make sure we're not returning back the given value.
+ return list(candidates)
+
+ matches_or_no_digest = []
+ # Collect the non-matches for logging purposes.
+ non_matches = []
+ match_count = 0
+ for candidate in candidates:
+ link = candidate.link
+ if not link.has_hash:
+ pass
+ elif link.is_hash_allowed(hashes=hashes):
+ match_count += 1
+ else:
+ non_matches.append(candidate)
+ continue
+
+ matches_or_no_digest.append(candidate)
+
+ if match_count:
+ filtered = matches_or_no_digest
+ else:
+ # Make sure we're not returning back the given value.
+ filtered = list(candidates)
+
+ if len(filtered) == len(candidates):
+ discard_message = 'discarding no candidates'
+ else:
+ discard_message = 'discarding {} non-matches:\n {}'.format(
+ len(non_matches),
+ '\n '.join(str(candidate.link) for candidate in non_matches)
+ )
+
+ logger.debug(
+ 'Checked %s links for project %r against %s hashes '
+ '(%s matches, %s no digest): %s',
+ len(candidates),
+ project_name,
+ hashes.digest_count,
+ match_count,
+ len(matches_or_no_digest) - match_count,
+ discard_message
+ )
+
+ return filtered
+
+
+class CandidatePreferences(object):
+
+ """
+ Encapsulates some of the preferences for filtering and sorting
+ InstallationCandidate objects.
+ """
+
+ def __init__(
+ self,
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ ):
+ # type: (...) -> None
+ """
+ :param allow_all_prereleases: Whether to allow all pre-releases.
+ """
+ self.allow_all_prereleases = allow_all_prereleases
+ self.prefer_binary = prefer_binary
+
+
+class CandidateEvaluator(object):
+
+ """
+ Responsible for filtering and sorting candidates for installation based
+ on what tags are valid.
+ """
+
+ @classmethod
+ def create(
+ cls,
+ project_name, # type: str
+ target_python=None, # type: Optional[TargetPython]
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> CandidateEvaluator
+ """Create a CandidateEvaluator object.
+
+ :param target_python: The target Python interpreter to use when
+ checking compatibility. If None (the default), a TargetPython
+ object will be constructed from the running Python.
+ :param hashes: An optional collection of allowed hashes.
+ """
+ if target_python is None:
+ target_python = TargetPython()
+ if specifier is None:
+ specifier = specifiers.SpecifierSet()
+
+ supported_tags = target_python.get_tags()
+
+ return cls(
+ project_name=project_name,
+ supported_tags=supported_tags,
+ specifier=specifier,
+ prefer_binary=prefer_binary,
+ allow_all_prereleases=allow_all_prereleases,
+ hashes=hashes,
+ )
+
+ def __init__(
+ self,
+ project_name, # type: str
+ supported_tags, # type: List[Pep425Tag]
+ specifier, # type: specifiers.BaseSpecifier
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> None
+ """
+ :param supported_tags: The PEP 425 tags supported by the target
+ Python in order of preference (most preferred first).
+ """
+ self._allow_all_prereleases = allow_all_prereleases
+ self._hashes = hashes
+ self._prefer_binary = prefer_binary
+ self._project_name = project_name
+ self._specifier = specifier
+ self._supported_tags = supported_tags
+
+ def get_applicable_candidates(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> List[InstallationCandidate]
+ """
+ Return the applicable candidates from a list of candidates.
+ """
+ # Using None infers from the specifier instead.
+ allow_prereleases = self._allow_all_prereleases or None
+ specifier = self._specifier
+ versions = {
+ str(v) for v in specifier.filter(
+ # We turn the version object into a str here because otherwise
+ # when we're debundled but setuptools isn't, Python will see
+ # packaging.version.Version and
+ # pkg_resources._vendor.packaging.version.Version as different
+ # types. This way we'll use a str as a common data interchange
+ # format. If we stop using the pkg_resources provided specifier
+ # and start using our own, we can drop the cast to str().
+ (str(c.version) for c in candidates),
+ prereleases=allow_prereleases,
+ )
+ }
+
+ # Again, converting version to str to deal with debundling.
+ applicable_candidates = [
+ c for c in candidates if str(c.version) in versions
+ ]
+
+ return filter_unallowed_hashes(
+ candidates=applicable_candidates,
+ hashes=self._hashes,
+ project_name=self._project_name,
+ )
+
+ def make_found_candidates(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> FoundCandidates
+ """
+ Create and return a `FoundCandidates` instance.
+
+ :param specifier: An optional object implementing `filter`
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+ versions.
+ """
+ applicable_candidates = self.get_applicable_candidates(candidates)
+
+ return FoundCandidates(
+ candidates,
+ applicable_candidates=applicable_candidates,
+ evaluator=self,
+ )
+
+ def _sort_key(self, candidate):
+ # type: (InstallationCandidate) -> CandidateSortingKey
+ """
+ Function to pass as the `key` argument to a call to sorted() to sort
+ InstallationCandidates by preference.
+
+ Returns a tuple such that tuples sorting as greater using Python's
+ default comparison operator are more preferred.
+
+ The preference is as follows:
+
+ First and foremost, candidates with allowed (matching) hashes are
+ always preferred over candidates without matching hashes. This is
+ because e.g. if the only candidate with an allowed hash is yanked,
+ we still want to use that candidate.
+
+ Second, excepting hash considerations, candidates that have been
+ yanked (in the sense of PEP 592) are always less preferred than
+ candidates that haven't been yanked. Then:
+
+ If not finding wheels, they are sorted by version only.
+ If finding wheels, then the sort order is by version, then:
+ 1. existing installs
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
+ 3. source archives
+ If prefer_binary was set, then all wheels are sorted above sources.
+
+ Note: it was considered to embed this logic into the Link
+ comparison operators, but then different sdist links
+ with the same version, would have to be considered equal
+ """
+ valid_tags = self._supported_tags
+ support_num = len(valid_tags)
+ build_tag = tuple() # type: BuildTag
+ binary_preference = 0
+ link = candidate.link
+ if link.is_wheel:
+ # can raise InvalidWheelFilename
+ wheel = Wheel(link.filename)
+ if not wheel.supported(valid_tags):
+ raise UnsupportedWheel(
+ "%s is not a supported wheel for this platform. It "
+ "can't be sorted." % wheel.filename
+ )
+ if self._prefer_binary:
+ binary_preference = 1
+ pri = -(wheel.support_index_min(valid_tags))
+ if wheel.build_tag is not None:
+ match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
+ build_tag_groups = match.groups()
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+ else: # sdist
+ pri = -(support_num)
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
+ return (
+ has_allowed_hash, yank_value, binary_preference, candidate.version,
+ build_tag, pri,
+ )
+
+ def get_best_candidate(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> Optional[InstallationCandidate]
+ """
+ Return the best candidate per the instance's sort order, or None if
+ no candidate is acceptable.
+ """
+ if not candidates:
+ return None
+
+ best_candidate = max(candidates, key=self._sort_key)
+
+ # Log a warning per PEP 592 if necessary before returning.
+ link = best_candidate.link
+ if link.is_yanked:
+ reason = link.yanked_reason or '<none given>'
+ msg = (
+ # Mark this as a unicode string to prevent
+ # "UnicodeEncodeError: 'ascii' codec can't encode character"
+ # in Python 2 when the reason contains non-ascii characters.
+ u'The candidate selected for download or install is a '
+ 'yanked version: {candidate}\n'
+ 'Reason for being yanked: {reason}'
+ ).format(candidate=best_candidate, reason=reason)
+ logger.warning(msg)
+
+ return best_candidate
+
+
+class FoundCandidates(object):
+ """A collection of candidates, returned by `PackageFinder.find_candidates`.
+
+ This class is only intended to be instantiated by CandidateEvaluator's
+ `make_found_candidates()` method.
+ """
+
+ def __init__(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ applicable_candidates, # type: List[InstallationCandidate]
+ evaluator, # type: CandidateEvaluator
+ ):
+ # type: (...) -> None
+ """
+ :param candidates: A sequence of all available candidates found.
+ :param applicable_candidates: The applicable candidates.
+ :param evaluator: A CandidateEvaluator object to sort applicable
+ candidates by order of preference.
+ """
+ self._applicable_candidates = applicable_candidates
+ self._candidates = candidates
+ self._evaluator = evaluator
+
+ def iter_all(self):
+ # type: () -> Iterable[InstallationCandidate]
+ """Iterate through all candidates.
+ """
+ return iter(self._candidates)
+
+ def iter_applicable(self):
+ # type: () -> Iterable[InstallationCandidate]
+ """Iterate through the applicable candidates.
+ """
+ return iter(self._applicable_candidates)
+
+ def get_best(self):
+ # type: () -> Optional[InstallationCandidate]
+ """Return the best candidate available, or None if no applicable
+ candidates are found.
+ """
+ candidates = list(self.iter_applicable())
+ return self._evaluator.get_best_candidate(candidates)
+
+
+class PackageFinder(object):
+ """This finds packages.
+
+ This is meant to match easy_install's technique for looking for
+ packages, by reading pages and looking for appropriate links.
+ """
+
+ def __init__(
+ self,
+ search_scope, # type: SearchScope
+ session, # type: PipSession
+ target_python, # type: TargetPython
+ allow_yanked, # type: bool
+ format_control=None, # type: Optional[FormatControl]
+ trusted_hosts=None, # type: Optional[List[str]]
+ candidate_prefs=None, # type: CandidatePreferences
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """
+ This constructor is primarily meant to be used by the create() class
+ method and from tests.
+
+ :param session: The Session to use to make requests.
+ :param format_control: A FormatControl object, used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param candidate_prefs: Options to use when creating a
+ CandidateEvaluator object.
+ """
+ if trusted_hosts is None:
+ trusted_hosts = []
+ if candidate_prefs is None:
+ candidate_prefs = CandidatePreferences()
+
+ format_control = format_control or FormatControl(set(), set())
+
+ self._allow_yanked = allow_yanked
+ self._candidate_prefs = candidate_prefs
+ self._ignore_requires_python = ignore_requires_python
+ self._target_python = target_python
+
+ self.search_scope = search_scope
+ self.session = session
+ self.format_control = format_control
+ self.trusted_hosts = trusted_hosts
+
+ # These are boring links that have already been logged somehow.
+ self._logged_links = set() # type: Set[Link]
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ @classmethod
+ def create(
+ cls,
+ search_scope, # type: SearchScope
+ selection_prefs, # type: SelectionPreferences
+ trusted_hosts=None, # type: Optional[List[str]]
+ session=None, # type: Optional[PipSession]
+ target_python=None, # type: Optional[TargetPython]
+ ):
+ # type: (...) -> PackageFinder
+ """Create a PackageFinder.
+
+ :param selection_prefs: The candidate selection preferences, as a
+ SelectionPreferences object.
+ :param trusted_hosts: Domains not to emit warnings for when not using
+ HTTPS.
+ :param session: The Session to use to make requests.
+ :param target_python: The target Python interpreter to use when
+ checking compatibility. If None (the default), a TargetPython
+ object will be constructed from the running Python.
+ """
+ if session is None:
+ raise TypeError(
+ "PackageFinder.create() missing 1 required keyword argument: "
+ "'session'"
+ )
+ if target_python is None:
+ target_python = TargetPython()
+
+ candidate_prefs = CandidatePreferences(
+ prefer_binary=selection_prefs.prefer_binary,
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
+ )
+
+ return cls(
+ candidate_prefs=candidate_prefs,
+ search_scope=search_scope,
+ session=session,
+ target_python=target_python,
+ allow_yanked=selection_prefs.allow_yanked,
+ format_control=selection_prefs.format_control,
+ trusted_hosts=trusted_hosts,
+ ignore_requires_python=selection_prefs.ignore_requires_python,
+ )
+
+ @property
+ def find_links(self):
+ # type: () -> List[str]
+ return self.search_scope.find_links
+
+ @property
+ def index_urls(self):
+ # type: () -> List[str]
+ return self.search_scope.index_urls
+
+ @property
+ def allow_all_prereleases(self):
+ # type: () -> bool
+ return self._candidate_prefs.allow_all_prereleases
+
+ def set_allow_all_prereleases(self):
+ # type: () -> None
+ self._candidate_prefs.allow_all_prereleases = True
+
+ def add_trusted_host(self, host, source=None):
+ # type: (str, Optional[str]) -> None
+ """
+ :param source: An optional source string, for logging where the host
+ string came from.
+ """
+ # It is okay to add a previously added host because PipSession stores
+ # the resulting prefixes in a dict.
+ msg = 'adding trusted host: {!r}'.format(host)
+ if source is not None:
+ msg += ' (from {})'.format(source)
+ logger.info(msg)
+ self.session.add_insecure_host(host)
+ if host in self.trusted_hosts:
+ return
+
+ self.trusted_hosts.append(host)
+
+ def iter_secure_origins(self):
+ # type: () -> Iterator[SecureOrigin]
+ for secure_origin in SECURE_ORIGINS:
+ yield secure_origin
+ for host in self.trusted_hosts:
+ yield ('*', host, '*')
+
+ @staticmethod
+ def _sort_locations(locations, expand_dir=False):
+ # type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
+ """
+ Sort locations into "files" (archives) and "urls", and return
+ a pair of lists (files,urls)
+ """
+ files = []
+ urls = []
+
+ # puts the url for the given file path into the appropriate list
+ def sort_path(path):
+ url = path_to_url(path)
+ if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
+ urls.append(url)
+ else:
+ files.append(url)
+
+ for url in locations:
+
+ is_local_path = os.path.exists(url)
+ is_file_url = url.startswith('file:')
+
+ if is_local_path or is_file_url:
+ if is_local_path:
+ path = url
+ else:
+ path = url_to_path(url)
+ if os.path.isdir(path):
+ if expand_dir:
+ path = os.path.realpath(path)
+ for item in os.listdir(path):
+ sort_path(os.path.join(path, item))
+ elif is_file_url:
+ urls.append(url)
+ else:
+ logger.warning(
+ "Path '{0}' is ignored: "
+ "it is a directory.".format(path),
+ )
+ elif os.path.isfile(path):
+ sort_path(path)
+ else:
+ logger.warning(
+ "Url '%s' is ignored: it is neither a file "
+ "nor a directory.", url,
+ )
+ elif is_url(url):
+ # Only add url with clear scheme
+ urls.append(url)
+ else:
+ logger.warning(
+ "Url '%s' is ignored. It is either a non-existing "
+ "path or lacks a specific scheme.", url,
+ )
+
+ return files, urls
+
+ def _validate_secure_origin(self, logger, location):
+ # type: (Logger, Link) -> bool
+ # Determine if this url used a secure transport mechanism
+ parsed = urllib_parse.urlparse(str(location))
+ origin = (parsed.scheme, parsed.hostname, parsed.port)
+
+ # The protocol to use to see if the protocol matches.
+ # Don't count the repository type as part of the protocol: in
+ # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
+ # the last scheme.)
+ protocol = origin[0].rsplit('+', 1)[-1]
+
+ # Determine if our origin is a secure origin by looking through our
+ # hardcoded list of secure origins, as well as any additional ones
+ # configured on this PackageFinder instance.
+ for secure_origin in self.iter_secure_origins():
+ if protocol != secure_origin[0] and secure_origin[0] != "*":
+ continue
+
+ try:
+ # We need to do this decode dance to ensure that we have a
+ # unicode object, even on Python 2.x.
+ addr = ipaddress.ip_address(
+ origin[1]
+ if (
+ isinstance(origin[1], six.text_type) or
+ origin[1] is None
+ )
+ else origin[1].decode("utf8")
+ )
+ network = ipaddress.ip_network(
+ secure_origin[1]
+ if isinstance(secure_origin[1], six.text_type)
+ # setting secure_origin[1] to proper Union[bytes, str]
+ # creates problems in other places
+ else secure_origin[1].decode("utf8") # type: ignore
+ )
+ except ValueError:
+ # We don't have both a valid address or a valid network, so
+ # we'll check this origin against hostnames.
+ if (origin[1] and
+ origin[1].lower() != secure_origin[1].lower() and
+ secure_origin[1] != "*"):
+ continue
+ else:
+ # We have a valid address and network, so see if the address
+ # is contained within the network.
+ if addr not in network:
+ continue
+
+ # Check to see if the port patches
+ if (origin[2] != secure_origin[2] and
+ secure_origin[2] != "*" and
+ secure_origin[2] is not None):
+ continue
+
+ # If we've gotten here, then this origin matches the current
+ # secure origin and we should return True
+ return True
+
+ # If we've gotten to this point, then the origin isn't secure and we
+ # will not accept it as a valid location to search. We will however
+ # log a warning that we are ignoring it.
+ logger.warning(
+ "The repository located at %s is not a trusted or secure host and "
+ "is being ignored. If this repository is available via HTTPS we "
+ "recommend you use HTTPS instead, otherwise you may silence "
+ "this warning and allow it anyway with '--trusted-host %s'.",
+ parsed.hostname,
+ parsed.hostname,
+ )
+
+ return False
+
+ def make_link_evaluator(self, project_name):
+ # type: (str) -> LinkEvaluator
+ canonical_name = canonicalize_name(project_name)
+ formats = self.format_control.get_allowed_formats(canonical_name)
+
+ return LinkEvaluator(
+ project_name=project_name,
+ canonical_name=canonical_name,
+ formats=formats,
+ target_python=self._target_python,
+ allow_yanked=self._allow_yanked,
+ ignore_requires_python=self._ignore_requires_python,
+ )
+
+ def find_all_candidates(self, project_name):
+ # type: (str) -> List[InstallationCandidate]
+ """Find all available InstallationCandidate for project_name
+
+ This checks index_urls and find_links.
+ All versions found are returned as an InstallationCandidate list.
+
+ See LinkEvaluator.evaluate_link() for details on which files
+ are accepted.
+ """
+ search_scope = self.search_scope
+ index_locations = search_scope.get_index_urls_locations(project_name)
+ index_file_loc, index_url_loc = self._sort_locations(index_locations)
+ fl_file_loc, fl_url_loc = self._sort_locations(
+ self.find_links, expand_dir=True,
+ )
+
+ file_locations = (Link(url) for url in itertools.chain(
+ index_file_loc, fl_file_loc,
+ ))
+
+ # We trust every url that the user has given us whether it was given
+ # via --index-url or --find-links.
+ # We want to filter out any thing which does not have a secure origin.
+ url_locations = [
+ link for link in itertools.chain(
+ (Link(url) for url in index_url_loc),
+ (Link(url) for url in fl_url_loc),
+ )
+ if self._validate_secure_origin(logger, link)
+ ]
+
+ logger.debug('%d location(s) to search for versions of %s:',
+ len(url_locations), project_name)
+
+ for location in url_locations:
+ logger.debug('* %s', location)
+
+ link_evaluator = self.make_link_evaluator(project_name)
+ find_links_versions = self._package_versions(
+ link_evaluator,
+ # We trust every directly linked archive in find_links
+ (Link(url, '-f') for url in self.find_links),
+ )
+
+ page_versions = []
+ for page in self._get_pages(url_locations, project_name):
+ logger.debug('Analyzing links from page %s', page.url)
+ with indent_log():
+ page_versions.extend(
+ self._package_versions(link_evaluator, page.iter_links())
+ )
+
+ file_versions = self._package_versions(link_evaluator, file_locations)
+ if file_versions:
+ file_versions.sort(reverse=True)
+ logger.debug(
+ 'Local files found: %s',
+ ', '.join([
+ url_to_path(candidate.link.url)
+ for candidate in file_versions
+ ])
+ )
+
+ # This is an intentional priority ordering
+ return file_versions + find_links_versions + page_versions
+
+ def make_candidate_evaluator(
+ self,
+ project_name, # type: str
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> CandidateEvaluator
+ """Create a CandidateEvaluator object to use.
+ """
+ candidate_prefs = self._candidate_prefs
+ return CandidateEvaluator.create(
+ project_name=project_name,
+ target_python=self._target_python,
+ prefer_binary=candidate_prefs.prefer_binary,
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
+ specifier=specifier,
+ hashes=hashes,
+ )
+
+ def find_candidates(
+ self,
+ project_name, # type: str
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> FoundCandidates
+ """Find matches for the given project and specifier.
+
+ :param specifier: An optional object implementing `filter`
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+ versions.
+
+ :return: A `FoundCandidates` instance.
+ """
+ candidates = self.find_all_candidates(project_name)
+ candidate_evaluator = self.make_candidate_evaluator(
+ project_name=project_name,
+ specifier=specifier,
+ hashes=hashes,
+ )
+ return candidate_evaluator.make_found_candidates(candidates)
+
+ def find_requirement(self, req, upgrade):
+ # type: (InstallRequirement, bool) -> Optional[Link]
+ """Try to find a Link matching req
+
+ Expects req, an InstallRequirement and upgrade, a boolean
+ Returns a Link if found,
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
+ """
+ hashes = req.hashes(trust_internet=False)
+ candidates = self.find_candidates(
+ req.name, specifier=req.specifier, hashes=hashes,
+ )
+ best_candidate = candidates.get_best()
+
+ installed_version = None # type: Optional[_BaseVersion]
+ if req.satisfied_by is not None:
+ installed_version = parse_version(req.satisfied_by.version)
+
+ def _format_versions(cand_iter):
+ # This repeated parse_version and str() conversion is needed to
+ # handle different vendoring sources from pip and pkg_resources.
+ # If we stop using the pkg_resources provided specifier and start
+ # using our own, we can drop the cast to str().
+ return ", ".join(sorted(
+ {str(c.version) for c in cand_iter},
+ key=parse_version,
+ )) or "none"
+
+ if installed_version is None and best_candidate is None:
+ logger.critical(
+ 'Could not find a version that satisfies the requirement %s '
+ '(from versions: %s)',
+ req,
+ _format_versions(candidates.iter_all()),
+ )
+
+ raise DistributionNotFound(
+ 'No matching distribution found for %s' % req
+ )
+
+ best_installed = False
+ if installed_version and (
+ best_candidate is None or
+ best_candidate.version <= installed_version):
+ best_installed = True
+
+ if not upgrade and installed_version is not None:
+ if best_installed:
+ logger.debug(
+ 'Existing installed version (%s) is most up-to-date and '
+ 'satisfies requirement',
+ installed_version,
+ )
+ else:
+ logger.debug(
+ 'Existing installed version (%s) satisfies requirement '
+ '(most up-to-date version is %s)',
+ installed_version,
+ best_candidate.version,
+ )
+ return None
+
+ if best_installed:
+ # We have an existing version, and its the best version
+ logger.debug(
+ 'Installed version (%s) is most up-to-date (past versions: '
+ '%s)',
+ installed_version,
+ _format_versions(candidates.iter_applicable()),
+ )
+ raise BestVersionAlreadyInstalled
+
+ logger.debug(
+ 'Using version %s (newest of versions: %s)',
+ best_candidate.version,
+ _format_versions(candidates.iter_applicable()),
+ )
+ return best_candidate.link
+
+ def _get_pages(self, locations, project_name):
+ # type: (Iterable[Link], str) -> Iterable[HTMLPage]
+ """
+ Yields (page, page_url) from the given locations, skipping
+ locations that have errors.
+ """
+ seen = set() # type: Set[Link]
+ for location in locations:
+ if location in seen:
+ continue
+ seen.add(location)
+
+ page = _get_html_page(location, session=self.session)
+ if page is None:
+ continue
+
+ yield page
+
+ def _sort_links(self, links):
+ # type: (Iterable[Link]) -> List[Link]
+ """
+ Returns elements of links in order, non-egg links first, egg links
+ second, while eliminating duplicates
+ """
+ eggs, no_eggs = [], []
+ seen = set() # type: Set[Link]
+ for link in links:
+ if link not in seen:
+ seen.add(link)
+ if link.egg_fragment:
+ eggs.append(link)
+ else:
+ no_eggs.append(link)
+ return no_eggs + eggs
+
+ def _log_skipped_link(self, link, reason):
+ # type: (Link, Text) -> None
+ if link not in self._logged_links:
+ # Mark this as a unicode string to prevent "UnicodeEncodeError:
+ # 'ascii' codec can't encode character" in Python 2 when
+ # the reason contains non-ascii characters.
+ # Also, put the link at the end so the reason is more visible
+ # and because the link string is usually very long.
+ logger.debug(u'Skipping link: %s: %s', reason, link)
+ self._logged_links.add(link)
+
+ def get_install_candidate(self, link_evaluator, link):
+ # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate]
+ """
+ If the link is a candidate for install, convert it to an
+ InstallationCandidate and return it. Otherwise, return None.
+ """
+ is_candidate, result = link_evaluator.evaluate_link(link)
+ if not is_candidate:
+ if result:
+ self._log_skipped_link(link, reason=result)
+ return None
+
+ return InstallationCandidate(
+ project=link_evaluator.project_name,
+ link=link,
+ # Convert the Text result to str since InstallationCandidate
+ # accepts str.
+ version=str(result),
+ )
+
+ def _package_versions(self, link_evaluator, links):
+ # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate]
+ result = []
+ for link in self._sort_links(links):
+ candidate = self.get_install_candidate(link_evaluator, link)
+ if candidate is not None:
+ result.append(candidate)
+ return result
+
+
+def _find_name_version_sep(fragment, canonical_name):
+ # type: (str, str) -> int
+ """Find the separator's index based on the package's canonical name.
+
+ :param fragment: A <package>+<version> filename "fragment" (stem) or
+ egg fragment.
+ :param canonical_name: The package's canonical name.
+
+ This function is needed since the canonicalized name does not necessarily
+ have the same length as the egg info's name part. An example::
+
+ >>> fragment = 'foo__bar-1.0'
+ >>> canonical_name = 'foo-bar'
+ >>> _find_name_version_sep(fragment, canonical_name)
+ 8
+ """
+ # Project name and version must be separated by one single dash. Find all
+ # occurrences of dashes; if the string in front of it matches the canonical
+ # name, this is the one separating the name and version parts.
+ for i, c in enumerate(fragment):
+ if c != "-":
+ continue
+ if canonicalize_name(fragment[:i]) == canonical_name:
+ return i
+ raise ValueError("{} does not match {}".format(fragment, canonical_name))
+
+
+def _extract_version_from_fragment(fragment, canonical_name):
+ # type: (str, str) -> Optional[str]
+ """Parse the version string from a <package>+<version> filename
+ "fragment" (stem) or egg fragment.
+
+ :param fragment: The string to parse. E.g. foo-2.1
+ :param canonical_name: The canonicalized name of the package this
+ belongs to.
+ """
+ try:
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
+ except ValueError:
+ return None
+ version = fragment[version_start:]
+ if not version:
+ return None
+ return version
+
+
+def _determine_base_url(document, page_url):
+ """Determine the HTML document's base URL.
+
+ This looks for a ``<base>`` tag in the HTML document. If present, its href
+ attribute denotes the base URL of anchor tags in the document. If there is
+ no such tag (or if it does not have a valid href attribute), the HTML
+ file's URL is used as the base URL.
+
+ :param document: An HTML document representation. The current
+ implementation expects the result of ``html5lib.parse()``.
+ :param page_url: The URL of the HTML document.
+ """
+ for base in document.findall(".//base"):
+ href = base.get("href")
+ if href is not None:
+ return href
+ return page_url
+
+
+def _get_encoding_from_headers(headers):
+ """Determine if we have any encoding information in our headers.
+ """
+ if headers and "Content-Type" in headers:
+ content_type, params = cgi.parse_header(headers["Content-Type"])
+ if "charset" in params:
+ return params['charset']
+ return None
+
+
+def _clean_link(url):
+ # type: (str) -> str
+ """Makes sure a link is fully encoded. That is, if a ' ' shows up in
+ the link, it will be rewritten to %20 (while not over-quoting
+ % or other characters)."""
+ # Split the URL into parts according to the general structure
+ # `scheme://netloc/path;parameters?query#fragment`. Note that the
+ # `netloc` can be empty and the URI will then refer to a local
+ # filesystem path.
+ result = urllib_parse.urlparse(url)
+ # In both cases below we unquote prior to quoting to make sure
+ # nothing is double quoted.
+ if result.netloc == "":
+ # On Windows the path part might contain a drive letter which
+ # should not be quoted. On Linux where drive letters do not
+ # exist, the colon should be quoted. We rely on urllib.request
+ # to do the right thing here.
+ path = urllib_request.pathname2url(
+ urllib_request.url2pathname(result.path))
+ else:
+ # In addition to the `/` character we protect `@` so that
+ # revision strings in VCS URLs are properly parsed.
+ path = urllib_parse.quote(urllib_parse.unquote(result.path), safe="/@")
+ return urllib_parse.urlunparse(result._replace(path=path))
+
+
+def _create_link_from_element(
+ anchor, # type: HTMLElement
+ page_url, # type: str
+ base_url, # type: str
+):
+ # type: (...) -> Optional[Link]
+ """
+ Convert an anchor element in a simple repository page to a Link.
+ """
+ href = anchor.get("href")
+ if not href:
+ return None
+
+ url = _clean_link(urllib_parse.urljoin(base_url, href))
+ pyrequire = anchor.get('data-requires-python')
+ pyrequire = unescape(pyrequire) if pyrequire else None
+
+ yanked_reason = anchor.get('data-yanked')
+ if yanked_reason:
+ # This is a unicode string in Python 2 (and 3).
+ yanked_reason = unescape(yanked_reason)
+
+ link = Link(
+ url,
+ comes_from=page_url,
+ requires_python=pyrequire,
+ yanked_reason=yanked_reason,
+ )
+
+ return link
+
+
+class HTMLPage(object):
+ """Represents one page, along with its URL"""
+
+ def __init__(self, content, url, headers=None):
+ # type: (bytes, str, MutableMapping[str, str]) -> None
+ self.content = content
+ self.url = url
+ self.headers = headers
+
+ def __str__(self):
+ return redact_password_from_url(self.url)
+
+ def iter_links(self):
+ # type: () -> Iterable[Link]
+ """Yields all links in the page"""
+ document = html5lib.parse(
+ self.content,
+ transport_encoding=_get_encoding_from_headers(self.headers),
+ namespaceHTMLElements=False,
+ )
+ base_url = _determine_base_url(document, self.url)
+ for anchor in document.findall(".//a"):
+ link = _create_link_from_element(
+ anchor,
+ page_url=self.url,
+ base_url=base_url,
+ )
+ if link is None:
+ continue
+ yield link
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py
new file mode 100644
index 00000000..1d9229cb
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py
@@ -0,0 +1,457 @@
+"""Dependency Resolution
+
+The dependency resolution in pip is performed as follows:
+
+for top-level requirements:
+ a. only one spec allowed per project, regardless of conflicts or not.
+ otherwise a "double requirement" exception is raised
+ b. they override sub-dependency requirements.
+for sub-dependencies
+ a. "first found, wins" (where the order is breadth first)
+"""
+
+import logging
+import sys
+from collections import defaultdict
+from itertools import chain
+
+from pip._vendor.packaging import specifiers
+
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
+ UnsupportedPythonVersion,
+)
+from pip._internal.req.constructors import install_req_from_req_string
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ dist_in_usersite, ensure_dir, normalize_version_info,
+)
+from pip._internal.utils.packaging import (
+ check_requires_python, get_requires_python,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import DefaultDict, List, Optional, Set, Tuple
+ from pip._vendor import pkg_resources
+
+ from pip._internal.cache import WheelCache
+ from pip._internal.distributions import AbstractDistribution
+ from pip._internal.download import PipSession
+ from pip._internal.index import PackageFinder
+ from pip._internal.operations.prepare import RequirementPreparer
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.req.req_set import RequirementSet
+
+logger = logging.getLogger(__name__)
+
+
+def _check_dist_requires_python(
+ dist, # type: pkg_resources.Distribution
+ version_info, # type: Tuple[int, int, int]
+ ignore_requires_python=False, # type: bool
+):
+ # type: (...) -> None
+ """
+ Check whether the given Python version is compatible with a distribution's
+ "Requires-Python" value.
+
+ :param version_info: A 3-tuple of ints representing the Python
+ major-minor-micro version to check.
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
+ value if the given Python version isn't compatible.
+
+ :raises UnsupportedPythonVersion: When the given Python version isn't
+ compatible.
+ """
+ requires_python = get_requires_python(dist)
+ try:
+ is_compatible = check_requires_python(
+ requires_python, version_info=version_info,
+ )
+ except specifiers.InvalidSpecifier as exc:
+ logger.warning(
+ "Package %r has an invalid Requires-Python: %s",
+ dist.project_name, exc,
+ )
+ return
+
+ if is_compatible:
+ return
+
+ version = '.'.join(map(str, version_info))
+ if ignore_requires_python:
+ logger.debug(
+ 'Ignoring failed Requires-Python check for package %r: '
+ '%s not in %r',
+ dist.project_name, version, requires_python,
+ )
+ return
+
+ raise UnsupportedPythonVersion(
+ 'Package {!r} requires a different Python: {} not in {!r}'.format(
+ dist.project_name, version, requires_python,
+ ))
+
+
+class Resolver(object):
+ """Resolves which packages need to be installed/uninstalled to perform \
+ the requested operation without breaking the requirements of any package.
+ """
+
+ _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+ def __init__(
+ self,
+ preparer, # type: RequirementPreparer
+ session, # type: PipSession
+ finder, # type: PackageFinder
+ wheel_cache, # type: Optional[WheelCache]
+ use_user_site, # type: bool
+ ignore_dependencies, # type: bool
+ ignore_installed, # type: bool
+ ignore_requires_python, # type: bool
+ force_reinstall, # type: bool
+ isolated, # type: bool
+ upgrade_strategy, # type: str
+ use_pep517=None, # type: Optional[bool]
+ py_version_info=None, # type: Optional[Tuple[int, ...]]
+ ):
+ # type: (...) -> None
+ super(Resolver, self).__init__()
+ assert upgrade_strategy in self._allowed_strategies
+
+ if py_version_info is None:
+ py_version_info = sys.version_info[:3]
+ else:
+ py_version_info = normalize_version_info(py_version_info)
+
+ self._py_version_info = py_version_info
+
+ self.preparer = preparer
+ self.finder = finder
+ self.session = session
+
+ # NOTE: This would eventually be replaced with a cache that can give
+ # information about both sdist and wheels transparently.
+ self.wheel_cache = wheel_cache
+
+ # This is set in resolve
+ self.require_hashes = None # type: Optional[bool]
+
+ self.upgrade_strategy = upgrade_strategy
+ self.force_reinstall = force_reinstall
+ self.isolated = isolated
+ self.ignore_dependencies = ignore_dependencies
+ self.ignore_installed = ignore_installed
+ self.ignore_requires_python = ignore_requires_python
+ self.use_user_site = use_user_site
+ self.use_pep517 = use_pep517
+
+ self._discovered_dependencies = \
+ defaultdict(list) # type: DefaultDict[str, List]
+
+ def resolve(self, requirement_set):
+ # type: (RequirementSet) -> None
+ """Resolve what operations need to be done
+
+ As a side-effect of this method, the packages (and their dependencies)
+ are downloaded, unpacked and prepared for installation. This
+ preparation is done by ``pip.operations.prepare``.
+
+ Once PyPI has static dependency metadata available, it would be
+ possible to move the preparation to become a step separated from
+ dependency resolution.
+ """
+ # make the wheelhouse
+ if self.preparer.wheel_download_dir:
+ ensure_dir(self.preparer.wheel_download_dir)
+
+ # If any top-level requirement has a hash specified, enter
+ # hash-checking mode, which requires hashes from all.
+ root_reqs = (
+ requirement_set.unnamed_requirements +
+ list(requirement_set.requirements.values())
+ )
+ self.require_hashes = (
+ requirement_set.require_hashes or
+ any(req.has_hash_options for req in root_reqs)
+ )
+
+ # Display where finder is looking for packages
+ search_scope = self.finder.search_scope
+ locations = search_scope.get_formatted_locations()
+ if locations:
+ logger.info(locations)
+
+ # Actually prepare the files, and collect any exceptions. Most hash
+ # exceptions cannot be checked ahead of time, because
+ # req.populate_link() needs to be called before we can make decisions
+ # based on link type.
+ discovered_reqs = [] # type: List[InstallRequirement]
+ hash_errors = HashErrors()
+ for req in chain(root_reqs, discovered_reqs):
+ try:
+ discovered_reqs.extend(
+ self._resolve_one(requirement_set, req)
+ )
+ except HashError as exc:
+ exc.req = req
+ hash_errors.append(exc)
+
+ if hash_errors:
+ raise hash_errors
+
+ def _is_upgrade_allowed(self, req):
+ # type: (InstallRequirement) -> bool
+ if self.upgrade_strategy == "to-satisfy-only":
+ return False
+ elif self.upgrade_strategy == "eager":
+ return True
+ else:
+ assert self.upgrade_strategy == "only-if-needed"
+ return req.is_direct
+
+ def _set_req_to_reinstall(self, req):
+ # type: (InstallRequirement) -> None
+ """
+ Set a requirement to be installed.
+ """
+ # Don't uninstall the conflict if doing a user install and the
+ # conflict is not a user install.
+ if not self.use_user_site or dist_in_usersite(req.satisfied_by):
+ req.conflicts_with = req.satisfied_by
+ req.satisfied_by = None
+
+ # XXX: Stop passing requirement_set for options
+ def _check_skip_installed(self, req_to_install):
+ # type: (InstallRequirement) -> Optional[str]
+ """Check if req_to_install should be skipped.
+
+ This will check if the req is installed, and whether we should upgrade
+ or reinstall it, taking into account all the relevant user options.
+
+ After calling this req_to_install will only have satisfied_by set to
+ None if the req_to_install is to be upgraded/reinstalled etc. Any
+ other value will be a dist recording the current thing installed that
+ satisfies the requirement.
+
+ Note that for vcs urls and the like we can't assess skipping in this
+ routine - we simply identify that we need to pull the thing down,
+ then later on it is pulled down and introspected to assess upgrade/
+ reinstalls etc.
+
+ :return: A text reason for why it was skipped, or None.
+ """
+ if self.ignore_installed:
+ return None
+
+ req_to_install.check_if_exists(self.use_user_site)
+ if not req_to_install.satisfied_by:
+ return None
+
+ if self.force_reinstall:
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ if not self._is_upgrade_allowed(req_to_install):
+ if self.upgrade_strategy == "only-if-needed":
+ return 'already satisfied, skipping upgrade'
+ return 'already satisfied'
+
+ # Check for the possibility of an upgrade. For link-based
+ # requirements we have to pull the tree down and inspect to assess
+ # the version #, so it's handled way down.
+ if not req_to_install.link:
+ try:
+ self.finder.find_requirement(req_to_install, upgrade=True)
+ except BestVersionAlreadyInstalled:
+ # Then the best version is installed.
+ return 'already up-to-date'
+ except DistributionNotFound:
+ # No distribution found, so we squash the error. It will
+ # be raised later when we re-try later to do the install.
+ # Why don't we just raise here?
+ pass
+
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ def _get_abstract_dist_for(self, req):
+ # type: (InstallRequirement) -> AbstractDistribution
+ """Takes a InstallRequirement and returns a single AbstractDist \
+ representing a prepared variant of the same.
+ """
+ assert self.require_hashes is not None, (
+ "require_hashes should have been set in Resolver.resolve()"
+ )
+
+ if req.editable:
+ return self.preparer.prepare_editable_requirement(
+ req, self.require_hashes, self.use_user_site, self.finder,
+ )
+
+ # satisfied_by is only evaluated by calling _check_skip_installed,
+ # so it must be None here.
+ assert req.satisfied_by is None
+ skip_reason = self._check_skip_installed(req)
+
+ if req.satisfied_by:
+ return self.preparer.prepare_installed_requirement(
+ req, self.require_hashes, skip_reason
+ )
+
+ upgrade_allowed = self._is_upgrade_allowed(req)
+ abstract_dist = self.preparer.prepare_linked_requirement(
+ req, self.session, self.finder, upgrade_allowed,
+ self.require_hashes
+ )
+
+ # NOTE
+ # The following portion is for determining if a certain package is
+ # going to be re-installed/upgraded or not and reporting to the user.
+ # This should probably get cleaned up in a future refactor.
+
+ # req.req is only avail after unpack for URL
+ # pkgs repeat check_if_exists to uninstall-on-upgrade
+ # (#14)
+ if not self.ignore_installed:
+ req.check_if_exists(self.use_user_site)
+
+ if req.satisfied_by:
+ should_modify = (
+ self.upgrade_strategy != "to-satisfy-only" or
+ self.force_reinstall or
+ self.ignore_installed or
+ req.link.scheme == 'file'
+ )
+ if should_modify:
+ self._set_req_to_reinstall(req)
+ else:
+ logger.info(
+ 'Requirement already satisfied (use --upgrade to upgrade):'
+ ' %s', req,
+ )
+
+ return abstract_dist
+
+ def _resolve_one(
+ self,
+ requirement_set, # type: RequirementSet
+ req_to_install # type: InstallRequirement
+ ):
+ # type: (...) -> List[InstallRequirement]
+ """Prepare a single requirements file.
+
+ :return: A list of additional InstallRequirements to also install.
+ """
+ # Tell user what we are doing for this requirement:
+ # obtain (editable), skipping, processing (local url), collecting
+ # (remote url or package name)
+ if req_to_install.constraint or req_to_install.prepared:
+ return []
+
+ req_to_install.prepared = True
+
+ # register tmp src for cleanup in case something goes wrong
+ requirement_set.reqs_to_cleanup.append(req_to_install)
+
+ abstract_dist = self._get_abstract_dist_for(req_to_install)
+
+ # Parse and return dependencies
+ dist = abstract_dist.get_pkg_resources_distribution()
+ # This will raise UnsupportedPythonVersion if the given Python
+ # version isn't compatible with the distribution's Requires-Python.
+ _check_dist_requires_python(
+ dist, version_info=self._py_version_info,
+ ignore_requires_python=self.ignore_requires_python,
+ )
+
+ more_reqs = [] # type: List[InstallRequirement]
+
+ def add_req(subreq, extras_requested):
+ sub_install_req = install_req_from_req_string(
+ str(subreq),
+ req_to_install,
+ isolated=self.isolated,
+ wheel_cache=self.wheel_cache,
+ use_pep517=self.use_pep517
+ )
+ parent_req_name = req_to_install.name
+ to_scan_again, add_to_parent = requirement_set.add_requirement(
+ sub_install_req,
+ parent_req_name=parent_req_name,
+ extras_requested=extras_requested,
+ )
+ if parent_req_name and add_to_parent:
+ self._discovered_dependencies[parent_req_name].append(
+ add_to_parent
+ )
+ more_reqs.extend(to_scan_again)
+
+ with indent_log():
+ # We add req_to_install before its dependencies, so that we
+ # can refer to it when adding dependencies.
+ if not requirement_set.has_requirement(req_to_install.name):
+ # 'unnamed' requirements will get added here
+ req_to_install.is_direct = True
+ requirement_set.add_requirement(
+ req_to_install, parent_req_name=None,
+ )
+
+ if not self.ignore_dependencies:
+ if req_to_install.extras:
+ logger.debug(
+ "Installing extra requirements: %r",
+ ','.join(req_to_install.extras),
+ )
+ missing_requested = sorted(
+ set(req_to_install.extras) - set(dist.extras)
+ )
+ for missing in missing_requested:
+ logger.warning(
+ '%s does not provide the extra \'%s\'',
+ dist, missing
+ )
+
+ available_requested = sorted(
+ set(dist.extras) & set(req_to_install.extras)
+ )
+ for subreq in dist.requires(available_requested):
+ add_req(subreq, extras_requested=available_requested)
+
+ if not req_to_install.editable and not req_to_install.satisfied_by:
+ # XXX: --no-install leads this to report 'Successfully
+ # downloaded' for only non-editable reqs, even though we took
+ # action on them.
+ requirement_set.successfully_downloaded.append(req_to_install)
+
+ return more_reqs
+
+ def get_installation_order(self, req_set):
+ # type: (RequirementSet) -> List[InstallRequirement]
+ """Create the installation order.
+
+ The installation order is topological - requirements are installed
+ before the requiring thing. We break cycles at an arbitrary point,
+ and make no other guarantees.
+ """
+ # The current implementation, which we may change at any point
+ # installs the user specified things in the order given, except when
+ # dependencies must come earlier to achieve topological order.
+ order = []
+ ordered_reqs = set() # type: Set[InstallRequirement]
+
+ def schedule(req):
+ if req.satisfied_by or req in ordered_reqs:
+ return
+ if req.constraint:
+ return
+ ordered_reqs.add(req)
+ for dep in self._discovered_dependencies[req.name]:
+ schedule(dep)
+ order.append(req)
+
+ for install_req in req_set.requirements.values():
+ schedule(install_req)
+ return order
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/locations.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/locations.py
new file mode 100644
index 00000000..5f843d79
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/locations.py
@@ -0,0 +1,142 @@
+"""Locations where we look for configs, install stuff, etc"""
+from __future__ import absolute_import
+
+import os
+import os.path
+import platform
+import site
+import sys
+import sysconfig
+from distutils import sysconfig as distutils_sysconfig
+from distutils.command.install import SCHEME_KEYS # type: ignore
+
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Union, Dict, List, Optional
+
+
+# Application Directories
+USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+
+
+def get_src_prefix():
+ if running_under_virtualenv():
+ src_prefix = os.path.join(sys.prefix, 'src')
+ else:
+ # FIXME: keep src in cwd for now (it is not a temporary folder)
+ try:
+ src_prefix = os.path.join(os.getcwd(), 'src')
+ except OSError:
+ # In case the current working directory has been renamed or deleted
+ sys.exit(
+ "The folder you are executing pip from can no longer be found."
+ )
+
+ # under macOS + virtualenv sys.prefix is not properly resolved
+ # it is something like /path/to/python/bin/..
+ return os.path.abspath(src_prefix)
+
+
+# FIXME doesn't account for venv linked to global site-packages
+
+site_packages = sysconfig.get_path("purelib") # type: Optional[str]
+
+# This is because of a bug in PyPy's sysconfig module, see
+# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
+# for more information.
+if platform.python_implementation().lower() == "pypy":
+ site_packages = distutils_sysconfig.get_python_lib()
+try:
+ # Use getusersitepackages if this is present, as it ensures that the
+ # value is initialised properly.
+ user_site = site.getusersitepackages()
+except AttributeError:
+ user_site = site.USER_SITE
+
+if WINDOWS:
+ bin_py = os.path.join(sys.prefix, 'Scripts')
+ bin_user = os.path.join(user_site, 'Scripts')
+ # buildout uses 'bin' on Windows too?
+ if not os.path.exists(bin_py):
+ bin_py = os.path.join(sys.prefix, 'bin')
+ bin_user = os.path.join(user_site, 'bin')
+else:
+ bin_py = os.path.join(sys.prefix, 'bin')
+ bin_user = os.path.join(user_site, 'bin')
+
+ # Forcing to use /usr/local/bin for standard macOS framework installs
+ # Also log to ~/Library/Logs/ for use with the Console.app log viewer
+ if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
+ bin_py = '/usr/local/bin'
+
+
+def distutils_scheme(dist_name, user=False, home=None, root=None,
+ isolated=False, prefix=None):
+ # type:(str, bool, str, str, bool, str) -> dict
+ """
+ Return a distutils install scheme
+ """
+ from distutils.dist import Distribution
+
+ scheme = {}
+
+ if isolated:
+ extra_dist_args = {"script_args": ["--no-user-cfg"]}
+ else:
+ extra_dist_args = {}
+ dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
+ dist_args.update(extra_dist_args)
+
+ d = Distribution(dist_args)
+ # Ignoring, typeshed issue reported python/typeshed/issues/2567
+ d.parse_config_files()
+ # NOTE: Ignoring type since mypy can't find attributes on 'Command'
+ i = d.get_command_obj('install', create=True) # type: Any
+ assert i is not None
+ # NOTE: setting user or home has the side-effect of creating the home dir
+ # or user base for installations during finalize_options()
+ # ideally, we'd prefer a scheme class that has no side-effects.
+ assert not (user and prefix), "user={} prefix={}".format(user, prefix)
+ assert not (home and prefix), "home={} prefix={}".format(home, prefix)
+ i.user = user or i.user
+ if user or home:
+ i.prefix = ""
+ i.prefix = prefix or i.prefix
+ i.home = home or i.home
+ i.root = root or i.root
+ i.finalize_options()
+ for key in SCHEME_KEYS:
+ scheme[key] = getattr(i, 'install_' + key)
+
+ # install_lib specified in setup.cfg should install *everything*
+ # into there (i.e. it takes precedence over both purelib and
+ # platlib). Note, i.install_lib is *always* set after
+ # finalize_options(); we only want to override here if the user
+ # has explicitly requested it hence going back to the config
+
+ # Ignoring, typeshed issue reported python/typeshed/issues/2567
+ if 'install_lib' in d.get_option_dict('install'): # type: ignore
+ scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
+
+ if running_under_virtualenv():
+ scheme['headers'] = os.path.join(
+ sys.prefix,
+ 'include',
+ 'site',
+ 'python' + sys.version[:3],
+ dist_name,
+ )
+
+ if root is not None:
+ path_no_drive = os.path.splitdrive(
+ os.path.abspath(scheme["headers"]))[1]
+ scheme["headers"] = os.path.join(
+ root,
+ path_no_drive[1:],
+ )
+
+ return scheme
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__init__.py
new file mode 100644
index 00000000..7855226e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__init__.py
@@ -0,0 +1,2 @@
+"""A package that contains models that represent entities.
+"""
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..0c0e1078
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/candidate.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/candidate.cpython-37.pyc
new file mode 100644
index 00000000..8c36fe7c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/candidate.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/format_control.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/format_control.cpython-37.pyc
new file mode 100644
index 00000000..e74488f2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/format_control.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/index.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/index.cpython-37.pyc
new file mode 100644
index 00000000..9286478b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/index.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/link.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/link.cpython-37.pyc
new file mode 100644
index 00000000..921bce9c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/link.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-37.pyc
new file mode 100644
index 00000000..13792322
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc
new file mode 100644
index 00000000..f40b57f7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/target_python.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/target_python.cpython-37.pyc
new file mode 100644
index 00000000..d198a509
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/__pycache__/target_python.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/candidate.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/candidate.py
new file mode 100644
index 00000000..1b99690f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/candidate.py
@@ -0,0 +1,36 @@
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from pip._vendor.packaging.version import _BaseVersion
+ from pip._internal.models.link import Link
+ from typing import Any
+
+
+class InstallationCandidate(KeyBasedCompareMixin):
+ """Represents a potential "candidate" for installation.
+ """
+
+ def __init__(self, project, version, link):
+ # type: (Any, str, Link) -> None
+ self.project = project
+ self.version = parse_version(version) # type: _BaseVersion
+ self.link = link
+
+ super(InstallationCandidate, self).__init__(
+ key=(self.project, self.version, self.link),
+ defining_class=InstallationCandidate
+ )
+
+ def __repr__(self):
+ # type: () -> str
+ return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
+ self.project, self.version, self.link,
+ )
+
+ def __str__(self):
+ return '{!r} candidate (version {} at {})'.format(
+ self.project, self.version, self.link,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/format_control.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/format_control.py
new file mode 100644
index 00000000..53138e48
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/format_control.py
@@ -0,0 +1,73 @@
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Set, FrozenSet
+
+
+class FormatControl(object):
+ """Helper for managing formats from which a package can be installed.
+ """
+
+ def __init__(self, no_binary=None, only_binary=None):
+ # type: (Optional[Set], Optional[Set]) -> None
+ if no_binary is None:
+ no_binary = set()
+ if only_binary is None:
+ only_binary = set()
+
+ self.no_binary = no_binary
+ self.only_binary = only_binary
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return "{}({}, {})".format(
+ self.__class__.__name__,
+ self.no_binary,
+ self.only_binary
+ )
+
+ @staticmethod
+ def handle_mutual_excludes(value, target, other):
+ # type: (str, Optional[Set], Optional[Set]) -> None
+ new = value.split(',')
+ while ':all:' in new:
+ other.clear()
+ target.clear()
+ target.add(':all:')
+ del new[:new.index(':all:') + 1]
+ # Without a none, we want to discard everything as :all: covers it
+ if ':none:' not in new:
+ return
+ for name in new:
+ if name == ':none:':
+ target.clear()
+ continue
+ name = canonicalize_name(name)
+ other.discard(name)
+ target.add(name)
+
+ def get_allowed_formats(self, canonical_name):
+ # type: (str) -> FrozenSet
+ result = {"binary", "source"}
+ if canonical_name in self.only_binary:
+ result.discard('source')
+ elif canonical_name in self.no_binary:
+ result.discard('binary')
+ elif ':all:' in self.only_binary:
+ result.discard('source')
+ elif ':all:' in self.no_binary:
+ result.discard('binary')
+ return frozenset(result)
+
+ def disallow_binaries(self):
+ # type: () -> None
+ self.handle_mutual_excludes(
+ ':all:', self.no_binary, self.only_binary,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/index.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/index.py
new file mode 100644
index 00000000..ead1efbd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/index.py
@@ -0,0 +1,31 @@
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+
+class PackageIndex(object):
+ """Represents a Package Index and provides easier access to endpoints
+ """
+
+ def __init__(self, url, file_storage_domain):
+ # type: (str, str) -> None
+ super(PackageIndex, self).__init__()
+ self.url = url
+ self.netloc = urllib_parse.urlsplit(url).netloc
+ self.simple_url = self._url_for_path('simple')
+ self.pypi_url = self._url_for_path('pypi')
+
+ # This is part of a temporary hack used to block installs of PyPI
+ # packages which depend on external urls only necessary until PyPI can
+ # block such packages themselves
+ self.file_storage_domain = file_storage_domain
+
+ def _url_for_path(self, path):
+ # type: (str) -> str
+ return urllib_parse.urljoin(self.url, path)
+
+
+PyPI = PackageIndex(
+ 'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
+)
+TestPyPI = PackageIndex(
+ 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
+)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/link.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/link.py
new file mode 100644
index 00000000..d42be28c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/link.py
@@ -0,0 +1,213 @@
+import posixpath
+import re
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.utils.misc import (
+ WHEEL_EXTENSION, path_to_url, redact_password_from_url,
+ split_auth_from_netloc, splitext,
+)
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Text, Tuple, Union
+ from pip._internal.index import HTMLPage
+ from pip._internal.utils.hashes import Hashes
+
+
+class Link(KeyBasedCompareMixin):
+ """Represents a parsed link from a Package Index's simple URL
+ """
+
+ def __init__(
+ self,
+ url, # type: str
+ comes_from=None, # type: Optional[Union[str, HTMLPage]]
+ requires_python=None, # type: Optional[str]
+ yanked_reason=None, # type: Optional[Text]
+ ):
+ # type: (...) -> None
+ """
+ :param url: url of the resource pointed to (href of the link)
+ :param comes_from: instance of HTMLPage where the link was found,
+ or string.
+ :param requires_python: String containing the `Requires-Python`
+ metadata field, specified in PEP 345. This may be specified by
+ a data-requires-python attribute in the HTML link tag, as
+ described in PEP 503.
+ :param yanked_reason: the reason the file has been yanked, if the
+ file has been yanked, or None if the file hasn't been yanked.
+ This is the value of the "data-yanked" attribute, if present, in
+ a simple repository HTML link. If the file has been yanked but
+ no reason was provided, this should be the empty string. See
+ PEP 592 for more information and the specification.
+ """
+
+ # url can be a UNC windows share
+ if url.startswith('\\\\'):
+ url = path_to_url(url)
+
+ self._parsed_url = urllib_parse.urlsplit(url)
+ # Store the url as a private attribute to prevent accidentally
+ # trying to set a new value.
+ self._url = url
+
+ self.comes_from = comes_from
+ self.requires_python = requires_python if requires_python else None
+ self.yanked_reason = yanked_reason
+
+ super(Link, self).__init__(key=url, defining_class=Link)
+
+ def __str__(self):
+ if self.requires_python:
+ rp = ' (requires-python:%s)' % self.requires_python
+ else:
+ rp = ''
+ if self.comes_from:
+ return '%s (from %s)%s' % (redact_password_from_url(self._url),
+ self.comes_from, rp)
+ else:
+ return redact_password_from_url(str(self._url))
+
+ def __repr__(self):
+ return '<Link %s>' % self
+
+ @property
+ def url(self):
+ # type: () -> str
+ return self._url
+
+ @property
+ def filename(self):
+ # type: () -> str
+ path = self.path.rstrip('/')
+ name = posixpath.basename(path)
+ if not name:
+ # Make sure we don't leak auth information if the netloc
+ # includes a username and password.
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
+ return netloc
+
+ name = urllib_parse.unquote(name)
+ assert name, ('URL %r produced no filename' % self._url)
+ return name
+
+ @property
+ def scheme(self):
+ # type: () -> str
+ return self._parsed_url.scheme
+
+ @property
+ def netloc(self):
+ # type: () -> str
+ """
+ This can contain auth information.
+ """
+ return self._parsed_url.netloc
+
+ @property
+ def path(self):
+ # type: () -> str
+ return urllib_parse.unquote(self._parsed_url.path)
+
+ def splitext(self):
+ # type: () -> Tuple[str, str]
+ return splitext(posixpath.basename(self.path.rstrip('/')))
+
+ @property
+ def ext(self):
+ # type: () -> str
+ return self.splitext()[1]
+
+ @property
+ def url_without_fragment(self):
+ # type: () -> str
+ scheme, netloc, path, query, fragment = self._parsed_url
+ return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
+
+ _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
+
+ @property
+ def egg_fragment(self):
+ # type: () -> Optional[str]
+ match = self._egg_fragment_re.search(self._url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
+
+ @property
+ def subdirectory_fragment(self):
+ # type: () -> Optional[str]
+ match = self._subdirectory_fragment_re.search(self._url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _hash_re = re.compile(
+ r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
+ )
+
+ @property
+ def hash(self):
+ # type: () -> Optional[str]
+ match = self._hash_re.search(self._url)
+ if match:
+ return match.group(2)
+ return None
+
+ @property
+ def hash_name(self):
+ # type: () -> Optional[str]
+ match = self._hash_re.search(self._url)
+ if match:
+ return match.group(1)
+ return None
+
+ @property
+ def show_url(self):
+ # type: () -> Optional[str]
+ return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
+
+ @property
+ def is_wheel(self):
+ # type: () -> bool
+ return self.ext == WHEEL_EXTENSION
+
+ @property
+ def is_artifact(self):
+ # type: () -> bool
+ """
+ Determines if this points to an actual artifact (e.g. a tarball) or if
+ it points to an "abstract" thing like a path or a VCS location.
+ """
+ from pip._internal.vcs import vcs
+
+ if self.scheme in vcs.all_schemes:
+ return False
+
+ return True
+
+ @property
+ def is_yanked(self):
+ # type: () -> bool
+ return self.yanked_reason is not None
+
+ @property
+ def has_hash(self):
+ return self.hash_name is not None
+
+ def is_hash_allowed(self, hashes):
+ # type: (Optional[Hashes]) -> bool
+ """
+ Return True if the link has a hash and it is allowed.
+ """
+ if hashes is None or not self.has_hash:
+ return False
+ # Assert non-None so mypy knows self.hash_name and self.hash are str.
+ assert self.hash_name is not None
+ assert self.hash is not None
+
+ return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/search_scope.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/search_scope.py
new file mode 100644
index 00000000..62152449
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/search_scope.py
@@ -0,0 +1,113 @@
+import itertools
+import logging
+import os
+import posixpath
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.models.index import PyPI
+from pip._internal.utils.compat import HAS_TLS
+from pip._internal.utils.misc import normalize_path, redact_password_from_url
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchScope(object):
+
+ """
+ Encapsulates the locations that pip is configured to search.
+ """
+
+ @classmethod
+ def create(
+ cls,
+ find_links, # type: List[str]
+ index_urls, # type: List[str]
+ ):
+ # type: (...) -> SearchScope
+ """
+ Create a SearchScope object after normalizing the `find_links`.
+ """
+ # Build find_links. If an argument starts with ~, it may be
+ # a local file relative to a home directory. So try normalizing
+ # it and if it exists, use the normalized version.
+ # This is deliberately conservative - it might be fine just to
+ # blindly normalize anything starting with a ~...
+ built_find_links = [] # type: List[str]
+ for link in find_links:
+ if link.startswith('~'):
+ new_link = normalize_path(link)
+ if os.path.exists(new_link):
+ link = new_link
+ built_find_links.append(link)
+
+ # If we don't have TLS enabled, then WARN if anyplace we're looking
+ # relies on TLS.
+ if not HAS_TLS:
+ for link in itertools.chain(index_urls, built_find_links):
+ parsed = urllib_parse.urlparse(link)
+ if parsed.scheme == 'https':
+ logger.warning(
+ 'pip is configured with locations that require '
+ 'TLS/SSL, however the ssl module in Python is not '
+ 'available.'
+ )
+ break
+
+ return cls(
+ find_links=built_find_links,
+ index_urls=index_urls,
+ )
+
+ def __init__(
+ self,
+ find_links, # type: List[str]
+ index_urls, # type: List[str]
+ ):
+ # type: (...) -> None
+ self.find_links = find_links
+ self.index_urls = index_urls
+
+ def get_formatted_locations(self):
+ # type: () -> str
+ lines = []
+ if self.index_urls and self.index_urls != [PyPI.simple_url]:
+ lines.append(
+ 'Looking in indexes: {}'.format(', '.join(
+ redact_password_from_url(url) for url in self.index_urls))
+ )
+ if self.find_links:
+ lines.append(
+ 'Looking in links: {}'.format(', '.join(
+ redact_password_from_url(url) for url in self.find_links))
+ )
+ return '\n'.join(lines)
+
+ def get_index_urls_locations(self, project_name):
+ # type: (str) -> List[str]
+ """Returns the locations found via self.index_urls
+
+ Checks the url_name on the main (first in the list) index and
+ use this url_name to produce all locations
+ """
+
+ def mkurl_pypi_url(url):
+ loc = posixpath.join(
+ url,
+ urllib_parse.quote(canonicalize_name(project_name)))
+ # For maximum compatibility with easy_install, ensure the path
+ # ends in a trailing slash. Although this isn't in the spec
+ # (and PyPI can handle it without the slash) some other index
+ # implementations might break if they relied on easy_install's
+ # behavior.
+ if not loc.endswith('/'):
+ loc = loc + '/'
+ return loc
+
+ return [mkurl_pypi_url(url) for url in self.index_urls]
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py
new file mode 100644
index 00000000..f58fdce9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py
@@ -0,0 +1,47 @@
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+ from pip._internal.models.format_control import FormatControl
+
+
+class SelectionPreferences(object):
+
+ """
+ Encapsulates the candidate selection preferences for downloading
+ and installing files.
+ """
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ def __init__(
+ self,
+ allow_yanked, # type: bool
+ allow_all_prereleases=False, # type: bool
+ format_control=None, # type: Optional[FormatControl]
+ prefer_binary=False, # type: bool
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """Create a SelectionPreferences object.
+
+ :param allow_yanked: Whether files marked as yanked (in the sense
+ of PEP 592) are permitted to be candidates for install.
+ :param format_control: A FormatControl object or None. Used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param prefer_binary: Whether to prefer an old, but valid, binary
+ dist over a new source dist.
+ :param ignore_requires_python: Whether to ignore incompatible
+ "Requires-Python" values in links. Defaults to False.
+ """
+ if ignore_requires_python is None:
+ ignore_requires_python = False
+
+ self.allow_yanked = allow_yanked
+ self.allow_all_prereleases = allow_all_prereleases
+ self.format_control = format_control
+ self.prefer_binary = prefer_binary
+ self.ignore_requires_python = ignore_requires_python
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/target_python.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/target_python.py
new file mode 100644
index 00000000..a23b79c4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/models/target_python.py
@@ -0,0 +1,106 @@
+import sys
+
+from pip._internal.pep425tags import get_supported, version_info_to_nodot
+from pip._internal.utils.misc import normalize_version_info
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Tuple
+ from pip._internal.pep425tags import Pep425Tag
+
+
+class TargetPython(object):
+
+ """
+ Encapsulates the properties of a Python interpreter one is targeting
+ for a package install, download, etc.
+ """
+
+ def __init__(
+ self,
+ platform=None, # type: Optional[str]
+ py_version_info=None, # type: Optional[Tuple[int, ...]]
+ abi=None, # type: Optional[str]
+ implementation=None, # type: Optional[str]
+ ):
+ # type: (...) -> None
+ """
+ :param platform: A string or None. If None, searches for packages
+ that are supported by the current system. Otherwise, will find
+ packages that can be built on the platform passed in. These
+ packages will only be downloaded for distribution: they will
+ not be built locally.
+ :param py_version_info: An optional tuple of ints representing the
+ Python version information to use (e.g. `sys.version_info[:3]`).
+ This can have length 1, 2, or 3 when provided.
+ :param abi: A string or None. This is passed to pep425tags.py's
+ get_supported() function as is.
+ :param implementation: A string or None. This is passed to
+ pep425tags.py's get_supported() function as is.
+ """
+ # Store the given py_version_info for when we call get_supported().
+ self._given_py_version_info = py_version_info
+
+ if py_version_info is None:
+ py_version_info = sys.version_info[:3]
+ else:
+ py_version_info = normalize_version_info(py_version_info)
+
+ py_version = '.'.join(map(str, py_version_info[:2]))
+
+ self.abi = abi
+ self.implementation = implementation
+ self.platform = platform
+ self.py_version = py_version
+ self.py_version_info = py_version_info
+
+ # This is used to cache the return value of get_tags().
+ self._valid_tags = None # type: Optional[List[Pep425Tag]]
+
+ def format_given(self):
+ # type: () -> str
+ """
+ Format the given, non-None attributes for display.
+ """
+ display_version = None
+ if self._given_py_version_info is not None:
+ display_version = '.'.join(
+ str(part) for part in self._given_py_version_info
+ )
+
+ key_values = [
+ ('platform', self.platform),
+ ('version_info', display_version),
+ ('abi', self.abi),
+ ('implementation', self.implementation),
+ ]
+ return ' '.join(
+ '{}={!r}'.format(key, value) for key, value in key_values
+ if value is not None
+ )
+
+ def get_tags(self):
+ # type: () -> List[Pep425Tag]
+ """
+ Return the supported PEP 425 tags to check wheel candidates against.
+
+ The tags are returned in order of preference (most preferred first).
+ """
+ if self._valid_tags is None:
+ # Pass versions=None if no py_version_info was given since
+ # versions=None uses special default logic.
+ py_version_info = self._given_py_version_info
+ if py_version_info is None:
+ versions = None
+ else:
+ versions = [version_info_to_nodot(py_version_info)]
+
+ tags = get_supported(
+ versions=versions,
+ platform=self.platform,
+ abi=self.abi,
+ impl=self.implementation,
+ )
+ self._valid_tags = tags
+
+ return self._valid_tags
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__init__.py
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..2020fda1
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/check.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/check.cpython-37.pyc
new file mode 100644
index 00000000..28338255
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/check.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc
new file mode 100644
index 00000000..710c275d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc
new file mode 100644
index 00000000..48ba3172
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/check.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/check.py
new file mode 100644
index 00000000..7b8b369f
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/check.py
@@ -0,0 +1,159 @@
+"""Validation of dependencies of packages
+"""
+
+import logging
+from collections import namedtuple
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import RequirementParseError
+
+from pip._internal.distributions import (
+ make_distribution_for_install_requirement,
+)
+from pip._internal.utils.misc import get_installed_distributions
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+logger = logging.getLogger(__name__)
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.req.req_install import InstallRequirement
+ from typing import (
+ Any, Callable, Dict, Optional, Set, Tuple, List
+ )
+
+ # Shorthands
+ PackageSet = Dict[str, 'PackageDetails']
+ Missing = Tuple[str, Any]
+ Conflicting = Tuple[str, str, Any]
+
+ MissingDict = Dict[str, List[Missing]]
+ ConflictingDict = Dict[str, List[Conflicting]]
+ CheckResult = Tuple[MissingDict, ConflictingDict]
+
+PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
+
+
+def create_package_set_from_installed(**kwargs):
+ # type: (**Any) -> Tuple[PackageSet, bool]
+ """Converts a list of distributions into a PackageSet.
+ """
+ # Default to using all packages installed on the system
+ if kwargs == {}:
+ kwargs = {"local_only": False, "skip": ()}
+
+ package_set = {}
+ problems = False
+ for dist in get_installed_distributions(**kwargs):
+ name = canonicalize_name(dist.project_name)
+ try:
+ package_set[name] = PackageDetails(dist.version, dist.requires())
+ except RequirementParseError as e:
+ # Don't crash on broken metadata
+ logging.warning("Error parsing requirements for %s: %s", name, e)
+ problems = True
+ return package_set, problems
+
+
+def check_package_set(package_set, should_ignore=None):
+ # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
+ """Check if a package set is consistent
+
+ If should_ignore is passed, it should be a callable that takes a
+ package name and returns a boolean.
+ """
+ if should_ignore is None:
+ def should_ignore(name):
+ return False
+
+ missing = dict()
+ conflicting = dict()
+
+ for package_name in package_set:
+ # Info about dependencies of package_name
+ missing_deps = set() # type: Set[Missing]
+ conflicting_deps = set() # type: Set[Conflicting]
+
+ if should_ignore(package_name):
+ continue
+
+ for req in package_set[package_name].requires:
+ name = canonicalize_name(req.project_name) # type: str
+
+ # Check if it's missing
+ if name not in package_set:
+ missed = True
+ if req.marker is not None:
+ missed = req.marker.evaluate()
+ if missed:
+ missing_deps.add((name, req))
+ continue
+
+ # Check if there's a conflict
+ version = package_set[name].version # type: str
+ if not req.specifier.contains(version, prereleases=True):
+ conflicting_deps.add((name, version, req))
+
+ if missing_deps:
+ missing[package_name] = sorted(missing_deps, key=str)
+ if conflicting_deps:
+ conflicting[package_name] = sorted(conflicting_deps, key=str)
+
+ return missing, conflicting
+
+
+def check_install_conflicts(to_install):
+ # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
+ """For checking if the dependency graph would be consistent after \
+ installing given requirements
+ """
+ # Start from the current state
+ package_set, _ = create_package_set_from_installed()
+ # Install packages
+ would_be_installed = _simulate_installation_of(to_install, package_set)
+
+ # Only warn about directly-dependent packages; create a whitelist of them
+ whitelist = _create_whitelist(would_be_installed, package_set)
+
+ return (
+ package_set,
+ check_package_set(
+ package_set, should_ignore=lambda name: name not in whitelist
+ )
+ )
+
+
+def _simulate_installation_of(to_install, package_set):
+ # type: (List[InstallRequirement], PackageSet) -> Set[str]
+ """Computes the version of packages after installing to_install.
+ """
+
+ # Keep track of packages that were installed
+ installed = set()
+
+ # Modify it as installing requirement_set would (assuming no errors)
+ for inst_req in to_install:
+ abstract_dist = make_distribution_for_install_requirement(inst_req)
+ dist = abstract_dist.get_pkg_resources_distribution()
+
+ name = canonicalize_name(dist.key)
+ package_set[name] = PackageDetails(dist.version, dist.requires())
+
+ installed.add(name)
+
+ return installed
+
+
+def _create_whitelist(would_be_installed, package_set):
+ # type: (Set[str], PackageSet) -> Set[str]
+ packages_affected = set(would_be_installed)
+
+ for package_name in package_set:
+ if package_name in packages_affected:
+ continue
+
+ for req in package_set[package_name].requires:
+ if canonicalize_name(req.name) in packages_affected:
+ packages_affected.add(package_name)
+ break
+
+ return packages_affected
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/freeze.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/freeze.py
new file mode 100644
index 00000000..6f5a3dd9
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/freeze.py
@@ -0,0 +1,253 @@
+from __future__ import absolute_import
+
+import collections
+import logging
+import os
+import re
+
+from pip._vendor import six
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import RequirementParseError
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.req.constructors import (
+ install_req_from_editable, install_req_from_line,
+)
+from pip._internal.req.req_file import COMMENT_RE
+from pip._internal.utils.misc import (
+ dist_is_editable, get_installed_distributions,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
+ )
+ from pip._internal.cache import WheelCache
+ from pip._vendor.pkg_resources import (
+ Distribution, Requirement
+ )
+
+ RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
+
+
+logger = logging.getLogger(__name__)
+
+
+def freeze(
+ requirement=None, # type: Optional[List[str]]
+ find_links=None, # type: Optional[List[str]]
+ local_only=None, # type: Optional[bool]
+ user_only=None, # type: Optional[bool]
+ paths=None, # type: Optional[List[str]]
+ skip_regex=None, # type: Optional[str]
+ isolated=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ exclude_editable=False, # type: bool
+ skip=() # type: Container[str]
+):
+ # type: (...) -> Iterator[str]
+ find_links = find_links or []
+ skip_match = None
+
+ if skip_regex:
+ skip_match = re.compile(skip_regex).search
+
+ for link in find_links:
+ yield '-f %s' % link
+ installations = {} # type: Dict[str, FrozenRequirement]
+ for dist in get_installed_distributions(local_only=local_only,
+ skip=(),
+ user_only=user_only,
+ paths=paths):
+ try:
+ req = FrozenRequirement.from_dist(dist)
+ except RequirementParseError as exc:
+ # We include dist rather than dist.project_name because the
+ # dist string includes more information, like the version and
+ # location. We also include the exception message to aid
+ # troubleshooting.
+ logger.warning(
+ 'Could not generate requirement for distribution %r: %s',
+ dist, exc
+ )
+ continue
+ if exclude_editable and req.editable:
+ continue
+ installations[req.name] = req
+
+ if requirement:
+ # the options that don't get turned into an InstallRequirement
+ # should only be emitted once, even if the same option is in multiple
+ # requirements files, so we need to keep track of what has been emitted
+ # so that we don't emit it again if it's seen again
+ emitted_options = set() # type: Set[str]
+ # keep track of which files a requirement is in so that we can
+ # give an accurate warning if a requirement appears multiple times.
+ req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
+ for req_file_path in requirement:
+ with open(req_file_path) as req_file:
+ for line in req_file:
+ if (not line.strip() or
+ line.strip().startswith('#') or
+ (skip_match and skip_match(line)) or
+ line.startswith((
+ '-r', '--requirement',
+ '-Z', '--always-unzip',
+ '-f', '--find-links',
+ '-i', '--index-url',
+ '--pre',
+ '--trusted-host',
+ '--process-dependency-links',
+ '--extra-index-url'))):
+ line = line.rstrip()
+ if line not in emitted_options:
+ emitted_options.add(line)
+ yield line
+ continue
+
+ if line.startswith('-e') or line.startswith('--editable'):
+ if line.startswith('-e'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--editable'):].strip().lstrip('=')
+ line_req = install_req_from_editable(
+ line,
+ isolated=isolated,
+ wheel_cache=wheel_cache,
+ )
+ else:
+ line_req = install_req_from_line(
+ COMMENT_RE.sub('', line).strip(),
+ isolated=isolated,
+ wheel_cache=wheel_cache,
+ )
+
+ if not line_req.name:
+ logger.info(
+ "Skipping line in requirement file [%s] because "
+ "it's not clear what it would install: %s",
+ req_file_path, line.strip(),
+ )
+ logger.info(
+ " (add #egg=PackageName to the URL to avoid"
+ " this warning)"
+ )
+ elif line_req.name not in installations:
+ # either it's not installed, or it is installed
+ # but has been processed already
+ if not req_files[line_req.name]:
+ logger.warning(
+ "Requirement file [%s] contains %s, but "
+ "package %r is not installed",
+ req_file_path,
+ COMMENT_RE.sub('', line).strip(), line_req.name
+ )
+ else:
+ req_files[line_req.name].append(req_file_path)
+ else:
+ yield str(installations[line_req.name]).rstrip()
+ del installations[line_req.name]
+ req_files[line_req.name].append(req_file_path)
+
+ # Warn about requirements that were included multiple times (in a
+ # single requirements file or in different requirements files).
+ for name, files in six.iteritems(req_files):
+ if len(files) > 1:
+ logger.warning("Requirement %s included multiple times [%s]",
+ name, ', '.join(sorted(set(files))))
+
+ yield(
+ '## The following requirements were added by '
+ 'pip freeze:'
+ )
+ for installation in sorted(
+ installations.values(), key=lambda x: x.name.lower()):
+ if canonicalize_name(installation.name) not in skip:
+ yield str(installation).rstrip()
+
+
+def get_requirement_info(dist):
+ # type: (Distribution) -> RequirementInfo
+ """
+ Compute and return values (req, editable, comments) for use in
+ FrozenRequirement.from_dist().
+ """
+ if not dist_is_editable(dist):
+ return (None, False, [])
+
+ location = os.path.normcase(os.path.abspath(dist.location))
+
+ from pip._internal.vcs import vcs, RemoteNotFoundError
+ vcs_backend = vcs.get_backend_for_dir(location)
+
+ if vcs_backend is None:
+ req = dist.as_requirement()
+ logger.debug(
+ 'No VCS found for editable requirement "%s" in: %r', req,
+ location,
+ )
+ comments = [
+ '# Editable install with no version control ({})'.format(req)
+ ]
+ return (location, True, comments)
+
+ try:
+ req = vcs_backend.get_src_requirement(location, dist.project_name)
+ except RemoteNotFoundError:
+ req = dist.as_requirement()
+ comments = [
+ '# Editable {} install with no remote ({})'.format(
+ type(vcs_backend).__name__, req,
+ )
+ ]
+ return (location, True, comments)
+
+ except BadCommand:
+ logger.warning(
+ 'cannot determine version of editable source in %s '
+ '(%s command not found in path)',
+ location,
+ vcs_backend.name,
+ )
+ return (None, True, [])
+
+ except InstallationError as exc:
+ logger.warning(
+ "Error when trying to get requirement for VCS system %s, "
+ "falling back to uneditable format", exc
+ )
+ else:
+ if req is not None:
+ return (req, True, [])
+
+ logger.warning(
+ 'Could not determine repository location of %s', location
+ )
+ comments = ['## !! Could not determine repository location']
+
+ return (None, False, comments)
+
+
+class FrozenRequirement(object):
+ def __init__(self, name, req, editable, comments=()):
+ # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
+ self.name = name
+ self.req = req
+ self.editable = editable
+ self.comments = comments
+
+ @classmethod
+ def from_dist(cls, dist):
+ # type: (Distribution) -> FrozenRequirement
+ req, editable, comments = get_requirement_info(dist)
+ if req is None:
+ req = dist.as_requirement()
+
+ return cls(dist.project_name, req, editable, comments=comments)
+
+ def __str__(self):
+ req = self.req
+ if self.editable:
+ req = '-e %s' % req
+ return '\n'.join(list(self.comments) + [str(req)]) + '\n'
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/prepare.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/prepare.py
new file mode 100644
index 00000000..6cf5f0ed
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/operations/prepare.py
@@ -0,0 +1,287 @@
+"""Prepares a distribution for installation
+"""
+
+import logging
+import os
+
+from pip._vendor import requests
+
+from pip._internal.distributions import (
+ make_distribution_for_install_requirement,
+)
+from pip._internal.distributions.installed import InstalledDistribution
+from pip._internal.download import (
+ is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
+)
+from pip._internal.exceptions import (
+ DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
+ PreviousBuildDirError, VcsHashUnsupported,
+)
+from pip._internal.utils.compat import expanduser
+from pip._internal.utils.hashes import MissingHashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import display_path, normalize_path
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+
+ from pip._internal.distributions import AbstractDistribution
+ from pip._internal.download import PipSession
+ from pip._internal.index import PackageFinder
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.req.req_tracker import RequirementTracker
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementPreparer(object):
+ """Prepares a Requirement
+ """
+
+ def __init__(
+ self,
+ build_dir, # type: str
+ download_dir, # type: Optional[str]
+ src_dir, # type: str
+ wheel_download_dir, # type: Optional[str]
+ progress_bar, # type: str
+ build_isolation, # type: bool
+ req_tracker # type: RequirementTracker
+ ):
+ # type: (...) -> None
+ super(RequirementPreparer, self).__init__()
+
+ self.src_dir = src_dir
+ self.build_dir = build_dir
+ self.req_tracker = req_tracker
+
+ # Where still packed archives should be written to. If None, they are
+ # not saved, and are deleted immediately after unpacking.
+ self.download_dir = download_dir
+
+ # Where still-packed .whl files should be written to. If None, they are
+ # written to the download_dir parameter. Separate to download_dir to
+ # permit only keeping wheel archives for pip wheel.
+ if wheel_download_dir:
+ wheel_download_dir = normalize_path(wheel_download_dir)
+ self.wheel_download_dir = wheel_download_dir
+
+ # NOTE
+ # download_dir and wheel_download_dir overlap semantically and may
+ # be combined if we're willing to have non-wheel archives present in
+ # the wheelhouse output by 'pip wheel'.
+
+ self.progress_bar = progress_bar
+
+ # Is build isolation allowed?
+ self.build_isolation = build_isolation
+
+ @property
+ def _download_should_save(self):
+ # type: () -> bool
+ # TODO: Modify to reduce indentation needed
+ if self.download_dir:
+ self.download_dir = expanduser(self.download_dir)
+ if os.path.exists(self.download_dir):
+ return True
+ else:
+ logger.critical('Could not find download directory')
+ raise InstallationError(
+ "Could not find or access download directory '%s'"
+ % display_path(self.download_dir))
+ return False
+
+ def prepare_linked_requirement(
+ self,
+ req, # type: InstallRequirement
+ session, # type: PipSession
+ finder, # type: PackageFinder
+ upgrade_allowed, # type: bool
+ require_hashes # type: bool
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare a requirement that would be obtained from req.link
+ """
+ # TODO: Breakup into smaller functions
+ if req.link and req.link.scheme == 'file':
+ path = url_to_path(req.link.url)
+ logger.info('Processing %s', display_path(path))
+ else:
+ logger.info('Collecting %s', req)
+
+ with indent_log():
+ # @@ if filesystem packages are not marked
+ # editable in a req, a non deterministic error
+ # occurs when the script attempts to unpack the
+ # build directory
+ req.ensure_has_source_dir(self.build_dir)
+ # If a checkout exists, it's unwise to keep going. version
+ # inconsistencies are logged later, but do not fail the
+ # installation.
+ # FIXME: this won't upgrade when there's an existing
+ # package unpacked in `req.source_dir`
+ # package unpacked in `req.source_dir`
+ if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
+ raise PreviousBuildDirError(
+ "pip can't proceed with requirements '%s' due to a"
+ " pre-existing build directory (%s). This is "
+ "likely due to a previous installation that failed"
+ ". pip is being responsible and not assuming it "
+ "can delete this. Please delete it and try again."
+ % (req, req.source_dir)
+ )
+ req.populate_link(finder, upgrade_allowed, require_hashes)
+
+ # We can't hit this spot and have populate_link return None.
+ # req.satisfied_by is None here (because we're
+ # guarded) and upgrade has no impact except when satisfied_by
+ # is not None.
+ # Then inside find_requirement existing_applicable -> False
+ # If no new versions are found, DistributionNotFound is raised,
+ # otherwise a result is guaranteed.
+ assert req.link
+ link = req.link
+
+ # Now that we have the real link, we can tell what kind of
+ # requirements we have and raise some more informative errors
+ # than otherwise. (For example, we can raise VcsHashUnsupported
+ # for a VCS URL rather than HashMissing.)
+ if require_hashes:
+ # We could check these first 2 conditions inside
+ # unpack_url and save repetition of conditions, but then
+ # we would report less-useful error messages for
+ # unhashable requirements, complaining that there's no
+ # hash provided.
+ if is_vcs_url(link):
+ raise VcsHashUnsupported()
+ elif is_file_url(link) and is_dir_url(link):
+ raise DirectoryUrlHashUnsupported()
+ if not req.original_link and not req.is_pinned:
+ # Unpinned packages are asking for trouble when a new
+ # version is uploaded. This isn't a security check, but
+ # it saves users a surprising hash mismatch in the
+ # future.
+ #
+ # file:/// URLs aren't pinnable, so don't complain
+ # about them not being pinned.
+ raise HashUnpinned()
+
+ hashes = req.hashes(trust_internet=not require_hashes)
+ if require_hashes and not hashes:
+ # Known-good hashes are missing for this requirement, so
+ # shim it with a facade object that will provoke hash
+ # computation and then raise a HashMissing exception
+ # showing the user what the hash should be.
+ hashes = MissingHashes()
+
+ try:
+ download_dir = self.download_dir
+ # We always delete unpacked sdists after pip ran.
+ autodelete_unpacked = True
+ if req.link.is_wheel and self.wheel_download_dir:
+ # when doing 'pip wheel` we download wheels to a
+ # dedicated dir.
+ download_dir = self.wheel_download_dir
+ if req.link.is_wheel:
+ if download_dir:
+ # When downloading, we only unpack wheels to get
+ # metadata.
+ autodelete_unpacked = True
+ else:
+ # When installing a wheel, we use the unpacked
+ # wheel.
+ autodelete_unpacked = False
+ unpack_url(
+ req.link, req.source_dir,
+ download_dir, autodelete_unpacked,
+ session=session, hashes=hashes,
+ progress_bar=self.progress_bar
+ )
+ except requests.HTTPError as exc:
+ logger.critical(
+ 'Could not install requirement %s because of error %s',
+ req,
+ exc,
+ )
+ raise InstallationError(
+ 'Could not install requirement %s because of HTTP '
+ 'error %s for URL %s' %
+ (req, exc, req.link)
+ )
+ abstract_dist = make_distribution_for_install_requirement(req)
+ with self.req_tracker.track(req):
+ abstract_dist.prepare_distribution_metadata(
+ finder, self.build_isolation,
+ )
+ if self._download_should_save:
+ # Make a .zip of the source_dir we already created.
+ if not req.link.is_artifact:
+ req.archive(self.download_dir)
+ return abstract_dist
+
+ def prepare_editable_requirement(
+ self,
+ req, # type: InstallRequirement
+ require_hashes, # type: bool
+ use_user_site, # type: bool
+ finder # type: PackageFinder
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare an editable requirement
+ """
+ assert req.editable, "cannot prepare a non-editable req as editable"
+
+ logger.info('Obtaining %s', req)
+
+ with indent_log():
+ if require_hashes:
+ raise InstallationError(
+ 'The editable requirement %s cannot be installed when '
+ 'requiring hashes, because there is no single file to '
+ 'hash.' % req
+ )
+ req.ensure_has_source_dir(self.src_dir)
+ req.update_editable(not self._download_should_save)
+
+ abstract_dist = make_distribution_for_install_requirement(req)
+ with self.req_tracker.track(req):
+ abstract_dist.prepare_distribution_metadata(
+ finder, self.build_isolation,
+ )
+
+ if self._download_should_save:
+ req.archive(self.download_dir)
+ req.check_if_exists(use_user_site)
+
+ return abstract_dist
+
+ def prepare_installed_requirement(
+ self,
+ req, # type: InstallRequirement
+ require_hashes, # type: bool
+ skip_reason # type: str
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare an already-installed requirement
+ """
+ assert req.satisfied_by, "req should have been satisfied but isn't"
+ assert skip_reason is not None, (
+ "did not get skip reason skipped but req.satisfied_by "
+ "is set to %r" % (req.satisfied_by,)
+ )
+ logger.info(
+ 'Requirement %s: %s (%s)',
+ skip_reason, req, req.satisfied_by.version
+ )
+ with indent_log():
+ if require_hashes:
+ logger.debug(
+ 'Since it is already installed, we are trusting this '
+ 'package without checking its hash. To ensure a '
+ 'completely repeatable environment, install into an '
+ 'empty virtualenv.'
+ )
+ abstract_dist = InstalledDistribution(req)
+
+ return abstract_dist
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pep425tags.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pep425tags.py
new file mode 100644
index 00000000..07dc148e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pep425tags.py
@@ -0,0 +1,387 @@
+"""Generate and work with PEP 425 Compatibility Tags."""
+from __future__ import absolute_import
+
+import distutils.util
+import logging
+import platform
+import re
+import sys
+import sysconfig
+import warnings
+from collections import OrderedDict
+
+import pip._internal.utils.glibc
+from pip._internal.utils.compat import get_extension_suffixes
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Tuple, Callable, List, Optional, Union, Dict
+ )
+
+ Pep425Tag = Tuple[str, str, str]
+
+logger = logging.getLogger(__name__)
+
+_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
+
+
+def get_config_var(var):
+ # type: (str) -> Optional[str]
+ try:
+ return sysconfig.get_config_var(var)
+ except IOError as e: # Issue #1074
+ warnings.warn("{}".format(e), RuntimeWarning)
+ return None
+
+
+def get_abbr_impl():
+ # type: () -> str
+ """Return abbreviated implementation name."""
+ if hasattr(sys, 'pypy_version_info'):
+ pyimpl = 'pp'
+ elif sys.platform.startswith('java'):
+ pyimpl = 'jy'
+ elif sys.platform == 'cli':
+ pyimpl = 'ip'
+ else:
+ pyimpl = 'cp'
+ return pyimpl
+
+
+def version_info_to_nodot(version_info):
+ # type: (Tuple[int, ...]) -> str
+ # Only use up to the first two numbers.
+ return ''.join(map(str, version_info[:2]))
+
+
+def get_impl_ver():
+ # type: () -> str
+ """Return implementation version."""
+ impl_ver = get_config_var("py_version_nodot")
+ if not impl_ver or get_abbr_impl() == 'pp':
+ impl_ver = ''.join(map(str, get_impl_version_info()))
+ return impl_ver
+
+
+def get_impl_version_info():
+ # type: () -> Tuple[int, ...]
+ """Return sys.version_info-like tuple for use in decrementing the minor
+ version."""
+ if get_abbr_impl() == 'pp':
+ # as per https://github.com/pypa/pip/issues/2882
+ # attrs exist only on pypy
+ return (sys.version_info[0],
+ sys.pypy_version_info.major, # type: ignore
+ sys.pypy_version_info.minor) # type: ignore
+ else:
+ return sys.version_info[0], sys.version_info[1]
+
+
+def get_impl_tag():
+ # type: () -> str
+ """
+ Returns the Tag for this specific implementation.
+ """
+ return "{}{}".format(get_abbr_impl(), get_impl_ver())
+
+
+def get_flag(var, fallback, expected=True, warn=True):
+ # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool
+ """Use a fallback method for determining SOABI flags if the needed config
+ var is unset or unavailable."""
+ val = get_config_var(var)
+ if val is None:
+ if warn:
+ logger.debug("Config variable '%s' is unset, Python ABI tag may "
+ "be incorrect", var)
+ return fallback()
+ return val == expected
+
+
+def get_abi_tag():
+ # type: () -> Optional[str]
+ """Return the ABI tag based on SOABI (if available) or emulate SOABI
+ (CPython 2, PyPy)."""
+ soabi = get_config_var('SOABI')
+ impl = get_abbr_impl()
+ if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
+ d = ''
+ m = ''
+ u = ''
+ if get_flag('Py_DEBUG',
+ lambda: hasattr(sys, 'gettotalrefcount'),
+ warn=(impl == 'cp')):
+ d = 'd'
+ if get_flag('WITH_PYMALLOC',
+ lambda: impl == 'cp',
+ warn=(impl == 'cp')):
+ m = 'm'
+ if get_flag('Py_UNICODE_SIZE',
+ lambda: sys.maxunicode == 0x10ffff,
+ expected=4,
+ warn=(impl == 'cp' and
+ sys.version_info < (3, 3))) \
+ and sys.version_info < (3, 3):
+ u = 'u'
+ abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
+ elif soabi and soabi.startswith('cpython-'):
+ abi = 'cp' + soabi.split('-')[1]
+ elif soabi:
+ abi = soabi.replace('.', '_').replace('-', '_')
+ else:
+ abi = None
+ return abi
+
+
+def _is_running_32bit():
+ # type: () -> bool
+ return sys.maxsize == 2147483647
+
+
+def get_platform():
+ # type: () -> str
+ """Return our platform name 'win32', 'linux_x86_64'"""
+ if sys.platform == 'darwin':
+ # distutils.util.get_platform() returns the release based on the value
+ # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
+ # be significantly older than the user's current machine.
+ release, _, machine = platform.mac_ver()
+ split_ver = release.split('.')
+
+ if machine == "x86_64" and _is_running_32bit():
+ machine = "i386"
+ elif machine == "ppc64" and _is_running_32bit():
+ machine = "ppc"
+
+ return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
+
+ # XXX remove distutils dependency
+ result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
+ if result == "linux_x86_64" and _is_running_32bit():
+ # 32 bit Python program (running on a 64 bit Linux): pip should only
+ # install and run 32 bit compiled extensions in that case.
+ result = "linux_i686"
+
+ return result
+
+
+def is_manylinux1_compatible():
+ # type: () -> bool
+ # Only Linux, and only x86-64 / i686
+ if get_platform() not in {"linux_x86_64", "linux_i686"}:
+ return False
+
+ # Check for presence of _manylinux module
+ try:
+ import _manylinux
+ return bool(_manylinux.manylinux1_compatible)
+ except (ImportError, AttributeError):
+ # Fall through to heuristic check below
+ pass
+
+ # Check glibc version. CentOS 5 uses glibc 2.5.
+ return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
+
+
+def is_manylinux2010_compatible():
+ # type: () -> bool
+ # Only Linux, and only x86-64 / i686
+ if get_platform() not in {"linux_x86_64", "linux_i686"}:
+ return False
+
+ # Check for presence of _manylinux module
+ try:
+ import _manylinux
+ return bool(_manylinux.manylinux2010_compatible)
+ except (ImportError, AttributeError):
+ # Fall through to heuristic check below
+ pass
+
+ # Check glibc version. CentOS 6 uses glibc 2.12.
+ return pip._internal.utils.glibc.have_compatible_glibc(2, 12)
+
+
+def get_darwin_arches(major, minor, machine):
+ # type: (int, int, str) -> List[str]
+ """Return a list of supported arches (including group arches) for
+ the given major, minor and machine architecture of an macOS machine.
+ """
+ arches = []
+
+ def _supports_arch(major, minor, arch):
+ # type: (int, int, str) -> bool
+ # Looking at the application support for macOS versions in the chart
+ # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
+ # our timeline looks roughly like:
+ #
+ # 10.0 - Introduces ppc support.
+ # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
+ # and x86_64 support is CLI only, and cannot be used for GUI
+ # applications.
+ # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
+ # 10.6 - Drops support for ppc64
+ # 10.7 - Drops support for ppc
+ #
+ # Given that we do not know if we're installing a CLI or a GUI
+ # application, we must be conservative and assume it might be a GUI
+ # application and behave as if ppc64 and x86_64 support did not occur
+ # until 10.5.
+ #
+ # Note: The above information is taken from the "Application support"
+ # column in the chart not the "Processor support" since I believe
+ # that we care about what instruction sets an application can use
+ # not which processors the OS supports.
+ if arch == 'ppc':
+ return (major, minor) <= (10, 5)
+ if arch == 'ppc64':
+ return (major, minor) == (10, 5)
+ if arch == 'i386':
+ return (major, minor) >= (10, 4)
+ if arch == 'x86_64':
+ return (major, minor) >= (10, 5)
+ if arch in groups:
+ for garch in groups[arch]:
+ if _supports_arch(major, minor, garch):
+ return True
+ return False
+
+ groups = OrderedDict([
+ ("fat", ("i386", "ppc")),
+ ("intel", ("x86_64", "i386")),
+ ("fat64", ("x86_64", "ppc64")),
+ ("fat32", ("x86_64", "i386", "ppc")),
+ ]) # type: Dict[str, Tuple[str, ...]]
+
+ if _supports_arch(major, minor, machine):
+ arches.append(machine)
+
+ for garch in groups:
+ if machine in groups[garch] and _supports_arch(major, minor, garch):
+ arches.append(garch)
+
+ arches.append('universal')
+
+ return arches
+
+
+def get_all_minor_versions_as_strings(version_info):
+ # type: (Tuple[int, ...]) -> List[str]
+ versions = []
+ major = version_info[:-1]
+ # Support all previous minor Python versions.
+ for minor in range(version_info[-1], -1, -1):
+ versions.append(''.join(map(str, major + (minor,))))
+ return versions
+
+
+def get_supported(
+ versions=None, # type: Optional[List[str]]
+ noarch=False, # type: bool
+ platform=None, # type: Optional[str]
+ impl=None, # type: Optional[str]
+ abi=None # type: Optional[str]
+):
+ # type: (...) -> List[Pep425Tag]
+ """Return a list of supported tags for each version specified in
+ `versions`.
+
+ :param versions: a list of string versions, of the form ["33", "32"],
+ or None. The first version will be assumed to support our ABI.
+ :param platform: specify the exact platform you want valid
+ tags for, or None. If None, use the local system platform.
+ :param impl: specify the exact implementation you want valid
+ tags for, or None. If None, use the local interpreter impl.
+ :param abi: specify the exact abi you want valid
+ tags for, or None. If None, use the local interpreter abi.
+ """
+ supported = []
+
+ # Versions must be given with respect to the preference
+ if versions is None:
+ version_info = get_impl_version_info()
+ versions = get_all_minor_versions_as_strings(version_info)
+
+ impl = impl or get_abbr_impl()
+
+ abis = [] # type: List[str]
+
+ abi = abi or get_abi_tag()
+ if abi:
+ abis[0:0] = [abi]
+
+ abi3s = set()
+ for suffix in get_extension_suffixes():
+ if suffix.startswith('.abi'):
+ abi3s.add(suffix.split('.', 2)[1])
+
+ abis.extend(sorted(list(abi3s)))
+
+ abis.append('none')
+
+ if not noarch:
+ arch = platform or get_platform()
+ arch_prefix, arch_sep, arch_suffix = arch.partition('_')
+ if arch.startswith('macosx'):
+ # support macosx-10.6-intel on macosx-10.9-x86_64
+ match = _osx_arch_pat.match(arch)
+ if match:
+ name, major, minor, actual_arch = match.groups()
+ tpl = '{}_{}_%i_%s'.format(name, major)
+ arches = []
+ for m in reversed(range(int(minor) + 1)):
+ for a in get_darwin_arches(int(major), m, actual_arch):
+ arches.append(tpl % (m, a))
+ else:
+ # arch pattern didn't match (?!)
+ arches = [arch]
+ elif arch_prefix == 'manylinux2010':
+ # manylinux1 wheels run on most manylinux2010 systems with the
+ # exception of wheels depending on ncurses. PEP 571 states
+ # manylinux1 wheels should be considered manylinux2010 wheels:
+ # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
+ arches = [arch, 'manylinux1' + arch_sep + arch_suffix]
+ elif platform is None:
+ arches = []
+ if is_manylinux2010_compatible():
+ arches.append('manylinux2010' + arch_sep + arch_suffix)
+ if is_manylinux1_compatible():
+ arches.append('manylinux1' + arch_sep + arch_suffix)
+ arches.append(arch)
+ else:
+ arches = [arch]
+
+ # Current version, current API (built specifically for our Python):
+ for abi in abis:
+ for arch in arches:
+ supported.append(('%s%s' % (impl, versions[0]), abi, arch))
+
+ # abi3 modules compatible with older version of Python
+ for version in versions[1:]:
+ # abi3 was introduced in Python 3.2
+ if version in {'31', '30'}:
+ break
+ for abi in abi3s: # empty set if not Python 3
+ for arch in arches:
+ supported.append(("%s%s" % (impl, version), abi, arch))
+
+ # Has binaries, does not use the Python API:
+ for arch in arches:
+ supported.append(('py%s' % (versions[0][0]), 'none', arch))
+
+ # No abi / arch, but requires our implementation:
+ supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
+ # Tagged specifically as being cross-version compatible
+ # (with just the major version specified)
+ supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
+
+ # No abi / arch, generic Python
+ for i, version in enumerate(versions):
+ supported.append(('py%s' % (version,), 'none', 'any'))
+ if i == 0:
+ supported.append(('py%s' % (version[0]), 'none', 'any'))
+
+ return supported
+
+
+implementation_tag = get_impl_tag()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pyproject.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pyproject.py
new file mode 100644
index 00000000..43efbed4
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/pyproject.py
@@ -0,0 +1,171 @@
+from __future__ import absolute_import
+
+import io
+import os
+import sys
+
+from pip._vendor import pytoml, six
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Tuple, Optional, List
+
+
+def _is_list_of_str(obj):
+ # type: (Any) -> bool
+ return (
+ isinstance(obj, list) and
+ all(isinstance(item, six.string_types) for item in obj)
+ )
+
+
+def make_pyproject_path(setup_py_dir):
+ # type: (str) -> str
+ path = os.path.join(setup_py_dir, 'pyproject.toml')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(path, six.text_type):
+ path = path.encode(sys.getfilesystemencoding())
+
+ return path
+
+
+def load_pyproject_toml(
+ use_pep517, # type: Optional[bool]
+ pyproject_toml, # type: str
+ setup_py, # type: str
+ req_name # type: str
+):
+ # type: (...) -> Optional[Tuple[List[str], str, List[str]]]
+ """Load the pyproject.toml file.
+
+ Parameters:
+ use_pep517 - Has the user requested PEP 517 processing? None
+ means the user hasn't explicitly specified.
+ pyproject_toml - Location of the project's pyproject.toml file
+ setup_py - Location of the project's setup.py file
+ req_name - The name of the requirement we're processing (for
+ error reporting)
+
+ Returns:
+ None if we should use the legacy code path, otherwise a tuple
+ (
+ requirements from pyproject.toml,
+ name of PEP 517 backend,
+ requirements we should check are installed after setting
+ up the build environment
+ )
+ """
+ has_pyproject = os.path.isfile(pyproject_toml)
+ has_setup = os.path.isfile(setup_py)
+
+ if has_pyproject:
+ with io.open(pyproject_toml, encoding="utf-8") as f:
+ pp_toml = pytoml.load(f)
+ build_system = pp_toml.get("build-system")
+ else:
+ build_system = None
+
+ # The following cases must use PEP 517
+ # We check for use_pep517 being non-None and falsey because that means
+ # the user explicitly requested --no-use-pep517. The value 0 as
+ # opposed to False can occur when the value is provided via an
+ # environment variable or config file option (due to the quirk of
+ # strtobool() returning an integer in pip's configuration code).
+ if has_pyproject and not has_setup:
+ if use_pep517 is not None and not use_pep517:
+ raise InstallationError(
+ "Disabling PEP 517 processing is invalid: "
+ "project does not have a setup.py"
+ )
+ use_pep517 = True
+ elif build_system and "build-backend" in build_system:
+ if use_pep517 is not None and not use_pep517:
+ raise InstallationError(
+ "Disabling PEP 517 processing is invalid: "
+ "project specifies a build backend of {} "
+ "in pyproject.toml".format(
+ build_system["build-backend"]
+ )
+ )
+ use_pep517 = True
+
+ # If we haven't worked out whether to use PEP 517 yet,
+ # and the user hasn't explicitly stated a preference,
+ # we do so if the project has a pyproject.toml file.
+ elif use_pep517 is None:
+ use_pep517 = has_pyproject
+
+ # At this point, we know whether we're going to use PEP 517.
+ assert use_pep517 is not None
+
+ # If we're using the legacy code path, there is nothing further
+ # for us to do here.
+ if not use_pep517:
+ return None
+
+ if build_system is None:
+ # Either the user has a pyproject.toml with no build-system
+ # section, or the user has no pyproject.toml, but has opted in
+ # explicitly via --use-pep517.
+ # In the absence of any explicit backend specification, we
+ # assume the setuptools backend that most closely emulates the
+ # traditional direct setup.py execution, and require wheel and
+ # a version of setuptools that supports that backend.
+
+ build_system = {
+ "requires": ["setuptools>=40.8.0", "wheel"],
+ "build-backend": "setuptools.build_meta:__legacy__",
+ }
+
+ # If we're using PEP 517, we have build system information (either
+ # from pyproject.toml, or defaulted by the code above).
+ # Note that at this point, we do not know if the user has actually
+ # specified a backend, though.
+ assert build_system is not None
+
+ # Ensure that the build-system section in pyproject.toml conforms
+ # to PEP 518.
+ error_template = (
+ "{package} has a pyproject.toml file that does not comply "
+ "with PEP 518: {reason}"
+ )
+
+ # Specifying the build-system table but not the requires key is invalid
+ if "requires" not in build_system:
+ raise InstallationError(
+ error_template.format(package=req_name, reason=(
+ "it has a 'build-system' table but not "
+ "'build-system.requires' which is mandatory in the table"
+ ))
+ )
+
+ # Error out if requires is not a list of strings
+ requires = build_system["requires"]
+ if not _is_list_of_str(requires):
+ raise InstallationError(error_template.format(
+ package=req_name,
+ reason="'build-system.requires' is not a list of strings.",
+ ))
+
+ backend = build_system.get("build-backend")
+ check = [] # type: List[str]
+ if backend is None:
+ # If the user didn't specify a backend, we assume they want to use
+ # the setuptools backend. But we can't be sure they have included
+ # a version of setuptools which supplies the backend, or wheel
+ # (which is needed by the backend) in their requirements. So we
+ # make a note to check that those requirements are present once
+ # we have set up the environment.
+ # This is quite a lot of work to check for a very specific case. But
+ # the problem is, that case is potentially quite common - projects that
+ # adopted PEP 518 early for the ability to specify requirements to
+ # execute setup.py, but never considered needing to mention the build
+ # tools themselves. The original PEP 518 code had a similar check (but
+ # implemented in a different way).
+ backend = "setuptools.build_meta:__legacy__"
+ check = ["setuptools>=40.8.0", "wheel"]
+
+ return (requires, backend, check)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__init__.py
new file mode 100644
index 00000000..c39f63fa
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__init__.py
@@ -0,0 +1,78 @@
+from __future__ import absolute_import
+
+import logging
+
+from .req_install import InstallRequirement
+from .req_set import RequirementSet
+from .req_file import parse_requirements
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, List, Sequence
+
+__all__ = [
+ "RequirementSet", "InstallRequirement",
+ "parse_requirements", "install_given_reqs",
+]
+
+logger = logging.getLogger(__name__)
+
+
+def install_given_reqs(
+ to_install, # type: List[InstallRequirement]
+ install_options, # type: List[str]
+ global_options=(), # type: Sequence[str]
+ *args, # type: Any
+ **kwargs # type: Any
+):
+ # type: (...) -> List[InstallRequirement]
+ """
+ Install everything in the given list.
+
+ (to be called after having downloaded and unpacked the packages)
+ """
+
+ if to_install:
+ logger.info(
+ 'Installing collected packages: %s',
+ ', '.join([req.name for req in to_install]),
+ )
+
+ with indent_log():
+ for requirement in to_install:
+ if requirement.conflicts_with:
+ logger.info(
+ 'Found existing installation: %s',
+ requirement.conflicts_with,
+ )
+ with indent_log():
+ uninstalled_pathset = requirement.uninstall(
+ auto_confirm=True
+ )
+ try:
+ requirement.install(
+ install_options,
+ global_options,
+ *args,
+ **kwargs
+ )
+ except Exception:
+ should_rollback = (
+ requirement.conflicts_with and
+ not requirement.install_succeeded
+ )
+ # if install did not succeed, rollback previous uninstall
+ if should_rollback:
+ uninstalled_pathset.rollback()
+ raise
+ else:
+ should_commit = (
+ requirement.conflicts_with and
+ requirement.install_succeeded
+ )
+ if should_commit:
+ uninstalled_pathset.commit()
+ requirement.remove_temporary_source()
+
+ return to_install
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..fee124c0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/constructors.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
new file mode 100644
index 00000000..7c20dade
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_file.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
new file mode 100644
index 00000000..c6146ec7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_install.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
new file mode 100644
index 00000000..f7b8a6ef
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_set.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
new file mode 100644
index 00000000..36a3106d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
new file mode 100644
index 00000000..eec20425
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
new file mode 100644
index 00000000..83068d48
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/constructors.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/constructors.py
new file mode 100644
index 00000000..cd0ab504
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/constructors.py
@@ -0,0 +1,349 @@
+"""Backing implementation for InstallRequirement's various constructors
+
+The idea here is that these formed a major chunk of InstallRequirement's size
+so, moving them and support code dedicated to them outside of that class
+helps creates for better understandability for the rest of the code.
+
+These are meant to be used elsewhere within pip to create instances of
+InstallRequirement.
+"""
+
+import logging
+import os
+import re
+
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+from pip._vendor.packaging.specifiers import Specifier
+from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
+
+from pip._internal.download import is_archive_file, is_url, url_to_path
+from pip._internal.exceptions import InstallationError
+from pip._internal.models.index import PyPI, TestPyPI
+from pip._internal.models.link import Link
+from pip._internal.pyproject import make_pyproject_path
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.misc import is_installable_dir, path_to_url
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.vcs import vcs
+from pip._internal.wheel import Wheel
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Optional, Set, Tuple, Union,
+ )
+ from pip._internal.cache import WheelCache
+
+
+__all__ = [
+ "install_req_from_editable", "install_req_from_line",
+ "parse_editable"
+]
+
+logger = logging.getLogger(__name__)
+operators = Specifier._operators.keys()
+
+
+def _strip_extras(path):
+ # type: (str) -> Tuple[str, Optional[str]]
+ m = re.match(r'^(.+)(\[[^\]]+\])$', path)
+ extras = None
+ if m:
+ path_no_extras = m.group(1)
+ extras = m.group(2)
+ else:
+ path_no_extras = path
+
+ return path_no_extras, extras
+
+
+def parse_editable(editable_req):
+ # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
+ """Parses an editable requirement into:
+ - a requirement name
+ - an URL
+ - extras
+ - editable options
+ Accepted requirements:
+ svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
+ .[some_extra]
+ """
+
+ url = editable_req
+
+ # If a file path is specified with extras, strip off the extras.
+ url_no_extras, extras = _strip_extras(url)
+
+ if os.path.isdir(url_no_extras):
+ if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
+ msg = (
+ 'File "setup.py" not found. Directory cannot be installed '
+ 'in editable mode: {}'.format(os.path.abspath(url_no_extras))
+ )
+ pyproject_path = make_pyproject_path(url_no_extras)
+ if os.path.isfile(pyproject_path):
+ msg += (
+ '\n(A "pyproject.toml" file was found, but editable '
+ 'mode currently requires a setup.py based build.)'
+ )
+ raise InstallationError(msg)
+
+ # Treating it as code that has already been checked out
+ url_no_extras = path_to_url(url_no_extras)
+
+ if url_no_extras.lower().startswith('file:'):
+ package_name = Link(url_no_extras).egg_fragment
+ if extras:
+ return (
+ package_name,
+ url_no_extras,
+ Requirement("placeholder" + extras.lower()).extras,
+ )
+ else:
+ return package_name, url_no_extras, None
+
+ for version_control in vcs:
+ if url.lower().startswith('%s:' % version_control):
+ url = '%s+%s' % (version_control, url)
+ break
+
+ if '+' not in url:
+ raise InstallationError(
+ '{} is not a valid editable requirement. '
+ 'It should either be a path to a local project or a VCS URL '
+ '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req)
+ )
+
+ vc_type = url.split('+', 1)[0].lower()
+
+ if not vcs.get_backend(vc_type):
+ error_message = 'For --editable=%s only ' % editable_req + \
+ ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
+ ' is currently supported'
+ raise InstallationError(error_message)
+
+ package_name = Link(url).egg_fragment
+ if not package_name:
+ raise InstallationError(
+ "Could not detect requirement name for '%s', please specify one "
+ "with #egg=your_package_name" % editable_req
+ )
+ return package_name, url, None
+
+
+def deduce_helpful_msg(req):
+ # type: (str) -> str
+ """Returns helpful msg in case requirements file does not exist,
+ or cannot be parsed.
+
+ :params req: Requirements file path
+ """
+ msg = ""
+ if os.path.exists(req):
+ msg = " It does exist."
+ # Try to parse and check if it is a requirements file.
+ try:
+ with open(req, 'r') as fp:
+ # parse first line only
+ next(parse_requirements(fp.read()))
+ msg += " The argument you provided " + \
+ "(%s) appears to be a" % (req) + \
+ " requirements file. If that is the" + \
+ " case, use the '-r' flag to install" + \
+ " the packages specified within it."
+ except RequirementParseError:
+ logger.debug("Cannot parse '%s' as requirements \
+ file" % (req), exc_info=True)
+ else:
+ msg += " File '%s' does not exist." % (req)
+ return msg
+
+
+# ---- The actual constructors follow ----
+
+
+def install_req_from_editable(
+ editable_req, # type: str
+ comes_from=None, # type: Optional[str]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False # type: bool
+):
+ # type: (...) -> InstallRequirement
+ name, url, extras_override = parse_editable(editable_req)
+ if url.startswith('file:'):
+ source_dir = url_to_path(url)
+ else:
+ source_dir = None
+
+ if name is not None:
+ try:
+ req = Requirement(name)
+ except InvalidRequirement:
+ raise InstallationError("Invalid requirement: '%s'" % name)
+ else:
+ req = None
+ return InstallRequirement(
+ req, comes_from, source_dir=source_dir,
+ editable=True,
+ link=Link(url),
+ constraint=constraint,
+ use_pep517=use_pep517,
+ isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache,
+ extras=extras_override or (),
+ )
+
+
+def install_req_from_line(
+ name, # type: str
+ comes_from=None, # type: Optional[Union[str, InstallRequirement]]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False, # type: bool
+ line_source=None, # type: Optional[str]
+):
+ # type: (...) -> InstallRequirement
+ """Creates an InstallRequirement from a name, which might be a
+ requirement, directory containing 'setup.py', filename, or URL.
+
+ :param line_source: An optional string describing where the line is from,
+ for logging purposes in case of an error.
+ """
+ if is_url(name):
+ marker_sep = '; '
+ else:
+ marker_sep = ';'
+ if marker_sep in name:
+ name, markers_as_string = name.split(marker_sep, 1)
+ markers_as_string = markers_as_string.strip()
+ if not markers_as_string:
+ markers = None
+ else:
+ markers = Marker(markers_as_string)
+ else:
+ markers = None
+ name = name.strip()
+ req_as_string = None
+ path = os.path.normpath(os.path.abspath(name))
+ link = None
+ extras_as_string = None
+
+ if is_url(name):
+ link = Link(name)
+ else:
+ p, extras_as_string = _strip_extras(path)
+ looks_like_dir = os.path.isdir(p) and (
+ os.path.sep in name or
+ (os.path.altsep is not None and os.path.altsep in name) or
+ name.startswith('.')
+ )
+ if looks_like_dir:
+ if not is_installable_dir(p):
+ raise InstallationError(
+ "Directory %r is not installable. Neither 'setup.py' "
+ "nor 'pyproject.toml' found." % name
+ )
+ link = Link(path_to_url(p))
+ elif is_archive_file(p):
+ if not os.path.isfile(p):
+ logger.warning(
+ 'Requirement %r looks like a filename, but the '
+ 'file does not exist',
+ name
+ )
+ link = Link(path_to_url(p))
+
+ # it's a local file, dir, or url
+ if link:
+ # Handle relative file URLs
+ if link.scheme == 'file' and re.search(r'\.\./', link.url):
+ link = Link(
+ path_to_url(os.path.normpath(os.path.abspath(link.path))))
+ # wheel file
+ if link.is_wheel:
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
+ req_as_string = "%s==%s" % (wheel.name, wheel.version)
+ else:
+ # set the req to the egg fragment. when it's not there, this
+ # will become an 'unnamed' requirement
+ req_as_string = link.egg_fragment
+
+ # a requirement specifier
+ else:
+ req_as_string = name
+
+ if extras_as_string:
+ extras = Requirement("placeholder" + extras_as_string.lower()).extras
+ else:
+ extras = ()
+ if req_as_string is not None:
+ try:
+ req = Requirement(req_as_string)
+ except InvalidRequirement:
+ if os.path.sep in req_as_string:
+ add_msg = "It looks like a path."
+ add_msg += deduce_helpful_msg(req_as_string)
+ elif ('=' in req_as_string and
+ not any(op in req_as_string for op in operators)):
+ add_msg = "= is not a valid operator. Did you mean == ?"
+ else:
+ add_msg = ''
+ if line_source is None:
+ source = ''
+ else:
+ source = ' (from {})'.format(line_source)
+ msg = (
+ 'Invalid requirement: {!r}{}'.format(req_as_string, source)
+ )
+ if add_msg:
+ msg += '\nHint: {}'.format(add_msg)
+ raise InstallationError(msg)
+ else:
+ req = None
+
+ return InstallRequirement(
+ req, comes_from, link=link, markers=markers,
+ use_pep517=use_pep517, isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache,
+ constraint=constraint,
+ extras=extras,
+ )
+
+
+def install_req_from_req_string(
+ req_string, # type: str
+ comes_from=None, # type: Optional[InstallRequirement]
+ isolated=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None # type: Optional[bool]
+):
+ # type: (...) -> InstallRequirement
+ try:
+ req = Requirement(req_string)
+ except InvalidRequirement:
+ raise InstallationError("Invalid requirement: '%s'" % req_string)
+
+ domains_not_allowed = [
+ PyPI.file_storage_domain,
+ TestPyPI.file_storage_domain,
+ ]
+ if (req.url and comes_from and comes_from.link and
+ comes_from.link.netloc in domains_not_allowed):
+ # Explicitly disallow pypi packages that depend on external urls
+ raise InstallationError(
+ "Packages installed from PyPI cannot depend on packages "
+ "which are not also hosted on PyPI.\n"
+ "%s depends on %s " % (comes_from.name, req)
+ )
+
+ return InstallRequirement(
+ req, comes_from, isolated=isolated, wheel_cache=wheel_cache,
+ use_pep517=use_pep517
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_file.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_file.py
new file mode 100644
index 00000000..5a9920fe
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_file.py
@@ -0,0 +1,399 @@
+"""
+Requirements file parsing
+"""
+
+from __future__ import absolute_import
+
+import optparse
+import os
+import re
+import shlex
+import sys
+
+from pip._vendor.six.moves import filterfalse
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.cli import cmdoptions
+from pip._internal.download import get_file_content
+from pip._internal.exceptions import RequirementsFileParseError
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.req.constructors import (
+ install_req_from_editable, install_req_from_line,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Callable, Iterator, List, NoReturn, Optional, Text, Tuple,
+ )
+ from pip._internal.req import InstallRequirement
+ from pip._internal.cache import WheelCache
+ from pip._internal.index import PackageFinder
+ from pip._internal.download import PipSession
+
+ ReqFileLines = Iterator[Tuple[int, Text]]
+
+__all__ = ['parse_requirements']
+
+SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
+COMMENT_RE = re.compile(r'(^|\s+)#.*$')
+
+# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
+# variable name consisting of only uppercase letters, digits or the '_'
+# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
+# 2013 Edition.
+ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
+
+SUPPORTED_OPTIONS = [
+ cmdoptions.constraints,
+ cmdoptions.editable,
+ cmdoptions.requirements,
+ cmdoptions.no_index,
+ cmdoptions.index_url,
+ cmdoptions.find_links,
+ cmdoptions.extra_index_url,
+ cmdoptions.always_unzip,
+ cmdoptions.no_binary,
+ cmdoptions.only_binary,
+ cmdoptions.pre,
+ cmdoptions.trusted_host,
+ cmdoptions.require_hashes,
+] # type: List[Callable[..., optparse.Option]]
+
+# options to be passed to requirements
+SUPPORTED_OPTIONS_REQ = [
+ cmdoptions.install_options,
+ cmdoptions.global_options,
+ cmdoptions.hash,
+] # type: List[Callable[..., optparse.Option]]
+
+# the 'dest' string values
+SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
+
+
+def parse_requirements(
+ filename, # type: str
+ finder=None, # type: Optional[PackageFinder]
+ comes_from=None, # type: Optional[str]
+ options=None, # type: Optional[optparse.Values]
+ session=None, # type: Optional[PipSession]
+ constraint=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None # type: Optional[bool]
+):
+ # type: (...) -> Iterator[InstallRequirement]
+ """Parse a requirements file and yield InstallRequirement instances.
+
+ :param filename: Path or url of requirements file.
+ :param finder: Instance of pip.index.PackageFinder.
+ :param comes_from: Origin description of requirements.
+ :param options: cli options.
+ :param session: Instance of pip.download.PipSession.
+ :param constraint: If true, parsing a constraint file rather than
+ requirements file.
+ :param wheel_cache: Instance of pip.wheel.WheelCache
+ :param use_pep517: Value of the --use-pep517 option.
+ """
+ if session is None:
+ raise TypeError(
+ "parse_requirements() missing 1 required keyword argument: "
+ "'session'"
+ )
+
+ _, content = get_file_content(
+ filename, comes_from=comes_from, session=session
+ )
+
+ lines_enum = preprocess(content, options)
+
+ for line_number, line in lines_enum:
+ req_iter = process_line(line, filename, line_number, finder,
+ comes_from, options, session, wheel_cache,
+ use_pep517=use_pep517, constraint=constraint)
+ for req in req_iter:
+ yield req
+
+
+def preprocess(content, options):
+ # type: (Text, Optional[optparse.Values]) -> ReqFileLines
+ """Split, filter, and join lines, and return a line iterator
+
+ :param content: the content of the requirements file
+ :param options: cli options
+ """
+ lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
+ lines_enum = join_lines(lines_enum)
+ lines_enum = ignore_comments(lines_enum)
+ lines_enum = skip_regex(lines_enum, options)
+ lines_enum = expand_env_variables(lines_enum)
+ return lines_enum
+
+
+def process_line(
+ line, # type: Text
+ filename, # type: str
+ line_number, # type: int
+ finder=None, # type: Optional[PackageFinder]
+ comes_from=None, # type: Optional[str]
+ options=None, # type: Optional[optparse.Values]
+ session=None, # type: Optional[PipSession]
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None, # type: Optional[bool]
+ constraint=False, # type: bool
+):
+ # type: (...) -> Iterator[InstallRequirement]
+ """Process a single requirements line; This can result in creating/yielding
+ requirements, or updating the finder.
+
+ For lines that contain requirements, the only options that have an effect
+ are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
+ requirement. Other options from SUPPORTED_OPTIONS may be present, but are
+ ignored.
+
+ For lines that do not contain requirements, the only options that have an
+ effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
+ be present, but are ignored. These lines may contain multiple options
+ (although our docs imply only one is supported), and all our parsed and
+ affect the finder.
+
+ :param constraint: If True, parsing a constraints file.
+ :param options: OptionParser options that we may update
+ """
+ parser = build_parser(line)
+ defaults = parser.get_default_values()
+ defaults.index_url = None
+ if finder:
+ defaults.format_control = finder.format_control
+ args_str, options_str = break_args_options(line)
+ # Prior to 2.7.3, shlex cannot deal with unicode entries
+ if sys.version_info < (2, 7, 3):
+ # https://github.com/python/mypy/issues/1174
+ options_str = options_str.encode('utf8') # type: ignore
+ # https://github.com/python/mypy/issues/1174
+ opts, _ = parser.parse_args(
+ shlex.split(options_str), defaults) # type: ignore
+
+ # preserve for the nested code path
+ line_comes_from = '%s %s (line %s)' % (
+ '-c' if constraint else '-r', filename, line_number,
+ )
+
+ # yield a line requirement
+ if args_str:
+ isolated = options.isolated_mode if options else False
+ if options:
+ cmdoptions.check_install_build_global(options, opts)
+ # get the options that apply to requirements
+ req_options = {}
+ for dest in SUPPORTED_OPTIONS_REQ_DEST:
+ if dest in opts.__dict__ and opts.__dict__[dest]:
+ req_options[dest] = opts.__dict__[dest]
+ line_source = 'line {} of {}'.format(line_number, filename)
+ yield install_req_from_line(
+ args_str,
+ comes_from=line_comes_from,
+ use_pep517=use_pep517,
+ isolated=isolated,
+ options=req_options,
+ wheel_cache=wheel_cache,
+ constraint=constraint,
+ line_source=line_source,
+ )
+
+ # yield an editable requirement
+ elif opts.editables:
+ isolated = options.isolated_mode if options else False
+ yield install_req_from_editable(
+ opts.editables[0], comes_from=line_comes_from,
+ use_pep517=use_pep517,
+ constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
+ )
+
+ # parse a nested requirements file
+ elif opts.requirements or opts.constraints:
+ if opts.requirements:
+ req_path = opts.requirements[0]
+ nested_constraint = False
+ else:
+ req_path = opts.constraints[0]
+ nested_constraint = True
+ # original file is over http
+ if SCHEME_RE.search(filename):
+ # do a url join so relative paths work
+ req_path = urllib_parse.urljoin(filename, req_path)
+ # original file and nested file are paths
+ elif not SCHEME_RE.search(req_path):
+ # do a join so relative paths work
+ req_path = os.path.join(os.path.dirname(filename), req_path)
+ # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
+ parsed_reqs = parse_requirements(
+ req_path, finder, comes_from, options, session,
+ constraint=nested_constraint, wheel_cache=wheel_cache
+ )
+ for req in parsed_reqs:
+ yield req
+
+ # percolate hash-checking option upward
+ elif opts.require_hashes:
+ options.require_hashes = opts.require_hashes
+
+ # set finder options
+ elif finder:
+ find_links = finder.find_links
+ index_urls = finder.index_urls
+ if opts.index_url:
+ index_urls = [opts.index_url]
+ if opts.no_index is True:
+ index_urls = []
+ if opts.extra_index_urls:
+ index_urls.extend(opts.extra_index_urls)
+ if opts.find_links:
+ # FIXME: it would be nice to keep track of the source
+ # of the find_links: support a find-links local path
+ # relative to a requirements file.
+ value = opts.find_links[0]
+ req_dir = os.path.dirname(os.path.abspath(filename))
+ relative_to_reqs_file = os.path.join(req_dir, value)
+ if os.path.exists(relative_to_reqs_file):
+ value = relative_to_reqs_file
+ find_links.append(value)
+
+ search_scope = SearchScope(
+ find_links=find_links,
+ index_urls=index_urls,
+ )
+ finder.search_scope = search_scope
+
+ if opts.pre:
+ finder.set_allow_all_prereleases()
+ for host in opts.trusted_hosts or []:
+ source = 'line {} of {}'.format(line_number, filename)
+ finder.add_trusted_host(host, source=source)
+
+
+def break_args_options(line):
+ # type: (Text) -> Tuple[str, Text]
+ """Break up the line into an args and options string. We only want to shlex
+ (and then optparse) the options, not the args. args can contain markers
+ which are corrupted by shlex.
+ """
+ tokens = line.split(' ')
+ args = []
+ options = tokens[:]
+ for token in tokens:
+ if token.startswith('-') or token.startswith('--'):
+ break
+ else:
+ args.append(token)
+ options.pop(0)
+ return ' '.join(args), ' '.join(options) # type: ignore
+
+
+def build_parser(line):
+ # type: (Text) -> optparse.OptionParser
+ """
+ Return a parser for parsing requirement lines
+ """
+ parser = optparse.OptionParser(add_help_option=False)
+
+ option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
+ for option_factory in option_factories:
+ option = option_factory()
+ parser.add_option(option)
+
+ # By default optparse sys.exits on parsing errors. We want to wrap
+ # that in our own exception.
+ def parser_exit(self, msg):
+ # type: (Any, str) -> NoReturn
+ # add offending line
+ msg = 'Invalid requirement: %s\n%s' % (line, msg)
+ raise RequirementsFileParseError(msg)
+ # NOTE: mypy disallows assigning to a method
+ # https://github.com/python/mypy/issues/2427
+ parser.exit = parser_exit # type: ignore
+
+ return parser
+
+
+def join_lines(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """Joins a line ending in '\' with the previous line (except when following
+ comments). The joined line takes on the index of the first line.
+ """
+ primary_line_number = None
+ new_line = [] # type: List[Text]
+ for line_number, line in lines_enum:
+ if not line.endswith('\\') or COMMENT_RE.match(line):
+ if COMMENT_RE.match(line):
+ # this ensures comments are always matched later
+ line = ' ' + line
+ if new_line:
+ new_line.append(line)
+ yield primary_line_number, ''.join(new_line)
+ new_line = []
+ else:
+ yield line_number, line
+ else:
+ if not new_line:
+ primary_line_number = line_number
+ new_line.append(line.strip('\\'))
+
+ # last line contains \
+ if new_line:
+ yield primary_line_number, ''.join(new_line)
+
+ # TODO: handle space after '\'.
+
+
+def ignore_comments(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """
+ Strips comments and filter empty lines.
+ """
+ for line_number, line in lines_enum:
+ line = COMMENT_RE.sub('', line)
+ line = line.strip()
+ if line:
+ yield line_number, line
+
+
+def skip_regex(lines_enum, options):
+ # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines
+ """
+ Skip lines that match '--skip-requirements-regex' pattern
+
+ Note: the regex pattern is only built once
+ """
+ skip_regex = options.skip_requirements_regex if options else None
+ if skip_regex:
+ pattern = re.compile(skip_regex)
+ lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
+ return lines_enum
+
+
+def expand_env_variables(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """Replace all environment variables that can be retrieved via `os.getenv`.
+
+ The only allowed format for environment variables defined in the
+ requirement file is `${MY_VARIABLE_1}` to ensure two things:
+
+ 1. Strings that contain a `$` aren't accidentally (partially) expanded.
+ 2. Ensure consistency across platforms for requirement files.
+
+ These points are the result of a discussion on the `github pull
+ request #3514 <https://github.com/pypa/pip/pull/3514>`_.
+
+ Valid characters in variable names follow the `POSIX standard
+ <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
+ to uppercase letter, digits and the `_` (underscore).
+ """
+ for line_number, line in lines_enum:
+ for env_var, var_name in ENV_VAR_RE.findall(line):
+ value = os.getenv(var_name)
+ if not value:
+ continue
+
+ line = line.replace(env_var, value)
+
+ yield line_number, line
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_install.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_install.py
new file mode 100644
index 00000000..f5c93504
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_install.py
@@ -0,0 +1,1035 @@
+from __future__ import absolute_import
+
+import logging
+import os
+import shutil
+import sys
+import sysconfig
+import zipfile
+from distutils.util import change_root
+
+from pip._vendor import pkg_resources, six
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import Version
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+from pip._internal import wheel
+from pip._internal.build_env import NoOpBuildEnvironment
+from pip._internal.exceptions import InstallationError
+from pip._internal.models.link import Link
+from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
+from pip._internal.req.req_uninstall import UninstallPathSet
+from pip._internal.utils.compat import native_str
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.marker_files import PIP_DELETE_MARKER_FILENAME
+from pip._internal.utils.misc import (
+ _make_build_dir, ask_path_exists, backup_dir, call_subprocess,
+ display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
+ get_installed_version, redact_password_from_url, rmtree,
+)
+from pip._internal.utils.packaging import get_metadata
+from pip._internal.utils.setuptools_build import make_setuptools_shim_args
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+from pip._internal.utils.virtualenv import running_under_virtualenv
+from pip._internal.vcs import vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, List, Mapping, Optional, Sequence, Union,
+ )
+ from pip._internal.build_env import BuildEnvironment
+ from pip._internal.cache import WheelCache
+ from pip._internal.index import PackageFinder
+ from pip._vendor.pkg_resources import Distribution
+ from pip._vendor.packaging.specifiers import SpecifierSet
+ from pip._vendor.packaging.markers import Marker
+
+
+logger = logging.getLogger(__name__)
+
+
+class InstallRequirement(object):
+ """
+ Represents something that may be installed later on, may have information
+ about where to fetch the relevant requirement and also contains logic for
+ installing the said requirement.
+ """
+
+ def __init__(
+ self,
+ req, # type: Optional[Requirement]
+ comes_from, # type: Optional[Union[str, InstallRequirement]]
+ source_dir=None, # type: Optional[str]
+ editable=False, # type: bool
+ link=None, # type: Optional[Link]
+ update=True, # type: bool
+ markers=None, # type: Optional[Marker]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False, # type: bool
+ extras=() # type: Iterable[str]
+ ):
+ # type: (...) -> None
+ assert req is None or isinstance(req, Requirement), req
+ self.req = req
+ self.comes_from = comes_from
+ self.constraint = constraint
+ if source_dir is None:
+ self.source_dir = None # type: Optional[str]
+ else:
+ self.source_dir = os.path.normpath(os.path.abspath(source_dir))
+ self.editable = editable
+
+ self._wheel_cache = wheel_cache
+ if link is None and req and req.url:
+ # PEP 508 URL requirement
+ link = Link(req.url)
+ self.link = self.original_link = link
+
+ if extras:
+ self.extras = extras
+ elif req:
+ self.extras = {
+ pkg_resources.safe_extra(extra) for extra in req.extras
+ }
+ else:
+ self.extras = set()
+ if markers is None and req:
+ markers = req.marker
+ self.markers = markers
+
+ self._egg_info_path = None # type: Optional[str]
+ # This holds the pkg_resources.Distribution object if this requirement
+ # is already available:
+ self.satisfied_by = None
+ # This hold the pkg_resources.Distribution object if this requirement
+ # conflicts with another installed distribution:
+ self.conflicts_with = None
+ # Temporary build location
+ self._temp_build_dir = TempDirectory(kind="req-build")
+ # Used to store the global directory where the _temp_build_dir should
+ # have been created. Cf _correct_build_location method.
+ self._ideal_build_dir = None # type: Optional[str]
+ # True if the editable should be updated:
+ self.update = update
+ # Set to True after successful installation
+ self.install_succeeded = None # type: Optional[bool]
+ # UninstallPathSet of uninstalled distribution (for possible rollback)
+ self.uninstalled_pathset = None
+ self.options = options if options else {}
+ # Set to True after successful preparation of this requirement
+ self.prepared = False
+ self.is_direct = False
+
+ self.isolated = isolated
+ self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
+
+ # For PEP 517, the directory where we request the project metadata
+ # gets stored. We need this to pass to build_wheel, so the backend
+ # can ensure that the wheel matches the metadata (see the PEP for
+ # details).
+ self.metadata_directory = None # type: Optional[str]
+
+ # The static build requirements (from pyproject.toml)
+ self.pyproject_requires = None # type: Optional[List[str]]
+
+ # Build requirements that we will check are available
+ self.requirements_to_check = [] # type: List[str]
+
+ # The PEP 517 backend we should use to build the project
+ self.pep517_backend = None # type: Optional[Pep517HookCaller]
+
+ # Are we using PEP 517 for this requirement?
+ # After pyproject.toml has been loaded, the only valid values are True
+ # and False. Before loading, None is valid (meaning "use the default").
+ # Setting an explicit value before loading pyproject.toml is supported,
+ # but after loading this flag should be treated as read only.
+ self.use_pep517 = use_pep517
+
+ def __str__(self):
+ # type: () -> str
+ if self.req:
+ s = str(self.req)
+ if self.link:
+ s += ' from %s' % redact_password_from_url(self.link.url)
+ elif self.link:
+ s = redact_password_from_url(self.link.url)
+ else:
+ s = '<InstallRequirement>'
+ if self.satisfied_by is not None:
+ s += ' in %s' % display_path(self.satisfied_by.location)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from # type: Optional[str]
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += ' (from %s)' % comes_from
+ return s
+
+ def __repr__(self):
+ # type: () -> str
+ return '<%s object: %s editable=%r>' % (
+ self.__class__.__name__, str(self), self.editable)
+
+ def format_debug(self):
+ # type: () -> str
+ """An un-tested helper for getting state, for debugging.
+ """
+ attributes = vars(self)
+ names = sorted(attributes)
+
+ state = (
+ "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
+ )
+ return '<{name} object: {{{state}}}>'.format(
+ name=self.__class__.__name__,
+ state=", ".join(state),
+ )
+
+ def populate_link(self, finder, upgrade, require_hashes):
+ # type: (PackageFinder, bool, bool) -> None
+ """Ensure that if a link can be found for this, that it is found.
+
+ Note that self.link may still be None - if Upgrade is False and the
+ requirement is already installed.
+
+ If require_hashes is True, don't use the wheel cache, because cached
+ wheels, always built locally, have different hashes than the files
+ downloaded from the index server and thus throw false hash mismatches.
+ Furthermore, cached wheels at present have undeterministic contents due
+ to file modification times.
+ """
+ if self.link is None:
+ self.link = finder.find_requirement(self, upgrade)
+ if self._wheel_cache is not None and not require_hashes:
+ old_link = self.link
+ self.link = self._wheel_cache.get(self.link, self.name)
+ if old_link != self.link:
+ logger.debug('Using cached wheel link: %s', self.link)
+
+ # Things that are valid for all kinds of requirements?
+ @property
+ def name(self):
+ # type: () -> Optional[str]
+ if self.req is None:
+ return None
+ return native_str(pkg_resources.safe_name(self.req.name))
+
+ @property
+ def specifier(self):
+ # type: () -> SpecifierSet
+ return self.req.specifier
+
+ @property
+ def is_pinned(self):
+ # type: () -> bool
+ """Return whether I am pinned to an exact version.
+
+ For example, some-package==1.2 is pinned; some-package>1.2 is not.
+ """
+ specifiers = self.specifier
+ return (len(specifiers) == 1 and
+ next(iter(specifiers)).operator in {'==', '==='})
+
+ @property
+ def installed_version(self):
+ # type: () -> Optional[str]
+ return get_installed_version(self.name)
+
+ def match_markers(self, extras_requested=None):
+ # type: (Optional[Iterable[str]]) -> bool
+ if not extras_requested:
+ # Provide an extra to safely evaluate the markers
+ # without matching any extra
+ extras_requested = ('',)
+ if self.markers is not None:
+ return any(
+ self.markers.evaluate({'extra': extra})
+ for extra in extras_requested)
+ else:
+ return True
+
+ @property
+ def has_hash_options(self):
+ # type: () -> bool
+ """Return whether any known-good hashes are specified as options.
+
+ These activate --require-hashes mode; hashes specified as part of a
+ URL do not.
+
+ """
+ return bool(self.options.get('hashes', {}))
+
+ def hashes(self, trust_internet=True):
+ # type: (bool) -> Hashes
+ """Return a hash-comparer that considers my option- and URL-based
+ hashes to be known-good.
+
+ Hashes in URLs--ones embedded in the requirements file, not ones
+ downloaded from an index server--are almost peers with ones from
+ flags. They satisfy --require-hashes (whether it was implicitly or
+ explicitly activated) but do not activate it. md5 and sha224 are not
+ allowed in flags, which should nudge people toward good algos. We
+ always OR all hashes together, even ones from URLs.
+
+ :param trust_internet: Whether to trust URL-based (#md5=...) hashes
+ downloaded from the internet, as by populate_link()
+
+ """
+ good_hashes = self.options.get('hashes', {}).copy()
+ link = self.link if trust_internet else self.original_link
+ if link and link.hash:
+ good_hashes.setdefault(link.hash_name, []).append(link.hash)
+ return Hashes(good_hashes)
+
+ def from_path(self):
+ # type: () -> Optional[str]
+ """Format a nice indicator to show where this "comes from"
+ """
+ if self.req is None:
+ return None
+ s = str(self.req)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += '->' + comes_from
+ return s
+
+ def build_location(self, build_dir):
+ # type: (str) -> str
+ assert build_dir is not None
+ if self._temp_build_dir.path is not None:
+ return self._temp_build_dir.path
+ if self.req is None:
+ # for requirement via a path to a directory: the name of the
+ # package is not available yet so we create a temp directory
+ # Once run_egg_info will have run, we'll be able
+ # to fix it via _correct_build_location
+ # Some systems have /tmp as a symlink which confuses custom
+ # builds (such as numpy). Thus, we ensure that the real path
+ # is returned.
+ self._temp_build_dir.create()
+ self._ideal_build_dir = build_dir
+
+ return self._temp_build_dir.path
+ if self.editable:
+ name = self.name.lower()
+ else:
+ name = self.name
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
+ # need this)
+ if not os.path.exists(build_dir):
+ logger.debug('Creating directory %s', build_dir)
+ _make_build_dir(build_dir)
+ return os.path.join(build_dir, name)
+
+ def _correct_build_location(self):
+ # type: () -> None
+ """Move self._temp_build_dir to self._ideal_build_dir/self.req.name
+
+ For some requirements (e.g. a path to a directory), the name of the
+ package is not available until we run egg_info, so the build_location
+ will return a temporary directory and store the _ideal_build_dir.
+
+ This is only called by self.run_egg_info to fix the temporary build
+ directory.
+ """
+ if self.source_dir is not None:
+ return
+ assert self.req is not None
+ assert self._temp_build_dir.path
+ assert (self._ideal_build_dir is not None and
+ self._ideal_build_dir.path) # type: ignore
+ old_location = self._temp_build_dir.path
+ self._temp_build_dir.path = None
+
+ new_location = self.build_location(self._ideal_build_dir)
+ if os.path.exists(new_location):
+ raise InstallationError(
+ 'A package already exists in %s; please remove it to continue'
+ % display_path(new_location))
+ logger.debug(
+ 'Moving package %s from %s to new location %s',
+ self, display_path(old_location), display_path(new_location),
+ )
+ shutil.move(old_location, new_location)
+ self._temp_build_dir.path = new_location
+ self._ideal_build_dir = None
+ self.source_dir = os.path.normpath(os.path.abspath(new_location))
+ self._egg_info_path = None
+
+ # Correct the metadata directory, if it exists
+ if self.metadata_directory:
+ old_meta = self.metadata_directory
+ rel = os.path.relpath(old_meta, start=old_location)
+ new_meta = os.path.join(new_location, rel)
+ new_meta = os.path.normpath(os.path.abspath(new_meta))
+ self.metadata_directory = new_meta
+
+ def remove_temporary_source(self):
+ # type: () -> None
+ """Remove the source files from this requirement, if they are marked
+ for deletion"""
+ if self.source_dir and os.path.exists(
+ os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
+ logger.debug('Removing source in %s', self.source_dir)
+ rmtree(self.source_dir)
+ self.source_dir = None
+ self._temp_build_dir.cleanup()
+ self.build_env.cleanup()
+
+ def check_if_exists(self, use_user_site):
+ # type: (bool) -> bool
+ """Find an installed distribution that satisfies or conflicts
+ with this requirement, and set self.satisfied_by or
+ self.conflicts_with appropriately.
+ """
+ if self.req is None:
+ return False
+ try:
+ # get_distribution() will resolve the entire list of requirements
+ # anyway, and we've already determined that we need the requirement
+ # in question, so strip the marker so that we don't try to
+ # evaluate it.
+ no_marker = Requirement(str(self.req))
+ no_marker.marker = None
+ self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
+ if self.editable and self.satisfied_by:
+ self.conflicts_with = self.satisfied_by
+ # when installing editables, nothing pre-existing should ever
+ # satisfy
+ self.satisfied_by = None
+ return True
+ except pkg_resources.DistributionNotFound:
+ return False
+ except pkg_resources.VersionConflict:
+ existing_dist = pkg_resources.get_distribution(
+ self.req.name
+ )
+ if use_user_site:
+ if dist_in_usersite(existing_dist):
+ self.conflicts_with = existing_dist
+ elif (running_under_virtualenv() and
+ dist_in_site_packages(existing_dist)):
+ raise InstallationError(
+ "Will not install to the user site because it will "
+ "lack sys.path precedence to %s in %s" %
+ (existing_dist.project_name, existing_dist.location)
+ )
+ else:
+ self.conflicts_with = existing_dist
+ return True
+
+ # Things valid for wheels
+ @property
+ def is_wheel(self):
+ # type: () -> bool
+ if not self.link:
+ return False
+ return self.link.is_wheel
+
+ def move_wheel_files(
+ self,
+ wheeldir, # type: str
+ root=None, # type: Optional[str]
+ home=None, # type: Optional[str]
+ prefix=None, # type: Optional[str]
+ warn_script_location=True, # type: bool
+ use_user_site=False, # type: bool
+ pycompile=True # type: bool
+ ):
+ # type: (...) -> None
+ wheel.move_wheel_files(
+ self.name, self.req, wheeldir,
+ user=use_user_site,
+ home=home,
+ root=root,
+ prefix=prefix,
+ pycompile=pycompile,
+ isolated=self.isolated,
+ warn_script_location=warn_script_location,
+ )
+
+ # Things valid for sdists
+ @property
+ def setup_py_dir(self):
+ # type: () -> str
+ return os.path.join(
+ self.source_dir,
+ self.link and self.link.subdirectory_fragment or '')
+
+ @property
+ def setup_py_path(self):
+ # type: () -> str
+ assert self.source_dir, "No source dir for %s" % self
+
+ setup_py = os.path.join(self.setup_py_dir, 'setup.py')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(setup_py, six.text_type):
+ setup_py = setup_py.encode(sys.getfilesystemencoding())
+
+ return setup_py
+
+ @property
+ def pyproject_toml_path(self):
+ # type: () -> str
+ assert self.source_dir, "No source dir for %s" % self
+
+ return make_pyproject_path(self.setup_py_dir)
+
+ def load_pyproject_toml(self):
+ # type: () -> None
+ """Load the pyproject.toml file.
+
+ After calling this routine, all of the attributes related to PEP 517
+ processing for this requirement have been set. In particular, the
+ use_pep517 attribute can be used to determine whether we should
+ follow the PEP 517 or legacy (setup.py) code path.
+ """
+ pyproject_toml_data = load_pyproject_toml(
+ self.use_pep517,
+ self.pyproject_toml_path,
+ self.setup_py_path,
+ str(self)
+ )
+
+ self.use_pep517 = (pyproject_toml_data is not None)
+
+ if not self.use_pep517:
+ return
+
+ requires, backend, check = pyproject_toml_data
+ self.requirements_to_check = check
+ self.pyproject_requires = requires
+ self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend)
+
+ # Use a custom function to call subprocesses
+ self.spin_message = ""
+
+ def runner(
+ cmd, # type: List[str]
+ cwd=None, # type: Optional[str]
+ extra_environ=None # type: Optional[Mapping[str, Any]]
+ ):
+ # type: (...) -> None
+ with open_spinner(self.spin_message) as spinner:
+ call_subprocess(
+ cmd,
+ cwd=cwd,
+ extra_environ=extra_environ,
+ spinner=spinner
+ )
+ self.spin_message = ""
+
+ self.pep517_backend._subprocess_runner = runner
+
+ def prepare_metadata(self):
+ # type: () -> None
+ """Ensure that project metadata is available.
+
+ Under PEP 517, call the backend hook to prepare the metadata.
+ Under legacy processing, call setup.py egg-info.
+ """
+ assert self.source_dir
+
+ with indent_log():
+ if self.use_pep517:
+ self.prepare_pep517_metadata()
+ else:
+ self.run_egg_info()
+
+ if not self.req:
+ if isinstance(parse_version(self.metadata["Version"]), Version):
+ op = "=="
+ else:
+ op = "==="
+ self.req = Requirement(
+ "".join([
+ self.metadata["Name"],
+ op,
+ self.metadata["Version"],
+ ])
+ )
+ self._correct_build_location()
+ else:
+ metadata_name = canonicalize_name(self.metadata["Name"])
+ if canonicalize_name(self.req.name) != metadata_name:
+ logger.warning(
+ 'Generating metadata for package %s '
+ 'produced metadata for project name %s. Fix your '
+ '#egg=%s fragments.',
+ self.name, metadata_name, self.name
+ )
+ self.req = Requirement(metadata_name)
+
+ def prepare_pep517_metadata(self):
+ # type: () -> None
+ assert self.pep517_backend is not None
+
+ metadata_dir = os.path.join(
+ self.setup_py_dir,
+ 'pip-wheel-metadata'
+ )
+ ensure_dir(metadata_dir)
+
+ with self.build_env:
+ # Note that Pep517HookCaller implements a fallback for
+ # prepare_metadata_for_build_wheel, so we don't have to
+ # consider the possibility that this hook doesn't exist.
+ backend = self.pep517_backend
+ self.spin_message = "Preparing wheel metadata"
+ distinfo_dir = backend.prepare_metadata_for_build_wheel(
+ metadata_dir
+ )
+
+ self.metadata_directory = os.path.join(metadata_dir, distinfo_dir)
+
+ def run_egg_info(self):
+ # type: () -> None
+ if self.name:
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package %s',
+ self.setup_py_path, self.name,
+ )
+ else:
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package from %s',
+ self.setup_py_path, self.link,
+ )
+ base_cmd = make_setuptools_shim_args(self.setup_py_path)
+ if self.isolated:
+ base_cmd += ["--no-user-cfg"]
+ egg_info_cmd = base_cmd + ['egg_info']
+ # We can't put the .egg-info files at the root, because then the
+ # source code will be mistaken for an installed egg, causing
+ # problems
+ if self.editable:
+ egg_base_option = [] # type: List[str]
+ else:
+ egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
+ ensure_dir(egg_info_dir)
+ egg_base_option = ['--egg-base', 'pip-egg-info']
+ with self.build_env:
+ call_subprocess(
+ egg_info_cmd + egg_base_option,
+ cwd=self.setup_py_dir,
+ command_desc='python setup.py egg_info')
+
+ @property
+ def egg_info_path(self):
+ # type: () -> str
+ if self._egg_info_path is None:
+ if self.editable:
+ base = self.source_dir
+ else:
+ base = os.path.join(self.setup_py_dir, 'pip-egg-info')
+ filenames = os.listdir(base)
+ if self.editable:
+ filenames = []
+ for root, dirs, files in os.walk(base):
+ for dir in vcs.dirnames:
+ if dir in dirs:
+ dirs.remove(dir)
+ # Iterate over a copy of ``dirs``, since mutating
+ # a list while iterating over it can cause trouble.
+ # (See https://github.com/pypa/pip/pull/462.)
+ for dir in list(dirs):
+ # Don't search in anything that looks like a virtualenv
+ # environment
+ if (
+ os.path.lexists(
+ os.path.join(root, dir, 'bin', 'python')
+ ) or
+ os.path.exists(
+ os.path.join(
+ root, dir, 'Scripts', 'Python.exe'
+ )
+ )):
+ dirs.remove(dir)
+ # Also don't search through tests
+ elif dir == 'test' or dir == 'tests':
+ dirs.remove(dir)
+ filenames.extend([os.path.join(root, dir)
+ for dir in dirs])
+ filenames = [f for f in filenames if f.endswith('.egg-info')]
+
+ if not filenames:
+ raise InstallationError(
+ "Files/directories not found in %s" % base
+ )
+ # if we have more than one match, we pick the toplevel one. This
+ # can easily be the case if there is a dist folder which contains
+ # an extracted tarball for testing purposes.
+ if len(filenames) > 1:
+ filenames.sort(
+ key=lambda x: x.count(os.path.sep) +
+ (os.path.altsep and x.count(os.path.altsep) or 0)
+ )
+ self._egg_info_path = os.path.join(base, filenames[0])
+ return self._egg_info_path
+
+ @property
+ def metadata(self):
+ # type: () -> Any
+ if not hasattr(self, '_metadata'):
+ self._metadata = get_metadata(self.get_dist())
+
+ return self._metadata
+
+ def get_dist(self):
+ # type: () -> Distribution
+ """Return a pkg_resources.Distribution for this requirement"""
+ if self.metadata_directory:
+ dist_dir = self.metadata_directory
+ dist_cls = pkg_resources.DistInfoDistribution
+ else:
+ dist_dir = self.egg_info_path.rstrip(os.path.sep)
+ # https://github.com/python/mypy/issues/1174
+ dist_cls = pkg_resources.Distribution # type: ignore
+
+ # dist_dir_name can be of the form "<project>.dist-info" or
+ # e.g. "<project>.egg-info".
+ base_dir, dist_dir_name = os.path.split(dist_dir)
+ dist_name = os.path.splitext(dist_dir_name)[0]
+ metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
+
+ return dist_cls(
+ base_dir,
+ project_name=dist_name,
+ metadata=metadata,
+ )
+
+ def assert_source_matches_version(self):
+ # type: () -> None
+ assert self.source_dir
+ version = self.metadata['version']
+ if self.req.specifier and version not in self.req.specifier:
+ logger.warning(
+ 'Requested %s, but installing version %s',
+ self,
+ version,
+ )
+ else:
+ logger.debug(
+ 'Source in %s has version %s, which satisfies requirement %s',
+ display_path(self.source_dir),
+ version,
+ self,
+ )
+
+ # For both source distributions and editables
+ def ensure_has_source_dir(self, parent_dir):
+ # type: (str) -> str
+ """Ensure that a source_dir is set.
+
+ This will create a temporary build dir if the name of the requirement
+ isn't known yet.
+
+ :param parent_dir: The ideal pip parent_dir for the source_dir.
+ Generally src_dir for editables and build_dir for sdists.
+ :return: self.source_dir
+ """
+ if self.source_dir is None:
+ self.source_dir = self.build_location(parent_dir)
+ return self.source_dir
+
+ # For editable installations
+ def install_editable(
+ self,
+ install_options, # type: List[str]
+ global_options=(), # type: Sequence[str]
+ prefix=None # type: Optional[str]
+ ):
+ # type: (...) -> None
+ logger.info('Running setup.py develop for %s', self.name)
+
+ if self.isolated:
+ global_options = list(global_options) + ["--no-user-cfg"]
+
+ if prefix:
+ prefix_param = ['--prefix={}'.format(prefix)]
+ install_options = list(install_options) + prefix_param
+
+ with indent_log():
+ # FIXME: should we do --install-headers here too?
+ with self.build_env:
+ call_subprocess(
+ make_setuptools_shim_args(self.setup_py_path) +
+ list(global_options) +
+ ['develop', '--no-deps'] +
+ list(install_options),
+
+ cwd=self.setup_py_dir,
+ )
+
+ self.install_succeeded = True
+
+ def update_editable(self, obtain=True):
+ # type: (bool) -> None
+ if not self.link:
+ logger.debug(
+ "Cannot update repository at %s; repository location is "
+ "unknown",
+ self.source_dir,
+ )
+ return
+ assert self.editable
+ assert self.source_dir
+ if self.link.scheme == 'file':
+ # Static paths don't get updated
+ return
+ assert '+' in self.link.url, "bad url: %r" % self.link.url
+ if not self.update:
+ return
+ vc_type, url = self.link.url.split('+', 1)
+ vcs_backend = vcs.get_backend(vc_type)
+ if vcs_backend:
+ url = self.link.url
+ if obtain:
+ vcs_backend.obtain(self.source_dir, url=url)
+ else:
+ vcs_backend.export(self.source_dir, url=url)
+ else:
+ assert 0, (
+ 'Unexpected version control type (in %s): %s'
+ % (self.link, vc_type))
+
+ # Top-level Actions
+ def uninstall(self, auto_confirm=False, verbose=False,
+ use_user_site=False):
+ # type: (bool, bool, bool) -> Optional[UninstallPathSet]
+ """
+ Uninstall the distribution currently satisfying this requirement.
+
+ Prompts before removing or modifying files unless
+ ``auto_confirm`` is True.
+
+ Refuses to delete or modify files outside of ``sys.prefix`` -
+ thus uninstallation within a virtual environment can only
+ modify that virtual environment, even if the virtualenv is
+ linked to global site-packages.
+
+ """
+ if not self.check_if_exists(use_user_site):
+ logger.warning("Skipping %s as it is not installed.", self.name)
+ return None
+ dist = self.satisfied_by or self.conflicts_with
+
+ uninstalled_pathset = UninstallPathSet.from_dist(dist)
+ uninstalled_pathset.remove(auto_confirm, verbose)
+ return uninstalled_pathset
+
+ def _clean_zip_name(self, name, prefix): # only used by archive.
+ # type: (str, str) -> str
+ assert name.startswith(prefix + os.path.sep), (
+ "name %r doesn't start with prefix %r" % (name, prefix)
+ )
+ name = name[len(prefix) + 1:]
+ name = name.replace(os.path.sep, '/')
+ return name
+
+ def _get_archive_name(self, path, parentdir, rootdir):
+ # type: (str, str, str) -> str
+ path = os.path.join(parentdir, path)
+ name = self._clean_zip_name(path, rootdir)
+ return self.name + '/' + name
+
+ # TODO: Investigate if this should be kept in InstallRequirement
+ # Seems to be used only when VCS + downloads
+ def archive(self, build_dir):
+ # type: (str) -> None
+ assert self.source_dir
+ create_archive = True
+ archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
+ archive_path = os.path.join(build_dir, archive_name)
+ if os.path.exists(archive_path):
+ response = ask_path_exists(
+ 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
+ display_path(archive_path), ('i', 'w', 'b', 'a'))
+ if response == 'i':
+ create_archive = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(archive_path))
+ os.remove(archive_path)
+ elif response == 'b':
+ dest_file = backup_dir(archive_path)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(archive_path),
+ display_path(dest_file),
+ )
+ shutil.move(archive_path, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+ if create_archive:
+ zip = zipfile.ZipFile(
+ archive_path, 'w', zipfile.ZIP_DEFLATED,
+ allowZip64=True
+ )
+ dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
+ for dirpath, dirnames, filenames in os.walk(dir):
+ if 'pip-egg-info' in dirnames:
+ dirnames.remove('pip-egg-info')
+ for dirname in dirnames:
+ dir_arcname = self._get_archive_name(dirname,
+ parentdir=dirpath,
+ rootdir=dir)
+ zipdir = zipfile.ZipInfo(dir_arcname + '/')
+ zipdir.external_attr = 0x1ED << 16 # 0o755
+ zip.writestr(zipdir, '')
+ for filename in filenames:
+ if filename == PIP_DELETE_MARKER_FILENAME:
+ continue
+ file_arcname = self._get_archive_name(filename,
+ parentdir=dirpath,
+ rootdir=dir)
+ filename = os.path.join(dirpath, filename)
+ zip.write(filename, file_arcname)
+ zip.close()
+ logger.info('Saved %s', display_path(archive_path))
+
+ def install(
+ self,
+ install_options, # type: List[str]
+ global_options=None, # type: Optional[Sequence[str]]
+ root=None, # type: Optional[str]
+ home=None, # type: Optional[str]
+ prefix=None, # type: Optional[str]
+ warn_script_location=True, # type: bool
+ use_user_site=False, # type: bool
+ pycompile=True # type: bool
+ ):
+ # type: (...) -> None
+ global_options = global_options if global_options is not None else []
+ if self.editable:
+ self.install_editable(
+ install_options, global_options, prefix=prefix,
+ )
+ return
+ if self.is_wheel:
+ version = wheel.wheel_version(self.source_dir)
+ wheel.check_compatibility(version, self.name)
+
+ self.move_wheel_files(
+ self.source_dir, root=root, prefix=prefix, home=home,
+ warn_script_location=warn_script_location,
+ use_user_site=use_user_site, pycompile=pycompile,
+ )
+ self.install_succeeded = True
+ return
+
+ # Extend the list of global and install options passed on to
+ # the setup.py call with the ones from the requirements file.
+ # Options specified in requirements file override those
+ # specified on the command line, since the last option given
+ # to setup.py is the one that is used.
+ global_options = list(global_options) + \
+ self.options.get('global_options', [])
+ install_options = list(install_options) + \
+ self.options.get('install_options', [])
+
+ if self.isolated:
+ # https://github.com/python/mypy/issues/1174
+ global_options = global_options + ["--no-user-cfg"] # type: ignore
+
+ with TempDirectory(kind="record") as temp_dir:
+ record_filename = os.path.join(temp_dir.path, 'install-record.txt')
+ install_args = self.get_install_args(
+ global_options, record_filename, root, prefix, pycompile,
+ )
+ msg = 'Running setup.py install for %s' % (self.name,)
+ with open_spinner(msg) as spinner:
+ with indent_log():
+ with self.build_env:
+ call_subprocess(
+ install_args + install_options,
+ cwd=self.setup_py_dir,
+ spinner=spinner,
+ )
+
+ if not os.path.exists(record_filename):
+ logger.debug('Record file %s not found', record_filename)
+ return
+ self.install_succeeded = True
+
+ def prepend_root(path):
+ # type: (str) -> str
+ if root is None or not os.path.isabs(path):
+ return path
+ else:
+ return change_root(root, path)
+
+ with open(record_filename) as f:
+ for line in f:
+ directory = os.path.dirname(line)
+ if directory.endswith('.egg-info'):
+ egg_info_dir = prepend_root(directory)
+ break
+ else:
+ logger.warning(
+ 'Could not find .egg-info directory in install record'
+ ' for %s',
+ self,
+ )
+ # FIXME: put the record somewhere
+ # FIXME: should this be an error?
+ return
+ new_lines = []
+ with open(record_filename) as f:
+ for line in f:
+ filename = line.strip()
+ if os.path.isdir(filename):
+ filename += os.path.sep
+ new_lines.append(
+ os.path.relpath(prepend_root(filename), egg_info_dir)
+ )
+ new_lines.sort()
+ ensure_dir(egg_info_dir)
+ inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
+ with open(inst_files_path, 'w') as f:
+ f.write('\n'.join(new_lines) + '\n')
+
+ def get_install_args(
+ self,
+ global_options, # type: Sequence[str]
+ record_filename, # type: str
+ root, # type: Optional[str]
+ prefix, # type: Optional[str]
+ pycompile # type: bool
+ ):
+ # type: (...) -> List[str]
+ install_args = make_setuptools_shim_args(self.setup_py_path,
+ unbuffered_output=True)
+ install_args += list(global_options) + \
+ ['install', '--record', record_filename]
+ install_args += ['--single-version-externally-managed']
+
+ if root is not None:
+ install_args += ['--root', root]
+ if prefix is not None:
+ install_args += ['--prefix', prefix]
+
+ if pycompile:
+ install_args += ["--compile"]
+ else:
+ install_args += ["--no-compile"]
+
+ if running_under_virtualenv():
+ py_ver_str = 'python' + sysconfig.get_python_version()
+ install_args += ['--install-headers',
+ os.path.join(sys.prefix, 'include', 'site',
+ py_ver_str, self.name)]
+
+ return install_args
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_set.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_set.py
new file mode 100644
index 00000000..d1966a4a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_set.py
@@ -0,0 +1,193 @@
+from __future__ import absolute_import
+
+import logging
+from collections import OrderedDict
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel import Wheel
+
+if MYPY_CHECK_RUNNING:
+ from typing import Dict, Iterable, List, Optional, Tuple
+ from pip._internal.req.req_install import InstallRequirement
+
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementSet(object):
+
+ def __init__(self, require_hashes=False, check_supported_wheels=True):
+ # type: (bool, bool) -> None
+ """Create a RequirementSet.
+ """
+
+ self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
+ self.require_hashes = require_hashes
+ self.check_supported_wheels = check_supported_wheels
+
+ # Mapping of alias: real_name
+ self.requirement_aliases = {} # type: Dict[str, str]
+ self.unnamed_requirements = [] # type: List[InstallRequirement]
+ self.successfully_downloaded = [] # type: List[InstallRequirement]
+ self.reqs_to_cleanup = [] # type: List[InstallRequirement]
+
+ def __str__(self):
+ # type: () -> str
+ reqs = [req for req in self.requirements.values()
+ if not req.comes_from]
+ reqs.sort(key=lambda req: req.name.lower())
+ return ' '.join([str(req.req) for req in reqs])
+
+ def __repr__(self):
+ # type: () -> str
+ reqs = [req for req in self.requirements.values()]
+ reqs.sort(key=lambda req: req.name.lower())
+ reqs_str = ', '.join([str(req.req) for req in reqs])
+ return ('<%s object; %d requirement(s): %s>'
+ % (self.__class__.__name__, len(reqs), reqs_str))
+
+ def add_requirement(
+ self,
+ install_req, # type: InstallRequirement
+ parent_req_name=None, # type: Optional[str]
+ extras_requested=None # type: Optional[Iterable[str]]
+ ):
+ # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
+ """Add install_req as a requirement to install.
+
+ :param parent_req_name: The name of the requirement that needed this
+ added. The name is used because when multiple unnamed requirements
+ resolve to the same name, we could otherwise end up with dependency
+ links that point outside the Requirements set. parent_req must
+ already be added. Note that None implies that this is a user
+ supplied requirement, vs an inferred one.
+ :param extras_requested: an iterable of extras used to evaluate the
+ environment markers.
+ :return: Additional requirements to scan. That is either [] if
+ the requirement is not applicable, or [install_req] if the
+ requirement is applicable and has just been added.
+ """
+ name = install_req.name
+
+ # If the markers do not match, ignore this requirement.
+ if not install_req.match_markers(extras_requested):
+ logger.info(
+ "Ignoring %s: markers '%s' don't match your environment",
+ name, install_req.markers,
+ )
+ return [], None
+
+ # If the wheel is not supported, raise an error.
+ # Should check this after filtering out based on environment markers to
+ # allow specifying different wheels based on the environment/OS, in a
+ # single requirements file.
+ if install_req.link and install_req.link.is_wheel:
+ wheel = Wheel(install_req.link.filename)
+ if self.check_supported_wheels and not wheel.supported():
+ raise InstallationError(
+ "%s is not a supported wheel on this platform." %
+ wheel.filename
+ )
+
+ # This next bit is really a sanity check.
+ assert install_req.is_direct == (parent_req_name is None), (
+ "a direct req shouldn't have a parent and also, "
+ "a non direct req should have a parent"
+ )
+
+ # Unnamed requirements are scanned again and the requirement won't be
+ # added as a dependency until after scanning.
+ if not name:
+ # url or path requirement w/o an egg fragment
+ self.unnamed_requirements.append(install_req)
+ return [install_req], None
+
+ try:
+ existing_req = self.get_requirement(name)
+ except KeyError:
+ existing_req = None
+
+ has_conflicting_requirement = (
+ parent_req_name is None and
+ existing_req and
+ not existing_req.constraint and
+ existing_req.extras == install_req.extras and
+ existing_req.req.specifier != install_req.req.specifier
+ )
+ if has_conflicting_requirement:
+ raise InstallationError(
+ "Double requirement given: %s (already in %s, name=%r)"
+ % (install_req, existing_req, name)
+ )
+
+ # When no existing requirement exists, add the requirement as a
+ # dependency and it will be scanned again after.
+ if not existing_req:
+ self.requirements[name] = install_req
+ # FIXME: what about other normalizations? E.g., _ vs. -?
+ if name.lower() != name:
+ self.requirement_aliases[name.lower()] = name
+ # We'd want to rescan this requirements later
+ return [install_req], install_req
+
+ # Assume there's no need to scan, and that we've already
+ # encountered this for scanning.
+ if install_req.constraint or not existing_req.constraint:
+ return [], existing_req
+
+ does_not_satisfy_constraint = (
+ install_req.link and
+ not (
+ existing_req.link and
+ install_req.link.path == existing_req.link.path
+ )
+ )
+ if does_not_satisfy_constraint:
+ self.reqs_to_cleanup.append(install_req)
+ raise InstallationError(
+ "Could not satisfy constraints for '%s': "
+ "installation from path or url cannot be "
+ "constrained to a version" % name,
+ )
+ # If we're now installing a constraint, mark the existing
+ # object for real installation.
+ existing_req.constraint = False
+ existing_req.extras = tuple(sorted(
+ set(existing_req.extras) | set(install_req.extras)
+ ))
+ logger.debug(
+ "Setting %s extras to: %s",
+ existing_req, existing_req.extras,
+ )
+ # Return the existing requirement for addition to the parent and
+ # scanning again.
+ return [existing_req], existing_req
+
+ def has_requirement(self, project_name):
+ # type: (str) -> bool
+ name = project_name.lower()
+ if (name in self.requirements and
+ not self.requirements[name].constraint or
+ name in self.requirement_aliases and
+ not self.requirements[self.requirement_aliases[name]].constraint):
+ return True
+ return False
+
+ def get_requirement(self, project_name):
+ # type: (str) -> InstallRequirement
+ for name in project_name, project_name.lower():
+ if name in self.requirements:
+ return self.requirements[name]
+ if name in self.requirement_aliases:
+ return self.requirements[self.requirement_aliases[name]]
+ raise KeyError("No project with the name %r" % project_name)
+
+ def cleanup_files(self):
+ # type: () -> None
+ """Clean up files, remove builds."""
+ logger.debug('Cleaning up...')
+ with indent_log():
+ for req in self.reqs_to_cleanup:
+ req.remove_temporary_source()
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py
new file mode 100644
index 00000000..e36a3f6b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py
@@ -0,0 +1,96 @@
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import hashlib
+import logging
+import os
+
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from types import TracebackType
+ from typing import Iterator, Optional, Set, Type
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.models.link import Link
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementTracker(object):
+
+ def __init__(self):
+ # type: () -> None
+ self._root = os.environ.get('PIP_REQ_TRACKER')
+ if self._root is None:
+ self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
+ self._temp_dir.create()
+ self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
+ logger.debug('Created requirements tracker %r', self._root)
+ else:
+ self._temp_dir = None
+ logger.debug('Re-using requirements tracker %r', self._root)
+ self._entries = set() # type: Set[InstallRequirement]
+
+ def __enter__(self):
+ # type: () -> RequirementTracker
+ return self
+
+ def __exit__(
+ self,
+ exc_type, # type: Optional[Type[BaseException]]
+ exc_val, # type: Optional[BaseException]
+ exc_tb # type: Optional[TracebackType]
+ ):
+ # type: (...) -> None
+ self.cleanup()
+
+ def _entry_path(self, link):
+ # type: (Link) -> str
+ hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
+ return os.path.join(self._root, hashed)
+
+ def add(self, req):
+ # type: (InstallRequirement) -> None
+ link = req.link
+ info = str(req)
+ entry_path = self._entry_path(link)
+ try:
+ with open(entry_path) as fp:
+ # Error, these's already a build in progress.
+ raise LookupError('%s is already being built: %s'
+ % (link, fp.read()))
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ assert req not in self._entries
+ with open(entry_path, 'w') as fp:
+ fp.write(info)
+ self._entries.add(req)
+ logger.debug('Added %s to build tracker %r', req, self._root)
+
+ def remove(self, req):
+ # type: (InstallRequirement) -> None
+ link = req.link
+ self._entries.remove(req)
+ os.unlink(self._entry_path(link))
+ logger.debug('Removed %s from build tracker %r', req, self._root)
+
+ def cleanup(self):
+ # type: () -> None
+ for req in set(self._entries):
+ self.remove(req)
+ remove = self._temp_dir is not None
+ if remove:
+ self._temp_dir.cleanup()
+ logger.debug('%s build tracker %r',
+ 'Removed' if remove else 'Cleaned',
+ self._root)
+
+ @contextlib.contextmanager
+ def track(self, req):
+ # type: (InstallRequirement) -> Iterator[None]
+ self.add(req)
+ yield
+ self.remove(req)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py
new file mode 100644
index 00000000..733301ce
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py
@@ -0,0 +1,633 @@
+from __future__ import absolute_import
+
+import csv
+import functools
+import logging
+import os
+import sys
+import sysconfig
+
+from pip._vendor import pkg_resources
+
+from pip._internal.exceptions import UninstallationError
+from pip._internal.locations import bin_py, bin_user
+from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
+ normalize_path, renames, rmtree,
+)
+from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple,
+ )
+ from pip._vendor.pkg_resources import Distribution
+
+logger = logging.getLogger(__name__)
+
+
+def _script_names(dist, script_name, is_gui):
+ # type: (Distribution, str, bool) -> List[str]
+ """Create the fully qualified name of the files created by
+ {console,gui}_scripts for the given ``dist``.
+ Returns the list of file names
+ """
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ exe_name = os.path.join(bin_dir, script_name)
+ paths_to_remove = [exe_name]
+ if WINDOWS:
+ paths_to_remove.append(exe_name + '.exe')
+ paths_to_remove.append(exe_name + '.exe.manifest')
+ if is_gui:
+ paths_to_remove.append(exe_name + '-script.pyw')
+ else:
+ paths_to_remove.append(exe_name + '-script.py')
+ return paths_to_remove
+
+
+def _unique(fn):
+ # type: (Callable) -> Callable[..., Iterator[Any]]
+ @functools.wraps(fn)
+ def unique(*args, **kw):
+ # type: (Any, Any) -> Iterator[Any]
+ seen = set() # type: Set[Any]
+ for item in fn(*args, **kw):
+ if item not in seen:
+ seen.add(item)
+ yield item
+ return unique
+
+
+@_unique
+def uninstallation_paths(dist):
+ # type: (Distribution) -> Iterator[str]
+ """
+ Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
+
+ Yield paths to all the files in RECORD. For each .py file in RECORD, add
+ the .pyc and .pyo in the same directory.
+
+ UninstallPathSet.add() takes care of the __pycache__ .py[co].
+ """
+ r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
+ for row in r:
+ path = os.path.join(dist.location, row[0])
+ yield path
+ if path.endswith('.py'):
+ dn, fn = os.path.split(path)
+ base = fn[:-3]
+ path = os.path.join(dn, base + '.pyc')
+ yield path
+ path = os.path.join(dn, base + '.pyo')
+ yield path
+
+
+def compact(paths):
+ # type: (Iterable[str]) -> Set[str]
+ """Compact a path set to contain the minimal number of paths
+ necessary to contain all paths in the set. If /a/path/ and
+ /a/path/to/a/file.txt are both in the set, leave only the
+ shorter path."""
+
+ sep = os.path.sep
+ short_paths = set() # type: Set[str]
+ for path in sorted(paths, key=len):
+ should_skip = any(
+ path.startswith(shortpath.rstrip("*")) and
+ path[len(shortpath.rstrip("*").rstrip(sep))] == sep
+ for shortpath in short_paths
+ )
+ if not should_skip:
+ short_paths.add(path)
+ return short_paths
+
+
+def compress_for_rename(paths):
+ # type: (Iterable[str]) -> Set[str]
+ """Returns a set containing the paths that need to be renamed.
+
+ This set may include directories when the original sequence of paths
+ included every file on disk.
+ """
+ case_map = dict((os.path.normcase(p), p) for p in paths)
+ remaining = set(case_map)
+ unchecked = sorted(set(os.path.split(p)[0]
+ for p in case_map.values()), key=len)
+ wildcards = set() # type: Set[str]
+
+ def norm_join(*a):
+ # type: (str) -> str
+ return os.path.normcase(os.path.join(*a))
+
+ for root in unchecked:
+ if any(os.path.normcase(root).startswith(w)
+ for w in wildcards):
+ # This directory has already been handled.
+ continue
+
+ all_files = set() # type: Set[str]
+ all_subdirs = set() # type: Set[str]
+ for dirname, subdirs, files in os.walk(root):
+ all_subdirs.update(norm_join(root, dirname, d)
+ for d in subdirs)
+ all_files.update(norm_join(root, dirname, f)
+ for f in files)
+ # If all the files we found are in our remaining set of files to
+ # remove, then remove them from the latter set and add a wildcard
+ # for the directory.
+ if not (all_files - remaining):
+ remaining.difference_update(all_files)
+ wildcards.add(root + os.sep)
+
+ return set(map(case_map.__getitem__, remaining)) | wildcards
+
+
+def compress_for_output_listing(paths):
+ # type: (Iterable[str]) -> Tuple[Set[str], Set[str]]
+ """Returns a tuple of 2 sets of which paths to display to user
+
+ The first set contains paths that would be deleted. Files of a package
+ are not added and the top-level directory of the package has a '*' added
+ at the end - to signify that all it's contents are removed.
+
+ The second set contains files that would have been skipped in the above
+ folders.
+ """
+
+ will_remove = set(paths)
+ will_skip = set()
+
+ # Determine folders and files
+ folders = set()
+ files = set()
+ for path in will_remove:
+ if path.endswith(".pyc"):
+ continue
+ if path.endswith("__init__.py") or ".dist-info" in path:
+ folders.add(os.path.dirname(path))
+ files.add(path)
+
+ # probably this one https://github.com/python/mypy/issues/390
+ _normcased_files = set(map(os.path.normcase, files)) # type: ignore
+
+ folders = compact(folders)
+
+ # This walks the tree using os.walk to not miss extra folders
+ # that might get added.
+ for folder in folders:
+ for dirpath, _, dirfiles in os.walk(folder):
+ for fname in dirfiles:
+ if fname.endswith(".pyc"):
+ continue
+
+ file_ = os.path.join(dirpath, fname)
+ if (os.path.isfile(file_) and
+ os.path.normcase(file_) not in _normcased_files):
+ # We are skipping this file. Add it to the set.
+ will_skip.add(file_)
+
+ will_remove = files | {
+ os.path.join(folder, "*") for folder in folders
+ }
+
+ return will_remove, will_skip
+
+
+class StashedUninstallPathSet(object):
+ """A set of file rename operations to stash files while
+ tentatively uninstalling them."""
+ def __init__(self):
+ # type: () -> None
+ # Mapping from source file root to [Adjacent]TempDirectory
+ # for files under that directory.
+ self._save_dirs = {} # type: Dict[str, TempDirectory]
+ # (old path, new path) tuples for each move that may need
+ # to be undone.
+ self._moves = [] # type: List[Tuple[str, str]]
+
+ def _get_directory_stash(self, path):
+ # type: (str) -> str
+ """Stashes a directory.
+
+ Directories are stashed adjacent to their original location if
+ possible, or else moved/copied into the user's temp dir."""
+
+ try:
+ save_dir = AdjacentTempDirectory(path) # type: TempDirectory
+ save_dir.create()
+ except OSError:
+ save_dir = TempDirectory(kind="uninstall")
+ save_dir.create()
+ self._save_dirs[os.path.normcase(path)] = save_dir
+
+ return save_dir.path
+
+ def _get_file_stash(self, path):
+ # type: (str) -> str
+ """Stashes a file.
+
+ If no root has been provided, one will be created for the directory
+ in the user's temp directory."""
+ path = os.path.normcase(path)
+ head, old_head = os.path.dirname(path), None
+ save_dir = None
+
+ while head != old_head:
+ try:
+ save_dir = self._save_dirs[head]
+ break
+ except KeyError:
+ pass
+ head, old_head = os.path.dirname(head), head
+ else:
+ # Did not find any suitable root
+ head = os.path.dirname(path)
+ save_dir = TempDirectory(kind='uninstall')
+ save_dir.create()
+ self._save_dirs[head] = save_dir
+
+ relpath = os.path.relpath(path, head)
+ if relpath and relpath != os.path.curdir:
+ return os.path.join(save_dir.path, relpath)
+ return save_dir.path
+
+ def stash(self, path):
+ # type: (str) -> str
+ """Stashes the directory or file and returns its new location.
+ """
+ if os.path.isdir(path):
+ new_path = self._get_directory_stash(path)
+ else:
+ new_path = self._get_file_stash(path)
+
+ self._moves.append((path, new_path))
+ if os.path.isdir(path) and os.path.isdir(new_path):
+ # If we're moving a directory, we need to
+ # remove the destination first or else it will be
+ # moved to inside the existing directory.
+ # We just created new_path ourselves, so it will
+ # be removable.
+ os.rmdir(new_path)
+ renames(path, new_path)
+ return new_path
+
+ def commit(self):
+ # type: () -> None
+ """Commits the uninstall by removing stashed files."""
+ for _, save_dir in self._save_dirs.items():
+ save_dir.cleanup()
+ self._moves = []
+ self._save_dirs = {}
+
+ def rollback(self):
+ # type: () -> None
+ """Undoes the uninstall by moving stashed files back."""
+ for p in self._moves:
+ logging.info("Moving to %s\n from %s", *p)
+
+ for new_path, path in self._moves:
+ try:
+ logger.debug('Replacing %s from %s', new_path, path)
+ if os.path.isfile(new_path):
+ os.unlink(new_path)
+ elif os.path.isdir(new_path):
+ rmtree(new_path)
+ renames(path, new_path)
+ except OSError as ex:
+ logger.error("Failed to restore %s", new_path)
+ logger.debug("Exception: %s", ex)
+
+ self.commit()
+
+ @property
+ def can_rollback(self):
+ # type: () -> bool
+ return bool(self._moves)
+
+
+class UninstallPathSet(object):
+ """A set of file paths to be removed in the uninstallation of a
+ requirement."""
+ def __init__(self, dist):
+ # type: (Distribution) -> None
+ self.paths = set() # type: Set[str]
+ self._refuse = set() # type: Set[str]
+ self.pth = {} # type: Dict[str, UninstallPthEntries]
+ self.dist = dist
+ self._moved_paths = StashedUninstallPathSet()
+
+ def _permitted(self, path):
+ # type: (str) -> bool
+ """
+ Return True if the given path is one we are permitted to
+ remove/modify, False otherwise.
+
+ """
+ return is_local(path)
+
+ def add(self, path):
+ # type: (str) -> None
+ head, tail = os.path.split(path)
+
+ # we normalize the head to resolve parent directory symlinks, but not
+ # the tail, since we only want to uninstall symlinks, not their targets
+ path = os.path.join(normalize_path(head), os.path.normcase(tail))
+
+ if not os.path.exists(path):
+ return
+ if self._permitted(path):
+ self.paths.add(path)
+ else:
+ self._refuse.add(path)
+
+ # __pycache__ files can show up after 'installed-files.txt' is created,
+ # due to imports
+ if os.path.splitext(path)[1] == '.py' and uses_pycache:
+ self.add(cache_from_source(path))
+
+ def add_pth(self, pth_file, entry):
+ # type: (str, str) -> None
+ pth_file = normalize_path(pth_file)
+ if self._permitted(pth_file):
+ if pth_file not in self.pth:
+ self.pth[pth_file] = UninstallPthEntries(pth_file)
+ self.pth[pth_file].add(entry)
+ else:
+ self._refuse.add(pth_file)
+
+ def remove(self, auto_confirm=False, verbose=False):
+ # type: (bool, bool) -> None
+ """Remove paths in ``self.paths`` with confirmation (unless
+ ``auto_confirm`` is True)."""
+
+ if not self.paths:
+ logger.info(
+ "Can't uninstall '%s'. No files were found to uninstall.",
+ self.dist.project_name,
+ )
+ return
+
+ dist_name_version = (
+ self.dist.project_name + "-" + self.dist.version
+ )
+ logger.info('Uninstalling %s:', dist_name_version)
+
+ with indent_log():
+ if auto_confirm or self._allowed_to_proceed(verbose):
+ moved = self._moved_paths
+
+ for_rename = compress_for_rename(self.paths)
+
+ for path in sorted(compact(for_rename)):
+ moved.stash(path)
+ logger.debug('Removing file or directory %s', path)
+
+ for pth in self.pth.values():
+ pth.remove()
+
+ logger.info('Successfully uninstalled %s', dist_name_version)
+
+ def _allowed_to_proceed(self, verbose):
+ # type: (bool) -> bool
+ """Display which files would be deleted and prompt for confirmation
+ """
+
+ def _display(msg, paths):
+ # type: (str, Iterable[str]) -> None
+ if not paths:
+ return
+
+ logger.info(msg)
+ with indent_log():
+ for path in sorted(compact(paths)):
+ logger.info(path)
+
+ if not verbose:
+ will_remove, will_skip = compress_for_output_listing(self.paths)
+ else:
+ # In verbose mode, display all the files that are going to be
+ # deleted.
+ will_remove = set(self.paths)
+ will_skip = set()
+
+ _display('Would remove:', will_remove)
+ _display('Would not remove (might be manually added):', will_skip)
+ _display('Would not remove (outside of prefix):', self._refuse)
+ if verbose:
+ _display('Will actually move:', compress_for_rename(self.paths))
+
+ return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
+
+ def rollback(self):
+ # type: () -> None
+ """Rollback the changes previously made by remove()."""
+ if not self._moved_paths.can_rollback:
+ logger.error(
+ "Can't roll back %s; was not uninstalled",
+ self.dist.project_name,
+ )
+ return
+ logger.info('Rolling back uninstall of %s', self.dist.project_name)
+ self._moved_paths.rollback()
+ for pth in self.pth.values():
+ pth.rollback()
+
+ def commit(self):
+ # type: () -> None
+ """Remove temporary save dir: rollback will no longer be possible."""
+ self._moved_paths.commit()
+
+ @classmethod
+ def from_dist(cls, dist):
+ # type: (Distribution) -> UninstallPathSet
+ dist_path = normalize_path(dist.location)
+ if not dist_is_local(dist):
+ logger.info(
+ "Not uninstalling %s at %s, outside environment %s",
+ dist.key,
+ dist_path,
+ sys.prefix,
+ )
+ return cls(dist)
+
+ if dist_path in {p for p in {sysconfig.get_path("stdlib"),
+ sysconfig.get_path("platstdlib")}
+ if p}:
+ logger.info(
+ "Not uninstalling %s at %s, as it is in the standard library.",
+ dist.key,
+ dist_path,
+ )
+ return cls(dist)
+
+ paths_to_remove = cls(dist)
+ develop_egg_link = egg_link_path(dist)
+ develop_egg_link_egg_info = '{}.egg-info'.format(
+ pkg_resources.to_filename(dist.project_name))
+ egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
+ # Special case for distutils installed package
+ distutils_egg_info = getattr(dist._provider, 'path', None)
+
+ # Uninstall cases order do matter as in the case of 2 installs of the
+ # same package, pip needs to uninstall the currently detected version
+ if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
+ not dist.egg_info.endswith(develop_egg_link_egg_info)):
+ # if dist.egg_info.endswith(develop_egg_link_egg_info), we
+ # are in fact in the develop_egg_link case
+ paths_to_remove.add(dist.egg_info)
+ if dist.has_metadata('installed-files.txt'):
+ for installed_file in dist.get_metadata(
+ 'installed-files.txt').splitlines():
+ path = os.path.normpath(
+ os.path.join(dist.egg_info, installed_file)
+ )
+ paths_to_remove.add(path)
+ # FIXME: need a test for this elif block
+ # occurs with --single-version-externally-managed/--record outside
+ # of pip
+ elif dist.has_metadata('top_level.txt'):
+ if dist.has_metadata('namespace_packages.txt'):
+ namespaces = dist.get_metadata('namespace_packages.txt')
+ else:
+ namespaces = []
+ for top_level_pkg in [
+ p for p
+ in dist.get_metadata('top_level.txt').splitlines()
+ if p and p not in namespaces]:
+ path = os.path.join(dist.location, top_level_pkg)
+ paths_to_remove.add(path)
+ paths_to_remove.add(path + '.py')
+ paths_to_remove.add(path + '.pyc')
+ paths_to_remove.add(path + '.pyo')
+
+ elif distutils_egg_info:
+ raise UninstallationError(
+ "Cannot uninstall {!r}. It is a distutils installed project "
+ "and thus we cannot accurately determine which files belong "
+ "to it which would lead to only a partial uninstall.".format(
+ dist.project_name,
+ )
+ )
+
+ elif dist.location.endswith('.egg'):
+ # package installed by easy_install
+ # We cannot match on dist.egg_name because it can slightly vary
+ # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
+ paths_to_remove.add(dist.location)
+ easy_install_egg = os.path.split(dist.location)[1]
+ easy_install_pth = os.path.join(os.path.dirname(dist.location),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
+
+ elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
+ for path in uninstallation_paths(dist):
+ paths_to_remove.add(path)
+
+ elif develop_egg_link:
+ # develop egg
+ with open(develop_egg_link, 'r') as fh:
+ link_pointer = os.path.normcase(fh.readline().strip())
+ assert (link_pointer == dist.location), (
+ 'Egg-link %s does not match installed location of %s '
+ '(at %s)' % (link_pointer, dist.project_name, dist.location)
+ )
+ paths_to_remove.add(develop_egg_link)
+ easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, dist.location)
+
+ else:
+ logger.debug(
+ 'Not sure how to uninstall: %s - Check: %s',
+ dist, dist.location,
+ )
+
+ # find distutils scripts= scripts
+ if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
+ for script in dist.metadata_listdir('scripts'):
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ paths_to_remove.add(os.path.join(bin_dir, script))
+ if WINDOWS:
+ paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
+
+ # find console_scripts
+ _scripts_to_remove = []
+ console_scripts = dist.get_entry_map(group='console_scripts')
+ for name in console_scripts.keys():
+ _scripts_to_remove.extend(_script_names(dist, name, False))
+ # find gui_scripts
+ gui_scripts = dist.get_entry_map(group='gui_scripts')
+ for name in gui_scripts.keys():
+ _scripts_to_remove.extend(_script_names(dist, name, True))
+
+ for s in _scripts_to_remove:
+ paths_to_remove.add(s)
+
+ return paths_to_remove
+
+
+class UninstallPthEntries(object):
+ def __init__(self, pth_file):
+ # type: (str) -> None
+ if not os.path.isfile(pth_file):
+ raise UninstallationError(
+ "Cannot remove entries from nonexistent file %s" % pth_file
+ )
+ self.file = pth_file
+ self.entries = set() # type: Set[str]
+ self._saved_lines = None # type: Optional[List[bytes]]
+
+ def add(self, entry):
+ # type: (str) -> None
+ entry = os.path.normcase(entry)
+ # On Windows, os.path.normcase converts the entry to use
+ # backslashes. This is correct for entries that describe absolute
+ # paths outside of site-packages, but all the others use forward
+ # slashes.
+ if WINDOWS and not os.path.splitdrive(entry)[0]:
+ entry = entry.replace('\\', '/')
+ self.entries.add(entry)
+
+ def remove(self):
+ # type: () -> None
+ logger.debug('Removing pth entries from %s:', self.file)
+ with open(self.file, 'rb') as fh:
+ # windows uses '\r\n' with py3k, but uses '\n' with py2.x
+ lines = fh.readlines()
+ self._saved_lines = lines
+ if any(b'\r\n' in line for line in lines):
+ endline = '\r\n'
+ else:
+ endline = '\n'
+ # handle missing trailing newline
+ if lines and not lines[-1].endswith(endline.encode("utf-8")):
+ lines[-1] = lines[-1] + endline.encode("utf-8")
+ for entry in self.entries:
+ try:
+ logger.debug('Removing entry: %s', entry)
+ lines.remove((entry + endline).encode("utf-8"))
+ except ValueError:
+ pass
+ with open(self.file, 'wb') as fh:
+ fh.writelines(lines)
+
+ def rollback(self):
+ # type: () -> bool
+ if self._saved_lines is None:
+ logger.error(
+ 'Cannot roll back changes to %s, none were made', self.file
+ )
+ return False
+ logger.debug('Rolling %s back to previous state', self.file)
+ with open(self.file, 'wb') as fh:
+ fh.writelines(self._saved_lines)
+ return True
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__init__.py
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..da751af6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
new file mode 100644
index 00000000..6a487507
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/compat.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
new file mode 100644
index 00000000..056eb515
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
new file mode 100644
index 00000000..5d1469b3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
new file mode 100644
index 00000000..b960d19a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
new file mode 100644
index 00000000..5ed92bc5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
new file mode 100644
index 00000000..0670c31d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
new file mode 100644
index 00000000..bd7daf9d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/logging.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
new file mode 100644
index 00000000..2b827a00
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-37.pyc
new file mode 100644
index 00000000..9b90b6d8
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/misc.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
new file mode 100644
index 00000000..4d3b0d92
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/models.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/models.cpython-37.pyc
new file mode 100644
index 00000000..4f0a69f7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/models.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
new file mode 100644
index 00000000..aaa7ead3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
new file mode 100644
index 00000000..321ef73d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
new file mode 100644
index 00000000..4362f053
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
new file mode 100644
index 00000000..ef1701df
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/typing.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
new file mode 100644
index 00000000..6f329e61
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/ui.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
new file mode 100644
index 00000000..90684aa7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc
new file mode 100644
index 00000000..69b60a11
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py
new file mode 100644
index 00000000..fb261110
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py
@@ -0,0 +1,268 @@
+"""
+This code was taken from https://github.com/ActiveState/appdirs and modified
+to suit our purposes.
+"""
+from __future__ import absolute_import
+
+import os
+import sys
+
+from pip._vendor.six import PY2, text_type
+
+from pip._internal.utils.compat import WINDOWS, expanduser
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+
+def user_cache_dir(appname):
+ # type: (str) -> str
+ r"""
+ Return full path to the user-specific cache dir for this application.
+
+ "appname" is the name of application.
+
+ Typical user cache directories are:
+ macOS: ~/Library/Caches/<AppName>
+ Unix: ~/.cache/<AppName> (XDG default)
+ Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
+
+ On Windows the only suggestion in the MSDN docs is that local settings go
+ in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
+ non-roaming app data dir (the default returned by `user_data_dir`). Apps
+ typically put cache data somewhere *under* the given dir here. Some
+ examples:
+ ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
+ ...\Acme\SuperApp\Cache\1.0
+
+ OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
+ """
+ if WINDOWS:
+ # Get the base path
+ path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
+
+ # When using Python 2, return paths as bytes on Windows like we do on
+ # other operating systems. See helper function docs for more details.
+ if PY2 and isinstance(path, text_type):
+ path = _win_path_to_bytes(path)
+
+ # Add our app name and Cache directory to it
+ path = os.path.join(path, appname, "Cache")
+ elif sys.platform == "darwin":
+ # Get the base path
+ path = expanduser("~/Library/Caches")
+
+ # Add our app name to it
+ path = os.path.join(path, appname)
+ else:
+ # Get the base path
+ path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
+
+ # Add our app name to it
+ path = os.path.join(path, appname)
+
+ return path
+
+
+def user_data_dir(appname, roaming=False):
+ # type: (str, bool) -> str
+ r"""
+ Return full path to the user-specific data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user data directories are:
+ macOS: ~/Library/Application Support/<AppName>
+ if it exists, else ~/.config/<AppName>
+ Unix: ~/.local/share/<AppName> # or in
+ $XDG_DATA_HOME, if defined
+ Win XP (not roaming): C:\Documents and Settings\<username>\ ...
+ ...Application Data\<AppName>
+ Win XP (roaming): C:\Documents and Settings\<username>\Local ...
+ ...Settings\Application Data\<AppName>
+ Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
+ Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
+
+ For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
+ That means, by default "~/.local/share/<AppName>".
+ """
+ if WINDOWS:
+ const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
+ path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
+ elif sys.platform == "darwin":
+ path = os.path.join(
+ expanduser('~/Library/Application Support/'),
+ appname,
+ ) if os.path.isdir(os.path.join(
+ expanduser('~/Library/Application Support/'),
+ appname,
+ )
+ ) else os.path.join(
+ expanduser('~/.config/'),
+ appname,
+ )
+ else:
+ path = os.path.join(
+ os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
+ appname,
+ )
+
+ return path
+
+
+def user_config_dir(appname, roaming=True):
+ # type: (str, bool) -> str
+ """Return full path to the user-specific config dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "roaming" (boolean, default True) can be set False to not use the
+ Windows roaming appdata directory. That means that for users on a
+ Windows network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user data directories are:
+ macOS: same as user_data_dir
+ Unix: ~/.config/<AppName>
+ Win *: same as user_data_dir
+
+ For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
+ That means, by default "~/.config/<AppName>".
+ """
+ if WINDOWS:
+ path = user_data_dir(appname, roaming=roaming)
+ elif sys.platform == "darwin":
+ path = user_data_dir(appname)
+ else:
+ path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
+ path = os.path.join(path, appname)
+
+ return path
+
+
+# for the discussion regarding site_config_dirs locations
+# see <https://github.com/pypa/pip/issues/1733>
+def site_config_dirs(appname):
+ # type: (str) -> List[str]
+ r"""Return a list of potential user-shared config dirs for this application.
+
+ "appname" is the name of application.
+
+ Typical user config directories are:
+ macOS: /Library/Application Support/<AppName>/
+ Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
+ $XDG_CONFIG_DIRS
+ Win XP: C:\Documents and Settings\All Users\Application ...
+ ...Data\<AppName>\
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
+ on Vista.)
+ Win 7: Hidden, but writeable on Win 7:
+ C:\ProgramData\<AppName>\
+ """
+ if WINDOWS:
+ path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
+ pathlist = [os.path.join(path, appname)]
+ elif sys.platform == 'darwin':
+ pathlist = [os.path.join('/Library/Application Support', appname)]
+ else:
+ # try looking in $XDG_CONFIG_DIRS
+ xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
+ if xdg_config_dirs:
+ pathlist = [
+ os.path.join(expanduser(x), appname)
+ for x in xdg_config_dirs.split(os.pathsep)
+ ]
+ else:
+ pathlist = []
+
+ # always look in /etc directly as well
+ pathlist.append('/etc')
+
+ return pathlist
+
+
+# -- Windows support functions --
+
+def _get_win_folder_from_registry(csidl_name):
+ # type: (str) -> str
+ """
+ This is a fallback technique at best. I'm not sure if using the
+ registry for this guarantees us the correct answer for all CSIDL_*
+ names.
+ """
+ import _winreg
+
+ shell_folder_name = {
+ "CSIDL_APPDATA": "AppData",
+ "CSIDL_COMMON_APPDATA": "Common AppData",
+ "CSIDL_LOCAL_APPDATA": "Local AppData",
+ }[csidl_name]
+
+ key = _winreg.OpenKey(
+ _winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
+ )
+ directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
+ return directory
+
+
+def _get_win_folder_with_ctypes(csidl_name):
+ # type: (str) -> str
+ csidl_const = {
+ "CSIDL_APPDATA": 26,
+ "CSIDL_COMMON_APPDATA": 35,
+ "CSIDL_LOCAL_APPDATA": 28,
+ }[csidl_name]
+
+ buf = ctypes.create_unicode_buffer(1024)
+ ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in buf:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf2 = ctypes.create_unicode_buffer(1024)
+ if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+ buf = buf2
+
+ return buf.value
+
+
+if WINDOWS:
+ try:
+ import ctypes
+ _get_win_folder = _get_win_folder_with_ctypes
+ except ImportError:
+ _get_win_folder = _get_win_folder_from_registry
+
+
+def _win_path_to_bytes(path):
+ """Encode Windows paths to bytes. Only used on Python 2.
+
+ Motivation is to be consistent with other operating systems where paths
+ are also returned as bytes. This avoids problems mixing bytes and Unicode
+ elsewhere in the codebase. For more details and discussion see
+ <https://github.com/pypa/pip/issues/3463>.
+
+ If encoding using ASCII and MBCS fails, return the original Unicode path.
+ """
+ for encoding in ('ASCII', 'MBCS'):
+ try:
+ return path.encode(encoding)
+ except (UnicodeEncodeError, LookupError):
+ pass
+ return path
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/compat.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/compat.py
new file mode 100644
index 00000000..ec3995c2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/compat.py
@@ -0,0 +1,293 @@
+"""Stuff that differs in different Python versions and platform
+distributions."""
+from __future__ import absolute_import, division
+
+import codecs
+import locale
+import logging
+import os
+import shutil
+import sys
+
+from pip._vendor.six import text_type
+from pip._vendor.urllib3.util import IS_PYOPENSSL
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Text, Tuple, Union
+
+try:
+ import _ssl # noqa
+except ImportError:
+ ssl = None
+else:
+ # This additional assignment was needed to prevent a mypy error.
+ ssl = _ssl
+
+try:
+ import ipaddress
+except ImportError:
+ try:
+ from pip._vendor import ipaddress # type: ignore
+ except ImportError:
+ import ipaddr as ipaddress # type: ignore
+ ipaddress.ip_address = ipaddress.IPAddress # type: ignore
+ ipaddress.ip_network = ipaddress.IPNetwork # type: ignore
+
+
+__all__ = [
+ "ipaddress", "uses_pycache", "console_to_str", "native_str",
+ "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
+ "get_extension_suffixes",
+]
+
+
+logger = logging.getLogger(__name__)
+
+HAS_TLS = (ssl is not None) or IS_PYOPENSSL
+
+if sys.version_info >= (3, 4):
+ uses_pycache = True
+ from importlib.util import cache_from_source
+else:
+ import imp
+
+ try:
+ cache_from_source = imp.cache_from_source # type: ignore
+ except AttributeError:
+ # does not use __pycache__
+ cache_from_source = None
+
+ uses_pycache = cache_from_source is not None
+
+
+if sys.version_info >= (3, 5):
+ backslashreplace_decode = "backslashreplace"
+else:
+ # In version 3.4 and older, backslashreplace exists
+ # but does not support use for decoding.
+ # We implement our own replace handler for this
+ # situation, so that we can consistently use
+ # backslash replacement for all versions.
+ def backslashreplace_decode_fn(err):
+ raw_bytes = (err.object[i] for i in range(err.start, err.end))
+ if sys.version_info[0] == 2:
+ # Python 2 gave us characters - convert to numeric bytes
+ raw_bytes = (ord(b) for b in raw_bytes)
+ return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
+ codecs.register_error(
+ "backslashreplace_decode",
+ backslashreplace_decode_fn,
+ )
+ backslashreplace_decode = "backslashreplace_decode"
+
+
+def str_to_display(data, desc=None):
+ # type: (Union[bytes, Text], Optional[str]) -> Text
+ """
+ For display or logging purposes, convert a bytes object (or text) to
+ text (e.g. unicode in Python 2) safe for output.
+
+ :param desc: An optional phrase describing the input data, for use in
+ the log message if a warning is logged. Defaults to "Bytes object".
+
+ This function should never error out and so can take a best effort
+ approach. It is okay to be lossy if needed since the return value is
+ just for display.
+
+ We assume the data is in the locale preferred encoding. If it won't
+ decode properly, we warn the user but decode as best we can.
+
+ We also ensure that the output can be safely written to standard output
+ without encoding errors.
+ """
+ if isinstance(data, text_type):
+ return data
+
+ # Otherwise, data is a bytes object (str in Python 2).
+ # First, get the encoding we assume. This is the preferred
+ # encoding for the locale, unless that is not found, or
+ # it is ASCII, in which case assume UTF-8
+ encoding = locale.getpreferredencoding()
+ if (not encoding) or codecs.lookup(encoding).name == "ascii":
+ encoding = "utf-8"
+
+ # Now try to decode the data - if we fail, warn the user and
+ # decode with replacement.
+ try:
+ decoded_data = data.decode(encoding)
+ except UnicodeDecodeError:
+ if desc is None:
+ desc = 'Bytes object'
+ msg_format = '{} does not appear to be encoded as %s'.format(desc)
+ logger.warning(msg_format, encoding)
+ decoded_data = data.decode(encoding, errors=backslashreplace_decode)
+
+ # Make sure we can print the output, by encoding it to the output
+ # encoding with replacement of unencodable characters, and then
+ # decoding again.
+ # We use stderr's encoding because it's less likely to be
+ # redirected and if we don't find an encoding we skip this
+ # step (on the assumption that output is wrapped by something
+ # that won't fail).
+ # The double getattr is to deal with the possibility that we're
+ # being called in a situation where sys.__stderr__ doesn't exist,
+ # or doesn't have an encoding attribute. Neither of these cases
+ # should occur in normal pip use, but there's no harm in checking
+ # in case people use pip in (unsupported) unusual situations.
+ output_encoding = getattr(getattr(sys, "__stderr__", None),
+ "encoding", None)
+
+ if output_encoding:
+ output_encoded = decoded_data.encode(
+ output_encoding,
+ errors="backslashreplace"
+ )
+ decoded_data = output_encoded.decode(output_encoding)
+
+ return decoded_data
+
+
+def console_to_str(data):
+ # type: (bytes) -> Text
+ """Return a string, safe for output, of subprocess output.
+ """
+ return str_to_display(data, desc='Subprocess output')
+
+
+if sys.version_info >= (3,):
+ def native_str(s, replace=False):
+ # type: (str, bool) -> str
+ if isinstance(s, bytes):
+ return s.decode('utf-8', 'replace' if replace else 'strict')
+ return s
+
+else:
+ def native_str(s, replace=False):
+ # type: (str, bool) -> str
+ # Replace is ignored -- unicode to UTF-8 can't fail
+ if isinstance(s, text_type):
+ return s.encode('utf-8')
+ return s
+
+
+def get_path_uid(path):
+ # type: (str) -> int
+ """
+ Return path's uid.
+
+ Does not follow symlinks:
+ https://github.com/pypa/pip/pull/935#discussion_r5307003
+
+ Placed this function in compat due to differences on AIX and
+ Jython, that should eventually go away.
+
+ :raises OSError: When path is a symlink or can't be read.
+ """
+ if hasattr(os, 'O_NOFOLLOW'):
+ fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
+ file_uid = os.fstat(fd).st_uid
+ os.close(fd)
+ else: # AIX and Jython
+ # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
+ if not os.path.islink(path):
+ # older versions of Jython don't have `os.fstat`
+ file_uid = os.stat(path).st_uid
+ else:
+ # raise OSError for parity with os.O_NOFOLLOW above
+ raise OSError(
+ "%s is a symlink; Will not return uid for symlinks" % path
+ )
+ return file_uid
+
+
+if sys.version_info >= (3, 4):
+ from importlib.machinery import EXTENSION_SUFFIXES
+
+ def get_extension_suffixes():
+ return EXTENSION_SUFFIXES
+else:
+ from imp import get_suffixes
+
+ def get_extension_suffixes():
+ return [suffix[0] for suffix in get_suffixes()]
+
+
+def expanduser(path):
+ # type: (str) -> str
+ """
+ Expand ~ and ~user constructions.
+
+ Includes a workaround for https://bugs.python.org/issue14768
+ """
+ expanded = os.path.expanduser(path)
+ if path.startswith('~/') and expanded.startswith('//'):
+ expanded = expanded[1:]
+ return expanded
+
+
+# packages in the stdlib that may have installation metadata, but should not be
+# considered 'installed'. this theoretically could be determined based on
+# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
+# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
+# make this ineffective, so hard-coding
+stdlib_pkgs = {"python", "wsgiref", "argparse"}
+
+
+# windows detection, covers cpython and ironpython
+WINDOWS = (sys.platform.startswith("win") or
+ (sys.platform == 'cli' and os.name == 'nt'))
+
+
+def samefile(file1, file2):
+ # type: (str, str) -> bool
+ """Provide an alternative for os.path.samefile on Windows/Python2"""
+ if hasattr(os.path, 'samefile'):
+ return os.path.samefile(file1, file2)
+ else:
+ path1 = os.path.normcase(os.path.abspath(file1))
+ path2 = os.path.normcase(os.path.abspath(file2))
+ return path1 == path2
+
+
+if hasattr(shutil, 'get_terminal_size'):
+ def get_terminal_size():
+ # type: () -> Tuple[int, int]
+ """
+ Returns a tuple (x, y) representing the width(x) and the height(y)
+ in characters of the terminal window.
+ """
+ return tuple(shutil.get_terminal_size()) # type: ignore
+else:
+ def get_terminal_size():
+ # type: () -> Tuple[int, int]
+ """
+ Returns a tuple (x, y) representing the width(x) and the height(y)
+ in characters of the terminal window.
+ """
+ def ioctl_GWINSZ(fd):
+ try:
+ import fcntl
+ import termios
+ import struct
+ cr = struct.unpack_from(
+ 'hh',
+ fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
+ )
+ except Exception:
+ return None
+ if cr == (0, 0):
+ return None
+ return cr
+ cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
+ if not cr:
+ try:
+ fd = os.open(os.ctermid(), os.O_RDONLY)
+ cr = ioctl_GWINSZ(fd)
+ os.close(fd)
+ except Exception:
+ pass
+ if not cr:
+ cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
+ return int(cr[1]), int(cr[0])
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py
new file mode 100644
index 00000000..b9359bdd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py
@@ -0,0 +1,100 @@
+"""
+A module that implements tooling to enable easy warnings about deprecations.
+"""
+from __future__ import absolute_import
+
+import logging
+import warnings
+
+from pip._vendor.packaging.version import parse
+
+from pip import __version__ as current_version
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Optional
+
+
+DEPRECATION_MSG_PREFIX = "DEPRECATION: "
+
+
+class PipDeprecationWarning(Warning):
+ pass
+
+
+_original_showwarning = None # type: Any
+
+
+# Warnings <-> Logging Integration
+def _showwarning(message, category, filename, lineno, file=None, line=None):
+ if file is not None:
+ if _original_showwarning is not None:
+ _original_showwarning(
+ message, category, filename, lineno, file, line,
+ )
+ elif issubclass(category, PipDeprecationWarning):
+ # We use a specially named logger which will handle all of the
+ # deprecation messages for pip.
+ logger = logging.getLogger("pip._internal.deprecations")
+ logger.warning(message)
+ else:
+ _original_showwarning(
+ message, category, filename, lineno, file, line,
+ )
+
+
+def install_warning_logger():
+ # type: () -> None
+ # Enable our Deprecation Warnings
+ warnings.simplefilter("default", PipDeprecationWarning, append=True)
+
+ global _original_showwarning
+
+ if _original_showwarning is None:
+ _original_showwarning = warnings.showwarning
+ warnings.showwarning = _showwarning
+
+
+def deprecated(reason, replacement, gone_in, issue=None):
+ # type: (str, Optional[str], Optional[str], Optional[int]) -> None
+ """Helper to deprecate existing functionality.
+
+ reason:
+ Textual reason shown to the user about why this functionality has
+ been deprecated.
+ replacement:
+ Textual suggestion shown to the user about what alternative
+ functionality they can use.
+ gone_in:
+ The version of pip does this functionality should get removed in.
+ Raises errors if pip's current version is greater than or equal to
+ this.
+ issue:
+ Issue number on the tracker that would serve as a useful place for
+ users to find related discussion and provide feedback.
+
+ Always pass replacement, gone_in and issue as keyword arguments for clarity
+ at the call site.
+ """
+
+ # Construct a nice message.
+ # This is eagerly formatted as we want it to get logged as if someone
+ # typed this entire message out.
+ sentences = [
+ (reason, DEPRECATION_MSG_PREFIX + "{}"),
+ (gone_in, "pip {} will remove support for this functionality."),
+ (replacement, "A possible replacement is {}."),
+ (issue, (
+ "You can find discussion regarding this at "
+ "https://github.com/pypa/pip/issues/{}."
+ )),
+ ]
+ message = " ".join(
+ template.format(val) for val, template in sentences if val is not None
+ )
+
+ # Raise as an error if it has to be removed.
+ if gone_in is not None and parse(current_version) >= parse(gone_in):
+ raise PipDeprecationWarning(message)
+
+ warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/encoding.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/encoding.py
new file mode 100644
index 00000000..30139f2e
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/encoding.py
@@ -0,0 +1,39 @@
+import codecs
+import locale
+import re
+import sys
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Tuple, Text
+
+BOMS = [
+ (codecs.BOM_UTF8, 'utf-8'),
+ (codecs.BOM_UTF16, 'utf-16'),
+ (codecs.BOM_UTF16_BE, 'utf-16-be'),
+ (codecs.BOM_UTF16_LE, 'utf-16-le'),
+ (codecs.BOM_UTF32, 'utf-32'),
+ (codecs.BOM_UTF32_BE, 'utf-32-be'),
+ (codecs.BOM_UTF32_LE, 'utf-32-le'),
+] # type: List[Tuple[bytes, Text]]
+
+ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
+
+
+def auto_decode(data):
+ # type: (bytes) -> Text
+ """Check a bytes string for a BOM to correctly detect the encoding
+
+ Fallback to locale.getpreferredencoding(False) like open() on Python3"""
+ for bom, encoding in BOMS:
+ if data.startswith(bom):
+ return data[len(bom):].decode(encoding)
+ # Lets check the first two lines as in PEP263
+ for line in data.split(b'\n')[:2]:
+ if line[0:1] == b'#' and ENCODING_RE.search(line):
+ encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
+ return data.decode(encoding)
+ return data.decode(
+ locale.getpreferredencoding(False) or sys.getdefaultencoding(),
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py
new file mode 100644
index 00000000..1e6b0338
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py
@@ -0,0 +1,30 @@
+import os
+import os.path
+
+from pip._internal.utils.compat import get_path_uid
+
+
+def check_path_owner(path):
+ # type: (str) -> bool
+ # If we don't have a way to check the effective uid of this process, then
+ # we'll just assume that we own the directory.
+ if not hasattr(os, "geteuid"):
+ return True
+
+ previous = None
+ while path != previous:
+ if os.path.lexists(path):
+ # Check if path is writable by current user.
+ if os.geteuid() == 0:
+ # Special handling for root user in order to handle properly
+ # cases where users use sudo without -H flag.
+ try:
+ path_uid = get_path_uid(path)
+ except OSError:
+ return False
+ return path_uid == 0
+ else:
+ return os.access(path, os.W_OK)
+ else:
+ previous, path = path, os.path.dirname(path)
+ return False # assume we don't own the path
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/glibc.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/glibc.py
new file mode 100644
index 00000000..aa77d9b6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/glibc.py
@@ -0,0 +1,120 @@
+from __future__ import absolute_import
+
+import os
+import re
+import warnings
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+
+
+def glibc_version_string():
+ # type: () -> Optional[str]
+ "Returns glibc version string, or None if not using glibc."
+ return glibc_version_string_confstr() or glibc_version_string_ctypes()
+
+
+def glibc_version_string_confstr():
+ # type: () -> Optional[str]
+ "Primary implementation of glibc_version_string using os.confstr."
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module:
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
+ try:
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
+ _, version = os.confstr("CS_GNU_LIBC_VERSION").split()
+ except (AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def glibc_version_string_ctypes():
+ # type: () -> Optional[str]
+ "Fallback implementation of glibc_version_string using ctypes."
+
+ try:
+ import ctypes
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ process_namespace = ctypes.CDLL(None)
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+# Separated out from have_compatible_glibc for easier unit testing
+def check_glibc_version(version_str, required_major, minimum_minor):
+ # type: (str, int, int) -> bool
+ # Parse string and check against requested version.
+ #
+ # We use a regexp instead of str.split because we want to discard any
+ # random junk that might come after the minor version -- this might happen
+ # in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ # uses version strings like "2.20-2014.11"). See gh-3588.
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+ if not m:
+ warnings.warn("Expected glibc version with 2 components major.minor,"
+ " got: %s" % version_str, RuntimeWarning)
+ return False
+ return (int(m.group("major")) == required_major and
+ int(m.group("minor")) >= minimum_minor)
+
+
+def have_compatible_glibc(required_major, minimum_minor):
+ # type: (int, int) -> bool
+ version_str = glibc_version_string()
+ if version_str is None:
+ return False
+ return check_glibc_version(version_str, required_major, minimum_minor)
+
+
+# platform.libc_ver regularly returns completely nonsensical glibc
+# versions. E.g. on my computer, platform says:
+#
+# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.7')
+# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.9')
+#
+# But the truth is:
+#
+# ~$ ldd --version
+# ldd (Debian GLIBC 2.22-11) 2.22
+#
+# This is unfortunate, because it means that the linehaul data on libc
+# versions that was generated by pip 8.1.2 and earlier is useless and
+# misleading. Solution: instead of using platform, use our code that actually
+# works.
+def libc_ver():
+ # type: () -> Tuple[str, str]
+ """Try to determine the glibc version
+
+ Returns a tuple of strings (lib, version) which default to empty strings
+ in case the lookup fails.
+ """
+ glibc_version = glibc_version_string()
+ if glibc_version is None:
+ return ("", "")
+ else:
+ return ("glibc", glibc_version)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/hashes.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/hashes.py
new file mode 100644
index 00000000..e8aabe1a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/hashes.py
@@ -0,0 +1,128 @@
+from __future__ import absolute_import
+
+import hashlib
+
+from pip._vendor.six import iteritems, iterkeys, itervalues
+
+from pip._internal.exceptions import (
+ HashMismatch, HashMissing, InstallationError,
+)
+from pip._internal.utils.misc import read_chunks
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Dict, List, BinaryIO, NoReturn, Iterator
+ )
+ from pip._vendor.six import PY3
+ if PY3:
+ from hashlib import _Hash
+ else:
+ from hashlib import _hash as _Hash
+
+
+# The recommended hash algo of the moment. Change this whenever the state of
+# the art changes; it won't hurt backward compatibility.
+FAVORITE_HASH = 'sha256'
+
+
+# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
+# Currently, those are the ones at least as collision-resistant as sha256.
+STRONG_HASHES = ['sha256', 'sha384', 'sha512']
+
+
+class Hashes(object):
+ """A wrapper that builds multiple hashes at once and checks them against
+ known-good values
+
+ """
+ def __init__(self, hashes=None):
+ # type: (Dict[str, List[str]]) -> None
+ """
+ :param hashes: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ """
+ self._allowed = {} if hashes is None else hashes
+
+ @property
+ def digest_count(self):
+ # type: () -> int
+ return sum(len(digests) for digests in self._allowed.values())
+
+ def is_hash_allowed(
+ self,
+ hash_name, # type: str
+ hex_digest, # type: str
+ ):
+ """Return whether the given hex digest is allowed."""
+ return hex_digest in self._allowed.get(hash_name, [])
+
+ def check_against_chunks(self, chunks):
+ # type: (Iterator[bytes]) -> None
+ """Check good hashes against ones built from iterable of chunks of
+ data.
+
+ Raise HashMismatch if none match.
+
+ """
+ gots = {}
+ for hash_name in iterkeys(self._allowed):
+ try:
+ gots[hash_name] = hashlib.new(hash_name)
+ except (ValueError, TypeError):
+ raise InstallationError('Unknown hash name: %s' % hash_name)
+
+ for chunk in chunks:
+ for hash in itervalues(gots):
+ hash.update(chunk)
+
+ for hash_name, got in iteritems(gots):
+ if got.hexdigest() in self._allowed[hash_name]:
+ return
+ self._raise(gots)
+
+ def _raise(self, gots):
+ # type: (Dict[str, _Hash]) -> NoReturn
+ raise HashMismatch(self._allowed, gots)
+
+ def check_against_file(self, file):
+ # type: (BinaryIO) -> None
+ """Check good hashes against a file-like object
+
+ Raise HashMismatch if none match.
+
+ """
+ return self.check_against_chunks(read_chunks(file))
+
+ def check_against_path(self, path):
+ # type: (str) -> None
+ with open(path, 'rb') as file:
+ return self.check_against_file(file)
+
+ def __nonzero__(self):
+ # type: () -> bool
+ """Return whether I know any known-good hashes."""
+ return bool(self._allowed)
+
+ def __bool__(self):
+ # type: () -> bool
+ return self.__nonzero__()
+
+
+class MissingHashes(Hashes):
+ """A workalike for Hashes used when we're missing a hash for a requirement
+
+ It computes the actual hash of the requirement and raises a HashMissing
+ exception showing it to the user.
+
+ """
+ def __init__(self):
+ # type: () -> None
+ """Don't offer the ``hashes`` kwarg."""
+ # Pass our favorite hash in to generate a "gotten hash". With the
+ # empty list, it will never match, so an error will always raise.
+ super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
+
+ def _raise(self, gots):
+ # type: (Dict[str, _Hash]) -> NoReturn
+ raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/logging.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/logging.py
new file mode 100644
index 00000000..3fbec712
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/logging.py
@@ -0,0 +1,394 @@
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import logging
+import logging.handlers
+import os
+import sys
+from logging import Filter
+
+from pip._vendor.six import PY2
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
+from pip._internal.utils.misc import ensure_dir, subprocess_logger
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading # type: ignore
+
+
+try:
+ # Use "import as" and set colorama in the else clause to avoid mypy
+ # errors and get the following correct revealed type for colorama:
+ # `Union[_importlib_modulespec.ModuleType, None]`
+ # Otherwise, we get an error like the following in the except block:
+ # > Incompatible types in assignment (expression has type "None",
+ # variable has type Module)
+ # TODO: eliminate the need to use "import as" once mypy addresses some
+ # of its issues with conditional imports. Here is an umbrella issue:
+ # https://github.com/python/mypy/issues/1297
+ from pip._vendor import colorama as _colorama
+# Lots of different errors can come from this, including SystemError and
+# ImportError.
+except Exception:
+ colorama = None
+else:
+ # Import Fore explicitly rather than accessing below as colorama.Fore
+ # to avoid the following error running mypy:
+ # > Module has no attribute "Fore"
+ # TODO: eliminate the need to import Fore once mypy addresses some of its
+ # issues with conditional imports. This particular case could be an
+ # instance of the following issue (but also see the umbrella issue above):
+ # https://github.com/python/mypy/issues/3500
+ from pip._vendor.colorama import Fore
+
+ colorama = _colorama
+
+
+_log_state = threading.local()
+_log_state.indentation = 0
+
+
+class BrokenStdoutLoggingError(Exception):
+ """
+ Raised if BrokenPipeError occurs for the stdout stream while logging.
+ """
+ pass
+
+
+# BrokenPipeError does not exist in Python 2 and, in addition, manifests
+# differently in Windows and non-Windows.
+if WINDOWS:
+ # In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
+ # https://bugs.python.org/issue19612
+ # https://bugs.python.org/issue30418
+ if PY2:
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return (exc_class is IOError and
+ exc.errno in (errno.EINVAL, errno.EPIPE))
+ else:
+ # In Windows, a broken pipe IOError became OSError in Python 3.
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return ((exc_class is BrokenPipeError) or # noqa: F821
+ (exc_class is OSError and
+ exc.errno in (errno.EINVAL, errno.EPIPE)))
+elif PY2:
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return (exc_class is IOError and exc.errno == errno.EPIPE)
+else:
+ # Then we are in the non-Windows Python 3 case.
+ def _is_broken_pipe_error(exc_class, exc):
+ """
+ Return whether an exception is a broken pipe error.
+
+ Args:
+ exc_class: an exception class.
+ exc: an exception instance.
+ """
+ return (exc_class is BrokenPipeError) # noqa: F821
+
+
+@contextlib.contextmanager
+def indent_log(num=2):
+ """
+ A context manager which will cause the log output to be indented for any
+ log messages emitted inside it.
+ """
+ _log_state.indentation += num
+ try:
+ yield
+ finally:
+ _log_state.indentation -= num
+
+
+def get_indentation():
+ return getattr(_log_state, 'indentation', 0)
+
+
+class IndentingFormatter(logging.Formatter):
+
+ def __init__(self, *args, **kwargs):
+ """
+ A logging.Formatter that obeys the indent_log() context manager.
+
+ :param add_timestamp: A bool indicating output lines should be prefixed
+ with their record's timestamp.
+ """
+ self.add_timestamp = kwargs.pop("add_timestamp", False)
+ super(IndentingFormatter, self).__init__(*args, **kwargs)
+
+ def get_message_start(self, formatted, levelno):
+ """
+ Return the start of the formatted log message (not counting the
+ prefix to add to each line).
+ """
+ if levelno < logging.WARNING:
+ return ''
+ if formatted.startswith(DEPRECATION_MSG_PREFIX):
+ # Then the message already has a prefix. We don't want it to
+ # look like "WARNING: DEPRECATION: ...."
+ return ''
+ if levelno < logging.ERROR:
+ return 'WARNING: '
+
+ return 'ERROR: '
+
+ def format(self, record):
+ """
+ Calls the standard formatter, but will indent all of the log message
+ lines by our current indentation level.
+ """
+ formatted = super(IndentingFormatter, self).format(record)
+ message_start = self.get_message_start(formatted, record.levelno)
+ formatted = message_start + formatted
+
+ prefix = ''
+ if self.add_timestamp:
+ # TODO: Use Formatter.default_time_format after dropping PY2.
+ t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S")
+ prefix = '%s,%03d ' % (t, record.msecs)
+ prefix += " " * get_indentation()
+ formatted = "".join([
+ prefix + line
+ for line in formatted.splitlines(True)
+ ])
+ return formatted
+
+
+def _color_wrap(*colors):
+ def wrapped(inp):
+ return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
+ return wrapped
+
+
+class ColorizedStreamHandler(logging.StreamHandler):
+
+ # Don't build up a list of colors if we don't have colorama
+ if colorama:
+ COLORS = [
+ # This needs to be in order from highest logging level to lowest.
+ (logging.ERROR, _color_wrap(Fore.RED)),
+ (logging.WARNING, _color_wrap(Fore.YELLOW)),
+ ]
+ else:
+ COLORS = []
+
+ def __init__(self, stream=None, no_color=None):
+ logging.StreamHandler.__init__(self, stream)
+ self._no_color = no_color
+
+ if WINDOWS and colorama:
+ self.stream = colorama.AnsiToWin32(self.stream)
+
+ def _using_stdout(self):
+ """
+ Return whether the handler is using sys.stdout.
+ """
+ if WINDOWS and colorama:
+ # Then self.stream is an AnsiToWin32 object.
+ return self.stream.wrapped is sys.stdout
+
+ return self.stream is sys.stdout
+
+ def should_color(self):
+ # Don't colorize things if we do not have colorama or if told not to
+ if not colorama or self._no_color:
+ return False
+
+ real_stream = (
+ self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
+ else self.stream.wrapped
+ )
+
+ # If the stream is a tty we should color it
+ if hasattr(real_stream, "isatty") and real_stream.isatty():
+ return True
+
+ # If we have an ANSI term we should color it
+ if os.environ.get("TERM") == "ANSI":
+ return True
+
+ # If anything else we should not color it
+ return False
+
+ def format(self, record):
+ msg = logging.StreamHandler.format(self, record)
+
+ if self.should_color():
+ for level, color in self.COLORS:
+ if record.levelno >= level:
+ msg = color(msg)
+ break
+
+ return msg
+
+ # The logging module says handleError() can be customized.
+ def handleError(self, record):
+ exc_class, exc = sys.exc_info()[:2]
+ # If a broken pipe occurred while calling write() or flush() on the
+ # stdout stream in logging's Handler.emit(), then raise our special
+ # exception so we can handle it in main() instead of logging the
+ # broken pipe error and continuing.
+ if (exc_class and self._using_stdout() and
+ _is_broken_pipe_error(exc_class, exc)):
+ raise BrokenStdoutLoggingError()
+
+ return super(ColorizedStreamHandler, self).handleError(record)
+
+
+class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
+
+ def _open(self):
+ ensure_dir(os.path.dirname(self.baseFilename))
+ return logging.handlers.RotatingFileHandler._open(self)
+
+
+class MaxLevelFilter(Filter):
+
+ def __init__(self, level):
+ self.level = level
+
+ def filter(self, record):
+ return record.levelno < self.level
+
+
+class ExcludeLoggerFilter(Filter):
+
+ """
+ A logging Filter that excludes records from a logger (or its children).
+ """
+
+ def filter(self, record):
+ # The base Filter class allows only records from a logger (or its
+ # children).
+ return not super(ExcludeLoggerFilter, self).filter(record)
+
+
+def setup_logging(verbosity, no_color, user_log_file):
+ """Configures and sets up all of the logging
+
+ Returns the requested logging level, as its integer value.
+ """
+
+ # Determine the level to be logging at.
+ if verbosity >= 1:
+ level = "DEBUG"
+ elif verbosity == -1:
+ level = "WARNING"
+ elif verbosity == -2:
+ level = "ERROR"
+ elif verbosity <= -3:
+ level = "CRITICAL"
+ else:
+ level = "INFO"
+
+ level_number = getattr(logging, level)
+
+ # The "root" logger should match the "console" level *unless* we also need
+ # to log to a user log file.
+ include_user_log = user_log_file is not None
+ if include_user_log:
+ additional_log_file = user_log_file
+ root_level = "DEBUG"
+ else:
+ additional_log_file = "/dev/null"
+ root_level = level
+
+ # Disable any logging besides WARNING unless we have DEBUG level logging
+ # enabled for vendored libraries.
+ vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
+
+ # Shorthands for clarity
+ log_streams = {
+ "stdout": "ext://sys.stdout",
+ "stderr": "ext://sys.stderr",
+ }
+ handler_classes = {
+ "stream": "pip._internal.utils.logging.ColorizedStreamHandler",
+ "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
+ }
+ handlers = ["console", "console_errors", "console_subprocess"] + (
+ ["user_log"] if include_user_log else []
+ )
+
+ logging.config.dictConfig({
+ "version": 1,
+ "disable_existing_loggers": False,
+ "filters": {
+ "exclude_warnings": {
+ "()": "pip._internal.utils.logging.MaxLevelFilter",
+ "level": logging.WARNING,
+ },
+ "restrict_to_subprocess": {
+ "()": "logging.Filter",
+ "name": subprocess_logger.name,
+ },
+ "exclude_subprocess": {
+ "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
+ "name": subprocess_logger.name,
+ },
+ },
+ "formatters": {
+ "indent": {
+ "()": IndentingFormatter,
+ "format": "%(message)s",
+ },
+ "indent_with_timestamp": {
+ "()": IndentingFormatter,
+ "format": "%(message)s",
+ "add_timestamp": True,
+ },
+ },
+ "handlers": {
+ "console": {
+ "level": level,
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stdout"],
+ "filters": ["exclude_subprocess", "exclude_warnings"],
+ "formatter": "indent",
+ },
+ "console_errors": {
+ "level": "WARNING",
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stderr"],
+ "filters": ["exclude_subprocess"],
+ "formatter": "indent",
+ },
+ # A handler responsible for logging to the console messages
+ # from the "subprocessor" logger.
+ "console_subprocess": {
+ "level": level,
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stderr"],
+ "filters": ["restrict_to_subprocess"],
+ "formatter": "indent",
+ },
+ "user_log": {
+ "level": "DEBUG",
+ "class": handler_classes["file"],
+ "filename": additional_log_file,
+ "delay": True,
+ "formatter": "indent_with_timestamp",
+ },
+ },
+ "root": {
+ "level": root_level,
+ "handlers": handlers,
+ },
+ "loggers": {
+ "pip._vendor": {
+ "level": vendored_log_level
+ }
+ },
+ })
+
+ return level_number
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py
new file mode 100644
index 00000000..cb0c8ebc
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py
@@ -0,0 +1,20 @@
+import os.path
+
+DELETE_MARKER_MESSAGE = '''\
+This file is placed here by pip to indicate the source was put
+here by pip.
+
+Once this package is successfully installed this source code will be
+deleted (unless you remove this file).
+'''
+PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
+
+
+def write_delete_marker_file(directory):
+ # type: (str) -> None
+ """
+ Write the pip delete marker file into this directory.
+ """
+ filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
+ with open(filepath, 'w') as marker_fp:
+ marker_fp.write(DELETE_MARKER_MESSAGE)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/misc.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/misc.py
new file mode 100644
index 00000000..abb95979
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/misc.py
@@ -0,0 +1,1204 @@
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import getpass
+import io
+# we have a submodule named 'logging' which would shadow this if we used the
+# regular name:
+import logging as std_logging
+import os
+import posixpath
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tarfile
+import zipfile
+from collections import deque
+
+from pip._vendor import pkg_resources
+# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import.
+from pip._vendor.retrying import retry # type: ignore
+from pip._vendor.six import PY2, text_type
+from pip._vendor.six.moves import input, shlex_quote
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
+
+from pip import __version__
+from pip._internal.exceptions import CommandError, InstallationError
+from pip._internal.locations import site_packages, user_site
+from pip._internal.utils.compat import (
+ WINDOWS, console_to_str, expanduser, stdlib_pkgs, str_to_display,
+)
+from pip._internal.utils.marker_files import write_delete_marker_file
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import (
+ running_under_virtualenv, virtualenv_no_global,
+)
+
+if PY2:
+ from io import BytesIO as StringIO
+else:
+ from io import StringIO
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, AnyStr, Container, Iterable, List, Mapping, Match, Optional, Text,
+ Union,
+ )
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.models.link import Link
+ from pip._internal.utils.ui import SpinnerInterface
+
+try:
+ from typing import cast, Tuple
+ VersionInfo = Tuple[int, int, int]
+except ImportError:
+ # typing's cast() isn't supported in code comments, so we need to
+ # define a dummy, no-op version.
+ def cast(typ, val):
+ return val
+ VersionInfo = None
+
+
+__all__ = ['rmtree', 'display_path', 'backup_dir',
+ 'ask', 'splitext',
+ 'format_size', 'is_installable_dir',
+ 'is_svn_page', 'file_contents',
+ 'split_leading_dir', 'has_leading_dir',
+ 'normalize_path',
+ 'renames', 'get_prog',
+ 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
+ 'captured_stdout', 'ensure_dir',
+ 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', 'WHEEL_EXTENSION',
+ 'get_installed_version', 'remove_auth_from_url']
+
+
+logger = std_logging.getLogger(__name__)
+subprocess_logger = std_logging.getLogger('pip.subprocessor')
+
+LOG_DIVIDER = '----------------------------------------'
+
+WHEEL_EXTENSION = '.whl'
+BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
+XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
+ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION)
+TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
+ARCHIVE_EXTENSIONS = (
+ ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
+SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
+
+try:
+ import bz2 # noqa
+ SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
+except ImportError:
+ logger.debug('bz2 module is not available')
+
+try:
+ # Only for Python 3.3+
+ import lzma # noqa
+ SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
+except ImportError:
+ logger.debug('lzma module is not available')
+
+
+def get_pip_version():
+ # type: () -> str
+ pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
+ pip_pkg_dir = os.path.abspath(pip_pkg_dir)
+
+ return (
+ 'pip {} from {} (python {})'.format(
+ __version__, pip_pkg_dir, sys.version[:3],
+ )
+ )
+
+
+def normalize_version_info(py_version_info):
+ # type: (Tuple[int, ...]) -> Tuple[int, int, int]
+ """
+ Convert a tuple of ints representing a Python version to one of length
+ three.
+
+ :param py_version_info: a tuple of ints representing a Python version,
+ or None to specify no version. The tuple can have any length.
+
+ :return: a tuple of length three if `py_version_info` is non-None.
+ Otherwise, return `py_version_info` unchanged (i.e. None).
+ """
+ if len(py_version_info) < 3:
+ py_version_info += (3 - len(py_version_info)) * (0,)
+ elif len(py_version_info) > 3:
+ py_version_info = py_version_info[:3]
+
+ return cast(VersionInfo, py_version_info)
+
+
+def ensure_dir(path):
+ # type: (AnyStr) -> None
+ """os.path.makedirs without EEXIST."""
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+
+def get_prog():
+ # type: () -> str
+ try:
+ prog = os.path.basename(sys.argv[0])
+ if prog in ('__main__.py', '-c'):
+ return "%s -m pip" % sys.executable
+ else:
+ return prog
+ except (AttributeError, TypeError, IndexError):
+ pass
+ return 'pip'
+
+
+# Retry every half second for up to 3 seconds
+@retry(stop_max_delay=3000, wait_fixed=500)
+def rmtree(dir, ignore_errors=False):
+ # type: (str, bool) -> None
+ shutil.rmtree(dir, ignore_errors=ignore_errors,
+ onerror=rmtree_errorhandler)
+
+
+def rmtree_errorhandler(func, path, exc_info):
+ """On Windows, the files in .svn are read-only, so when rmtree() tries to
+ remove them, an exception is thrown. We catch that here, remove the
+ read-only attribute, and hopefully continue without problems."""
+ # if file type currently read only
+ if os.stat(path).st_mode & stat.S_IREAD:
+ # convert to read/write
+ os.chmod(path, stat.S_IWRITE)
+ # use the original function to repeat the operation
+ func(path)
+ return
+ else:
+ raise
+
+
+def path_to_display(path):
+ # type: (Optional[Union[str, Text]]) -> Optional[Text]
+ """
+ Convert a bytes (or text) path to text (unicode in Python 2) for display
+ and logging purposes.
+
+ This function should never error out. Also, this function is mainly needed
+ for Python 2 since in Python 3 str paths are already text.
+ """
+ if path is None:
+ return None
+ if isinstance(path, text_type):
+ return path
+ # Otherwise, path is a bytes object (str in Python 2).
+ try:
+ display_path = path.decode(sys.getfilesystemencoding(), 'strict')
+ except UnicodeDecodeError:
+ # Include the full bytes to make troubleshooting easier, even though
+ # it may not be very human readable.
+ if PY2:
+ # Convert the bytes to a readable str representation using
+ # repr(), and then convert the str to unicode.
+ # Also, we add the prefix "b" to the repr() return value both
+ # to make the Python 2 output look like the Python 3 output, and
+ # to signal to the user that this is a bytes representation.
+ display_path = str_to_display('b{!r}'.format(path))
+ else:
+ # Silence the "F821 undefined name 'ascii'" flake8 error since
+ # in Python 3 ascii() is a built-in.
+ display_path = ascii(path) # noqa: F821
+
+ return display_path
+
+
+def display_path(path):
+ # type: (Union[str, Text]) -> str
+ """Gives the display value for a given path, making it relative to cwd
+ if possible."""
+ path = os.path.normcase(os.path.abspath(path))
+ if sys.version_info[0] == 2:
+ path = path.decode(sys.getfilesystemencoding(), 'replace')
+ path = path.encode(sys.getdefaultencoding(), 'replace')
+ if path.startswith(os.getcwd() + os.path.sep):
+ path = '.' + path[len(os.getcwd()):]
+ return path
+
+
+def backup_dir(dir, ext='.bak'):
+ # type: (str, str) -> str
+ """Figure out the name of a directory to back up the given dir to
+ (adding .bak, .bak2, etc)"""
+ n = 1
+ extension = ext
+ while os.path.exists(dir + extension):
+ n += 1
+ extension = ext + str(n)
+ return dir + extension
+
+
+def ask_path_exists(message, options):
+ # type: (str, Iterable[str]) -> str
+ for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
+ if action in options:
+ return action
+ return ask(message, options)
+
+
+def _check_no_input(message):
+ # type: (str) -> None
+ """Raise an error if no input is allowed."""
+ if os.environ.get('PIP_NO_INPUT'):
+ raise Exception(
+ 'No input was expected ($PIP_NO_INPUT set); question: %s' %
+ message
+ )
+
+
+def ask(message, options):
+ # type: (str, Iterable[str]) -> str
+ """Ask the message interactively, with the given possible responses"""
+ while 1:
+ _check_no_input(message)
+ response = input(message)
+ response = response.strip().lower()
+ if response not in options:
+ print(
+ 'Your response (%r) was not one of the expected responses: '
+ '%s' % (response, ', '.join(options))
+ )
+ else:
+ return response
+
+
+def ask_input(message):
+ # type: (str) -> str
+ """Ask for input interactively."""
+ _check_no_input(message)
+ return input(message)
+
+
+def ask_password(message):
+ # type: (str) -> str
+ """Ask for a password interactively."""
+ _check_no_input(message)
+ return getpass.getpass(message)
+
+
+def format_size(bytes):
+ # type: (float) -> str
+ if bytes > 1000 * 1000:
+ return '%.1fMB' % (bytes / 1000.0 / 1000)
+ elif bytes > 10 * 1000:
+ return '%ikB' % (bytes / 1000)
+ elif bytes > 1000:
+ return '%.1fkB' % (bytes / 1000.0)
+ else:
+ return '%ibytes' % bytes
+
+
+def is_installable_dir(path):
+ # type: (str) -> bool
+ """Is path is a directory containing setup.py or pyproject.toml?
+ """
+ if not os.path.isdir(path):
+ return False
+ setup_py = os.path.join(path, 'setup.py')
+ if os.path.isfile(setup_py):
+ return True
+ pyproject_toml = os.path.join(path, 'pyproject.toml')
+ if os.path.isfile(pyproject_toml):
+ return True
+ return False
+
+
+def is_svn_page(html):
+ # type: (Union[str, Text]) -> Optional[Match[Union[str, Text]]]
+ """
+ Returns true if the page appears to be the index page of an svn repository
+ """
+ return (re.search(r'<title>[^<]*Revision \d+:', html) and
+ re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
+
+
+def file_contents(filename):
+ # type: (str) -> Text
+ with open(filename, 'rb') as fp:
+ return fp.read().decode('utf-8')
+
+
+def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
+ """Yield pieces of data from a file-like object until EOF."""
+ while True:
+ chunk = file.read(size)
+ if not chunk:
+ break
+ yield chunk
+
+
+def split_leading_dir(path):
+ # type: (Union[str, Text]) -> List[Union[str, Text]]
+ path = path.lstrip('/').lstrip('\\')
+ if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
+ '\\' not in path):
+ return path.split('/', 1)
+ elif '\\' in path:
+ return path.split('\\', 1)
+ else:
+ return [path, '']
+
+
+def has_leading_dir(paths):
+ # type: (Iterable[Union[str, Text]]) -> bool
+ """Returns true if all the paths have the same leading path name
+ (i.e., everything is in one subdirectory in an archive)"""
+ common_prefix = None
+ for path in paths:
+ prefix, rest = split_leading_dir(path)
+ if not prefix:
+ return False
+ elif common_prefix is None:
+ common_prefix = prefix
+ elif prefix != common_prefix:
+ return False
+ return True
+
+
+def normalize_path(path, resolve_symlinks=True):
+ # type: (str, bool) -> str
+ """
+ Convert a path to its canonical, case-normalized, absolute version.
+
+ """
+ path = expanduser(path)
+ if resolve_symlinks:
+ path = os.path.realpath(path)
+ else:
+ path = os.path.abspath(path)
+ return os.path.normcase(path)
+
+
+def splitext(path):
+ # type: (str) -> Tuple[str, str]
+ """Like os.path.splitext, but take off .tar too"""
+ base, ext = posixpath.splitext(path)
+ if base.lower().endswith('.tar'):
+ ext = base[-4:] + ext
+ base = base[:-4]
+ return base, ext
+
+
+def renames(old, new):
+ # type: (str, str) -> None
+ """Like os.renames(), but handles renaming across devices."""
+ # Implementation borrowed from os.renames().
+ head, tail = os.path.split(new)
+ if head and tail and not os.path.exists(head):
+ os.makedirs(head)
+
+ shutil.move(old, new)
+
+ head, tail = os.path.split(old)
+ if head and tail:
+ try:
+ os.removedirs(head)
+ except OSError:
+ pass
+
+
+def is_local(path):
+ # type: (str) -> bool
+ """
+ Return True if path is within sys.prefix, if we're running in a virtualenv.
+
+ If we're not in a virtualenv, all paths are considered "local."
+
+ """
+ if not running_under_virtualenv():
+ return True
+ return normalize_path(path).startswith(normalize_path(sys.prefix))
+
+
+def dist_is_local(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution object is installed locally
+ (i.e. within current virtualenv).
+
+ Always True if we're not in a virtualenv.
+
+ """
+ return is_local(dist_location(dist))
+
+
+def dist_in_usersite(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is installed in user site.
+ """
+ norm_path = normalize_path(dist_location(dist))
+ return norm_path.startswith(normalize_path(user_site))
+
+
+def dist_in_site_packages(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is installed in
+ sysconfig.get_python_lib().
+ """
+ return normalize_path(
+ dist_location(dist)
+ ).startswith(normalize_path(site_packages))
+
+
+def dist_is_editable(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is an editable install.
+ """
+ for path_item in sys.path:
+ egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
+ if os.path.isfile(egg_link):
+ return True
+ return False
+
+
+def get_installed_distributions(
+ local_only=True, # type: bool
+ skip=stdlib_pkgs, # type: Container[str]
+ include_editables=True, # type: bool
+ editables_only=False, # type: bool
+ user_only=False, # type: bool
+ paths=None # type: Optional[List[str]]
+):
+ # type: (...) -> List[Distribution]
+ """
+ Return a list of installed Distribution objects.
+
+ If ``local_only`` is True (default), only return installations
+ local to the current virtualenv, if in a virtualenv.
+
+ ``skip`` argument is an iterable of lower-case project names to
+ ignore; defaults to stdlib_pkgs
+
+ If ``include_editables`` is False, don't report editables.
+
+ If ``editables_only`` is True , only report editables.
+
+ If ``user_only`` is True , only report installations in the user
+ site directory.
+
+ If ``paths`` is set, only report the distributions present at the
+ specified list of locations.
+ """
+ if paths:
+ working_set = pkg_resources.WorkingSet(paths)
+ else:
+ working_set = pkg_resources.working_set
+
+ if local_only:
+ local_test = dist_is_local
+ else:
+ def local_test(d):
+ return True
+
+ if include_editables:
+ def editable_test(d):
+ return True
+ else:
+ def editable_test(d):
+ return not dist_is_editable(d)
+
+ if editables_only:
+ def editables_only_test(d):
+ return dist_is_editable(d)
+ else:
+ def editables_only_test(d):
+ return True
+
+ if user_only:
+ user_test = dist_in_usersite
+ else:
+ def user_test(d):
+ return True
+
+ # because of pkg_resources vendoring, mypy cannot find stub in typeshed
+ return [d for d in working_set # type: ignore
+ if local_test(d) and
+ d.key not in skip and
+ editable_test(d) and
+ editables_only_test(d) and
+ user_test(d)
+ ]
+
+
+def egg_link_path(dist):
+ # type: (Distribution) -> Optional[str]
+ """
+ Return the path for the .egg-link file if it exists, otherwise, None.
+
+ There's 3 scenarios:
+ 1) not in a virtualenv
+ try to find in site.USER_SITE, then site_packages
+ 2) in a no-global virtualenv
+ try to find in site_packages
+ 3) in a yes-global virtualenv
+ try to find in site_packages, then site.USER_SITE
+ (don't look in global location)
+
+ For #1 and #3, there could be odd cases, where there's an egg-link in 2
+ locations.
+
+ This method will just return the first one found.
+ """
+ sites = []
+ if running_under_virtualenv():
+ if virtualenv_no_global():
+ sites.append(site_packages)
+ else:
+ sites.append(site_packages)
+ if user_site:
+ sites.append(user_site)
+ else:
+ if user_site:
+ sites.append(user_site)
+ sites.append(site_packages)
+
+ for site in sites:
+ egglink = os.path.join(site, dist.project_name) + '.egg-link'
+ if os.path.isfile(egglink):
+ return egglink
+ return None
+
+
+def dist_location(dist):
+ # type: (Distribution) -> str
+ """
+ Get the site-packages location of this distribution. Generally
+ this is dist.location, except in the case of develop-installed
+ packages, where dist.location is the source code location, and we
+ want to know where the egg-link file is.
+
+ """
+ egg_link = egg_link_path(dist)
+ if egg_link:
+ return egg_link
+ return dist.location
+
+
+def current_umask():
+ """Get the current umask which involves having to set it temporarily."""
+ mask = os.umask(0)
+ os.umask(mask)
+ return mask
+
+
+def unzip_file(filename, location, flatten=True):
+ # type: (str, str, bool) -> None
+ """
+ Unzip the file (with path `filename`) to the destination `location`. All
+ files are written based on system defaults and umask (i.e. permissions are
+ not preserved), except that regular file members with any execute
+ permissions (user, group, or world) have "chmod +x" applied after being
+ written. Note that for windows, any execute changes using os.chmod are
+ no-ops per the python docs.
+ """
+ ensure_dir(location)
+ zipfp = open(filename, 'rb')
+ try:
+ zip = zipfile.ZipFile(zipfp, allowZip64=True)
+ leading = has_leading_dir(zip.namelist()) and flatten
+ for info in zip.infolist():
+ name = info.filename
+ fn = name
+ if leading:
+ fn = split_leading_dir(name)[1]
+ fn = os.path.join(location, fn)
+ dir = os.path.dirname(fn)
+ if fn.endswith('/') or fn.endswith('\\'):
+ # A directory
+ ensure_dir(fn)
+ else:
+ ensure_dir(dir)
+ # Don't use read() to avoid allocating an arbitrarily large
+ # chunk of memory for the file's content
+ fp = zip.open(name)
+ try:
+ with open(fn, 'wb') as destfp:
+ shutil.copyfileobj(fp, destfp)
+ finally:
+ fp.close()
+ mode = info.external_attr >> 16
+ # if mode and regular file and any execute permissions for
+ # user/group/world?
+ if mode and stat.S_ISREG(mode) and mode & 0o111:
+ # make dest file have execute for user/group/world
+ # (chmod +x) no-op on windows per python docs
+ os.chmod(fn, (0o777 - current_umask() | 0o111))
+ finally:
+ zipfp.close()
+
+
+def untar_file(filename, location):
+ # type: (str, str) -> None
+ """
+ Untar the file (with path `filename`) to the destination `location`.
+ All files are written based on system defaults and umask (i.e. permissions
+ are not preserved), except that regular file members with any execute
+ permissions (user, group, or world) have "chmod +x" applied after being
+ written. Note that for windows, any execute changes using os.chmod are
+ no-ops per the python docs.
+ """
+ ensure_dir(location)
+ if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+ mode = 'r:gz'
+ elif filename.lower().endswith(BZ2_EXTENSIONS):
+ mode = 'r:bz2'
+ elif filename.lower().endswith(XZ_EXTENSIONS):
+ mode = 'r:xz'
+ elif filename.lower().endswith('.tar'):
+ mode = 'r'
+ else:
+ logger.warning(
+ 'Cannot determine compression type for file %s', filename,
+ )
+ mode = 'r:*'
+ tar = tarfile.open(filename, mode)
+ try:
+ leading = has_leading_dir([
+ member.name for member in tar.getmembers()
+ ])
+ for member in tar.getmembers():
+ fn = member.name
+ if leading:
+ # https://github.com/python/mypy/issues/1174
+ fn = split_leading_dir(fn)[1] # type: ignore
+ path = os.path.join(location, fn)
+ if member.isdir():
+ ensure_dir(path)
+ elif member.issym():
+ try:
+ # https://github.com/python/typeshed/issues/2673
+ tar._extract_member(member, path) # type: ignore
+ except Exception as exc:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warning(
+ 'In the tar file %s the member %s is invalid: %s',
+ filename, member.name, exc,
+ )
+ continue
+ else:
+ try:
+ fp = tar.extractfile(member)
+ except (KeyError, AttributeError) as exc:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warning(
+ 'In the tar file %s the member %s is invalid: %s',
+ filename, member.name, exc,
+ )
+ continue
+ ensure_dir(os.path.dirname(path))
+ with open(path, 'wb') as destfp:
+ shutil.copyfileobj(fp, destfp)
+ fp.close()
+ # Update the timestamp (useful for cython compiled files)
+ # https://github.com/python/typeshed/issues/2673
+ tar.utime(member, path) # type: ignore
+ # member have any execute permissions for user/group/world?
+ if member.mode & 0o111:
+ # make dest file have execute for user/group/world
+ # no-op on windows per python docs
+ os.chmod(path, (0o777 - current_umask() | 0o111))
+ finally:
+ tar.close()
+
+
+def unpack_file(
+ filename, # type: str
+ location, # type: str
+ content_type, # type: Optional[str]
+ link # type: Optional[Link]
+):
+ # type: (...) -> None
+ filename = os.path.realpath(filename)
+ if (content_type == 'application/zip' or
+ filename.lower().endswith(ZIP_EXTENSIONS) or
+ zipfile.is_zipfile(filename)):
+ unzip_file(
+ filename,
+ location,
+ flatten=not filename.endswith('.whl')
+ )
+ elif (content_type == 'application/x-gzip' or
+ tarfile.is_tarfile(filename) or
+ filename.lower().endswith(
+ TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
+ untar_file(filename, location)
+ elif (content_type and content_type.startswith('text/html') and
+ is_svn_page(file_contents(filename))):
+ # We don't really care about this
+ from pip._internal.vcs.subversion import Subversion
+ url = 'svn+' + link.url
+ Subversion().unpack(location, url=url)
+ else:
+ # FIXME: handle?
+ # FIXME: magic signatures?
+ logger.critical(
+ 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
+ 'cannot detect archive format',
+ filename, location, content_type,
+ )
+ raise InstallationError(
+ 'Cannot determine archive format of %s' % location
+ )
+
+
+def format_command_args(args):
+ # type: (List[str]) -> str
+ """
+ Format command arguments for display.
+ """
+ return ' '.join(shlex_quote(arg) for arg in args)
+
+
+def make_subprocess_output_error(
+ cmd_args, # type: List[str]
+ cwd, # type: Optional[str]
+ lines, # type: List[Text]
+ exit_status, # type: int
+):
+ # type: (...) -> Text
+ """
+ Create and return the error message to use to log a subprocess error
+ with command output.
+
+ :param lines: A list of lines, each ending with a newline.
+ """
+ command = format_command_args(cmd_args)
+ # Convert `command` and `cwd` to text (unicode in Python 2) so we can use
+ # them as arguments in the unicode format string below. This avoids
+ # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2
+ # if either contains a non-ascii character.
+ command_display = str_to_display(command, desc='command bytes')
+ cwd_display = path_to_display(cwd)
+
+ # We know the joined output value ends in a newline.
+ output = ''.join(lines)
+ msg = (
+ # Use a unicode string to avoid "UnicodeEncodeError: 'ascii'
+ # codec can't encode character ..." in Python 2 when a format
+ # argument (e.g. `output`) has a non-ascii character.
+ u'Command errored out with exit status {exit_status}:\n'
+ ' command: {command_display}\n'
+ ' cwd: {cwd_display}\n'
+ 'Complete output ({line_count} lines):\n{output}{divider}'
+ ).format(
+ exit_status=exit_status,
+ command_display=command_display,
+ cwd_display=cwd_display,
+ line_count=len(lines),
+ output=output,
+ divider=LOG_DIVIDER,
+ )
+ return msg
+
+
+def call_subprocess(
+ cmd, # type: List[str]
+ show_stdout=False, # type: bool
+ cwd=None, # type: Optional[str]
+ on_returncode='raise', # type: str
+ extra_ok_returncodes=None, # type: Optional[Iterable[int]]
+ command_desc=None, # type: Optional[str]
+ extra_environ=None, # type: Optional[Mapping[str, Any]]
+ unset_environ=None, # type: Optional[Iterable[str]]
+ spinner=None # type: Optional[SpinnerInterface]
+):
+ # type: (...) -> Text
+ """
+ Args:
+ show_stdout: if true, use INFO to log the subprocess's stderr and
+ stdout streams. Otherwise, use DEBUG. Defaults to False.
+ extra_ok_returncodes: an iterable of integer return codes that are
+ acceptable, in addition to 0. Defaults to None, which means [].
+ unset_environ: an iterable of environment variable names to unset
+ prior to calling subprocess.Popen().
+ """
+ if extra_ok_returncodes is None:
+ extra_ok_returncodes = []
+ if unset_environ is None:
+ unset_environ = []
+ # Most places in pip use show_stdout=False. What this means is--
+ #
+ # - We connect the child's output (combined stderr and stdout) to a
+ # single pipe, which we read.
+ # - We log this output to stderr at DEBUG level as it is received.
+ # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
+ # requested), then we show a spinner so the user can still see the
+ # subprocess is in progress.
+ # - If the subprocess exits with an error, we log the output to stderr
+ # at ERROR level if it hasn't already been displayed to the console
+ # (e.g. if --verbose logging wasn't enabled). This way we don't log
+ # the output to the console twice.
+ #
+ # If show_stdout=True, then the above is still done, but with DEBUG
+ # replaced by INFO.
+ if show_stdout:
+ # Then log the subprocess output at INFO level.
+ log_subprocess = subprocess_logger.info
+ used_level = std_logging.INFO
+ else:
+ # Then log the subprocess output using DEBUG. This also ensures
+ # it will be logged to the log file (aka user_log), if enabled.
+ log_subprocess = subprocess_logger.debug
+ used_level = std_logging.DEBUG
+
+ # Whether the subprocess will be visible in the console.
+ showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
+
+ # Only use the spinner if we're not showing the subprocess output
+ # and we have a spinner.
+ use_spinner = not showing_subprocess and spinner is not None
+
+ if command_desc is None:
+ command_desc = format_command_args(cmd)
+
+ log_subprocess("Running command %s", command_desc)
+ env = os.environ.copy()
+ if extra_environ:
+ env.update(extra_environ)
+ for name in unset_environ:
+ env.pop(name, None)
+ try:
+ proc = subprocess.Popen(
+ cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, cwd=cwd, env=env,
+ )
+ proc.stdin.close()
+ except Exception as exc:
+ subprocess_logger.critical(
+ "Error %s while executing command %s", exc, command_desc,
+ )
+ raise
+ all_output = []
+ while True:
+ # The "line" value is a unicode string in Python 2.
+ line = console_to_str(proc.stdout.readline())
+ if not line:
+ break
+ line = line.rstrip()
+ all_output.append(line + '\n')
+
+ # Show the line immediately.
+ log_subprocess(line)
+ # Update the spinner.
+ if use_spinner:
+ spinner.spin()
+ try:
+ proc.wait()
+ finally:
+ if proc.stdout:
+ proc.stdout.close()
+ proc_had_error = (
+ proc.returncode and proc.returncode not in extra_ok_returncodes
+ )
+ if use_spinner:
+ if proc_had_error:
+ spinner.finish("error")
+ else:
+ spinner.finish("done")
+ if proc_had_error:
+ if on_returncode == 'raise':
+ if not showing_subprocess:
+ # Then the subprocess streams haven't been logged to the
+ # console yet.
+ msg = make_subprocess_output_error(
+ cmd_args=cmd,
+ cwd=cwd,
+ lines=all_output,
+ exit_status=proc.returncode,
+ )
+ subprocess_logger.error(msg)
+ exc_msg = (
+ 'Command errored out with exit status {}: {} '
+ 'Check the logs for full command output.'
+ ).format(proc.returncode, command_desc)
+ raise InstallationError(exc_msg)
+ elif on_returncode == 'warn':
+ subprocess_logger.warning(
+ 'Command "%s" had error code %s in %s',
+ command_desc, proc.returncode, cwd,
+ )
+ elif on_returncode == 'ignore':
+ pass
+ else:
+ raise ValueError('Invalid value: on_returncode=%s' %
+ repr(on_returncode))
+ return ''.join(all_output)
+
+
+def _make_build_dir(build_dir):
+ os.makedirs(build_dir)
+ write_delete_marker_file(build_dir)
+
+
+class FakeFile(object):
+ """Wrap a list of lines in an object with readline() to make
+ ConfigParser happy."""
+ def __init__(self, lines):
+ self._gen = (l for l in lines)
+
+ def readline(self):
+ try:
+ try:
+ return next(self._gen)
+ except NameError:
+ return self._gen.next()
+ except StopIteration:
+ return ''
+
+ def __iter__(self):
+ return self._gen
+
+
+class StreamWrapper(StringIO):
+
+ @classmethod
+ def from_stream(cls, orig_stream):
+ cls.orig_stream = orig_stream
+ return cls()
+
+ # compileall.compile_dir() needs stdout.encoding to print to stdout
+ @property
+ def encoding(self):
+ return self.orig_stream.encoding
+
+
+@contextlib.contextmanager
+def captured_output(stream_name):
+ """Return a context manager used by captured_stdout/stdin/stderr
+ that temporarily replaces the sys stream *stream_name* with a StringIO.
+
+ Taken from Lib/support/__init__.py in the CPython repo.
+ """
+ orig_stdout = getattr(sys, stream_name)
+ setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
+ try:
+ yield getattr(sys, stream_name)
+ finally:
+ setattr(sys, stream_name, orig_stdout)
+
+
+def captured_stdout():
+ """Capture the output of sys.stdout:
+
+ with captured_stdout() as stdout:
+ print('hello')
+ self.assertEqual(stdout.getvalue(), 'hello\n')
+
+ Taken from Lib/support/__init__.py in the CPython repo.
+ """
+ return captured_output('stdout')
+
+
+def captured_stderr():
+ """
+ See captured_stdout().
+ """
+ return captured_output('stderr')
+
+
+class cached_property(object):
+ """A property that is only computed once per instance and then replaces
+ itself with an ordinary attribute. Deleting the attribute resets the
+ property.
+
+ Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
+ """
+
+ def __init__(self, func):
+ self.__doc__ = getattr(func, '__doc__')
+ self.func = func
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ # We're being accessed from the class itself, not from an object
+ return self
+ value = obj.__dict__[self.func.__name__] = self.func(obj)
+ return value
+
+
+def get_installed_version(dist_name, working_set=None):
+ """Get the installed version of dist_name avoiding pkg_resources cache"""
+ # Create a requirement that we'll look for inside of setuptools.
+ req = pkg_resources.Requirement.parse(dist_name)
+
+ if working_set is None:
+ # We want to avoid having this cached, so we need to construct a new
+ # working set each time.
+ working_set = pkg_resources.WorkingSet()
+
+ # Get the installed distribution from our working set
+ dist = working_set.find(req)
+
+ # Check to see if we got an installed distribution or not, if we did
+ # we want to return it's version.
+ return dist.version if dist else None
+
+
+def consume(iterator):
+ """Consume an iterable at C speed."""
+ deque(iterator, maxlen=0)
+
+
+# Simulates an enum
+def enum(*sequential, **named):
+ enums = dict(zip(sequential, range(len(sequential))), **named)
+ reverse = {value: key for key, value in enums.items()}
+ enums['reverse_mapping'] = reverse
+ return type('Enum', (), enums)
+
+
+def path_to_url(path):
+ # type: (Union[str, Text]) -> str
+ """
+ Convert a path to a file: URL. The path will be made absolute and have
+ quoted path parts.
+ """
+ path = os.path.normpath(os.path.abspath(path))
+ url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
+ return url
+
+
+def split_auth_from_netloc(netloc):
+ """
+ Parse out and remove the auth information from a netloc.
+
+ Returns: (netloc, (username, password)).
+ """
+ if '@' not in netloc:
+ return netloc, (None, None)
+
+ # Split from the right because that's how urllib.parse.urlsplit()
+ # behaves if more than one @ is present (which can be checked using
+ # the password attribute of urlsplit()'s return value).
+ auth, netloc = netloc.rsplit('@', 1)
+ if ':' in auth:
+ # Split from the left because that's how urllib.parse.urlsplit()
+ # behaves if more than one : is present (which again can be checked
+ # using the password attribute of the return value)
+ user_pass = auth.split(':', 1)
+ else:
+ user_pass = auth, None
+
+ user_pass = tuple(
+ None if x is None else urllib_unquote(x) for x in user_pass
+ )
+
+ return netloc, user_pass
+
+
+def redact_netloc(netloc):
+ # type: (str) -> str
+ """
+ Replace the password in a netloc with "****", if it exists.
+
+ For example, "user:pass@example.com" returns "user:****@example.com".
+ """
+ netloc, (user, password) = split_auth_from_netloc(netloc)
+ if user is None:
+ return netloc
+ password = '' if password is None else ':****'
+ return '{user}{password}@{netloc}'.format(user=urllib_parse.quote(user),
+ password=password,
+ netloc=netloc)
+
+
+def _transform_url(url, transform_netloc):
+ """Transform and replace netloc in a url.
+
+ transform_netloc is a function taking the netloc and returning a
+ tuple. The first element of this tuple is the new netloc. The
+ entire tuple is returned.
+
+ Returns a tuple containing the transformed url as item 0 and the
+ original tuple returned by transform_netloc as item 1.
+ """
+ purl = urllib_parse.urlsplit(url)
+ netloc_tuple = transform_netloc(purl.netloc)
+ # stripped url
+ url_pieces = (
+ purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment
+ )
+ surl = urllib_parse.urlunsplit(url_pieces)
+ return surl, netloc_tuple
+
+
+def _get_netloc(netloc):
+ return split_auth_from_netloc(netloc)
+
+
+def _redact_netloc(netloc):
+ return (redact_netloc(netloc),)
+
+
+def split_auth_netloc_from_url(url):
+ # type: (str) -> Tuple[str, str, Tuple[str, str]]
+ """
+ Parse a url into separate netloc, auth, and url with no auth.
+
+ Returns: (url_without_auth, netloc, (username, password))
+ """
+ url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
+ return url_without_auth, netloc, auth
+
+
+def remove_auth_from_url(url):
+ # type: (str) -> str
+ """Return a copy of url with 'username:password@' removed."""
+ # username/pass params are passed to subversion through flags
+ # and are not recognized in the url.
+ return _transform_url(url, _get_netloc)[0]
+
+
+def redact_password_from_url(url):
+ # type: (str) -> str
+ """Replace the password in a given url with ****."""
+ return _transform_url(url, _redact_netloc)[0]
+
+
+def protect_pip_from_modification_on_windows(modifying_pip):
+ """Protection of pip.exe from modification on Windows
+
+ On Windows, any operation modifying pip should be run as:
+ python -m pip ...
+ """
+ pip_names = [
+ "pip.exe",
+ "pip{}.exe".format(sys.version_info[0]),
+ "pip{}.{}.exe".format(*sys.version_info[:2])
+ ]
+
+ # See https://github.com/pypa/pip/issues/1299 for more discussion
+ should_show_use_python_msg = (
+ modifying_pip and
+ WINDOWS and
+ os.path.basename(sys.argv[0]) in pip_names
+ )
+
+ if should_show_use_python_msg:
+ new_command = [
+ sys.executable, "-m", "pip"
+ ] + sys.argv[1:]
+ raise CommandError(
+ 'To modify pip, please run the following command:\n{}'
+ .format(" ".join(new_command))
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/models.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/models.py
new file mode 100644
index 00000000..fccaf5dd
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/models.py
@@ -0,0 +1,40 @@
+"""Utilities for defining models
+"""
+
+import operator
+
+
+class KeyBasedCompareMixin(object):
+ """Provides comparison capabilities that is based on a key
+ """
+
+ def __init__(self, key, defining_class):
+ self._compare_key = key
+ self._defining_class = defining_class
+
+ def __hash__(self):
+ return hash(self._compare_key)
+
+ def __lt__(self, other):
+ return self._compare(other, operator.__lt__)
+
+ def __le__(self, other):
+ return self._compare(other, operator.__le__)
+
+ def __gt__(self, other):
+ return self._compare(other, operator.__gt__)
+
+ def __ge__(self, other):
+ return self._compare(other, operator.__ge__)
+
+ def __eq__(self, other):
+ return self._compare(other, operator.__eq__)
+
+ def __ne__(self, other):
+ return self._compare(other, operator.__ne__)
+
+ def _compare(self, other, method):
+ if not isinstance(other, self._defining_class):
+ return NotImplemented
+
+ return method(self._compare_key, other._compare_key)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/outdated.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/outdated.py
new file mode 100644
index 00000000..2b10aeff
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/outdated.py
@@ -0,0 +1,178 @@
+from __future__ import absolute_import
+
+import datetime
+import json
+import logging
+import os.path
+import sys
+
+from pip._vendor import lockfile, pkg_resources
+from pip._vendor.packaging import version as packaging_version
+
+from pip._internal.cli.cmdoptions import make_search_scope
+from pip._internal.index import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.misc import ensure_dir, get_installed_version
+from pip._internal.utils.packaging import get_installer
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ import optparse
+ from typing import Any, Dict
+ from pip._internal.download import PipSession
+
+
+SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
+
+
+logger = logging.getLogger(__name__)
+
+
+class SelfCheckState(object):
+ def __init__(self, cache_dir):
+ # type: (str) -> None
+ self.state = {} # type: Dict[str, Any]
+ self.statefile_path = None
+
+ # Try to load the existing state
+ if cache_dir:
+ self.statefile_path = os.path.join(cache_dir, "selfcheck.json")
+ try:
+ with open(self.statefile_path) as statefile:
+ self.state = json.load(statefile)[sys.prefix]
+ except (IOError, ValueError, KeyError):
+ # Explicitly suppressing exceptions, since we don't want to
+ # error out if the cache file is invalid.
+ pass
+
+ def save(self, pypi_version, current_time):
+ # type: (str, datetime.datetime) -> None
+ # If we do not have a path to cache in, don't bother saving.
+ if not self.statefile_path:
+ return
+
+ # Check to make sure that we own the directory
+ if not check_path_owner(os.path.dirname(self.statefile_path)):
+ return
+
+ # Now that we've ensured the directory is owned by this user, we'll go
+ # ahead and make sure that all our directories are created.
+ ensure_dir(os.path.dirname(self.statefile_path))
+
+ # Attempt to write out our version check file
+ with lockfile.LockFile(self.statefile_path):
+ if os.path.exists(self.statefile_path):
+ with open(self.statefile_path) as statefile:
+ state = json.load(statefile)
+ else:
+ state = {}
+
+ state[sys.prefix] = {
+ "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
+ "pypi_version": pypi_version,
+ }
+
+ with open(self.statefile_path, "w") as statefile:
+ json.dump(state, statefile, sort_keys=True,
+ separators=(",", ":"))
+
+
+def was_installed_by_pip(pkg):
+ # type: (str) -> bool
+ """Checks whether pkg was installed by pip
+
+ This is used not to display the upgrade message when pip is in fact
+ installed by system package manager, such as dnf on Fedora.
+ """
+ try:
+ dist = pkg_resources.get_distribution(pkg)
+ return "pip" == get_installer(dist)
+ except pkg_resources.DistributionNotFound:
+ return False
+
+
+def pip_version_check(session, options):
+ # type: (PipSession, optparse.Values) -> None
+ """Check for an update for pip.
+
+ Limit the frequency of checks to once per week. State is stored either in
+ the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
+ of the pip script path.
+ """
+ installed_version = get_installed_version("pip")
+ if not installed_version:
+ return
+
+ pip_version = packaging_version.parse(installed_version)
+ pypi_version = None
+
+ try:
+ state = SelfCheckState(cache_dir=options.cache_dir)
+
+ current_time = datetime.datetime.utcnow()
+ # Determine if we need to refresh the state
+ if "last_check" in state.state and "pypi_version" in state.state:
+ last_check = datetime.datetime.strptime(
+ state.state["last_check"],
+ SELFCHECK_DATE_FMT
+ )
+ if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
+ pypi_version = state.state["pypi_version"]
+
+ # Refresh the version if we need to or just see if we need to warn
+ if pypi_version is None:
+ # Lets use PackageFinder to see what the latest pip version is
+ search_scope = make_search_scope(options, suppress_no_index=True)
+
+ # Pass allow_yanked=False so we don't suggest upgrading to a
+ # yanked version.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=False, # Explicitly set to False
+ )
+
+ finder = PackageFinder.create(
+ search_scope=search_scope,
+ selection_prefs=selection_prefs,
+ trusted_hosts=options.trusted_hosts,
+ session=session,
+ )
+ candidate = finder.find_candidates("pip").get_best()
+ if candidate is None:
+ return
+ pypi_version = str(candidate.version)
+
+ # save that we've performed a check
+ state.save(pypi_version, current_time)
+
+ remote_version = packaging_version.parse(pypi_version)
+
+ local_version_is_older = (
+ pip_version < remote_version and
+ pip_version.base_version != remote_version.base_version and
+ was_installed_by_pip('pip')
+ )
+
+ # Determine if our pypi_version is older
+ if not local_version_is_older:
+ return
+
+ # Advise "python -m pip" on Windows to avoid issues
+ # with overwriting pip.exe.
+ if WINDOWS:
+ pip_cmd = "python -m pip"
+ else:
+ pip_cmd = "pip"
+ logger.warning(
+ "You are using pip version %s, however version %s is "
+ "available.\nYou should consider upgrading via the "
+ "'%s install --upgrade pip' command.",
+ pip_version, pypi_version, pip_cmd
+ )
+ except Exception:
+ logger.debug(
+ "There was an error checking the latest version of pip",
+ exc_info=True,
+ )
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/packaging.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/packaging.py
new file mode 100644
index 00000000..68aa86ed
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/packaging.py
@@ -0,0 +1,94 @@
+from __future__ import absolute_import
+
+import logging
+from email.parser import FeedParser
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging import specifiers, version
+
+from pip._internal.exceptions import NoneMetadataError
+from pip._internal.utils.misc import display_path
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+ from email.message import Message
+ from pip._vendor.pkg_resources import Distribution
+
+
+logger = logging.getLogger(__name__)
+
+
+def check_requires_python(requires_python, version_info):
+ # type: (Optional[str], Tuple[int, ...]) -> bool
+ """
+ Check if the given Python version matches a "Requires-Python" specifier.
+
+ :param version_info: A 3-tuple of ints representing a Python
+ major-minor-micro version to check (e.g. `sys.version_info[:3]`).
+
+ :return: `True` if the given Python version satisfies the requirement.
+ Otherwise, return `False`.
+
+ :raises InvalidSpecifier: If `requires_python` has an invalid format.
+ """
+ if requires_python is None:
+ # The package provides no information
+ return True
+ requires_python_specifier = specifiers.SpecifierSet(requires_python)
+
+ python_version = version.parse('.'.join(map(str, version_info)))
+ return python_version in requires_python_specifier
+
+
+def get_metadata(dist):
+ # type: (Distribution) -> Message
+ """
+ :raises NoneMetadataError: if the distribution reports `has_metadata()`
+ True but `get_metadata()` returns None.
+ """
+ metadata_name = 'METADATA'
+ if (isinstance(dist, pkg_resources.DistInfoDistribution) and
+ dist.has_metadata(metadata_name)):
+ metadata = dist.get_metadata(metadata_name)
+ elif dist.has_metadata('PKG-INFO'):
+ metadata_name = 'PKG-INFO'
+ metadata = dist.get_metadata(metadata_name)
+ else:
+ logger.warning("No metadata found in %s", display_path(dist.location))
+ metadata = ''
+
+ if metadata is None:
+ raise NoneMetadataError(dist, metadata_name)
+
+ feed_parser = FeedParser()
+ # The following line errors out if with a "NoneType" TypeError if
+ # passed metadata=None.
+ feed_parser.feed(metadata)
+ return feed_parser.close()
+
+
+def get_requires_python(dist):
+ # type: (pkg_resources.Distribution) -> Optional[str]
+ """
+ Return the "Requires-Python" metadata for a distribution, or None
+ if not present.
+ """
+ pkg_info_dict = get_metadata(dist)
+ requires_python = pkg_info_dict.get('Requires-Python')
+
+ if requires_python is not None:
+ # Convert to a str to satisfy the type checker, since requires_python
+ # can be a Header object.
+ requires_python = str(requires_python)
+
+ return requires_python
+
+
+def get_installer(dist):
+ # type: (Distribution) -> str
+ if dist.has_metadata('INSTALLER'):
+ for line in dist.get_metadata_lines('INSTALLER'):
+ if line.strip():
+ return line.strip()
+ return ''
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py
new file mode 100644
index 00000000..58956072
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py
@@ -0,0 +1,36 @@
+import sys
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+# Shim to wrap setup.py invocation with setuptools
+#
+# We set sys.argv[0] to the path to the underlying setup.py file so
+# setuptools / distutils don't take the path to the setup.py to be "-c" when
+# invoking via the shim. This avoids e.g. the following manifest_maker
+# warning: "warning: manifest_maker: standard file '-c' not found".
+_SETUPTOOLS_SHIM = (
+ "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};"
+ "f=getattr(tokenize, 'open', open)(__file__);"
+ "code=f.read().replace('\\r\\n', '\\n');"
+ "f.close();"
+ "exec(compile(code, __file__, 'exec'))"
+)
+
+
+def make_setuptools_shim_args(setup_py_path, unbuffered_output=False):
+ # type: (str, bool) -> List[str]
+ """
+ Get setuptools command arguments with shim wrapped setup file invocation.
+
+ :param setup_py_path: The path to setup.py to be wrapped.
+ :param unbuffered_output: If True, adds the unbuffered switch to the
+ argument list.
+ """
+ args = [sys.executable]
+ if unbuffered_output:
+ args.append('-u')
+ args.extend(['-c', _SETUPTOOLS_SHIM.format(setup_py_path)])
+ return args
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py
new file mode 100644
index 00000000..2c81ad55
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py
@@ -0,0 +1,155 @@
+from __future__ import absolute_import
+
+import errno
+import itertools
+import logging
+import os.path
+import tempfile
+
+from pip._internal.utils.misc import rmtree
+
+logger = logging.getLogger(__name__)
+
+
+class TempDirectory(object):
+ """Helper class that owns and cleans up a temporary directory.
+
+ This class can be used as a context manager or as an OO representation of a
+ temporary directory.
+
+ Attributes:
+ path
+ Location to the created temporary directory or None
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ Methods:
+ create()
+ Creates a temporary directory and stores its path in the path
+ attribute.
+ cleanup()
+ Deletes the temporary directory and sets path attribute to None
+
+ When used as a context manager, a temporary directory is created on
+ entering the context and, if the delete attribute is True, on exiting the
+ context the created directory is deleted.
+ """
+
+ def __init__(self, path=None, delete=None, kind="temp"):
+ super(TempDirectory, self).__init__()
+
+ if path is None and delete is None:
+ # If we were not given an explicit directory, and we were not given
+ # an explicit delete option, then we'll default to deleting.
+ delete = True
+
+ self.path = path
+ self.delete = delete
+ self.kind = kind
+
+ def __repr__(self):
+ return "<{} {!r}>".format(self.__class__.__name__, self.path)
+
+ def __enter__(self):
+ self.create()
+ return self
+
+ def __exit__(self, exc, value, tb):
+ if self.delete:
+ self.cleanup()
+
+ def create(self):
+ """Create a temporary directory and store its path in self.path
+ """
+ if self.path is not None:
+ logger.debug(
+ "Skipped creation of temporary directory: {}".format(self.path)
+ )
+ return
+ # We realpath here because some systems have their default tmpdir
+ # symlinked to another directory. This tends to confuse build
+ # scripts, so we canonicalize the path by traversing potential
+ # symlinks here.
+ self.path = os.path.realpath(
+ tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
+ )
+ logger.debug("Created temporary directory: {}".format(self.path))
+
+ def cleanup(self):
+ """Remove the temporary directory created and reset state
+ """
+ if self.path is not None and os.path.exists(self.path):
+ rmtree(self.path)
+ self.path = None
+
+
+class AdjacentTempDirectory(TempDirectory):
+ """Helper class that creates a temporary directory adjacent to a real one.
+
+ Attributes:
+ original
+ The original directory to create a temp directory for.
+ path
+ After calling create() or entering, contains the full
+ path to the temporary directory.
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ """
+ # The characters that may be used to name the temp directory
+ # We always prepend a ~ and then rotate through these until
+ # a usable name is found.
+ # pkg_resources raises a different error for .dist-info folder
+ # with leading '-' and invalid metadata
+ LEADING_CHARS = "-~.=%0123456789"
+
+ def __init__(self, original, delete=None):
+ super(AdjacentTempDirectory, self).__init__(delete=delete)
+ self.original = original.rstrip('/\\')
+
+ @classmethod
+ def _generate_names(cls, name):
+ """Generates a series of temporary names.
+
+ The algorithm replaces the leading characters in the name
+ with ones that are valid filesystem characters, but are not
+ valid package names (for both Python and pip definitions of
+ package).
+ """
+ for i in range(1, len(name)):
+ for candidate in itertools.combinations_with_replacement(
+ cls.LEADING_CHARS, i - 1):
+ new_name = '~' + ''.join(candidate) + name[i:]
+ if new_name != name:
+ yield new_name
+
+ # If we make it this far, we will have to make a longer name
+ for i in range(len(cls.LEADING_CHARS)):
+ for candidate in itertools.combinations_with_replacement(
+ cls.LEADING_CHARS, i):
+ new_name = '~' + ''.join(candidate) + name
+ if new_name != name:
+ yield new_name
+
+ def create(self):
+ root, name = os.path.split(self.original)
+ for candidate in self._generate_names(name):
+ path = os.path.join(root, candidate)
+ try:
+ os.mkdir(path)
+ except OSError as ex:
+ # Continue if the name exists already
+ if ex.errno != errno.EEXIST:
+ raise
+ else:
+ self.path = os.path.realpath(path)
+ break
+
+ if not self.path:
+ # Final fallback on the default behavior.
+ self.path = os.path.realpath(
+ tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
+ )
+ logger.debug("Created temporary directory: {}".format(self.path))
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/typing.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/typing.py
new file mode 100644
index 00000000..10170ce2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/typing.py
@@ -0,0 +1,29 @@
+"""For neatly implementing static typing in pip.
+
+`mypy` - the static type analysis tool we use - uses the `typing` module, which
+provides core functionality fundamental to mypy's functioning.
+
+Generally, `typing` would be imported at runtime and used in that fashion -
+it acts as a no-op at runtime and does not have any run-time overhead by
+design.
+
+As it turns out, `typing` is not vendorable - it uses separate sources for
+Python 2/Python 3. Thus, this codebase can not expect it to be present.
+To work around this, mypy allows the typing import to be behind a False-y
+optional to prevent it from running at runtime and type-comments can be used
+to remove the need for the types to be accessible directly during runtime.
+
+This module provides the False-y guard in a nicely named fashion so that a
+curious maintainer can reach here to read this.
+
+In pip, all static-typing related imports should be guarded as follows:
+
+ from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+ if MYPY_CHECK_RUNNING:
+ from typing import ...
+
+Ref: https://github.com/python/mypy/issues/3216
+"""
+
+MYPY_CHECK_RUNNING = False
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/ui.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/ui.py
new file mode 100644
index 00000000..46390f4a
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/ui.py
@@ -0,0 +1,424 @@
+from __future__ import absolute_import, division
+
+import contextlib
+import itertools
+import logging
+import sys
+import time
+from signal import SIGINT, default_int_handler, signal
+
+from pip._vendor import six
+from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
+from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
+from pip._vendor.progress.spinner import Spinner
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import get_indentation
+from pip._internal.utils.misc import format_size
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Iterator, IO
+
+try:
+ from pip._vendor import colorama
+# Lots of different errors can come from this, including SystemError and
+# ImportError.
+except Exception:
+ colorama = None
+
+logger = logging.getLogger(__name__)
+
+
+def _select_progress_class(preferred, fallback):
+ encoding = getattr(preferred.file, "encoding", None)
+
+ # If we don't know what encoding this file is in, then we'll just assume
+ # that it doesn't support unicode and use the ASCII bar.
+ if not encoding:
+ return fallback
+
+ # Collect all of the possible characters we want to use with the preferred
+ # bar.
+ characters = [
+ getattr(preferred, "empty_fill", six.text_type()),
+ getattr(preferred, "fill", six.text_type()),
+ ]
+ characters += list(getattr(preferred, "phases", []))
+
+ # Try to decode the characters we're using for the bar using the encoding
+ # of the given file, if this works then we'll assume that we can use the
+ # fancier bar and if not we'll fall back to the plaintext bar.
+ try:
+ six.text_type().join(characters).encode(encoding)
+ except UnicodeEncodeError:
+ return fallback
+ else:
+ return preferred
+
+
+_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
+
+
+class InterruptibleMixin(object):
+ """
+ Helper to ensure that self.finish() gets called on keyboard interrupt.
+
+ This allows downloads to be interrupted without leaving temporary state
+ (like hidden cursors) behind.
+
+ This class is similar to the progress library's existing SigIntMixin
+ helper, but as of version 1.2, that helper has the following problems:
+
+ 1. It calls sys.exit().
+ 2. It discards the existing SIGINT handler completely.
+ 3. It leaves its own handler in place even after an uninterrupted finish,
+ which will have unexpected delayed effects if the user triggers an
+ unrelated keyboard interrupt some time after a progress-displaying
+ download has already completed, for example.
+ """
+
+ def __init__(self, *args, **kwargs):
+ """
+ Save the original SIGINT handler for later.
+ """
+ super(InterruptibleMixin, self).__init__(*args, **kwargs)
+
+ self.original_handler = signal(SIGINT, self.handle_sigint)
+
+ # If signal() returns None, the previous handler was not installed from
+ # Python, and we cannot restore it. This probably should not happen,
+ # but if it does, we must restore something sensible instead, at least.
+ # The least bad option should be Python's default SIGINT handler, which
+ # just raises KeyboardInterrupt.
+ if self.original_handler is None:
+ self.original_handler = default_int_handler
+
+ def finish(self):
+ """
+ Restore the original SIGINT handler after finishing.
+
+ This should happen regardless of whether the progress display finishes
+ normally, or gets interrupted.
+ """
+ super(InterruptibleMixin, self).finish()
+ signal(SIGINT, self.original_handler)
+
+ def handle_sigint(self, signum, frame):
+ """
+ Call self.finish() before delegating to the original SIGINT handler.
+
+ This handler should only be in place while the progress display is
+ active.
+ """
+ self.finish()
+ self.original_handler(signum, frame)
+
+
+class SilentBar(Bar):
+
+ def update(self):
+ pass
+
+
+class BlueEmojiBar(IncrementalBar):
+
+ suffix = "%(percent)d%%"
+ bar_prefix = " "
+ bar_suffix = " "
+ phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
+
+
+class DownloadProgressMixin(object):
+
+ def __init__(self, *args, **kwargs):
+ super(DownloadProgressMixin, self).__init__(*args, **kwargs)
+ self.message = (" " * (get_indentation() + 2)) + self.message
+
+ @property
+ def downloaded(self):
+ return format_size(self.index)
+
+ @property
+ def download_speed(self):
+ # Avoid zero division errors...
+ if self.avg == 0.0:
+ return "..."
+ return format_size(1 / self.avg) + "/s"
+
+ @property
+ def pretty_eta(self):
+ if self.eta:
+ return "eta %s" % self.eta_td
+ return ""
+
+ def iter(self, it, n=1):
+ for x in it:
+ yield x
+ self.next(n)
+ self.finish()
+
+
+class WindowsMixin(object):
+
+ def __init__(self, *args, **kwargs):
+ # The Windows terminal does not support the hide/show cursor ANSI codes
+ # even with colorama. So we'll ensure that hide_cursor is False on
+ # Windows.
+ # This call needs to go before the super() call, so that hide_cursor
+ # is set in time. The base progress bar class writes the "hide cursor"
+ # code to the terminal in its init, so if we don't set this soon
+ # enough, we get a "hide" with no corresponding "show"...
+ if WINDOWS and self.hide_cursor:
+ self.hide_cursor = False
+
+ super(WindowsMixin, self).__init__(*args, **kwargs)
+
+ # Check if we are running on Windows and we have the colorama module,
+ # if we do then wrap our file with it.
+ if WINDOWS and colorama:
+ self.file = colorama.AnsiToWin32(self.file)
+ # The progress code expects to be able to call self.file.isatty()
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
+ # add it.
+ self.file.isatty = lambda: self.file.wrapped.isatty()
+ # The progress code expects to be able to call self.file.flush()
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
+ # add it.
+ self.file.flush = lambda: self.file.wrapped.flush()
+
+
+class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
+ DownloadProgressMixin):
+
+ file = sys.stdout
+ message = "%(percent)d%%"
+ suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
+
+# NOTE: The "type: ignore" comments on the following classes are there to
+# work around https://github.com/python/typing/issues/241
+
+
+class DefaultDownloadProgressBar(BaseDownloadProgressBar,
+ _BaseBar):
+ pass
+
+
+class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
+ pass
+
+
+class DownloadBar(BaseDownloadProgressBar, # type: ignore
+ Bar):
+ pass
+
+
+class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
+ FillingCirclesBar):
+ pass
+
+
+class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
+ BlueEmojiBar):
+ pass
+
+
+class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
+ DownloadProgressMixin, Spinner):
+
+ file = sys.stdout
+ suffix = "%(downloaded)s %(download_speed)s"
+
+ def next_phase(self):
+ if not hasattr(self, "_phaser"):
+ self._phaser = itertools.cycle(self.phases)
+ return next(self._phaser)
+
+ def update(self):
+ message = self.message % self
+ phase = self.next_phase()
+ suffix = self.suffix % self
+ line = ''.join([
+ message,
+ " " if message else "",
+ phase,
+ " " if suffix else "",
+ suffix,
+ ])
+
+ self.writeln(line)
+
+
+BAR_TYPES = {
+ "off": (DownloadSilentBar, DownloadSilentBar),
+ "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
+ "ascii": (DownloadBar, DownloadProgressSpinner),
+ "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
+ "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
+}
+
+
+def DownloadProgressProvider(progress_bar, max=None):
+ if max is None or max == 0:
+ return BAR_TYPES[progress_bar][1]().iter
+ else:
+ return BAR_TYPES[progress_bar][0](max=max).iter
+
+
+################################################################
+# Generic "something is happening" spinners
+#
+# We don't even try using progress.spinner.Spinner here because it's actually
+# simpler to reimplement from scratch than to coerce their code into doing
+# what we need.
+################################################################
+
+@contextlib.contextmanager
+def hidden_cursor(file):
+ # type: (IO) -> Iterator[None]
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
+ # even via colorama. So don't even try.
+ if WINDOWS:
+ yield
+ # We don't want to clutter the output with control characters if we're
+ # writing to a file, or if the user is running with --quiet.
+ # See https://github.com/pypa/pip/issues/3418
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
+ yield
+ else:
+ file.write(HIDE_CURSOR)
+ try:
+ yield
+ finally:
+ file.write(SHOW_CURSOR)
+
+
+class RateLimiter(object):
+ def __init__(self, min_update_interval_seconds):
+ # type: (float) -> None
+ self._min_update_interval_seconds = min_update_interval_seconds
+ self._last_update = 0 # type: float
+
+ def ready(self):
+ # type: () -> bool
+ now = time.time()
+ delta = now - self._last_update
+ return delta >= self._min_update_interval_seconds
+
+ def reset(self):
+ # type: () -> None
+ self._last_update = time.time()
+
+
+class SpinnerInterface(object):
+ def spin(self):
+ # type: () -> None
+ raise NotImplementedError()
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ raise NotImplementedError()
+
+
+class InteractiveSpinner(SpinnerInterface):
+ def __init__(self, message, file=None, spin_chars="-\\|/",
+ # Empirically, 8 updates/second looks nice
+ min_update_interval_seconds=0.125):
+ self._message = message
+ if file is None:
+ file = sys.stdout
+ self._file = file
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._finished = False
+
+ self._spin_cycle = itertools.cycle(spin_chars)
+
+ self._file.write(" " * get_indentation() + self._message + " ... ")
+ self._width = 0
+
+ def _write(self, status):
+ assert not self._finished
+ # Erase what we wrote before by backspacing to the beginning, writing
+ # spaces to overwrite the old text, and then backspacing again
+ backup = "\b" * self._width
+ self._file.write(backup + " " * self._width + backup)
+ # Now we have a blank slate to add our status
+ self._file.write(status)
+ self._width = len(status)
+ self._file.flush()
+ self._rate_limiter.reset()
+
+ def spin(self):
+ # type: () -> None
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._write(next(self._spin_cycle))
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ if self._finished:
+ return
+ self._write(final_status)
+ self._file.write("\n")
+ self._file.flush()
+ self._finished = True
+
+
+# Used for dumb terminals, non-interactive installs (no tty), etc.
+# We still print updates occasionally (once every 60 seconds by default) to
+# act as a keep-alive for systems like Travis-CI that take lack-of-output as
+# an indication that a task has frozen.
+class NonInteractiveSpinner(SpinnerInterface):
+ def __init__(self, message, min_update_interval_seconds=60):
+ # type: (str, float) -> None
+ self._message = message
+ self._finished = False
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._update("started")
+
+ def _update(self, status):
+ assert not self._finished
+ self._rate_limiter.reset()
+ logger.info("%s: %s", self._message, status)
+
+ def spin(self):
+ # type: () -> None
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._update("still running...")
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ if self._finished:
+ return
+ self._update("finished with status '%s'" % (final_status,))
+ self._finished = True
+
+
+@contextlib.contextmanager
+def open_spinner(message):
+ # type: (str) -> Iterator[SpinnerInterface]
+ # Interactive spinner goes directly to sys.stdout rather than being routed
+ # through the logging system, but it acts like it has level INFO,
+ # i.e. it's only displayed if we're at level INFO or better.
+ # Non-interactive spinner goes through the logging system, so it is always
+ # in sync with logging configuration.
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
+ spinner = InteractiveSpinner(message) # type: SpinnerInterface
+ else:
+ spinner = NonInteractiveSpinner(message)
+ try:
+ with hidden_cursor(sys.stdout):
+ yield spinner
+ except KeyboardInterrupt:
+ spinner.finish("canceled")
+ raise
+ except Exception:
+ spinner.finish("error")
+ raise
+ else:
+ spinner.finish("done")
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py
new file mode 100644
index 00000000..380db1c3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py
@@ -0,0 +1,34 @@
+import os.path
+import site
+import sys
+
+
+def running_under_virtualenv():
+ # type: () -> bool
+ """
+ Return True if we're running inside a virtualenv, False otherwise.
+
+ """
+ if hasattr(sys, 'real_prefix'):
+ # pypa/virtualenv case
+ return True
+ elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
+ # PEP 405 venv
+ return True
+
+ return False
+
+
+def virtualenv_no_global():
+ # type: () -> bool
+ """
+ Return True if in a venv and no system site packages.
+ """
+ # this mirrors the logic in virtualenv.py for locating the
+ # no-global-site-packages.txt file
+ site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
+ no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
+ if running_under_virtualenv() and os.path.isfile(no_global_file):
+ return True
+ else:
+ return False
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py
new file mode 100644
index 00000000..cb573ab6
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py
@@ -0,0 +1,12 @@
+# Expose a limited set of classes and functions so callers outside of
+# the vcs package don't need to import deeper than `pip._internal.vcs`.
+# (The test directory and imports protected by MYPY_CHECK_RUNNING may
+# still need to import from a vcs sub-package.)
+from pip._internal.vcs.versioncontrol import ( # noqa: F401
+ RemoteNotFoundError, make_vcs_requirement_url, vcs,
+)
+# Import all vcs modules to register each VCS in the VcsSupport object.
+import pip._internal.vcs.bazaar
+import pip._internal.vcs.git
+import pip._internal.vcs.mercurial
+import pip._internal.vcs.subversion # noqa: F401
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 00000000..a4eba3d7
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc
new file mode 100644
index 00000000..126f552c
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/git.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/git.cpython-37.pyc
new file mode 100644
index 00000000..e331d430
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/git.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc
new file mode 100644
index 00000000..898fd9a2
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc
new file mode 100644
index 00000000..abf6452d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc
new file mode 100644
index 00000000..d9a06ebf
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc
Binary files differ
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py
new file mode 100644
index 00000000..4f1e114b
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.utils.misc import display_path, path_to_url, rmtree
+from pip._internal.vcs.versioncontrol import VersionControl, vcs
+
+logger = logging.getLogger(__name__)
+
+
+class Bazaar(VersionControl):
+ name = 'bzr'
+ dirname = '.bzr'
+ repo_name = 'branch'
+ schemes = (
+ 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
+ 'bzr+lp',
+ )
+
+ def __init__(self, *args, **kwargs):
+ super(Bazaar, self).__init__(*args, **kwargs)
+ # This is only needed for python <2.7.5
+ # Register lp but do not expose as a scheme to support bzr+lp.
+ if getattr(urllib_parse, 'uses_fragment', None):
+ urllib_parse.uses_fragment.extend(['lp'])
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return ['-r', rev]
+
+ def export(self, location, url):
+ """
+ Export the Bazaar repository at the url to the destination location
+ """
+ # Remove the location to make sure Bazaar can export it correctly
+ if os.path.exists(location):
+ rmtree(location)
+
+ url, rev_options = self.get_url_rev_options(url)
+ self.run_command(
+ ['export', location, url] + rev_options.to_args(),
+ show_stdout=False,
+ )
+
+ def fetch_new(self, dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Checking out %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
+ self.run_command(cmd_args)
+
+ def switch(self, dest, url, rev_options):
+ self.run_command(['switch', url], cwd=dest)
+
+ def update(self, dest, url, rev_options):
+ cmd_args = ['pull', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
+ url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url)
+ if url.startswith('ssh://'):
+ url = 'bzr+' + url
+ return url, rev, user_pass
+
+ @classmethod
+ def get_remote_url(cls, location):
+ urls = cls.run_command(['info'], show_stdout=False, cwd=location)
+ for line in urls.splitlines():
+ line = line.strip()
+ for x in ('checkout of branch: ',
+ 'parent branch: '):
+ if line.startswith(x):
+ repo = line.split(x)[1]
+ if cls._is_local_repository(repo):
+ return path_to_url(repo)
+ return repo
+ return None
+
+ @classmethod
+ def get_revision(cls, location):
+ revision = cls.run_command(
+ ['revno'], show_stdout=False, cwd=location,
+ )
+ return revision.splitlines()[-1]
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+
+vcs.register(Bazaar)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/git.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/git.py
new file mode 100644
index 00000000..3445c1b3
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/git.py
@@ -0,0 +1,358 @@
+from __future__ import absolute_import
+
+import logging
+import os.path
+import re
+
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.exceptions import BadCommand
+from pip._internal.utils.compat import samefile
+from pip._internal.utils.misc import display_path, redact_password_from_url
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.vcs.versioncontrol import (
+ RemoteNotFoundError, VersionControl, vcs,
+)
+
+urlsplit = urllib_parse.urlsplit
+urlunsplit = urllib_parse.urlunsplit
+
+
+logger = logging.getLogger(__name__)
+
+
+HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$')
+
+
+def looks_like_hash(sha):
+ return bool(HASH_REGEX.match(sha))
+
+
+class Git(VersionControl):
+ name = 'git'
+ dirname = '.git'
+ repo_name = 'clone'
+ schemes = (
+ 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
+ )
+ # Prevent the user's environment variables from interfering with pip:
+ # https://github.com/pypa/pip/issues/1130
+ unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
+ default_arg_rev = 'HEAD'
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return [rev]
+
+ def get_git_version(self):
+ VERSION_PFX = 'git version '
+ version = self.run_command(['version'], show_stdout=False)
+ if version.startswith(VERSION_PFX):
+ version = version[len(VERSION_PFX):].split()[0]
+ else:
+ version = ''
+ # get first 3 positions of the git version because
+ # on windows it is x.y.z.windows.t, and this parses as
+ # LegacyVersion which always smaller than a Version.
+ version = '.'.join(version.split('.')[:3])
+ return parse_version(version)
+
+ @classmethod
+ def get_current_branch(cls, location):
+ """
+ Return the current branch, or None if HEAD isn't at a branch
+ (e.g. detached HEAD).
+ """
+ # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
+ # HEAD rather than a symbolic ref. In addition, the -q causes the
+ # command to exit with status code 1 instead of 128 in this case
+ # and to suppress the message to stderr.
+ args = ['symbolic-ref', '-q', 'HEAD']
+ output = cls.run_command(
+ args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
+ )
+ ref = output.strip()
+
+ if ref.startswith('refs/heads/'):
+ return ref[len('refs/heads/'):]
+
+ return None
+
+ def export(self, location, url):
+ """Export the Git repository at the url to the destination location"""
+ if not location.endswith('/'):
+ location = location + '/'
+
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path, url=url)
+ self.run_command(
+ ['checkout-index', '-a', '-f', '--prefix', location],
+ show_stdout=False, cwd=temp_dir.path
+ )
+
+ @classmethod
+ def get_revision_sha(cls, dest, rev):
+ """
+ Return (sha_or_none, is_branch), where sha_or_none is a commit hash
+ if the revision names a remote branch or tag, otherwise None.
+
+ Args:
+ dest: the repository directory.
+ rev: the revision name.
+ """
+ # Pass rev to pre-filter the list.
+ output = cls.run_command(['show-ref', rev], cwd=dest,
+ show_stdout=False, on_returncode='ignore')
+ refs = {}
+ for line in output.strip().splitlines():
+ try:
+ sha, ref = line.split()
+ except ValueError:
+ # Include the offending line to simplify troubleshooting if
+ # this error ever occurs.
+ raise ValueError('unexpected show-ref line: {!r}'.format(line))
+
+ refs[ref] = sha
+
+ branch_ref = 'refs/remotes/origin/{}'.format(rev)
+ tag_ref = 'refs/tags/{}'.format(rev)
+
+ sha = refs.get(branch_ref)
+ if sha is not None:
+ return (sha, True)
+
+ sha = refs.get(tag_ref)
+
+ return (sha, False)
+
+ @classmethod
+ def resolve_revision(cls, dest, url, rev_options):
+ """
+ Resolve a revision to a new RevOptions object with the SHA1 of the
+ branch, tag, or ref if found.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ rev = rev_options.arg_rev
+ sha, is_branch = cls.get_revision_sha(dest, rev)
+
+ if sha is not None:
+ rev_options = rev_options.make_new(sha)
+ rev_options.branch_name = rev if is_branch else None
+
+ return rev_options
+
+ # Do not show a warning for the common case of something that has
+ # the form of a Git commit hash.
+ if not looks_like_hash(rev):
+ logger.warning(
+ "Did not find branch or tag '%s', assuming revision or ref.",
+ rev,
+ )
+
+ if not rev.startswith('refs/'):
+ return rev_options
+
+ # If it looks like a ref, we have to fetch it explicitly.
+ cls.run_command(
+ ['fetch', '-q', url] + rev_options.to_args(),
+ cwd=dest,
+ )
+ # Change the revision to the SHA of the ref we fetched
+ sha = cls.get_revision(dest, rev='FETCH_HEAD')
+ rev_options = rev_options.make_new(sha)
+
+ return rev_options
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """
+ Return whether the current commit hash equals the given name.
+
+ Args:
+ dest: the repository directory.
+ name: a string name.
+ """
+ if not name:
+ # Then avoid an unnecessary subprocess call.
+ return False
+
+ return cls.get_revision(dest) == name
+
+ def fetch_new(self, dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Cloning %s%s to %s', redact_password_from_url(url),
+ rev_display, display_path(dest),
+ )
+ self.run_command(['clone', '-q', url, dest])
+
+ if rev_options.rev:
+ # Then a specific revision was requested.
+ rev_options = self.resolve_revision(dest, url, rev_options)
+ branch_name = getattr(rev_options, 'branch_name', None)
+ if branch_name is None:
+ # Only do a checkout if the current commit id doesn't match
+ # the requested revision.
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ cmd_args = ['checkout', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+ elif self.get_current_branch(dest) != branch_name:
+ # Then a specific branch was requested, and that branch
+ # is not yet checked out.
+ track_branch = 'origin/{}'.format(branch_name)
+ cmd_args = [
+ 'checkout', '-b', branch_name, '--track', track_branch,
+ ]
+ self.run_command(cmd_args, cwd=dest)
+
+ #: repo may contain submodules
+ self.update_submodules(dest)
+
+ def switch(self, dest, url, rev_options):
+ self.run_command(['config', 'remote.origin.url', url], cwd=dest)
+ cmd_args = ['checkout', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ self.update_submodules(dest)
+
+ def update(self, dest, url, rev_options):
+ # First fetch changes from the default remote
+ if self.get_git_version() >= parse_version('1.9.0'):
+ # fetch tags in addition to everything else
+ self.run_command(['fetch', '-q', '--tags'], cwd=dest)
+ else:
+ self.run_command(['fetch', '-q'], cwd=dest)
+ # Then reset to wanted revision (maybe even origin/master)
+ rev_options = self.resolve_revision(dest, url, rev_options)
+ cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+ #: update submodules
+ self.update_submodules(dest)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ """
+ Return URL of the first remote encountered.
+
+ Raises RemoteNotFoundError if the repository does not have a remote
+ url configured.
+ """
+ # We need to pass 1 for extra_ok_returncodes since the command
+ # exits with return code 1 if there are no matching lines.
+ stdout = cls.run_command(
+ ['config', '--get-regexp', r'remote\..*\.url'],
+ extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
+ )
+ remotes = stdout.splitlines()
+ try:
+ found_remote = remotes[0]
+ except IndexError:
+ raise RemoteNotFoundError
+
+ for remote in remotes:
+ if remote.startswith('remote.origin.url '):
+ found_remote = remote
+ break
+ url = found_remote.split(' ')[1]
+ return url.strip()
+
+ @classmethod
+ def get_revision(cls, location, rev=None):
+ if rev is None:
+ rev = 'HEAD'
+ current_rev = cls.run_command(
+ ['rev-parse', rev], show_stdout=False, cwd=location,
+ )
+ return current_rev.strip()
+
+ @classmethod
+ def get_subdirectory(cls, location):
+ # find the repo root
+ git_dir = cls.run_command(['rev-parse', '--git-dir'],
+ show_stdout=False, cwd=location).strip()
+ if not os.path.isabs(git_dir):
+ git_dir = os.path.join(location, git_dir)
+ root_dir = os.path.join(git_dir, '..')
+ # find setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warning(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)",
+ orig_location,
+ )
+ return None
+ # relative path of setup.py to repo root
+ if samefile(root_dir, location):
+ return None
+ return os.path.relpath(location, root_dir)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ """
+ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
+ That's required because although they use SSH they sometimes don't
+ work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
+ parsing. Hence we remove it again afterwards and return it as a stub.
+ """
+ # Works around an apparent Git bug
+ # (see https://article.gmane.org/gmane.comp.version-control.git/146500)
+ scheme, netloc, path, query, fragment = urlsplit(url)
+ if scheme.endswith('file'):
+ initial_slashes = path[:-len(path.lstrip('/'))]
+ newpath = (
+ initial_slashes +
+ urllib_request.url2pathname(path)
+ .replace('\\', '/').lstrip('/')
+ )
+ url = urlunsplit((scheme, netloc, newpath, query, fragment))
+ after_plus = scheme.find('+') + 1
+ url = scheme[:after_plus] + urlunsplit(
+ (scheme[after_plus:], netloc, newpath, query, fragment),
+ )
+
+ if '://' not in url:
+ assert 'file:' not in url
+ url = url.replace('git+', 'git+ssh://')
+ url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
+ url = url.replace('ssh://', '')
+ else:
+ url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
+
+ return url, rev, user_pass
+
+ @classmethod
+ def update_submodules(cls, location):
+ if not os.path.exists(os.path.join(location, '.gitmodules')):
+ return
+ cls.run_command(
+ ['submodule', 'update', '--init', '--recursive', '-q'],
+ cwd=location,
+ )
+
+ @classmethod
+ def controls_location(cls, location):
+ if super(Git, cls).controls_location(location):
+ return True
+ try:
+ r = cls.run_command(['rev-parse'],
+ cwd=location,
+ show_stdout=False,
+ on_returncode='ignore')
+ return not r
+ except BadCommand:
+ logger.debug("could not determine if %s is under git control "
+ "because git is not available", location)
+ return False
+
+
+vcs.register(Git)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py
new file mode 100644
index 00000000..db42783d
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py
@@ -0,0 +1,103 @@
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._vendor.six.moves import configparser
+
+from pip._internal.utils.misc import display_path, path_to_url
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.vcs.versioncontrol import VersionControl, vcs
+
+logger = logging.getLogger(__name__)
+
+
+class Mercurial(VersionControl):
+ name = 'hg'
+ dirname = '.hg'
+ repo_name = 'clone'
+ schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return [rev]
+
+ def export(self, location, url):
+ """Export the Hg repository at the url to the destination location"""
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path, url=url)
+
+ self.run_command(
+ ['archive', location], show_stdout=False, cwd=temp_dir.path
+ )
+
+ def fetch_new(self, dest, url, rev_options):
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Cloning hg %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ self.run_command(['clone', '--noupdate', '-q', url, dest])
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ def switch(self, dest, url, rev_options):
+ repo_config = os.path.join(dest, self.dirname, 'hgrc')
+ config = configparser.RawConfigParser()
+ try:
+ config.read(repo_config)
+ config.set('paths', 'default', url)
+ with open(repo_config, 'w') as config_file:
+ config.write(config_file)
+ except (OSError, configparser.NoSectionError) as exc:
+ logger.warning(
+ 'Could not switch Mercurial repository to %s: %s', url, exc,
+ )
+ else:
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ def update(self, dest, url, rev_options):
+ self.run_command(['pull', '-q'], cwd=dest)
+ cmd_args = ['update', '-q'] + rev_options.to_args()
+ self.run_command(cmd_args, cwd=dest)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ url = cls.run_command(
+ ['showconfig', 'paths.default'],
+ show_stdout=False, cwd=location).strip()
+ if cls._is_local_repository(url):
+ url = path_to_url(url)
+ return url.strip()
+
+ @classmethod
+ def get_revision(cls, location):
+ """
+ Return the repository-local changeset revision number, as an integer.
+ """
+ current_revision = cls.run_command(
+ ['parents', '--template={rev}'],
+ show_stdout=False, cwd=location).strip()
+ return current_revision
+
+ @classmethod
+ def get_requirement_revision(cls, location):
+ """
+ Return the changeset identification hash, as a 40-character
+ hexadecimal string
+ """
+ current_rev_hash = cls.run_command(
+ ['parents', '--template={node}'],
+ show_stdout=False, cwd=location).strip()
+ return current_rev_hash
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+
+vcs.register(Mercurial)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py
new file mode 100644
index 00000000..6bb4c8c5
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py
@@ -0,0 +1,314 @@
+from __future__ import absolute_import
+
+import logging
+import os
+import re
+import sys
+
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ display_path, rmtree, split_auth_from_netloc,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.vcs.versioncontrol import VersionControl, vcs
+
+_svn_xml_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
+_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
+_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
+
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Tuple
+ from pip._internal.vcs.versioncontrol import RevOptions
+
+logger = logging.getLogger(__name__)
+
+
+class Subversion(VersionControl):
+ name = 'svn'
+ dirname = '.svn'
+ repo_name = 'checkout'
+ schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
+
+ @classmethod
+ def should_add_vcs_url_prefix(cls, remote_url):
+ return True
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return ['-r', rev]
+
+ @classmethod
+ def get_revision(cls, location):
+ """
+ Return the maximum revision for all files under a given location
+ """
+ # Note: taken from setuptools.command.egg_info
+ revision = 0
+
+ for base, dirs, files in os.walk(location):
+ if cls.dirname not in dirs:
+ dirs[:] = []
+ continue # no sense walking uncontrolled subdirs
+ dirs.remove(cls.dirname)
+ entries_fn = os.path.join(base, cls.dirname, 'entries')
+ if not os.path.exists(entries_fn):
+ # FIXME: should we warn?
+ continue
+
+ dirurl, localrev = cls._get_svn_url_rev(base)
+
+ if base == location:
+ base = dirurl + '/' # save the root url
+ elif not dirurl or not dirurl.startswith(base):
+ dirs[:] = []
+ continue # not part of the same svn tree, skip it
+ revision = max(revision, localrev)
+ return revision
+
+ @classmethod
+ def get_netloc_and_auth(cls, netloc, scheme):
+ """
+ This override allows the auth information to be passed to svn via the
+ --username and --password options instead of via the URL.
+ """
+ if scheme == 'ssh':
+ # The --username and --password options can't be used for
+ # svn+ssh URLs, so keep the auth information in the URL.
+ return super(Subversion, cls).get_netloc_and_auth(netloc, scheme)
+
+ return split_auth_from_netloc(netloc)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
+ url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url)
+ if url.startswith('ssh://'):
+ url = 'svn+' + url
+ return url, rev, user_pass
+
+ @staticmethod
+ def make_rev_args(username, password):
+ extra_args = []
+ if username:
+ extra_args += ['--username', username]
+ if password:
+ extra_args += ['--password', password]
+
+ return extra_args
+
+ @classmethod
+ def get_remote_url(cls, location):
+ # In cases where the source is in a subdirectory, not alongside
+ # setup.py we have to look up in the location until we find a real
+ # setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warning(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)",
+ orig_location,
+ )
+ return None
+
+ return cls._get_svn_url_rev(location)[0]
+
+ @classmethod
+ def _get_svn_url_rev(cls, location):
+ from pip._internal.exceptions import InstallationError
+
+ entries_path = os.path.join(location, cls.dirname, 'entries')
+ if os.path.exists(entries_path):
+ with open(entries_path) as f:
+ data = f.read()
+ else: # subversion >= 1.7 does not have the 'entries' file
+ data = ''
+
+ if (data.startswith('8') or
+ data.startswith('9') or
+ data.startswith('10')):
+ data = list(map(str.splitlines, data.split('\n\x0c\n')))
+ del data[0][0] # get rid of the '8'
+ url = data[0][3]
+ revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
+ elif data.startswith('<?xml'):
+ match = _svn_xml_url_re.search(data)
+ if not match:
+ raise ValueError('Badly formatted data: %r' % data)
+ url = match.group(1) # get repository URL
+ revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
+ else:
+ try:
+ # subversion >= 1.7
+ # Note that using get_remote_call_options is not necessary here
+ # because `svn info` is being run against a local directory.
+ # We don't need to worry about making sure interactive mode
+ # is being used to prompt for passwords, because passwords
+ # are only potentially needed for remote server requests.
+ xml = cls.run_command(
+ ['info', '--xml', location],
+ show_stdout=False,
+ )
+ url = _svn_info_xml_url_re.search(xml).group(1)
+ revs = [
+ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
+ ]
+ except InstallationError:
+ url, revs = None, []
+
+ if revs:
+ rev = max(revs)
+ else:
+ rev = 0
+
+ return url, rev
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+ def __init__(self, use_interactive=None):
+ # type: (bool) -> None
+ if use_interactive is None:
+ use_interactive = sys.stdin.isatty()
+ self.use_interactive = use_interactive
+
+ # This member is used to cache the fetched version of the current
+ # ``svn`` client.
+ # Special value definitions:
+ # None: Not evaluated yet.
+ # Empty tuple: Could not parse version.
+ self._vcs_version = None # type: Optional[Tuple[int, ...]]
+
+ super(Subversion, self).__init__()
+
+ def call_vcs_version(self):
+ # type: () -> Tuple[int, ...]
+ """Query the version of the currently installed Subversion client.
+
+ :return: A tuple containing the parts of the version information or
+ ``()`` if the version returned from ``svn`` could not be parsed.
+ :raises: BadCommand: If ``svn`` is not installed.
+ """
+ # Example versions:
+ # svn, version 1.10.3 (r1842928)
+ # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
+ # svn, version 1.7.14 (r1542130)
+ # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
+ version_prefix = 'svn, version '
+ version = self.run_command(['--version'], show_stdout=False)
+ if not version.startswith(version_prefix):
+ return ()
+
+ version = version[len(version_prefix):].split()[0]
+ version_list = version.split('.')
+ try:
+ parsed_version = tuple(map(int, version_list))
+ except ValueError:
+ return ()
+
+ return parsed_version
+
+ def get_vcs_version(self):
+ # type: () -> Tuple[int, ...]
+ """Return the version of the currently installed Subversion client.
+
+ If the version of the Subversion client has already been queried,
+ a cached value will be used.
+
+ :return: A tuple containing the parts of the version information or
+ ``()`` if the version returned from ``svn`` could not be parsed.
+ :raises: BadCommand: If ``svn`` is not installed.
+ """
+ if self._vcs_version is not None:
+ # Use cached version, if available.
+ # If parsing the version failed previously (empty tuple),
+ # do not attempt to parse it again.
+ return self._vcs_version
+
+ vcs_version = self.call_vcs_version()
+ self._vcs_version = vcs_version
+ return vcs_version
+
+ def get_remote_call_options(self):
+ # type: () -> List[str]
+ """Return options to be used on calls to Subversion that contact the server.
+
+ These options are applicable for the following ``svn`` subcommands used
+ in this class.
+
+ - checkout
+ - export
+ - switch
+ - update
+
+ :return: A list of command line arguments to pass to ``svn``.
+ """
+ if not self.use_interactive:
+ # --non-interactive switch is available since Subversion 0.14.4.
+ # Subversion < 1.8 runs in interactive mode by default.
+ return ['--non-interactive']
+
+ svn_version = self.get_vcs_version()
+ # By default, Subversion >= 1.8 runs in non-interactive mode if
+ # stdin is not a TTY. Since that is how pip invokes SVN, in
+ # call_subprocess(), pip must pass --force-interactive to ensure
+ # the user can be prompted for a password, if required.
+ # SVN added the --force-interactive option in SVN 1.8. Since
+ # e.g. RHEL/CentOS 7, which is supported until 2024, ships with
+ # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
+ # can't safely add the option if the SVN version is < 1.8 (or unknown).
+ if svn_version >= (1, 8):
+ return ['--force-interactive']
+
+ return []
+
+ def export(self, location, url):
+ """Export the svn repository at the url to the destination location"""
+ url, rev_options = self.get_url_rev_options(url)
+
+ logger.info('Exporting svn repository %s to %s', url, location)
+ with indent_log():
+ if os.path.exists(location):
+ # Subversion doesn't like to check out over an existing
+ # directory --force fixes this, but was only added in svn 1.5
+ rmtree(location)
+ cmd_args = (['export'] + self.get_remote_call_options() +
+ rev_options.to_args() + [url, location])
+ self.run_command(cmd_args, show_stdout=False)
+
+ def fetch_new(self, dest, url, rev_options):
+ # type: (str, str, RevOptions) -> None
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Checking out %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ cmd_args = (['checkout', '-q'] +
+ self.get_remote_call_options() +
+ rev_options.to_args() + [url, dest])
+ self.run_command(cmd_args)
+
+ def switch(self, dest, url, rev_options):
+ # type: (str, str, RevOptions) -> None
+ cmd_args = (['switch'] + self.get_remote_call_options() +
+ rev_options.to_args() + [url, dest])
+ self.run_command(cmd_args)
+
+ def update(self, dest, url, rev_options):
+ # type: (str, str, RevOptions) -> None
+ cmd_args = (['update'] + self.get_remote_call_options() +
+ rev_options.to_args() + [dest])
+ self.run_command(cmd_args)
+
+
+vcs.register(Subversion)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py
new file mode 100644
index 00000000..2d05fc13
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py
@@ -0,0 +1,600 @@
+"""Handles all VCS (version control) support"""
+from __future__ import absolute_import
+
+import errno
+import logging
+import os
+import shutil
+import sys
+
+from pip._vendor import pkg_resources
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.exceptions import BadCommand
+from pip._internal.utils.misc import (
+ ask_path_exists, backup_dir, call_subprocess, display_path, rmtree,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type
+ )
+ from pip._internal.utils.ui import SpinnerInterface
+
+ AuthInfo = Tuple[Optional[str], Optional[str]]
+
+__all__ = ['vcs']
+
+
+logger = logging.getLogger(__name__)
+
+
+def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None):
+ """
+ Return the URL for a VCS requirement.
+
+ Args:
+ repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
+ project_name: the (unescaped) project name.
+ """
+ egg_project_name = pkg_resources.to_filename(project_name)
+ req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name)
+ if subdir:
+ req += '&subdirectory={}'.format(subdir)
+
+ return req
+
+
+class RemoteNotFoundError(Exception):
+ pass
+
+
+class RevOptions(object):
+
+ """
+ Encapsulates a VCS-specific revision to install, along with any VCS
+ install options.
+
+ Instances of this class should be treated as if immutable.
+ """
+
+ def __init__(
+ self,
+ vc_class, # type: Type[VersionControl]
+ rev=None, # type: Optional[str]
+ extra_args=None, # type: Optional[List[str]]
+ ):
+ # type: (...) -> None
+ """
+ Args:
+ vc_class: a VersionControl subclass.
+ rev: the name of the revision to install.
+ extra_args: a list of extra options.
+ """
+ if extra_args is None:
+ extra_args = []
+
+ self.extra_args = extra_args
+ self.rev = rev
+ self.vc_class = vc_class
+
+ def __repr__(self):
+ return '<RevOptions {}: rev={!r}>'.format(self.vc_class.name, self.rev)
+
+ @property
+ def arg_rev(self):
+ # type: () -> Optional[str]
+ if self.rev is None:
+ return self.vc_class.default_arg_rev
+
+ return self.rev
+
+ def to_args(self):
+ # type: () -> List[str]
+ """
+ Return the VCS-specific command arguments.
+ """
+ args = [] # type: List[str]
+ rev = self.arg_rev
+ if rev is not None:
+ args += self.vc_class.get_base_rev_args(rev)
+ args += self.extra_args
+
+ return args
+
+ def to_display(self):
+ # type: () -> str
+ if not self.rev:
+ return ''
+
+ return ' (to revision {})'.format(self.rev)
+
+ def make_new(self, rev):
+ # type: (str) -> RevOptions
+ """
+ Make a copy of the current instance, but with a new rev.
+
+ Args:
+ rev: the name of the revision for the new object.
+ """
+ return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
+
+
+class VcsSupport(object):
+ _registry = {} # type: Dict[str, VersionControl]
+ schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
+
+ def __init__(self):
+ # type: () -> None
+ # Register more schemes with urlparse for various version control
+ # systems
+ urllib_parse.uses_netloc.extend(self.schemes)
+ # Python >= 2.7.4, 3.3 doesn't have uses_fragment
+ if getattr(urllib_parse, 'uses_fragment', None):
+ urllib_parse.uses_fragment.extend(self.schemes)
+ super(VcsSupport, self).__init__()
+
+ def __iter__(self):
+ return self._registry.__iter__()
+
+ @property
+ def backends(self):
+ # type: () -> List[VersionControl]
+ return list(self._registry.values())
+
+ @property
+ def dirnames(self):
+ # type: () -> List[str]
+ return [backend.dirname for backend in self.backends]
+
+ @property
+ def all_schemes(self):
+ # type: () -> List[str]
+ schemes = [] # type: List[str]
+ for backend in self.backends:
+ schemes.extend(backend.schemes)
+ return schemes
+
+ def register(self, cls):
+ # type: (Type[VersionControl]) -> None
+ if not hasattr(cls, 'name'):
+ logger.warning('Cannot register VCS %s', cls.__name__)
+ return
+ if cls.name not in self._registry:
+ self._registry[cls.name] = cls()
+ logger.debug('Registered VCS backend: %s', cls.name)
+
+ def unregister(self, name):
+ # type: (str) -> None
+ if name in self._registry:
+ del self._registry[name]
+
+ def get_backend_for_dir(self, location):
+ # type: (str) -> Optional[VersionControl]
+ """
+ Return a VersionControl object if a repository of that type is found
+ at the given directory.
+ """
+ for vcs_backend in self._registry.values():
+ if vcs_backend.controls_location(location):
+ logger.debug('Determine that %s uses VCS: %s',
+ location, vcs_backend.name)
+ return vcs_backend
+ return None
+
+ def get_backend(self, name):
+ # type: (str) -> Optional[VersionControl]
+ """
+ Return a VersionControl object or None.
+ """
+ name = name.lower()
+ return self._registry.get(name)
+
+
+vcs = VcsSupport()
+
+
+class VersionControl(object):
+ name = ''
+ dirname = ''
+ repo_name = ''
+ # List of supported schemes for this Version Control
+ schemes = () # type: Tuple[str, ...]
+ # Iterable of environment variable names to pass to call_subprocess().
+ unset_environ = () # type: Tuple[str, ...]
+ default_arg_rev = None # type: Optional[str]
+
+ @classmethod
+ def should_add_vcs_url_prefix(cls, remote_url):
+ """
+ Return whether the vcs prefix (e.g. "git+") should be added to a
+ repository's remote url when used in a requirement.
+ """
+ return not remote_url.lower().startswith('{}:'.format(cls.name))
+
+ @classmethod
+ def get_subdirectory(cls, repo_dir):
+ """
+ Return the path to setup.py, relative to the repo root.
+ """
+ return None
+
+ @classmethod
+ def get_requirement_revision(cls, repo_dir):
+ """
+ Return the revision string that should be used in a requirement.
+ """
+ return cls.get_revision(repo_dir)
+
+ @classmethod
+ def get_src_requirement(cls, repo_dir, project_name):
+ """
+ Return the requirement string to use to redownload the files
+ currently at the given repository directory.
+
+ Args:
+ project_name: the (unescaped) project name.
+
+ The return value has a form similar to the following:
+
+ {repository_url}@{revision}#egg={project_name}
+ """
+ repo_url = cls.get_remote_url(repo_dir)
+ if repo_url is None:
+ return None
+
+ if cls.should_add_vcs_url_prefix(repo_url):
+ repo_url = '{}+{}'.format(cls.name, repo_url)
+
+ revision = cls.get_requirement_revision(repo_dir)
+ subdir = cls.get_subdirectory(repo_dir)
+ req = make_vcs_requirement_url(repo_url, revision, project_name,
+ subdir=subdir)
+
+ return req
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ """
+ Return the base revision arguments for a vcs command.
+
+ Args:
+ rev: the name of a revision to install. Cannot be None.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def make_rev_options(cls, rev=None, extra_args=None):
+ # type: (Optional[str], Optional[List[str]]) -> RevOptions
+ """
+ Return a RevOptions object.
+
+ Args:
+ rev: the name of a revision to install.
+ extra_args: a list of extra options.
+ """
+ return RevOptions(cls, rev, extra_args=extra_args)
+
+ @classmethod
+ def _is_local_repository(cls, repo):
+ # type: (str) -> bool
+ """
+ posix absolute paths start with os.path.sep,
+ win32 ones start with drive (like c:\\folder)
+ """
+ drive, tail = os.path.splitdrive(repo)
+ return repo.startswith(os.path.sep) or bool(drive)
+
+ def export(self, location, url):
+ """
+ Export the repository at the url to the destination location
+ i.e. only download the files, without vcs informations
+
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def get_netloc_and_auth(cls, netloc, scheme):
+ """
+ Parse the repository URL's netloc, and return the new netloc to use
+ along with auth information.
+
+ Args:
+ netloc: the original repository URL netloc.
+ scheme: the repository URL's scheme without the vcs prefix.
+
+ This is mainly for the Subversion class to override, so that auth
+ information can be provided via the --username and --password options
+ instead of through the URL. For other subclasses like Git without
+ such an option, auth information must stay in the URL.
+
+ Returns: (netloc, (username, password)).
+ """
+ return netloc, (None, None)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # type: (str) -> Tuple[str, Optional[str], AuthInfo]
+ """
+ Parse the repository URL to use, and return the URL, revision,
+ and auth info to use.
+
+ Returns: (url, rev, (username, password)).
+ """
+ scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
+ if '+' not in scheme:
+ raise ValueError(
+ "Sorry, {!r} is a malformed VCS url. "
+ "The format is <vcs>+<protocol>://<url>, "
+ "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
+ )
+ # Remove the vcs prefix.
+ scheme = scheme.split('+', 1)[1]
+ netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
+ rev = None
+ if '@' in path:
+ path, rev = path.rsplit('@', 1)
+ url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
+ return url, rev, user_pass
+
+ @staticmethod
+ def make_rev_args(username, password):
+ """
+ Return the RevOptions "extra arguments" to use in obtain().
+ """
+ return []
+
+ def get_url_rev_options(self, url):
+ # type: (str) -> Tuple[str, RevOptions]
+ """
+ Return the URL and RevOptions object to use in obtain() and in
+ some cases export(), as a tuple (url, rev_options).
+ """
+ url, rev, user_pass = self.get_url_rev_and_auth(url)
+ username, password = user_pass
+ extra_args = self.make_rev_args(username, password)
+ rev_options = self.make_rev_options(rev, extra_args=extra_args)
+
+ return url, rev_options
+
+ @staticmethod
+ def normalize_url(url):
+ # type: (str) -> str
+ """
+ Normalize a URL for comparison by unquoting it and removing any
+ trailing slash.
+ """
+ return urllib_parse.unquote(url).rstrip('/')
+
+ @classmethod
+ def compare_urls(cls, url1, url2):
+ # type: (str, str) -> bool
+ """
+ Compare two repo URLs for identity, ignoring incidental differences.
+ """
+ return (cls.normalize_url(url1) == cls.normalize_url(url2))
+
+ def fetch_new(self, dest, url, rev_options):
+ """
+ Fetch a revision from a repository, in the case that this is the
+ first fetch from the repository.
+
+ Args:
+ dest: the directory to fetch the repository to.
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ def switch(self, dest, url, rev_options):
+ """
+ Switch the repo at ``dest`` to point to ``URL``.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ def update(self, dest, url, rev_options):
+ """
+ Update an already-existing repo to the given ``rev_options``.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """
+ Return whether the id of the current commit equals the given name.
+
+ Args:
+ dest: the repository directory.
+ name: a string name.
+ """
+ raise NotImplementedError
+
+ def obtain(self, dest, url):
+ # type: (str, str) -> None
+ """
+ Install or update in editable mode the package represented by this
+ VersionControl object.
+
+ :param dest: the repository directory in which to install or update.
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ url, rev_options = self.get_url_rev_options(url)
+
+ if not os.path.exists(dest):
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ rev_display = rev_options.to_display()
+ if self.is_repository_directory(dest):
+ existing_url = self.get_remote_url(dest)
+ if self.compare_urls(existing_url, url):
+ logger.debug(
+ '%s in %s exists, and has correct URL (%s)',
+ self.repo_name.title(),
+ display_path(dest),
+ url,
+ )
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ logger.info(
+ 'Updating %s %s%s',
+ display_path(dest),
+ self.repo_name,
+ rev_display,
+ )
+ self.update(dest, url, rev_options)
+ else:
+ logger.info('Skipping because already up-to-date.')
+ return
+
+ logger.warning(
+ '%s %s in %s exists with URL %s',
+ self.name,
+ self.repo_name,
+ display_path(dest),
+ existing_url,
+ )
+ prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
+ ('s', 'i', 'w', 'b'))
+ else:
+ logger.warning(
+ 'Directory %s already exists, and is not a %s %s.',
+ dest,
+ self.name,
+ self.repo_name,
+ )
+ # https://github.com/python/mypy/issues/1174
+ prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore
+ ('i', 'w', 'b'))
+
+ logger.warning(
+ 'The plan is to install the %s repository %s',
+ self.name,
+ url,
+ )
+ response = ask_path_exists('What to do? %s' % prompt[0], prompt[1])
+
+ if response == 'a':
+ sys.exit(-1)
+
+ if response == 'w':
+ logger.warning('Deleting %s', display_path(dest))
+ rmtree(dest)
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ if response == 'b':
+ dest_dir = backup_dir(dest)
+ logger.warning(
+ 'Backing up %s to %s', display_path(dest), dest_dir,
+ )
+ shutil.move(dest, dest_dir)
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ # Do nothing if the response is "i".
+ if response == 's':
+ logger.info(
+ 'Switching %s %s to %s%s',
+ self.repo_name,
+ display_path(dest),
+ url,
+ rev_display,
+ )
+ self.switch(dest, url, rev_options)
+
+ def unpack(self, location, url):
+ # type: (str, str) -> None
+ """
+ Clean up current location and download the url repository
+ (and vcs infos) into location
+
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ if os.path.exists(location):
+ rmtree(location)
+ self.obtain(location, url=url)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ """
+ Return the url used at location
+
+ Raises RemoteNotFoundError if the repository does not have a remote
+ url configured.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def get_revision(cls, location):
+ """
+ Return the current commit id of the files at the given location.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def run_command(
+ cls,
+ cmd, # type: List[str]
+ show_stdout=True, # type: bool
+ cwd=None, # type: Optional[str]
+ on_returncode='raise', # type: str
+ extra_ok_returncodes=None, # type: Optional[Iterable[int]]
+ command_desc=None, # type: Optional[str]
+ extra_environ=None, # type: Optional[Mapping[str, Any]]
+ spinner=None # type: Optional[SpinnerInterface]
+ ):
+ # type: (...) -> Text
+ """
+ Run a VCS subcommand
+ This is simply a wrapper around call_subprocess that adds the VCS
+ command name, and checks that the VCS is available
+ """
+ cmd = [cls.name] + cmd
+ try:
+ return call_subprocess(cmd, show_stdout, cwd,
+ on_returncode=on_returncode,
+ extra_ok_returncodes=extra_ok_returncodes,
+ command_desc=command_desc,
+ extra_environ=extra_environ,
+ unset_environ=cls.unset_environ,
+ spinner=spinner)
+ except OSError as e:
+ # errno.ENOENT = no such file or directory
+ # In other words, the VCS executable isn't available
+ if e.errno == errno.ENOENT:
+ raise BadCommand(
+ 'Cannot find command %r - do you have '
+ '%r installed and in your '
+ 'PATH?' % (cls.name, cls.name))
+ else:
+ raise # re-raise exception if a different error occurred
+
+ @classmethod
+ def is_repository_directory(cls, path):
+ # type: (str) -> bool
+ """
+ Return whether a directory path is a repository directory.
+ """
+ logger.debug('Checking in %s for %s (%s)...',
+ path, cls.dirname, cls.name)
+ return os.path.exists(os.path.join(path, cls.dirname))
+
+ @classmethod
+ def controls_location(cls, location):
+ # type: (str) -> bool
+ """
+ Check if a location is controlled by the vcs.
+ It is meant to be overridden to implement smarter detection
+ mechanisms for specific vcs.
+
+ This can do more than is_repository_directory() alone. For example,
+ the Git override checks that Git is actually available.
+ """
+ return cls.is_repository_directory(location)
diff --git a/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/wheel.py b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/wheel.py
new file mode 100644
index 00000000..6f034cd0
--- /dev/null
+++ b/.emacs.d.back/.python-environments/default/lib/python3.7/site-packages/pip/_internal/wheel.py
@@ -0,0 +1,1125 @@
+"""
+Support for installing and building the "wheel" binary package format.
+"""
+from __future__ import absolute_import
+
+import collections
+import compileall
+import csv
+import hashlib
+import logging
+import os.path
+import re
+import shutil
+import stat
+import sys
+import warnings
+from base64 import urlsafe_b64encode
+from email.parser import Parser
+
+from pip._vendor import pkg_resources
+from pip._vendor.distlib.scripts import ScriptMaker
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.six import StringIO
+
+from pip._internal import pep425tags
+from pip._internal.download import unpack_url
+from pip._internal.exceptions import (
+ InstallationError, InvalidWheelFilename, UnsupportedWheel,
+)
+from pip._internal.locations import distutils_scheme
+from pip._internal.models.link import Link
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.marker_files import PIP_DELETE_MARKER_FILENAME
+from pip._internal.utils.misc import (
+ LOG_DIVIDER, call_subprocess, captured_stdout, ensure_dir,
+ format_command_args, path_to_url, read_chunks,
+)
+from pip._internal.utils.setuptools_build import make_setuptools_shim_args
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any, Iterable
+ )
+ from pip._vendor.packaging.requirements import Requirement
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.download import PipSession
+ from pip._internal.index import FormatControl, PackageFinder
+ from pip._internal.operations.prepare import (
+ RequirementPreparer
+ )
+ from pip._internal.cache import WheelCache
+ from pip._internal.pep425tags import Pep425Tag
+
+ InstalledCSVRow = Tuple[str, ...]
+
+
+VERSION_COMPATIBLE = (1, 0)
+
+
+logger = logging.getLogger(__name__)
+
+
+def normpath(src, p):
+ return os.path.relpath(src, p).replace(os.path.sep, '/')
+
+
+def hash_file(path, blocksize=1 << 20):
+ # type: (str, int) -> Tuple[Any, int]
+ """Return (hash, length) for path using hashlib.sha256()"""
+ h = hashlib.sha256()
+ length = 0
+ with open(path, 'rb') as f:
+ for block in read_chunks(f, size=blocksize):
+ length += len(block)
+ h.update(block)
+ return (h, length) # type: ignore
+
+
+def rehash(path, blocksize=1 << 20):
+ # type: (str, int) -> Tuple[str, str]
+ """Return (encoded_digest, length) for path using hashlib.sha256()"""
+ h, length = hash_file(path, blocksize)
+ digest = 'sha256=' + urlsafe_b64encode(
+ h.digest()
+ ).decode('latin1').rstrip('=')
+ # unicode/str python2 issues
+ return (digest, str(length)) # type: ignore
+
+
+def open_for_csv(name, mode):
+ # type: (str, Text) -> IO
+ if sys.version_info[0] < 3:
+ nl = {} # type: Dict[str, Any]
+ bin = 'b'
+ else:
+ nl = {'newline': ''} # type: Dict[str, Any]
+ bin = ''
+ return open(name, mode + bin, **nl)
+
+
+def replace_python_tag(wheelname, new_tag):
+ # type: (str, str) -> str
+ """Replace the Python tag in a wheel file name with a new value.
+ """
+ parts = wheelname.split('-')
+ parts[-3] = new_tag
+ return '-'.join(parts)
+
+
+def fix_script(path):
+ # type: (str) -> Optional[bool]
+ """Replace #!python with #!/path/to/python
+ Return True if file was changed."""
+ # XXX RECORD hashes will need to be updated
+ if os.path.isfile(path):
+ with open(path, 'rb') as script:
+ firstline = script.readline()
+ if not firstline.startswith(b'#!python'):
+ return False
+ exename = sys.executable.encode(sys.getfilesystemencoding())
+ firstline = b'#!' + exename + os.linesep.encode("ascii")
+ rest = script.read()
+ with open(path, 'wb') as script:
+ script.write(firstline)
+ script.write(rest)
+ return True
+ return None
+
+
+dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?)
+ \.dist-info$""", re.VERBOSE)
+
+
+def root_is_purelib(name, wheeldir):
+ # type: (str, str) -> bool
+ """
+ Return True if the extracted wheel in wheeldir should go into purelib.
+ """
+ name_folded = name.replace("-", "_")
+ for item in os.listdir(wheeldir):
+ match = dist_info_re.match(item)
+ if match and match.group('name') == name_folded:
+ with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
+ for line in wheel:
+ line = line.lower().rstrip()
+ if line == "root-is-purelib: true":
+ return True
+ return False
+
+
+def get_entrypoints(filename):
+ # type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
+ if not os.path.exists(filename):
+ return {}, {}
+
+ # This is done because you can pass a string to entry_points wrappers which
+ # means that they may or may not be valid INI files. The attempt here is to
+ # strip leading and trailing whitespace in order to make them valid INI
+ # files.
+ with open(filename) as fp:
+ data = StringIO()
+ for line in fp:
+ data.write(line.strip())
+ data.write("\n")
+ data.seek(0)
+
+ # get the entry points and then the script names
+ entry_points = pkg_resources.EntryPoint.parse_map(data)
+ console = entry_points.get('console_scripts', {})
+ gui = entry_points.get('gui_scripts', {})
+
+ def _split_ep(s):
+ """get the string representation of EntryPoint, remove space and split
+ on '='"""
+ return str(s).replace(" ", "").split("=")
+
+ # convert the EntryPoint objects into strings with module:function
+ console = dict(_split_ep(v) for v in console.values())
+ gui = dict(_split_ep(v) for v in gui.values())
+ return console, gui
+
+
+def message_about_scripts_not_on_PATH(scripts):
+ # type: (Sequence[str]) -> Optional[str]
+ """Determine if any scripts are not on PATH and format a warning.
+
+ Returns a warning message if one or more scripts are not on PATH,
+ otherwise None.
+ """
+ if not scripts:
+ return None
+
+ # Group scripts by the path they were installed in
+ grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set]
+ for destfile in scripts:
+ parent_dir = os.path.dirname(destfile)
+ script_name = os.path.basename(destfile)
+ grouped_by_dir[parent_dir].add(script_name)
+
+ # We don't want to warn for directories that are on PATH.
+ not_warn_dirs = [
+ os.path.normcase(i).rstrip(os.sep) for i in
+ os.environ.get("PATH", "").split(os.pathsep)
+ ]
+ # If an executable sits with sys.executable, we don't warn for it.
+ # This covers the case of venv invocations without activating the venv.
+ not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
+ warn_for = {
+ parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
+ if os.path.normcase(parent_dir) not in not_warn_dirs
+ }
+ if not warn_for:
+ return None
+
+ # Format a message
+ msg_lines = []
+ for parent_dir, scripts in warn_for.items():
+ sorted_scripts = sorted(scripts) # type: List[str]
+ if len(sorted_scripts) == 1:
+ start_text = "script {} is".format(sorted_scripts[0])
+ else:
+ start_text = "scripts {} are".format(
+ ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
+ )
+
+ msg_lines.append(
+ "The {} installed in '{}' which is not on PATH."
+ .format(start_text, parent_dir)
+ )
+
+ last_line_fmt = (
+ "Consider adding {} to PATH or, if you prefer "
+ "to suppress this warning, use --no-warn-script-location."
+ )
+ if len(msg_lines) == 1:
+ msg_lines.append(last_line_fmt.format("this directory"))
+ else:
+ msg_lines.append(last_line_fmt.format("these directories"))
+
+ # Returns the formatted multiline message
+ return "\n".join(msg_lines)
+
+
+def sorted_outrows(outrows):
+ # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
+ """
+ Return the given rows of a RECORD file in sorted order.
+
+ Each row is a 3-tuple (path, hash, size) and corresponds to a record of
+ a RECORD file (see PEP 376 and PEP 427 for details). For the rows
+ passed to this function, the size can be an integer as an int or string,
+ or the empty string.
+ """
+ # Normally, there should only be one row per path, in which case the
+ # second and third elements don't come into play when sorting.
+ # However, in cases in the wild where a path might happen to occur twice,
+ # we don't want the sort operation to trigger an error (but still want
+ # determinism). Since the third element can be an int or string, we
+ # coerce each element to a string to avoid a TypeError in this case.
+ # For additional background, see--
+ # https://github.com/pypa/pip/issues/5868
+ return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
+
+
+def get_csv_rows_for_installed(
+ old_csv_rows, # type: Iterable[List[str]]
+ installed, # type: Dict[str, str]
+ changed, # type: set
+ generated, # type: List[str]
+ lib_dir, # type: str
+):
+ # type: (...) -> List[InstalledCSVRow]
+ """
+ :param installed: A map from archive RECORD path to installation RECORD
+ path.
+ """
+ installed_rows = [] # type: List[InstalledCSVRow]
+ for row in old_csv_rows:
+ if len(row) > 3:
+ logger.warning(
+ 'RECORD line has more than three elements: {}'.format(row)
+ )
+ # Make a copy because we are mutating the row.
+ row = list(row)
+ old_path = row[0]
+ new_path = installed.pop(old_path, old_path)
+ row[0] = new_path
+ if new_path in changed:
+ digest, length = rehash(new_path)
+ row[1] = digest
+ row[2] = length
+ installed_rows.append(tuple(row))
+ for f in generated:
+ digest, length = rehash(f)
+ installed_rows.append((normpath(f, lib_dir), digest, str(length)))
+ for f in installed:
+ installed_rows.append((installed[f], '', ''))
+ return installed_rows
+
+
+def move_wheel_files(
+ name, # type: str
+ req, # type: Requirement
+ wheeldir, # type: str
+ user=False, # type: bool
+ home=None, # type: Optional[str]
+ root=None, # type: Optional[str]
+ pycompile=True, # type: bool
+ scheme=None, # type: Optional[Mapping[str, str]]
+ isolated=False, # type: bool
+ prefix=None, # type: Optional[str]
+ warn_script_location=True # type: bool
+):
+ # type: (...) -> None
+ """Install a wheel"""
+ # TODO: Investigate and break this up.
+ # TODO: Look into moving this into a dedicated class for representing an
+ # installation.
+
+ if not scheme:
+ scheme = distutils_scheme(
+ name, user=user, home=home, root=root, isolated=isolated,
+ prefix=prefix,
+ )
+
+ if root_is_purelib(name, wheeldir):
+ lib_dir = scheme['purelib']
+ else:
+ lib_dir = scheme['platlib']
+
+ info_dir = [] # type: List[str]
+ data_dirs = []
+ source = wheeldir.rstrip(os.path.sep) + os.path.sep
+
+ # Record details of the files moved
+ # installed = files copied from the wheel to the destination
+ # changed = files changed while installing (scripts #! line typically)
+ # generated = files newly generated during the install (script wrappers)
+ installed = {} # type: Dict[str, str]
+ changed = set()
+ generated = [] # type: List[str]
+
+ # Compile all of the pyc files that we're going to be installing
+ if pycompile:
+ with captured_stdout() as stdout:
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore')
+ compileall.compile_dir(source, force=True, quiet=True)
+ logger.debug(stdout.getvalue())
+
+ def record_installed(srcfile, destfile, modified=False):
+ """Map archive RECORD paths to installation RECORD paths."""
+ oldpath = normpath(srcfile, wheeldir)
+ newpath = normpath(destfile, lib_dir)
+ installed[oldpath] = newpath
+ if modified:
+ changed.add(destfile)
+
+ def clobber(source, dest, is_base, fixer=None, filter=None):
+ ensure_dir(dest) # common for the 'include' path
+
+ for dir, subdirs, files in os.walk(source):
+ basedir = dir[len(source):].lstrip(os.path.sep)
+ destdir = os.path.join(dest, basedir)
+ if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
+ continue
+ for s in subdirs:
+ destsubdir = os.path.join(dest, basedir, s)
+ if is_base and basedir == '' and destsubdir.endswith('.data'):
+ data_dirs.append(s)
+ continue
+ elif (is_base and
+ s.endswith('.dist-info') and
+ canonicalize_name(s).startswith(
+ canonicalize_name(req.name))):
+ assert not info_dir, ('Multiple .dist-info directories: ' +
+ destsubdir + ', ' +
+ ', '.join(info_dir))
+ info_dir.append(destsubdir)
+ for f in files:
+ # Skip unwanted files
+ if filter and filter(f):
+ continue
+ srcfile = os.path.join(dir, f)
+ destfile = os.path.join(dest, basedir, f)
+ # directory creation is lazy and after the file filtering above
+ # to ensure we don't install empty dirs; empty dirs can't be
+ # uninstalled.
+ ensure_dir(destdir)
+
+ # copyfile (called below) truncates the destination if it
+ # exists and then writes the new contents. This is fine in most
+ # cases, but can cause a segfault if pip has loaded a shared
+ # object (e.g. from pyopenssl through its vendored urllib3)
+ # Since the shared object is mmap'd an attempt to call a
+ # symbol in it will then cause a segfault. Unlinking the file
+ # allows writing of new contents while allowing the process to
+ # continue to use the old copy.
+ if os.path.exists(destfile):
+ os.unlink(destfile)
+
+ # We use copyfile (not move, copy, or copy2) to be extra sure
+ # that we are not moving directories over (copyfile fails for
+ # directories) as well as to ensure that we are not copying
+ # over any metadata because we want more control over what
+ # metadata we actually copy over.
+ shutil.copyfile(srcfile, destfile)
+
+ # Copy over the metadata for the file, currently this only
+ # includes the atime and mtime.
+ st = os.stat(srcfile)
+ if hasattr(os, "utime"):
+ os.utime(destfile, (st.st_atime, st.st_mtime))
+
+ # If our file is executable, then make our destination file
+ # executable.
+ if os.access(srcfile, os.X_OK):
+ st = os.stat(srcfile)
+ permissions = (
+ st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+ )
+ os.chmod(destfile, permissions)
+
+ changed = False
+ if fixer:
+ changed = fixer(destfile)
+ record_installed(srcfile, destfile, changed)
+
+ clobber(source, lib_dir, True)
+
+ assert info_dir, "%s .dist-info directory not found" % req
+
+ # Get the defined entry points
+ ep_file = os.path.join(info_dir[0], 'entry_points.txt')
+ console, gui = get_entrypoints(ep_file)
+
+ def is_entrypoint_wrapper(name):
+ # EP, EP.exe and EP-script.py are scripts generated for
+ # entry point EP by setuptools
+ if name.lower().endswith('.exe'):
+ matchname = name[:-4]
+ elif name.lower().endswith('-script.py'):
+ matchname = name[:-10]
+ elif name.lower().endswith(".pya"):
+ matchname = name[:-4]
+ else:
+ matchname = name
+ # Ignore setuptools-generated scripts
+ return (matchname in console or matchname in gui)
+
+ for datadir in data_dirs:
+ fixer = None
+ filter = None
+ for subdir in os.listdir(os.path.join(wheeldir, datadir)):
+ fixer = None
+ if subdir == 'scripts':
+ fixer = fix_script
+ filter = is_entrypoint_wrapper
+ source = os.path.join(wheeldir, datadir, subdir)
+ dest = scheme[subdir]
+ clobber(source, dest, False, fixer=fixer, filter=filter)
+
+ maker = ScriptMaker(None, scheme['scripts'])
+
+ # Ensure old scripts are overwritten.
+ # See https://github.com/pypa/pip/issues/1800
+ maker.clobber = True
+
+ # Ensure we don't generate any variants for scripts because this is almost
+ # never what somebody wants.
+ # See https://bitbucket.org/pypa/distlib/issue/35/
+ maker.variants = {''}
+
+ # This is required because otherwise distlib creates scripts that are not
+ # executable.
+ # See https://bitbucket.org/pypa/distlib/issue/32/
+ maker.set_mode = True
+
+ # Simplify the script and fix the fact that the default script swallows
+ # every single stack trace.
+ # See https://bitbucket.org/pypa/distlib/issue/34/
+ # See https://bitbucket.org/pypa/distlib/issue/33/
+ def _get_script_text(entry):
+ if entry.suffix is None:
+ raise InstallationError(
+ "Invalid script entry point: %s for req: %s - A callable "
+ "suffix is required. Cf https://packaging.python.org/en/"
+ "latest/distributing.html#console-scripts for more "
+ "information." % (entry, req)
+ )
+ return maker.script_template % {
+ "module": entry.prefix,
+ "import_name": entry.suffix.split(".")[0],
+ "func": entry.suffix,
+ }
+ # ignore type, because mypy disallows assigning to a method,
+ # see https://github.com/python/mypy/issues/2427
+ maker._get_script_text = _get_script_text # type: ignore
+ maker.script_template = r"""# -*- coding: utf-8 -*-
+import re
+import sys
+
+from %(module)s import %(import_name)s
+
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+ sys.exit(%(func)s())
+"""
+
+ # Special case pip and setuptools to generate versioned wrappers
+ #
+ # The issue is that some projects (specifically, pip and setuptools) use
+ # code in setup.py to create "versioned" entry points - pip2.7 on Python
+ # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
+ # the wheel metadata at build time, and so if the wheel is installed with
+ # a *different* version of Python the entry points will be wrong. The
+ # correct fix for this is to enhance the metadata to be able to describe
+ # such versioned entry points, but that won't happen till Metadata 2.0 is
+ # available.
+ # In the meantime, projects using versioned entry points will either have
+ # incorrect versioned entry points, or they will not be able to distribute
+ # "universal" wheels (i.e., they will need a wheel per Python version).
+ #
+ # Because setuptools and pip are bundled with _ensurepip and virtualenv,
+ # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
+ # override the versioned entry points in the wheel and generate the
+ # correct ones. This code is purely a short-term measure until Metadata 2.0
+ # is available.
+ #
+ # To add the level of hack in this section of code, in order to support
+ # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
+ # variable which will control which version scripts get installed.
+ #
+ # ENSUREPIP_OPTIONS=altinstall
+ # - Only pipX.Y and easy_install-X.Y will be generated and installed
+ # ENSUREPIP_OPTIONS=install
+ # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
+ # that this option is technically if ENSUREPIP_OPTIONS is set and is
+ # not altinstall
+ # DEFAULT
+ # - The default behavior is to install pip, pipX, pipX.Y, easy_install
+ # and easy_install-X.Y.
+ pip_script = console.pop('pip', None)
+ if pip_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ spec = 'pip = ' + pip_script
+ generated.extend(maker.make(spec))
+
+ if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
+ spec = 'pip%s = %s' % (sys.version[:1], pip_script)
+ generated.extend(maker.make(spec))
+
+ spec = 'pip%s = %s' % (sys.version[:3], pip_script)
+ generated.extend(maker.make(spec))
+ # Delete any other versioned pip entry points
+ pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
+ for k in pip_ep:
+ del console[k]
+ easy_install_script = console.pop('easy_install', None)
+ if easy_install_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ spec = 'easy_install = ' + easy_install_script
+ generated.extend(maker.make(spec))
+
+ spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
+ generated.extend(maker.make(spec))
+ # Delete any other versioned easy_install entry points
+ easy_install_ep = [
+ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
+ ]
+ for k in easy_install_ep:
+ del console[k]
+
+ # Generate the console and GUI entry points specified in the wheel
+ if len(console) > 0:
+ generated_console_scripts = maker.make_multiple(
+ ['%s = %s' % kv for kv in console.items()]
+ )
+ generated.extend(generated_console_scripts)
+
+ if warn_script_location:
+ msg = message_about_scripts_not_on_PATH(generated_console_scripts)
+ if msg is not None:
+ logger.warning(msg)
+
+ if len(gui) > 0:
+ generated.extend(
+ maker.make_multiple(
+ ['%s = %s' % kv for kv in gui.items()],
+ {'gui': True}
+ )
+ )
+
+ # Record pip as the installer
+ installer = os.path.join(info_dir[0], 'INSTALLER')
+ temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
+ with open(temp_installer, 'wb') as installer_file:
+ installer_file.write(b'pip\n')
+ shutil.move(temp_installer, installer)
+ generated.append(installer)
+
+ # Record details of all files installed
+ record = os.path.join(info_dir[0], 'RECORD')
+ temp_record = os.path.join(info_dir[0], 'RECORD.pip')
+ with open_for_csv(record, 'r') as record_in:
+ with open_for_csv(temp_record, 'w+') as record_out:
+ reader = csv.reader(record_in)
+ outrows = get_csv_rows_for_installed(
+ reader, installed=installed, changed=changed,
+ generated=generated, lib_dir=lib_dir,
+ )
+ writer = csv.writer(record_out)
+ # Sort to simplify testing.
+ for row in sorted_outrows(outrows):
+ writer.writerow(row)
+ shutil.move(temp_record, record)
+
+
+def wheel_version(source_dir):
+ # type: (Optional[str]) -> Optional[Tuple[int, ...]]
+ """
+ Return the Wheel-Version of an extracted wheel, if possible.
+
+ Otherwise, return None if we couldn't parse / extract it.
+ """
+ try:
+ dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
+
+ wheel_data = dist.get_metadata('WHEEL')
+ wheel_data = Parser().parsestr(wheel_data)
+
+ version = wheel_data['Wheel-Version'].strip()
+ version = tuple(map(int, version.split('.')))
+ return version
+ except Exception:
+ return None
+
+
+def check_compatibility(version, name):
+ # type: (Optional[Tuple[int, ...]], str) -> None
+ """
+ Raises errors or warns if called with an incompatible Wheel-Version.
+
+ Pip should refuse to install a Wheel-Version that's a major series
+ ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
+ installing a version only minor version ahead (e.g 1.2 > 1.1).
+
+ version: a 2-tuple representing a Wheel-Version (Major, Minor)
+ name: name of wheel or package to raise exception about
+
+ :raises UnsupportedWheel: when an incompatible Wheel-Version is given
+ """
+ if not version:
+ raise UnsupportedWheel(
+ "%s is in an unsupported or invalid wheel" % name
+ )
+ if version[0] > VERSION_COMPATIBLE[0]:
+ raise UnsupportedWheel(
+ "%s's Wheel-Version (%s) is not compatible with this version "
+ "of pip" % (name, '.'.join(map(str, version)))
+ )
+ elif version > VERSION_COMPATIBLE:
+ logger.warning(
+ 'Installing from a newer Wheel-Version (%s)',
+ '.'.join(map(str, version)),
+ )
+
+
+def format_tag(file_tag):
+ # type: (Tuple[str, ...]) -> str
+ """
+ Format three tags in the form "<python_tag>-<abi_tag>-<platform_tag>".
+
+ :param file_tag: A 3-tuple of tags (python_tag, abi_tag, platform_tag).
+ """
+ return '-'.join(file_tag)
+
+
+class Wheel(object):
+ """A wheel file"""
+
+ # TODO: Maybe move the class into the models sub-package
+ # TODO: Maybe move the install code into this class
+
+ wheel_file_re = re.compile(
+ r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
+ ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
+ \.whl|\.dist-info)$""",
+ re.VERBOSE
+ )
+
+ def __init__(self, filename):
+ # type: (str) -> None
+ """
+ :raises InvalidWheelFilename: when the filename is invalid for a wheel
+ """
+ wheel_info = self.wheel_file_re.match(filename)
+ if not wheel_info:
+ raise InvalidWheelFilename(
+ "%s is not a valid wheel filename." % filename
+ )
+ self.filename = filename
+ self.name = wheel_info.group('name').replace('_', '-')
+ # we'll assume "_" means "-" due to wheel naming scheme
+ # (https://github.com/pypa/pip/issues/1150)
+ self.version = wheel_info.group('ver').replace('_', '-')
+ self.build_tag = wheel_info.group('build')
+ self.pyversions = wheel_info.group('pyver').split('.')
+ self.abis = wheel_info.group('abi').split('.')
+ self.plats = wheel_info.group('plat').split('.')
+
+ # All the tag combinations from this file
+ self.file_tags = {
+ (x, y, z) for x in self.pyversions
+ for y in self.abis for z in self.plats
+ }
+
+ def get_formatted_file_tags(self):
+ # type: () -> List[str]
+ """
+ Return the wheel's tags as a sorted list of strings.
+ """
+ return sorted(format_tag(tag) for tag in self.file_tags)
+
+ def support_index_min(self, tags=None):
+ # type: (Optional[List[Pep425Tag]]) -> Optional[int]
+ """
+ Return the lowest index that one of the wheel's file_tag combinations
+ achieves in the supported_tags list e.g. if there are 8 supported tags,
+ and one of the file tags is first in the list, then return 0. Returns
+ None is the wheel is not supported.
+ """
+ if tags is None: # for mock
+ tags = pep425tags.get_supported()
+ indexes = [tags.index(c) for c in self.file_tags if c in tags]
+ return min(indexes) if indexes else None
+
+ def supported(self, tags=None):
+ # type: (Optional[List[Pep425Tag]]) -> bool
+ """Is this wheel supported on this system?"""
+ if tags is None: # for mock
+ tags = pep425tags.get_supported()
+ return bool(set(tags).intersection(self.file_tags))
+
+
+def _contains_egg_info(
+ s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
+ """Determine whether the string looks like an egg_info.
+
+ :param s: The string to parse. E.g. foo-2.1
+ """
+ return bool(_egg_info_re.search(s))
+
+
+def should_use_ephemeral_cache(
+ req, # type: InstallRequirement
+ format_control, # type: FormatControl
+ autobuilding, # type: bool
+ cache_available # type: bool
+):
+ # type: (...) -> Optional[bool]
+ """
+ Return whether to build an InstallRequirement object using the
+ ephemeral cache.
+
+ :param cache_available: whether a cache directory is available for the
+ autobuilding=True case.
+
+ :return: True or False to build the requirement with ephem_cache=True
+ or False, respectively; or None not to build the requirement.
+ """
+ if req.constraint:
+ return None
+ if req.is_wheel:
+ if not autobuilding:
+ logger.info(
+ 'Skipping %s, due to already being wheel.', req.name,
+ )
+ return None
+ if not autobuilding:
+ return False
+
+ if req.editable or not req.source_dir:
+ return None
+
+ if "binary" not in format_control.get_allowed_formats(
+ canonicalize_name(req.name)):
+ logger.info(
+ "Skipping bdist_wheel for %s, due to binaries "
+ "being disabled for it.", req.name,
+ )
+ return None
+
+ if req.link and not req.link.is_artifact:
+ # VCS checkout. Build wheel just for this run.
+ return True
+
+ link = req.link
+ base, ext = link.splitext()
+ if cache_available and _contains_egg_info(base):
+ return False
+
+ # Otherwise, build the wheel just for this run using the ephemeral
+ # cache since we are either in the case of e.g. a local directory, or
+ # no cache directory is available to use.
+ return True
+
+
+def format_command_result(
+ command_args, # type: List[str]
+ command_output, # type: str
+):
+ # type: (...) -> str
+ """
+ Format command information for logging.
+ """
+ command_desc = format_command_args(command_args)
+ text = 'Command arguments: {}\n'.format(command_desc)
+
+ if not command_output:
+ text += 'Command output: None'
+ elif logger.getEffectiveLevel() > logging.DEBUG:
+ text += 'Command output: [use --verbose to show]'
+ else:
+ if not command_output.endswith('\n'):
+ command_output += '\n'
+ text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER)
+
+ return text
+
+
+def get_legacy_build_wheel_path(
+ names, # type: List[str]
+ temp_dir, # type: str
+ req, # type: InstallRequirement
+ command_args, # type: List[str]
+ command_output, # type: str
+):
+ # type: (...) -> Optional[str]
+ """
+ Return the path to the wheel in the temporary build directory.
+ """
+ # Sort for determinism.
+ names = sorted(names)
+ if not names:
+ msg = (
+ 'Legacy build of wheel for {!r} created no files.\n'
+ ).format(req.name)
+ msg += format_command_result(command_args, command_output)
+ logger.warning(msg)
+ return None
+
+ if len(names) > 1:
+ msg = (
+ 'Legacy build of wheel for {!r} created more than one file.\n'
+ 'Filenames (choosing first): {}\n'
+ ).format(req.name, names)
+ msg += format_command_result(command_args, command_output)
+ logger.warning(msg)
+
+ return os.path.join(temp_dir, names[0])
+
+
+class WheelBuilder(object):
+ """Build wheels from a RequirementSet."""
+
+ def __init__(
+ self,
+ finder, # type: PackageFinder
+ preparer, # type: RequirementPreparer
+ wheel_cache, # type: WheelCache
+ build_options=None, # type: Optional[List[str]]
+ global_options=None, # type: Optional[List[str]]
+ no_clean=False # type: bool
+ ):
+ # type: (...) -> None
+ self.finder = finder
+ self.preparer = preparer
+ self.wheel_cache = wheel_cache
+
+ self._wheel_dir = preparer.wheel_download_dir
+
+ self.build_options = build_options or []
+ self.global_options = global_options or []
+ self.no_clean = no_clean
+
+ def _build_one(self, req, output_dir, python_tag=None):
+ """Build one wheel.
+
+ :return: The filename of the built wheel, or None if the build failed.
+ """
+ # Install build deps into temporary directory (PEP 518)
+ with req.build_env:
+ return self._build_one_inside_env(req, output_dir,
+ python_tag=python_tag)
+
+ def _build_one_inside_env(self, req, output_dir, python_tag=None):
+ with TempDirectory(kind="wheel") as temp_dir:
+ if req.use_pep517:
+ builder = self._build_one_pep517
+ else:
+ builder = self._build_one_legacy
+ wheel_path = builder(req, temp_dir.path, python_tag=python_tag)
+ if wheel_path is not None:
+ wheel_name = os.path.basename(wheel_path)
+ dest_path = os.path.join(output_dir, wheel_name)
+ try:
+ wheel_hash, length = hash_file(wheel_path)
+ shutil.move(wheel_path, dest_path)
+ logger.info('Created wheel for %s: '
+ 'filename=%s size=%d sha256=%s',
+ req.name, wheel_name, length,
+ wheel_hash.hexdigest())
+ logger.info('Stored in directory: %s', output_dir)
+ return dest_path
+ except Exception:
+ pass
+ # Ignore return, we can't do anything else useful.
+ self._clean_one(req)
+ return None
+
+ def _base_setup_args(self, req):
+ # NOTE: Eventually, we'd want to also -S to the flags here, when we're
+ # isolating. Currently, it breaks Python in virtualenvs, because it
+ # relies on site.py to find parts of the standard library outside the
+ # virtualenv.
+ base_cmd = make_setuptools_shim_args(req.setup_py_path,
+ unbuffered_output=True)
+ return base_cmd + list(self.global_options)
+
+ def _build_one_pep517(self, req, tempd, python_tag=None):
+ """Build one InstallRequirement using the PEP 517 build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ assert req.metadata_directory is not None
+ if self.build_options:
+ # PEP 517 does not support --build-options
+ logger.error('Cannot build wheel for %s using PEP 517 when '
+ '--build-options is present' % (req.name,))
+ return None
+ try:
+ req.spin_message = 'Building wheel for %s (PEP 517)' % (req.name,)
+ logger.debug('Destination directory: %s', tempd)
+ wheel_name = req.pep517_backend.build_wheel(
+ tempd,
+ metadata_directory=req.metadata_directory
+ )
+ if python_tag:
+ # General PEP 517 backends don't necessarily support
+ # a "--python-tag" option, so we rename the wheel
+ # file directly.
+ new_name = replace_python_tag(wheel_name, python_tag)
+ os.rename(
+ os.path.join(tempd, wheel_name),
+ os.path.join(tempd, new_name)
+ )
+ # Reassign to simplify the return at the end of function
+ wheel_name = new_name
+ except Exception:
+ logger.error('Failed building wheel for %s', req.name)
+ return None
+ return os.path.join(tempd, wheel_name)
+
+ def _build_one_legacy(self, req, tempd, python_tag=None):
+ """Build one InstallRequirement using the "legacy" build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ base_args = self._base_setup_args(req)
+
+ spin_message = 'Building wheel for %s (setup.py)' % (req.name,)
+ with open_spinner(spin_message) as spinner:
+ logger.debug('Destination directory: %s', tempd)
+ wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
+ + self.build_options
+
+ if python_tag is not None:
+ wheel_args += ["--python-tag", python_tag]
+
+ try:
+ output = call_subprocess(wheel_args, cwd=req.setup_py_dir,
+ spinner=spinner)
+ except Exception:
+ spinner.finish("error")
+ logger.error('Failed building wheel for %s', req.name)
+ return None
+ names = os.listdir(tempd)
+ wheel_path = get_legacy_build_wheel_path(
+ names=names,
+ temp_dir=tempd,
+ req=req,
+ command_args=wheel_args,
+ command_output=output,
+ )
+ return wheel_path
+
+ def _clean_one(self, req):
+ base_args = self._base_setup_args(req)
+
+ logger.info('Running setup.py clean for %s', req.name)
+ clean_args = base_args + ['clean', '--all']
+ try:
+ call_subprocess(clean_args, cwd=req.source_dir)
+ return True
+ except Exception:
+ logger.error('Failed cleaning build dir for %s', req.name)
+ return False
+
+ def build(
+ self,
+ requirements, # type: Iterable[InstallRequirement]
+ session, # type: PipSession
+ autobuilding=False # type: bool
+ ):
+ # type: (...) -> List[InstallRequirement]
+ """Build wheels.
+
+ :param unpack: If True, replace the sdist we built from with the
+ newly built wheel, in preparation for installation.
+ :return: True if all the wheels built correctly.
+ """
+ buildset = []
+ format_control = self.finder.format_control
+ # Whether a cache directory is available for autobuilding=True.
+ cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir)
+
+ for req in requirements:
+ ephem_cache = should_use_ephemeral_cache(
+ req, format_control=format_control, autobuilding=autobuilding,
+ cache_available=cache_available,
+ )
+ if ephem_cache is None:
+ continue
+
+ buildset.append((req, ephem_cache))
+
+ if not buildset:
+ return []
+
+ # Is any wheel build not using the ephemeral cache?
+ if any(not ephem_cache for _, ephem_cache in buildset):
+ have_directory_for_build = self._wheel_dir or (
+ autobuilding and self.wheel_cache.cache_dir
+ )
+ assert have_directory_for_build
+
+ # TODO by @pradyunsg
+ # Should break up this method into 2 separate methods.
+
+ # Build the wheels.
+ logger.info(
+ 'Building wheels for collected packages: %s',
+ ', '.join([req.name for (req, _) in buildset]),
+ )
+ _cache = self.wheel_cache # shorter name
+ with indent_log():
+ build_success, build_failure = [], []
+ for req, ephem in buildset:
+ python_tag = None
+ if autobuilding:
+ python_tag = pep425tags.implementation_tag
+ if ephem:
+ output_dir = _cache.get_ephem_path_for_link(req.link)
+ else:
+ output_dir = _cache.get_path_for_link(req.link)
+ try:
+ ensure_dir(output_dir)
+ except OSError as e:
+ logger.warning("Building wheel for %s failed: %s",
+ req.name, e)
+ build_failure.append(req)
+ continue
+ else:
+ output_dir = self._wheel_dir
+ wheel_file = self._build_one(
+ req, output_dir,
+ python_tag=python_tag,
+ )
+ if wheel_file:
+ build_success.append(req)
+ if autobuilding:
+ # XXX: This is mildly duplicative with prepare_files,
+ # but not close enough to pull out to a single common
+ # method.
+ # The code below assumes temporary source dirs -
+ # prevent it doing bad things.
+ if req.source_dir and not os.path.exists(os.path.join(
+ req.source_dir, PIP_DELETE_MARKER_FILENAME)):
+ raise AssertionError(
+ "bad source dir - missing marker")
+ # Delete the source we built the wheel from
+ req.remove_temporary_source()
+ # set the build directory again - name is known from
+ # the work prepare_files did.
+ req.source_dir = req.build_location(
+ self.preparer.build_dir
+ )
+ # Update the link for this.
+ req.link = Link(path_to_url(wheel_file))
+ assert req.link.is_wheel
+ # extract the wheel into the dir
+ unpack_url(
+ req.link, req.source_dir, None, False,
+ session=session,
+ )
+ else:
+ build_failure.append(req)
+
+ # notify success/failure
+ if build_success:
+ logger.info(
+ 'Successfully built %s',
+ ' '.join([req.name for req in build_success]),
+ )
+ if build_failure:
+ logger.info(
+ 'Failed to build %s',
+ ' '.join([req.name for req in build_failure]),
+ )
+ # Return a list of requirements that failed to build
+ return build_failure