Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +4 -0
- evalkit_llava/bin/bunzip2 +3 -0
- evalkit_llava/bin/unlzma +3 -0
- evalkit_llava/lib/libbz2.a +3 -0
- evalkit_llava/lib/libbz2.so.1.0.8 +3 -0
- evalkit_llava/lib/libform.so +0 -0
- evalkit_llava/lib/libgcc_s.so +4 -0
- evalkit_llava/lib/libmenu.a +0 -0
- evalkit_llava/lib/libpanelw.so.6.4 +0 -0
- evalkit_llava/lib/libuuid.so +0 -0
- evalkit_llava/lib/python3.10/idlelib/Icons/idle_32.gif +3 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__init__.py +179 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/api.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/help.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/models.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__version__.py +14 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/_internal_utils.py +50 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/adapters.py +719 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/api.py +157 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/auth.py +314 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/certs.py +17 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/compat.py +78 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/cookies.py +561 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/exceptions.py +151 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/help.py +127 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/hooks.py +33 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/models.py +1037 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/packages.py +25 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/sessions.py +831 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/status_codes.py +128 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/structures.py +99 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/utils.py +1096 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -71,3 +71,7 @@ evalkit_llava/lib/python3.10/lib2to3/tests/__pycache__/test_fixers.cpython-310.p
|
|
| 71 |
evalkit_llava/lib/libtinfo.so filter=lfs diff=lfs merge=lfs -text
|
| 72 |
evalkit_llava/lib/libz.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 73 |
evalkit_llava/lib/libreadline.so.8.2 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 71 |
evalkit_llava/lib/libtinfo.so filter=lfs diff=lfs merge=lfs -text
|
| 72 |
evalkit_llava/lib/libz.so.1 filter=lfs diff=lfs merge=lfs -text
|
| 73 |
evalkit_llava/lib/libreadline.so.8.2 filter=lfs diff=lfs merge=lfs -text
|
| 74 |
+
evalkit_llava/lib/libbz2.a filter=lfs diff=lfs merge=lfs -text
|
| 75 |
+
evalkit_llava/bin/unlzma filter=lfs diff=lfs merge=lfs -text
|
| 76 |
+
evalkit_llava/lib/libbz2.so.1.0.8 filter=lfs diff=lfs merge=lfs -text
|
| 77 |
+
evalkit_llava/bin/bunzip2 filter=lfs diff=lfs merge=lfs -text
|
evalkit_llava/bin/bunzip2
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8a514cce807cb1656a3bcd59794401e7d63c9554267e9acc77097a406092a8ed
|
| 3 |
+
size 299464
|
evalkit_llava/bin/unlzma
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5cc86d36933372b94af4bd9ed22ad711f57b4e16175675627edcd4cb9ea46a61
|
| 3 |
+
size 108336
|
evalkit_llava/lib/libbz2.a
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4377dc3d8f7542568b6365cd6bb06970b53c20e9a71b7d54271874f7868be500
|
| 3 |
+
size 264138
|
evalkit_llava/lib/libbz2.so.1.0.8
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4979469ae49ac144f62202f75bbdd69b17197aedb879d633337c8cf7e4aba301
|
| 3 |
+
size 229016
|
evalkit_llava/lib/libform.so
ADDED
|
Binary file (91.4 kB). View file
|
|
|
evalkit_llava/lib/libgcc_s.so
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/* GNU ld script
|
| 2 |
+
Use the shared library, but some functions are only in
|
| 3 |
+
the static library. */
|
| 4 |
+
GROUP ( libgcc_s.so.1 -lgcc )
|
evalkit_llava/lib/libmenu.a
ADDED
|
Binary file (82.2 kB). View file
|
|
|
evalkit_llava/lib/libpanelw.so.6.4
ADDED
|
Binary file (20.7 kB). View file
|
|
|
evalkit_llava/lib/libuuid.so
ADDED
|
Binary file (35.9 kB). View file
|
|
|
evalkit_llava/lib/python3.10/idlelib/Icons/idle_32.gif
ADDED
|
|
Git LFS Details
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__init__.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# __
|
| 2 |
+
# /__) _ _ _ _ _/ _
|
| 3 |
+
# / ( (- (/ (/ (- _) / _)
|
| 4 |
+
# /
|
| 5 |
+
|
| 6 |
+
"""
|
| 7 |
+
Requests HTTP Library
|
| 8 |
+
~~~~~~~~~~~~~~~~~~~~~
|
| 9 |
+
|
| 10 |
+
Requests is an HTTP library, written in Python, for human beings.
|
| 11 |
+
Basic GET usage:
|
| 12 |
+
|
| 13 |
+
>>> import requests
|
| 14 |
+
>>> r = requests.get('https://www.python.org')
|
| 15 |
+
>>> r.status_code
|
| 16 |
+
200
|
| 17 |
+
>>> b'Python is a programming language' in r.content
|
| 18 |
+
True
|
| 19 |
+
|
| 20 |
+
... or POST:
|
| 21 |
+
|
| 22 |
+
>>> payload = dict(key1='value1', key2='value2')
|
| 23 |
+
>>> r = requests.post('https://httpbin.org/post', data=payload)
|
| 24 |
+
>>> print(r.text)
|
| 25 |
+
{
|
| 26 |
+
...
|
| 27 |
+
"form": {
|
| 28 |
+
"key1": "value1",
|
| 29 |
+
"key2": "value2"
|
| 30 |
+
},
|
| 31 |
+
...
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
The other HTTP methods are supported - see `requests.api`. Full documentation
|
| 35 |
+
is at <https://requests.readthedocs.io>.
|
| 36 |
+
|
| 37 |
+
:copyright: (c) 2017 by Kenneth Reitz.
|
| 38 |
+
:license: Apache 2.0, see LICENSE for more details.
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
import warnings
|
| 42 |
+
|
| 43 |
+
from pip._vendor import urllib3
|
| 44 |
+
|
| 45 |
+
from .exceptions import RequestsDependencyWarning
|
| 46 |
+
|
| 47 |
+
charset_normalizer_version = None
|
| 48 |
+
chardet_version = None
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
|
| 52 |
+
urllib3_version = urllib3_version.split(".")
|
| 53 |
+
assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
|
| 54 |
+
|
| 55 |
+
# Sometimes, urllib3 only reports its version as 16.1.
|
| 56 |
+
if len(urllib3_version) == 2:
|
| 57 |
+
urllib3_version.append("0")
|
| 58 |
+
|
| 59 |
+
# Check urllib3 for compatibility.
|
| 60 |
+
major, minor, patch = urllib3_version # noqa: F811
|
| 61 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
| 62 |
+
# urllib3 >= 1.21.1
|
| 63 |
+
assert major >= 1
|
| 64 |
+
if major == 1:
|
| 65 |
+
assert minor >= 21
|
| 66 |
+
|
| 67 |
+
# Check charset_normalizer for compatibility.
|
| 68 |
+
if chardet_version:
|
| 69 |
+
major, minor, patch = chardet_version.split(".")[:3]
|
| 70 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
| 71 |
+
# chardet_version >= 3.0.2, < 6.0.0
|
| 72 |
+
assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0)
|
| 73 |
+
elif charset_normalizer_version:
|
| 74 |
+
major, minor, patch = charset_normalizer_version.split(".")[:3]
|
| 75 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
| 76 |
+
# charset_normalizer >= 2.0.0 < 4.0.0
|
| 77 |
+
assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0)
|
| 78 |
+
else:
|
| 79 |
+
# pip does not need or use character detection
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _check_cryptography(cryptography_version):
|
| 84 |
+
# cryptography < 1.3.4
|
| 85 |
+
try:
|
| 86 |
+
cryptography_version = list(map(int, cryptography_version.split(".")))
|
| 87 |
+
except ValueError:
|
| 88 |
+
return
|
| 89 |
+
|
| 90 |
+
if cryptography_version < [1, 3, 4]:
|
| 91 |
+
warning = "Old version of cryptography ({}) may cause slowdown.".format(
|
| 92 |
+
cryptography_version
|
| 93 |
+
)
|
| 94 |
+
warnings.warn(warning, RequestsDependencyWarning)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
# Check imported dependencies for compatibility.
|
| 98 |
+
try:
|
| 99 |
+
check_compatibility(
|
| 100 |
+
urllib3.__version__, chardet_version, charset_normalizer_version
|
| 101 |
+
)
|
| 102 |
+
except (AssertionError, ValueError):
|
| 103 |
+
warnings.warn(
|
| 104 |
+
"urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
| 105 |
+
"version!".format(
|
| 106 |
+
urllib3.__version__, chardet_version, charset_normalizer_version
|
| 107 |
+
),
|
| 108 |
+
RequestsDependencyWarning,
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
# Attempt to enable urllib3's fallback for SNI support
|
| 112 |
+
# if the standard library doesn't support SNI or the
|
| 113 |
+
# 'ssl' library isn't available.
|
| 114 |
+
try:
|
| 115 |
+
# Note: This logic prevents upgrading cryptography on Windows, if imported
|
| 116 |
+
# as part of pip.
|
| 117 |
+
from pip._internal.utils.compat import WINDOWS
|
| 118 |
+
if not WINDOWS:
|
| 119 |
+
raise ImportError("pip internals: don't import cryptography on Windows")
|
| 120 |
+
try:
|
| 121 |
+
import ssl
|
| 122 |
+
except ImportError:
|
| 123 |
+
ssl = None
|
| 124 |
+
|
| 125 |
+
if not getattr(ssl, "HAS_SNI", False):
|
| 126 |
+
from pip._vendor.urllib3.contrib import pyopenssl
|
| 127 |
+
|
| 128 |
+
pyopenssl.inject_into_urllib3()
|
| 129 |
+
|
| 130 |
+
# Check cryptography version
|
| 131 |
+
from cryptography import __version__ as cryptography_version
|
| 132 |
+
|
| 133 |
+
_check_cryptography(cryptography_version)
|
| 134 |
+
except ImportError:
|
| 135 |
+
pass
|
| 136 |
+
|
| 137 |
+
# urllib3's DependencyWarnings should be silenced.
|
| 138 |
+
from pip._vendor.urllib3.exceptions import DependencyWarning
|
| 139 |
+
|
| 140 |
+
warnings.simplefilter("ignore", DependencyWarning)
|
| 141 |
+
|
| 142 |
+
# Set default logging handler to avoid "No handler found" warnings.
|
| 143 |
+
import logging
|
| 144 |
+
from logging import NullHandler
|
| 145 |
+
|
| 146 |
+
from . import packages, utils
|
| 147 |
+
from .__version__ import (
|
| 148 |
+
__author__,
|
| 149 |
+
__author_email__,
|
| 150 |
+
__build__,
|
| 151 |
+
__cake__,
|
| 152 |
+
__copyright__,
|
| 153 |
+
__description__,
|
| 154 |
+
__license__,
|
| 155 |
+
__title__,
|
| 156 |
+
__url__,
|
| 157 |
+
__version__,
|
| 158 |
+
)
|
| 159 |
+
from .api import delete, get, head, options, patch, post, put, request
|
| 160 |
+
from .exceptions import (
|
| 161 |
+
ConnectionError,
|
| 162 |
+
ConnectTimeout,
|
| 163 |
+
FileModeWarning,
|
| 164 |
+
HTTPError,
|
| 165 |
+
JSONDecodeError,
|
| 166 |
+
ReadTimeout,
|
| 167 |
+
RequestException,
|
| 168 |
+
Timeout,
|
| 169 |
+
TooManyRedirects,
|
| 170 |
+
URLRequired,
|
| 171 |
+
)
|
| 172 |
+
from .models import PreparedRequest, Request, Response
|
| 173 |
+
from .sessions import Session, session
|
| 174 |
+
from .status_codes import codes
|
| 175 |
+
|
| 176 |
+
logging.getLogger(__name__).addHandler(NullHandler())
|
| 177 |
+
|
| 178 |
+
# FileModeWarnings go off per the default.
|
| 179 |
+
warnings.simplefilter("default", FileModeWarning, append=True)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc
ADDED
|
Binary file (537 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc
ADDED
|
Binary file (1.61 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-310.pyc
ADDED
|
Binary file (22.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/api.cpython-310.pyc
ADDED
|
Binary file (6.71 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-310.pyc
ADDED
|
Binary file (8.11 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-310.pyc
ADDED
|
Binary file (626 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc
ADDED
|
Binary file (18.7 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-310.pyc
ADDED
|
Binary file (6.23 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/help.cpython-310.pyc
ADDED
|
Binary file (2.8 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc
ADDED
|
Binary file (981 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/models.cpython-310.pyc
ADDED
|
Binary file (24.3 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-310.pyc
ADDED
|
Binary file (727 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-310.pyc
ADDED
|
Binary file (19.7 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-310.pyc
ADDED
|
Binary file (4.73 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/__version__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# .-. .-. .-. . . .-. .-. .-. .-.
|
| 2 |
+
# |( |- |.| | | |- `-. | `-.
|
| 3 |
+
# ' ' `-' `-`.`-' `-' `-' ' `-'
|
| 4 |
+
|
| 5 |
+
__title__ = "requests"
|
| 6 |
+
__description__ = "Python HTTP for Humans."
|
| 7 |
+
__url__ = "https://requests.readthedocs.io"
|
| 8 |
+
__version__ = "2.32.3"
|
| 9 |
+
__build__ = 0x023203
|
| 10 |
+
__author__ = "Kenneth Reitz"
|
| 11 |
+
__author_email__ = "[email protected]"
|
| 12 |
+
__license__ = "Apache-2.0"
|
| 13 |
+
__copyright__ = "Copyright Kenneth Reitz"
|
| 14 |
+
__cake__ = "\u2728 \U0001f370 \u2728"
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/_internal_utils.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests._internal_utils
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Provides utility functions that are consumed internally by Requests
|
| 6 |
+
which depend on extremely few external helpers (such as compat)
|
| 7 |
+
"""
|
| 8 |
+
import re
|
| 9 |
+
|
| 10 |
+
from .compat import builtin_str
|
| 11 |
+
|
| 12 |
+
_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
|
| 13 |
+
_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
|
| 14 |
+
_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
|
| 15 |
+
_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
|
| 16 |
+
|
| 17 |
+
_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR)
|
| 18 |
+
_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE)
|
| 19 |
+
HEADER_VALIDATORS = {
|
| 20 |
+
bytes: _HEADER_VALIDATORS_BYTE,
|
| 21 |
+
str: _HEADER_VALIDATORS_STR,
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def to_native_string(string, encoding="ascii"):
|
| 26 |
+
"""Given a string object, regardless of type, returns a representation of
|
| 27 |
+
that string in the native string type, encoding and decoding where
|
| 28 |
+
necessary. This assumes ASCII unless told otherwise.
|
| 29 |
+
"""
|
| 30 |
+
if isinstance(string, builtin_str):
|
| 31 |
+
out = string
|
| 32 |
+
else:
|
| 33 |
+
out = string.decode(encoding)
|
| 34 |
+
|
| 35 |
+
return out
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def unicode_is_ascii(u_string):
|
| 39 |
+
"""Determine if unicode string only contains ASCII characters.
|
| 40 |
+
|
| 41 |
+
:param str u_string: unicode string to check. Must be unicode
|
| 42 |
+
and not Python 2 `str`.
|
| 43 |
+
:rtype: bool
|
| 44 |
+
"""
|
| 45 |
+
assert isinstance(u_string, str)
|
| 46 |
+
try:
|
| 47 |
+
u_string.encode("ascii")
|
| 48 |
+
return True
|
| 49 |
+
except UnicodeEncodeError:
|
| 50 |
+
return False
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/adapters.py
ADDED
|
@@ -0,0 +1,719 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.adapters
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the transport adapters that Requests uses to define
|
| 6 |
+
and maintain connections.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import os.path
|
| 10 |
+
import socket # noqa: F401
|
| 11 |
+
import typing
|
| 12 |
+
import warnings
|
| 13 |
+
|
| 14 |
+
from pip._vendor.urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
|
| 15 |
+
from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
|
| 16 |
+
from pip._vendor.urllib3.exceptions import InvalidHeader as _InvalidHeader
|
| 17 |
+
from pip._vendor.urllib3.exceptions import (
|
| 18 |
+
LocationValueError,
|
| 19 |
+
MaxRetryError,
|
| 20 |
+
NewConnectionError,
|
| 21 |
+
ProtocolError,
|
| 22 |
+
)
|
| 23 |
+
from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError
|
| 24 |
+
from pip._vendor.urllib3.exceptions import ReadTimeoutError, ResponseError
|
| 25 |
+
from pip._vendor.urllib3.exceptions import SSLError as _SSLError
|
| 26 |
+
from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
|
| 27 |
+
from pip._vendor.urllib3.util import Timeout as TimeoutSauce
|
| 28 |
+
from pip._vendor.urllib3.util import parse_url
|
| 29 |
+
from pip._vendor.urllib3.util.retry import Retry
|
| 30 |
+
from pip._vendor.urllib3.util.ssl_ import create_urllib3_context
|
| 31 |
+
|
| 32 |
+
from .auth import _basic_auth_str
|
| 33 |
+
from .compat import basestring, urlparse
|
| 34 |
+
from .cookies import extract_cookies_to_jar
|
| 35 |
+
from .exceptions import (
|
| 36 |
+
ConnectionError,
|
| 37 |
+
ConnectTimeout,
|
| 38 |
+
InvalidHeader,
|
| 39 |
+
InvalidProxyURL,
|
| 40 |
+
InvalidSchema,
|
| 41 |
+
InvalidURL,
|
| 42 |
+
ProxyError,
|
| 43 |
+
ReadTimeout,
|
| 44 |
+
RetryError,
|
| 45 |
+
SSLError,
|
| 46 |
+
)
|
| 47 |
+
from .models import Response
|
| 48 |
+
from .structures import CaseInsensitiveDict
|
| 49 |
+
from .utils import (
|
| 50 |
+
DEFAULT_CA_BUNDLE_PATH,
|
| 51 |
+
extract_zipped_paths,
|
| 52 |
+
get_auth_from_url,
|
| 53 |
+
get_encoding_from_headers,
|
| 54 |
+
prepend_scheme_if_needed,
|
| 55 |
+
select_proxy,
|
| 56 |
+
urldefragauth,
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
try:
|
| 60 |
+
from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager
|
| 61 |
+
except ImportError:
|
| 62 |
+
|
| 63 |
+
def SOCKSProxyManager(*args, **kwargs):
|
| 64 |
+
raise InvalidSchema("Missing dependencies for SOCKS support.")
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
if typing.TYPE_CHECKING:
|
| 68 |
+
from .models import PreparedRequest
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
DEFAULT_POOLBLOCK = False
|
| 72 |
+
DEFAULT_POOLSIZE = 10
|
| 73 |
+
DEFAULT_RETRIES = 0
|
| 74 |
+
DEFAULT_POOL_TIMEOUT = None
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
try:
|
| 78 |
+
import ssl # noqa: F401
|
| 79 |
+
|
| 80 |
+
_preloaded_ssl_context = create_urllib3_context()
|
| 81 |
+
_preloaded_ssl_context.load_verify_locations(
|
| 82 |
+
extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
|
| 83 |
+
)
|
| 84 |
+
except ImportError:
|
| 85 |
+
# Bypass default SSLContext creation when Python
|
| 86 |
+
# interpreter isn't built with the ssl module.
|
| 87 |
+
_preloaded_ssl_context = None
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _urllib3_request_context(
|
| 91 |
+
request: "PreparedRequest",
|
| 92 |
+
verify: "bool | str | None",
|
| 93 |
+
client_cert: "typing.Tuple[str, str] | str | None",
|
| 94 |
+
poolmanager: "PoolManager",
|
| 95 |
+
) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
|
| 96 |
+
host_params = {}
|
| 97 |
+
pool_kwargs = {}
|
| 98 |
+
parsed_request_url = urlparse(request.url)
|
| 99 |
+
scheme = parsed_request_url.scheme.lower()
|
| 100 |
+
port = parsed_request_url.port
|
| 101 |
+
|
| 102 |
+
# Determine if we have and should use our default SSLContext
|
| 103 |
+
# to optimize performance on standard requests.
|
| 104 |
+
poolmanager_kwargs = getattr(poolmanager, "connection_pool_kw", {})
|
| 105 |
+
has_poolmanager_ssl_context = poolmanager_kwargs.get("ssl_context")
|
| 106 |
+
should_use_default_ssl_context = (
|
| 107 |
+
_preloaded_ssl_context is not None and not has_poolmanager_ssl_context
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
cert_reqs = "CERT_REQUIRED"
|
| 111 |
+
if verify is False:
|
| 112 |
+
cert_reqs = "CERT_NONE"
|
| 113 |
+
elif verify is True and should_use_default_ssl_context:
|
| 114 |
+
pool_kwargs["ssl_context"] = _preloaded_ssl_context
|
| 115 |
+
elif isinstance(verify, str):
|
| 116 |
+
if not os.path.isdir(verify):
|
| 117 |
+
pool_kwargs["ca_certs"] = verify
|
| 118 |
+
else:
|
| 119 |
+
pool_kwargs["ca_cert_dir"] = verify
|
| 120 |
+
pool_kwargs["cert_reqs"] = cert_reqs
|
| 121 |
+
if client_cert is not None:
|
| 122 |
+
if isinstance(client_cert, tuple) and len(client_cert) == 2:
|
| 123 |
+
pool_kwargs["cert_file"] = client_cert[0]
|
| 124 |
+
pool_kwargs["key_file"] = client_cert[1]
|
| 125 |
+
else:
|
| 126 |
+
# According to our docs, we allow users to specify just the client
|
| 127 |
+
# cert path
|
| 128 |
+
pool_kwargs["cert_file"] = client_cert
|
| 129 |
+
host_params = {
|
| 130 |
+
"scheme": scheme,
|
| 131 |
+
"host": parsed_request_url.hostname,
|
| 132 |
+
"port": port,
|
| 133 |
+
}
|
| 134 |
+
return host_params, pool_kwargs
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
class BaseAdapter:
|
| 138 |
+
"""The Base Transport Adapter"""
|
| 139 |
+
|
| 140 |
+
def __init__(self):
|
| 141 |
+
super().__init__()
|
| 142 |
+
|
| 143 |
+
def send(
|
| 144 |
+
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
| 145 |
+
):
|
| 146 |
+
"""Sends PreparedRequest object. Returns Response object.
|
| 147 |
+
|
| 148 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 149 |
+
:param stream: (optional) Whether to stream the request content.
|
| 150 |
+
:param timeout: (optional) How long to wait for the server to send
|
| 151 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
| 152 |
+
read timeout) <timeouts>` tuple.
|
| 153 |
+
:type timeout: float or tuple
|
| 154 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
| 155 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 156 |
+
to a CA bundle to use
|
| 157 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
| 158 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
| 159 |
+
"""
|
| 160 |
+
raise NotImplementedError
|
| 161 |
+
|
| 162 |
+
def close(self):
|
| 163 |
+
"""Cleans up adapter specific items."""
|
| 164 |
+
raise NotImplementedError
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class HTTPAdapter(BaseAdapter):
|
| 168 |
+
"""The built-in HTTP Adapter for urllib3.
|
| 169 |
+
|
| 170 |
+
Provides a general-case interface for Requests sessions to contact HTTP and
|
| 171 |
+
HTTPS urls by implementing the Transport Adapter interface. This class will
|
| 172 |
+
usually be created by the :class:`Session <Session>` class under the
|
| 173 |
+
covers.
|
| 174 |
+
|
| 175 |
+
:param pool_connections: The number of urllib3 connection pools to cache.
|
| 176 |
+
:param pool_maxsize: The maximum number of connections to save in the pool.
|
| 177 |
+
:param max_retries: The maximum number of retries each connection
|
| 178 |
+
should attempt. Note, this applies only to failed DNS lookups, socket
|
| 179 |
+
connections and connection timeouts, never to requests where data has
|
| 180 |
+
made it to the server. By default, Requests does not retry failed
|
| 181 |
+
connections. If you need granular control over the conditions under
|
| 182 |
+
which we retry a request, import urllib3's ``Retry`` class and pass
|
| 183 |
+
that instead.
|
| 184 |
+
:param pool_block: Whether the connection pool should block for connections.
|
| 185 |
+
|
| 186 |
+
Usage::
|
| 187 |
+
|
| 188 |
+
>>> import requests
|
| 189 |
+
>>> s = requests.Session()
|
| 190 |
+
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
| 191 |
+
>>> s.mount('http://', a)
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
__attrs__ = [
|
| 195 |
+
"max_retries",
|
| 196 |
+
"config",
|
| 197 |
+
"_pool_connections",
|
| 198 |
+
"_pool_maxsize",
|
| 199 |
+
"_pool_block",
|
| 200 |
+
]
|
| 201 |
+
|
| 202 |
+
def __init__(
|
| 203 |
+
self,
|
| 204 |
+
pool_connections=DEFAULT_POOLSIZE,
|
| 205 |
+
pool_maxsize=DEFAULT_POOLSIZE,
|
| 206 |
+
max_retries=DEFAULT_RETRIES,
|
| 207 |
+
pool_block=DEFAULT_POOLBLOCK,
|
| 208 |
+
):
|
| 209 |
+
if max_retries == DEFAULT_RETRIES:
|
| 210 |
+
self.max_retries = Retry(0, read=False)
|
| 211 |
+
else:
|
| 212 |
+
self.max_retries = Retry.from_int(max_retries)
|
| 213 |
+
self.config = {}
|
| 214 |
+
self.proxy_manager = {}
|
| 215 |
+
|
| 216 |
+
super().__init__()
|
| 217 |
+
|
| 218 |
+
self._pool_connections = pool_connections
|
| 219 |
+
self._pool_maxsize = pool_maxsize
|
| 220 |
+
self._pool_block = pool_block
|
| 221 |
+
|
| 222 |
+
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
|
| 223 |
+
|
| 224 |
+
def __getstate__(self):
|
| 225 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
| 226 |
+
|
| 227 |
+
def __setstate__(self, state):
|
| 228 |
+
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
| 229 |
+
# self.poolmanager uses a lambda function, which isn't pickleable.
|
| 230 |
+
self.proxy_manager = {}
|
| 231 |
+
self.config = {}
|
| 232 |
+
|
| 233 |
+
for attr, value in state.items():
|
| 234 |
+
setattr(self, attr, value)
|
| 235 |
+
|
| 236 |
+
self.init_poolmanager(
|
| 237 |
+
self._pool_connections, self._pool_maxsize, block=self._pool_block
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
def init_poolmanager(
|
| 241 |
+
self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
|
| 242 |
+
):
|
| 243 |
+
"""Initializes a urllib3 PoolManager.
|
| 244 |
+
|
| 245 |
+
This method should not be called from user code, and is only
|
| 246 |
+
exposed for use when subclassing the
|
| 247 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 248 |
+
|
| 249 |
+
:param connections: The number of urllib3 connection pools to cache.
|
| 250 |
+
:param maxsize: The maximum number of connections to save in the pool.
|
| 251 |
+
:param block: Block when no free connections are available.
|
| 252 |
+
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
|
| 253 |
+
"""
|
| 254 |
+
# save these values for pickling
|
| 255 |
+
self._pool_connections = connections
|
| 256 |
+
self._pool_maxsize = maxsize
|
| 257 |
+
self._pool_block = block
|
| 258 |
+
|
| 259 |
+
self.poolmanager = PoolManager(
|
| 260 |
+
num_pools=connections,
|
| 261 |
+
maxsize=maxsize,
|
| 262 |
+
block=block,
|
| 263 |
+
**pool_kwargs,
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
| 267 |
+
"""Return urllib3 ProxyManager for the given proxy.
|
| 268 |
+
|
| 269 |
+
This method should not be called from user code, and is only
|
| 270 |
+
exposed for use when subclassing the
|
| 271 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 272 |
+
|
| 273 |
+
:param proxy: The proxy to return a urllib3 ProxyManager for.
|
| 274 |
+
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
|
| 275 |
+
:returns: ProxyManager
|
| 276 |
+
:rtype: urllib3.ProxyManager
|
| 277 |
+
"""
|
| 278 |
+
if proxy in self.proxy_manager:
|
| 279 |
+
manager = self.proxy_manager[proxy]
|
| 280 |
+
elif proxy.lower().startswith("socks"):
|
| 281 |
+
username, password = get_auth_from_url(proxy)
|
| 282 |
+
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
|
| 283 |
+
proxy,
|
| 284 |
+
username=username,
|
| 285 |
+
password=password,
|
| 286 |
+
num_pools=self._pool_connections,
|
| 287 |
+
maxsize=self._pool_maxsize,
|
| 288 |
+
block=self._pool_block,
|
| 289 |
+
**proxy_kwargs,
|
| 290 |
+
)
|
| 291 |
+
else:
|
| 292 |
+
proxy_headers = self.proxy_headers(proxy)
|
| 293 |
+
manager = self.proxy_manager[proxy] = proxy_from_url(
|
| 294 |
+
proxy,
|
| 295 |
+
proxy_headers=proxy_headers,
|
| 296 |
+
num_pools=self._pool_connections,
|
| 297 |
+
maxsize=self._pool_maxsize,
|
| 298 |
+
block=self._pool_block,
|
| 299 |
+
**proxy_kwargs,
|
| 300 |
+
)
|
| 301 |
+
|
| 302 |
+
return manager
|
| 303 |
+
|
| 304 |
+
def cert_verify(self, conn, url, verify, cert):
|
| 305 |
+
"""Verify a SSL certificate. This method should not be called from user
|
| 306 |
+
code, and is only exposed for use when subclassing the
|
| 307 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 308 |
+
|
| 309 |
+
:param conn: The urllib3 connection object associated with the cert.
|
| 310 |
+
:param url: The requested URL.
|
| 311 |
+
:param verify: Either a boolean, in which case it controls whether we verify
|
| 312 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 313 |
+
to a CA bundle to use
|
| 314 |
+
:param cert: The SSL certificate to verify.
|
| 315 |
+
"""
|
| 316 |
+
if url.lower().startswith("https") and verify:
|
| 317 |
+
conn.cert_reqs = "CERT_REQUIRED"
|
| 318 |
+
|
| 319 |
+
# Only load the CA certificates if 'verify' is a string indicating the CA bundle to use.
|
| 320 |
+
# Otherwise, if verify is a boolean, we don't load anything since
|
| 321 |
+
# the connection will be using a context with the default certificates already loaded,
|
| 322 |
+
# and this avoids a call to the slow load_verify_locations()
|
| 323 |
+
if verify is not True:
|
| 324 |
+
# `verify` must be a str with a path then
|
| 325 |
+
cert_loc = verify
|
| 326 |
+
|
| 327 |
+
if not os.path.exists(cert_loc):
|
| 328 |
+
raise OSError(
|
| 329 |
+
f"Could not find a suitable TLS CA certificate bundle, "
|
| 330 |
+
f"invalid path: {cert_loc}"
|
| 331 |
+
)
|
| 332 |
+
|
| 333 |
+
if not os.path.isdir(cert_loc):
|
| 334 |
+
conn.ca_certs = cert_loc
|
| 335 |
+
else:
|
| 336 |
+
conn.ca_cert_dir = cert_loc
|
| 337 |
+
else:
|
| 338 |
+
conn.cert_reqs = "CERT_NONE"
|
| 339 |
+
conn.ca_certs = None
|
| 340 |
+
conn.ca_cert_dir = None
|
| 341 |
+
|
| 342 |
+
if cert:
|
| 343 |
+
if not isinstance(cert, basestring):
|
| 344 |
+
conn.cert_file = cert[0]
|
| 345 |
+
conn.key_file = cert[1]
|
| 346 |
+
else:
|
| 347 |
+
conn.cert_file = cert
|
| 348 |
+
conn.key_file = None
|
| 349 |
+
if conn.cert_file and not os.path.exists(conn.cert_file):
|
| 350 |
+
raise OSError(
|
| 351 |
+
f"Could not find the TLS certificate file, "
|
| 352 |
+
f"invalid path: {conn.cert_file}"
|
| 353 |
+
)
|
| 354 |
+
if conn.key_file and not os.path.exists(conn.key_file):
|
| 355 |
+
raise OSError(
|
| 356 |
+
f"Could not find the TLS key file, invalid path: {conn.key_file}"
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
def build_response(self, req, resp):
|
| 360 |
+
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
| 361 |
+
response. This should not be called from user code, and is only exposed
|
| 362 |
+
for use when subclassing the
|
| 363 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
|
| 364 |
+
|
| 365 |
+
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
|
| 366 |
+
:param resp: The urllib3 response object.
|
| 367 |
+
:rtype: requests.Response
|
| 368 |
+
"""
|
| 369 |
+
response = Response()
|
| 370 |
+
|
| 371 |
+
# Fallback to None if there's no status_code, for whatever reason.
|
| 372 |
+
response.status_code = getattr(resp, "status", None)
|
| 373 |
+
|
| 374 |
+
# Make headers case-insensitive.
|
| 375 |
+
response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
|
| 376 |
+
|
| 377 |
+
# Set encoding.
|
| 378 |
+
response.encoding = get_encoding_from_headers(response.headers)
|
| 379 |
+
response.raw = resp
|
| 380 |
+
response.reason = response.raw.reason
|
| 381 |
+
|
| 382 |
+
if isinstance(req.url, bytes):
|
| 383 |
+
response.url = req.url.decode("utf-8")
|
| 384 |
+
else:
|
| 385 |
+
response.url = req.url
|
| 386 |
+
|
| 387 |
+
# Add new cookies from the server.
|
| 388 |
+
extract_cookies_to_jar(response.cookies, req, resp)
|
| 389 |
+
|
| 390 |
+
# Give the Response some context.
|
| 391 |
+
response.request = req
|
| 392 |
+
response.connection = self
|
| 393 |
+
|
| 394 |
+
return response
|
| 395 |
+
|
| 396 |
+
def build_connection_pool_key_attributes(self, request, verify, cert=None):
|
| 397 |
+
"""Build the PoolKey attributes used by urllib3 to return a connection.
|
| 398 |
+
|
| 399 |
+
This looks at the PreparedRequest, the user-specified verify value,
|
| 400 |
+
and the value of the cert parameter to determine what PoolKey values
|
| 401 |
+
to use to select a connection from a given urllib3 Connection Pool.
|
| 402 |
+
|
| 403 |
+
The SSL related pool key arguments are not consistently set. As of
|
| 404 |
+
this writing, use the following to determine what keys may be in that
|
| 405 |
+
dictionary:
|
| 406 |
+
|
| 407 |
+
* If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the
|
| 408 |
+
default Requests SSL Context
|
| 409 |
+
* If ``verify`` is ``False``, ``"ssl_context"`` will not be set but
|
| 410 |
+
``"cert_reqs"`` will be set
|
| 411 |
+
* If ``verify`` is a string, (i.e., it is a user-specified trust bundle)
|
| 412 |
+
``"ca_certs"`` will be set if the string is not a directory recognized
|
| 413 |
+
by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` will be
|
| 414 |
+
set.
|
| 415 |
+
* If ``"cert"`` is specified, ``"cert_file"`` will always be set. If
|
| 416 |
+
``"cert"`` is a tuple with a second item, ``"key_file"`` will also
|
| 417 |
+
be present
|
| 418 |
+
|
| 419 |
+
To override these settings, one may subclass this class, call this
|
| 420 |
+
method and use the above logic to change parameters as desired. For
|
| 421 |
+
example, if one wishes to use a custom :py:class:`ssl.SSLContext` one
|
| 422 |
+
must both set ``"ssl_context"`` and based on what else they require,
|
| 423 |
+
alter the other keys to ensure the desired behaviour.
|
| 424 |
+
|
| 425 |
+
:param request:
|
| 426 |
+
The PreparedReqest being sent over the connection.
|
| 427 |
+
:type request:
|
| 428 |
+
:class:`~requests.models.PreparedRequest`
|
| 429 |
+
:param verify:
|
| 430 |
+
Either a boolean, in which case it controls whether
|
| 431 |
+
we verify the server's TLS certificate, or a string, in which case it
|
| 432 |
+
must be a path to a CA bundle to use.
|
| 433 |
+
:param cert:
|
| 434 |
+
(optional) Any user-provided SSL certificate for client
|
| 435 |
+
authentication (a.k.a., mTLS). This may be a string (i.e., just
|
| 436 |
+
the path to a file which holds both certificate and key) or a
|
| 437 |
+
tuple of length 2 with the certificate file path and key file
|
| 438 |
+
path.
|
| 439 |
+
:returns:
|
| 440 |
+
A tuple of two dictionaries. The first is the "host parameters"
|
| 441 |
+
portion of the Pool Key including scheme, hostname, and port. The
|
| 442 |
+
second is a dictionary of SSLContext related parameters.
|
| 443 |
+
"""
|
| 444 |
+
return _urllib3_request_context(request, verify, cert, self.poolmanager)
|
| 445 |
+
|
| 446 |
+
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
|
| 447 |
+
"""Returns a urllib3 connection for the given request and TLS settings.
|
| 448 |
+
This should not be called from user code, and is only exposed for use
|
| 449 |
+
when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 450 |
+
|
| 451 |
+
:param request:
|
| 452 |
+
The :class:`PreparedRequest <PreparedRequest>` object to be sent
|
| 453 |
+
over the connection.
|
| 454 |
+
:param verify:
|
| 455 |
+
Either a boolean, in which case it controls whether we verify the
|
| 456 |
+
server's TLS certificate, or a string, in which case it must be a
|
| 457 |
+
path to a CA bundle to use.
|
| 458 |
+
:param proxies:
|
| 459 |
+
(optional) The proxies dictionary to apply to the request.
|
| 460 |
+
:param cert:
|
| 461 |
+
(optional) Any user-provided SSL certificate to be used for client
|
| 462 |
+
authentication (a.k.a., mTLS).
|
| 463 |
+
:rtype:
|
| 464 |
+
urllib3.ConnectionPool
|
| 465 |
+
"""
|
| 466 |
+
proxy = select_proxy(request.url, proxies)
|
| 467 |
+
try:
|
| 468 |
+
host_params, pool_kwargs = self.build_connection_pool_key_attributes(
|
| 469 |
+
request,
|
| 470 |
+
verify,
|
| 471 |
+
cert,
|
| 472 |
+
)
|
| 473 |
+
except ValueError as e:
|
| 474 |
+
raise InvalidURL(e, request=request)
|
| 475 |
+
if proxy:
|
| 476 |
+
proxy = prepend_scheme_if_needed(proxy, "http")
|
| 477 |
+
proxy_url = parse_url(proxy)
|
| 478 |
+
if not proxy_url.host:
|
| 479 |
+
raise InvalidProxyURL(
|
| 480 |
+
"Please check proxy URL. It is malformed "
|
| 481 |
+
"and could be missing the host."
|
| 482 |
+
)
|
| 483 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
| 484 |
+
conn = proxy_manager.connection_from_host(
|
| 485 |
+
**host_params, pool_kwargs=pool_kwargs
|
| 486 |
+
)
|
| 487 |
+
else:
|
| 488 |
+
# Only scheme should be lower case
|
| 489 |
+
conn = self.poolmanager.connection_from_host(
|
| 490 |
+
**host_params, pool_kwargs=pool_kwargs
|
| 491 |
+
)
|
| 492 |
+
|
| 493 |
+
return conn
|
| 494 |
+
|
| 495 |
+
def get_connection(self, url, proxies=None):
|
| 496 |
+
"""DEPRECATED: Users should move to `get_connection_with_tls_context`
|
| 497 |
+
for all subclasses of HTTPAdapter using Requests>=2.32.2.
|
| 498 |
+
|
| 499 |
+
Returns a urllib3 connection for the given URL. This should not be
|
| 500 |
+
called from user code, and is only exposed for use when subclassing the
|
| 501 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 502 |
+
|
| 503 |
+
:param url: The URL to connect to.
|
| 504 |
+
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
| 505 |
+
:rtype: urllib3.ConnectionPool
|
| 506 |
+
"""
|
| 507 |
+
warnings.warn(
|
| 508 |
+
(
|
| 509 |
+
"`get_connection` has been deprecated in favor of "
|
| 510 |
+
"`get_connection_with_tls_context`. Custom HTTPAdapter subclasses "
|
| 511 |
+
"will need to migrate for Requests>=2.32.2. Please see "
|
| 512 |
+
"https://github.com/psf/requests/pull/6710 for more details."
|
| 513 |
+
),
|
| 514 |
+
DeprecationWarning,
|
| 515 |
+
)
|
| 516 |
+
proxy = select_proxy(url, proxies)
|
| 517 |
+
|
| 518 |
+
if proxy:
|
| 519 |
+
proxy = prepend_scheme_if_needed(proxy, "http")
|
| 520 |
+
proxy_url = parse_url(proxy)
|
| 521 |
+
if not proxy_url.host:
|
| 522 |
+
raise InvalidProxyURL(
|
| 523 |
+
"Please check proxy URL. It is malformed "
|
| 524 |
+
"and could be missing the host."
|
| 525 |
+
)
|
| 526 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
| 527 |
+
conn = proxy_manager.connection_from_url(url)
|
| 528 |
+
else:
|
| 529 |
+
# Only scheme should be lower case
|
| 530 |
+
parsed = urlparse(url)
|
| 531 |
+
url = parsed.geturl()
|
| 532 |
+
conn = self.poolmanager.connection_from_url(url)
|
| 533 |
+
|
| 534 |
+
return conn
|
| 535 |
+
|
| 536 |
+
def close(self):
|
| 537 |
+
"""Disposes of any internal state.
|
| 538 |
+
|
| 539 |
+
Currently, this closes the PoolManager and any active ProxyManager,
|
| 540 |
+
which closes any pooled connections.
|
| 541 |
+
"""
|
| 542 |
+
self.poolmanager.clear()
|
| 543 |
+
for proxy in self.proxy_manager.values():
|
| 544 |
+
proxy.clear()
|
| 545 |
+
|
| 546 |
+
def request_url(self, request, proxies):
|
| 547 |
+
"""Obtain the url to use when making the final request.
|
| 548 |
+
|
| 549 |
+
If the message is being sent through a HTTP proxy, the full URL has to
|
| 550 |
+
be used. Otherwise, we should only use the path portion of the URL.
|
| 551 |
+
|
| 552 |
+
This should not be called from user code, and is only exposed for use
|
| 553 |
+
when subclassing the
|
| 554 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 555 |
+
|
| 556 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 557 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
|
| 558 |
+
:rtype: str
|
| 559 |
+
"""
|
| 560 |
+
proxy = select_proxy(request.url, proxies)
|
| 561 |
+
scheme = urlparse(request.url).scheme
|
| 562 |
+
|
| 563 |
+
is_proxied_http_request = proxy and scheme != "https"
|
| 564 |
+
using_socks_proxy = False
|
| 565 |
+
if proxy:
|
| 566 |
+
proxy_scheme = urlparse(proxy).scheme.lower()
|
| 567 |
+
using_socks_proxy = proxy_scheme.startswith("socks")
|
| 568 |
+
|
| 569 |
+
url = request.path_url
|
| 570 |
+
if url.startswith("//"): # Don't confuse urllib3
|
| 571 |
+
url = f"/{url.lstrip('/')}"
|
| 572 |
+
|
| 573 |
+
if is_proxied_http_request and not using_socks_proxy:
|
| 574 |
+
url = urldefragauth(request.url)
|
| 575 |
+
|
| 576 |
+
return url
|
| 577 |
+
|
| 578 |
+
def add_headers(self, request, **kwargs):
|
| 579 |
+
"""Add any headers needed by the connection. As of v2.0 this does
|
| 580 |
+
nothing by default, but is left for overriding by users that subclass
|
| 581 |
+
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 582 |
+
|
| 583 |
+
This should not be called from user code, and is only exposed for use
|
| 584 |
+
when subclassing the
|
| 585 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 586 |
+
|
| 587 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
|
| 588 |
+
:param kwargs: The keyword arguments from the call to send().
|
| 589 |
+
"""
|
| 590 |
+
pass
|
| 591 |
+
|
| 592 |
+
def proxy_headers(self, proxy):
|
| 593 |
+
"""Returns a dictionary of the headers to add to any request sent
|
| 594 |
+
through a proxy. This works with urllib3 magic to ensure that they are
|
| 595 |
+
correctly sent to the proxy, rather than in a tunnelled request if
|
| 596 |
+
CONNECT is being used.
|
| 597 |
+
|
| 598 |
+
This should not be called from user code, and is only exposed for use
|
| 599 |
+
when subclassing the
|
| 600 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 601 |
+
|
| 602 |
+
:param proxy: The url of the proxy being used for this request.
|
| 603 |
+
:rtype: dict
|
| 604 |
+
"""
|
| 605 |
+
headers = {}
|
| 606 |
+
username, password = get_auth_from_url(proxy)
|
| 607 |
+
|
| 608 |
+
if username:
|
| 609 |
+
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
|
| 610 |
+
|
| 611 |
+
return headers
|
| 612 |
+
|
| 613 |
+
def send(
|
| 614 |
+
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
| 615 |
+
):
|
| 616 |
+
"""Sends PreparedRequest object. Returns Response object.
|
| 617 |
+
|
| 618 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 619 |
+
:param stream: (optional) Whether to stream the request content.
|
| 620 |
+
:param timeout: (optional) How long to wait for the server to send
|
| 621 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
| 622 |
+
read timeout) <timeouts>` tuple.
|
| 623 |
+
:type timeout: float or tuple or urllib3 Timeout object
|
| 624 |
+
:param verify: (optional) Either a boolean, in which case it controls whether
|
| 625 |
+
we verify the server's TLS certificate, or a string, in which case it
|
| 626 |
+
must be a path to a CA bundle to use
|
| 627 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
| 628 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
| 629 |
+
:rtype: requests.Response
|
| 630 |
+
"""
|
| 631 |
+
|
| 632 |
+
try:
|
| 633 |
+
conn = self.get_connection_with_tls_context(
|
| 634 |
+
request, verify, proxies=proxies, cert=cert
|
| 635 |
+
)
|
| 636 |
+
except LocationValueError as e:
|
| 637 |
+
raise InvalidURL(e, request=request)
|
| 638 |
+
|
| 639 |
+
self.cert_verify(conn, request.url, verify, cert)
|
| 640 |
+
url = self.request_url(request, proxies)
|
| 641 |
+
self.add_headers(
|
| 642 |
+
request,
|
| 643 |
+
stream=stream,
|
| 644 |
+
timeout=timeout,
|
| 645 |
+
verify=verify,
|
| 646 |
+
cert=cert,
|
| 647 |
+
proxies=proxies,
|
| 648 |
+
)
|
| 649 |
+
|
| 650 |
+
chunked = not (request.body is None or "Content-Length" in request.headers)
|
| 651 |
+
|
| 652 |
+
if isinstance(timeout, tuple):
|
| 653 |
+
try:
|
| 654 |
+
connect, read = timeout
|
| 655 |
+
timeout = TimeoutSauce(connect=connect, read=read)
|
| 656 |
+
except ValueError:
|
| 657 |
+
raise ValueError(
|
| 658 |
+
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
|
| 659 |
+
f"or a single float to set both timeouts to the same value."
|
| 660 |
+
)
|
| 661 |
+
elif isinstance(timeout, TimeoutSauce):
|
| 662 |
+
pass
|
| 663 |
+
else:
|
| 664 |
+
timeout = TimeoutSauce(connect=timeout, read=timeout)
|
| 665 |
+
|
| 666 |
+
try:
|
| 667 |
+
resp = conn.urlopen(
|
| 668 |
+
method=request.method,
|
| 669 |
+
url=url,
|
| 670 |
+
body=request.body,
|
| 671 |
+
headers=request.headers,
|
| 672 |
+
redirect=False,
|
| 673 |
+
assert_same_host=False,
|
| 674 |
+
preload_content=False,
|
| 675 |
+
decode_content=False,
|
| 676 |
+
retries=self.max_retries,
|
| 677 |
+
timeout=timeout,
|
| 678 |
+
chunked=chunked,
|
| 679 |
+
)
|
| 680 |
+
|
| 681 |
+
except (ProtocolError, OSError) as err:
|
| 682 |
+
raise ConnectionError(err, request=request)
|
| 683 |
+
|
| 684 |
+
except MaxRetryError as e:
|
| 685 |
+
if isinstance(e.reason, ConnectTimeoutError):
|
| 686 |
+
# TODO: Remove this in 3.0.0: see #2811
|
| 687 |
+
if not isinstance(e.reason, NewConnectionError):
|
| 688 |
+
raise ConnectTimeout(e, request=request)
|
| 689 |
+
|
| 690 |
+
if isinstance(e.reason, ResponseError):
|
| 691 |
+
raise RetryError(e, request=request)
|
| 692 |
+
|
| 693 |
+
if isinstance(e.reason, _ProxyError):
|
| 694 |
+
raise ProxyError(e, request=request)
|
| 695 |
+
|
| 696 |
+
if isinstance(e.reason, _SSLError):
|
| 697 |
+
# This branch is for urllib3 v1.22 and later.
|
| 698 |
+
raise SSLError(e, request=request)
|
| 699 |
+
|
| 700 |
+
raise ConnectionError(e, request=request)
|
| 701 |
+
|
| 702 |
+
except ClosedPoolError as e:
|
| 703 |
+
raise ConnectionError(e, request=request)
|
| 704 |
+
|
| 705 |
+
except _ProxyError as e:
|
| 706 |
+
raise ProxyError(e)
|
| 707 |
+
|
| 708 |
+
except (_SSLError, _HTTPError) as e:
|
| 709 |
+
if isinstance(e, _SSLError):
|
| 710 |
+
# This branch is for urllib3 versions earlier than v1.22
|
| 711 |
+
raise SSLError(e, request=request)
|
| 712 |
+
elif isinstance(e, ReadTimeoutError):
|
| 713 |
+
raise ReadTimeout(e, request=request)
|
| 714 |
+
elif isinstance(e, _InvalidHeader):
|
| 715 |
+
raise InvalidHeader(e, request=request)
|
| 716 |
+
else:
|
| 717 |
+
raise
|
| 718 |
+
|
| 719 |
+
return self.build_response(request, resp)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/api.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.api
|
| 3 |
+
~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module implements the Requests API.
|
| 6 |
+
|
| 7 |
+
:copyright: (c) 2012 by Kenneth Reitz.
|
| 8 |
+
:license: Apache2, see LICENSE for more details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from . import sessions
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def request(method, url, **kwargs):
|
| 15 |
+
"""Constructs and sends a :class:`Request <Request>`.
|
| 16 |
+
|
| 17 |
+
:param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
|
| 18 |
+
:param url: URL for the new :class:`Request` object.
|
| 19 |
+
:param params: (optional) Dictionary, list of tuples or bytes to send
|
| 20 |
+
in the query string for the :class:`Request`.
|
| 21 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 22 |
+
object to send in the body of the :class:`Request`.
|
| 23 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
| 24 |
+
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
| 25 |
+
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
| 26 |
+
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
|
| 27 |
+
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
|
| 28 |
+
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content_type'`` is a string
|
| 29 |
+
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
|
| 30 |
+
to add for the file.
|
| 31 |
+
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
| 32 |
+
:param timeout: (optional) How many seconds to wait for the server to send data
|
| 33 |
+
before giving up, as a float, or a :ref:`(connect timeout, read
|
| 34 |
+
timeout) <timeouts>` tuple.
|
| 35 |
+
:type timeout: float or tuple
|
| 36 |
+
:param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
|
| 37 |
+
:type allow_redirects: bool
|
| 38 |
+
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
| 39 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
| 40 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 41 |
+
to a CA bundle to use. Defaults to ``True``.
|
| 42 |
+
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
|
| 43 |
+
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
|
| 44 |
+
:return: :class:`Response <Response>` object
|
| 45 |
+
:rtype: requests.Response
|
| 46 |
+
|
| 47 |
+
Usage::
|
| 48 |
+
|
| 49 |
+
>>> import requests
|
| 50 |
+
>>> req = requests.request('GET', 'https://httpbin.org/get')
|
| 51 |
+
>>> req
|
| 52 |
+
<Response [200]>
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
# By using the 'with' statement we are sure the session is closed, thus we
|
| 56 |
+
# avoid leaving sockets open which can trigger a ResourceWarning in some
|
| 57 |
+
# cases, and look like a memory leak in others.
|
| 58 |
+
with sessions.Session() as session:
|
| 59 |
+
return session.request(method=method, url=url, **kwargs)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def get(url, params=None, **kwargs):
|
| 63 |
+
r"""Sends a GET request.
|
| 64 |
+
|
| 65 |
+
:param url: URL for the new :class:`Request` object.
|
| 66 |
+
:param params: (optional) Dictionary, list of tuples or bytes to send
|
| 67 |
+
in the query string for the :class:`Request`.
|
| 68 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 69 |
+
:return: :class:`Response <Response>` object
|
| 70 |
+
:rtype: requests.Response
|
| 71 |
+
"""
|
| 72 |
+
|
| 73 |
+
return request("get", url, params=params, **kwargs)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def options(url, **kwargs):
|
| 77 |
+
r"""Sends an OPTIONS request.
|
| 78 |
+
|
| 79 |
+
:param url: URL for the new :class:`Request` object.
|
| 80 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 81 |
+
:return: :class:`Response <Response>` object
|
| 82 |
+
:rtype: requests.Response
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
return request("options", url, **kwargs)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def head(url, **kwargs):
|
| 89 |
+
r"""Sends a HEAD request.
|
| 90 |
+
|
| 91 |
+
:param url: URL for the new :class:`Request` object.
|
| 92 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes. If
|
| 93 |
+
`allow_redirects` is not provided, it will be set to `False` (as
|
| 94 |
+
opposed to the default :meth:`request` behavior).
|
| 95 |
+
:return: :class:`Response <Response>` object
|
| 96 |
+
:rtype: requests.Response
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
kwargs.setdefault("allow_redirects", False)
|
| 100 |
+
return request("head", url, **kwargs)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def post(url, data=None, json=None, **kwargs):
|
| 104 |
+
r"""Sends a POST request.
|
| 105 |
+
|
| 106 |
+
:param url: URL for the new :class:`Request` object.
|
| 107 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 108 |
+
object to send in the body of the :class:`Request`.
|
| 109 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
| 110 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 111 |
+
:return: :class:`Response <Response>` object
|
| 112 |
+
:rtype: requests.Response
|
| 113 |
+
"""
|
| 114 |
+
|
| 115 |
+
return request("post", url, data=data, json=json, **kwargs)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def put(url, data=None, **kwargs):
|
| 119 |
+
r"""Sends a PUT request.
|
| 120 |
+
|
| 121 |
+
:param url: URL for the new :class:`Request` object.
|
| 122 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 123 |
+
object to send in the body of the :class:`Request`.
|
| 124 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
| 125 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 126 |
+
:return: :class:`Response <Response>` object
|
| 127 |
+
:rtype: requests.Response
|
| 128 |
+
"""
|
| 129 |
+
|
| 130 |
+
return request("put", url, data=data, **kwargs)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def patch(url, data=None, **kwargs):
|
| 134 |
+
r"""Sends a PATCH request.
|
| 135 |
+
|
| 136 |
+
:param url: URL for the new :class:`Request` object.
|
| 137 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 138 |
+
object to send in the body of the :class:`Request`.
|
| 139 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
| 140 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 141 |
+
:return: :class:`Response <Response>` object
|
| 142 |
+
:rtype: requests.Response
|
| 143 |
+
"""
|
| 144 |
+
|
| 145 |
+
return request("patch", url, data=data, **kwargs)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def delete(url, **kwargs):
|
| 149 |
+
r"""Sends a DELETE request.
|
| 150 |
+
|
| 151 |
+
:param url: URL for the new :class:`Request` object.
|
| 152 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 153 |
+
:return: :class:`Response <Response>` object
|
| 154 |
+
:rtype: requests.Response
|
| 155 |
+
"""
|
| 156 |
+
|
| 157 |
+
return request("delete", url, **kwargs)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/auth.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.auth
|
| 3 |
+
~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the authentication handlers for Requests.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import hashlib
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import threading
|
| 12 |
+
import time
|
| 13 |
+
import warnings
|
| 14 |
+
from base64 import b64encode
|
| 15 |
+
|
| 16 |
+
from ._internal_utils import to_native_string
|
| 17 |
+
from .compat import basestring, str, urlparse
|
| 18 |
+
from .cookies import extract_cookies_to_jar
|
| 19 |
+
from .utils import parse_dict_header
|
| 20 |
+
|
| 21 |
+
CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
|
| 22 |
+
CONTENT_TYPE_MULTI_PART = "multipart/form-data"
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _basic_auth_str(username, password):
|
| 26 |
+
"""Returns a Basic Auth string."""
|
| 27 |
+
|
| 28 |
+
# "I want us to put a big-ol' comment on top of it that
|
| 29 |
+
# says that this behaviour is dumb but we need to preserve
|
| 30 |
+
# it because people are relying on it."
|
| 31 |
+
# - Lukasa
|
| 32 |
+
#
|
| 33 |
+
# These are here solely to maintain backwards compatibility
|
| 34 |
+
# for things like ints. This will be removed in 3.0.0.
|
| 35 |
+
if not isinstance(username, basestring):
|
| 36 |
+
warnings.warn(
|
| 37 |
+
"Non-string usernames will no longer be supported in Requests "
|
| 38 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
| 39 |
+
"a string or bytes object in the near future to avoid "
|
| 40 |
+
"problems.".format(username),
|
| 41 |
+
category=DeprecationWarning,
|
| 42 |
+
)
|
| 43 |
+
username = str(username)
|
| 44 |
+
|
| 45 |
+
if not isinstance(password, basestring):
|
| 46 |
+
warnings.warn(
|
| 47 |
+
"Non-string passwords will no longer be supported in Requests "
|
| 48 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
| 49 |
+
"a string or bytes object in the near future to avoid "
|
| 50 |
+
"problems.".format(type(password)),
|
| 51 |
+
category=DeprecationWarning,
|
| 52 |
+
)
|
| 53 |
+
password = str(password)
|
| 54 |
+
# -- End Removal --
|
| 55 |
+
|
| 56 |
+
if isinstance(username, str):
|
| 57 |
+
username = username.encode("latin1")
|
| 58 |
+
|
| 59 |
+
if isinstance(password, str):
|
| 60 |
+
password = password.encode("latin1")
|
| 61 |
+
|
| 62 |
+
authstr = "Basic " + to_native_string(
|
| 63 |
+
b64encode(b":".join((username, password))).strip()
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
return authstr
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class AuthBase:
|
| 70 |
+
"""Base class that all auth implementations derive from"""
|
| 71 |
+
|
| 72 |
+
def __call__(self, r):
|
| 73 |
+
raise NotImplementedError("Auth hooks must be callable.")
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class HTTPBasicAuth(AuthBase):
|
| 77 |
+
"""Attaches HTTP Basic Authentication to the given Request object."""
|
| 78 |
+
|
| 79 |
+
def __init__(self, username, password):
|
| 80 |
+
self.username = username
|
| 81 |
+
self.password = password
|
| 82 |
+
|
| 83 |
+
def __eq__(self, other):
|
| 84 |
+
return all(
|
| 85 |
+
[
|
| 86 |
+
self.username == getattr(other, "username", None),
|
| 87 |
+
self.password == getattr(other, "password", None),
|
| 88 |
+
]
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
def __ne__(self, other):
|
| 92 |
+
return not self == other
|
| 93 |
+
|
| 94 |
+
def __call__(self, r):
|
| 95 |
+
r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
|
| 96 |
+
return r
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class HTTPProxyAuth(HTTPBasicAuth):
|
| 100 |
+
"""Attaches HTTP Proxy Authentication to a given Request object."""
|
| 101 |
+
|
| 102 |
+
def __call__(self, r):
|
| 103 |
+
r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
|
| 104 |
+
return r
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class HTTPDigestAuth(AuthBase):
|
| 108 |
+
"""Attaches HTTP Digest Authentication to the given Request object."""
|
| 109 |
+
|
| 110 |
+
def __init__(self, username, password):
|
| 111 |
+
self.username = username
|
| 112 |
+
self.password = password
|
| 113 |
+
# Keep state in per-thread local storage
|
| 114 |
+
self._thread_local = threading.local()
|
| 115 |
+
|
| 116 |
+
def init_per_thread_state(self):
|
| 117 |
+
# Ensure state is initialized just once per-thread
|
| 118 |
+
if not hasattr(self._thread_local, "init"):
|
| 119 |
+
self._thread_local.init = True
|
| 120 |
+
self._thread_local.last_nonce = ""
|
| 121 |
+
self._thread_local.nonce_count = 0
|
| 122 |
+
self._thread_local.chal = {}
|
| 123 |
+
self._thread_local.pos = None
|
| 124 |
+
self._thread_local.num_401_calls = None
|
| 125 |
+
|
| 126 |
+
def build_digest_header(self, method, url):
|
| 127 |
+
"""
|
| 128 |
+
:rtype: str
|
| 129 |
+
"""
|
| 130 |
+
|
| 131 |
+
realm = self._thread_local.chal["realm"]
|
| 132 |
+
nonce = self._thread_local.chal["nonce"]
|
| 133 |
+
qop = self._thread_local.chal.get("qop")
|
| 134 |
+
algorithm = self._thread_local.chal.get("algorithm")
|
| 135 |
+
opaque = self._thread_local.chal.get("opaque")
|
| 136 |
+
hash_utf8 = None
|
| 137 |
+
|
| 138 |
+
if algorithm is None:
|
| 139 |
+
_algorithm = "MD5"
|
| 140 |
+
else:
|
| 141 |
+
_algorithm = algorithm.upper()
|
| 142 |
+
# lambdas assume digest modules are imported at the top level
|
| 143 |
+
if _algorithm == "MD5" or _algorithm == "MD5-SESS":
|
| 144 |
+
|
| 145 |
+
def md5_utf8(x):
|
| 146 |
+
if isinstance(x, str):
|
| 147 |
+
x = x.encode("utf-8")
|
| 148 |
+
return hashlib.md5(x).hexdigest()
|
| 149 |
+
|
| 150 |
+
hash_utf8 = md5_utf8
|
| 151 |
+
elif _algorithm == "SHA":
|
| 152 |
+
|
| 153 |
+
def sha_utf8(x):
|
| 154 |
+
if isinstance(x, str):
|
| 155 |
+
x = x.encode("utf-8")
|
| 156 |
+
return hashlib.sha1(x).hexdigest()
|
| 157 |
+
|
| 158 |
+
hash_utf8 = sha_utf8
|
| 159 |
+
elif _algorithm == "SHA-256":
|
| 160 |
+
|
| 161 |
+
def sha256_utf8(x):
|
| 162 |
+
if isinstance(x, str):
|
| 163 |
+
x = x.encode("utf-8")
|
| 164 |
+
return hashlib.sha256(x).hexdigest()
|
| 165 |
+
|
| 166 |
+
hash_utf8 = sha256_utf8
|
| 167 |
+
elif _algorithm == "SHA-512":
|
| 168 |
+
|
| 169 |
+
def sha512_utf8(x):
|
| 170 |
+
if isinstance(x, str):
|
| 171 |
+
x = x.encode("utf-8")
|
| 172 |
+
return hashlib.sha512(x).hexdigest()
|
| 173 |
+
|
| 174 |
+
hash_utf8 = sha512_utf8
|
| 175 |
+
|
| 176 |
+
KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731
|
| 177 |
+
|
| 178 |
+
if hash_utf8 is None:
|
| 179 |
+
return None
|
| 180 |
+
|
| 181 |
+
# XXX not implemented yet
|
| 182 |
+
entdig = None
|
| 183 |
+
p_parsed = urlparse(url)
|
| 184 |
+
#: path is request-uri defined in RFC 2616 which should not be empty
|
| 185 |
+
path = p_parsed.path or "/"
|
| 186 |
+
if p_parsed.query:
|
| 187 |
+
path += f"?{p_parsed.query}"
|
| 188 |
+
|
| 189 |
+
A1 = f"{self.username}:{realm}:{self.password}"
|
| 190 |
+
A2 = f"{method}:{path}"
|
| 191 |
+
|
| 192 |
+
HA1 = hash_utf8(A1)
|
| 193 |
+
HA2 = hash_utf8(A2)
|
| 194 |
+
|
| 195 |
+
if nonce == self._thread_local.last_nonce:
|
| 196 |
+
self._thread_local.nonce_count += 1
|
| 197 |
+
else:
|
| 198 |
+
self._thread_local.nonce_count = 1
|
| 199 |
+
ncvalue = f"{self._thread_local.nonce_count:08x}"
|
| 200 |
+
s = str(self._thread_local.nonce_count).encode("utf-8")
|
| 201 |
+
s += nonce.encode("utf-8")
|
| 202 |
+
s += time.ctime().encode("utf-8")
|
| 203 |
+
s += os.urandom(8)
|
| 204 |
+
|
| 205 |
+
cnonce = hashlib.sha1(s).hexdigest()[:16]
|
| 206 |
+
if _algorithm == "MD5-SESS":
|
| 207 |
+
HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}")
|
| 208 |
+
|
| 209 |
+
if not qop:
|
| 210 |
+
respdig = KD(HA1, f"{nonce}:{HA2}")
|
| 211 |
+
elif qop == "auth" or "auth" in qop.split(","):
|
| 212 |
+
noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}"
|
| 213 |
+
respdig = KD(HA1, noncebit)
|
| 214 |
+
else:
|
| 215 |
+
# XXX handle auth-int.
|
| 216 |
+
return None
|
| 217 |
+
|
| 218 |
+
self._thread_local.last_nonce = nonce
|
| 219 |
+
|
| 220 |
+
# XXX should the partial digests be encoded too?
|
| 221 |
+
base = (
|
| 222 |
+
f'username="{self.username}", realm="{realm}", nonce="{nonce}", '
|
| 223 |
+
f'uri="{path}", response="{respdig}"'
|
| 224 |
+
)
|
| 225 |
+
if opaque:
|
| 226 |
+
base += f', opaque="{opaque}"'
|
| 227 |
+
if algorithm:
|
| 228 |
+
base += f', algorithm="{algorithm}"'
|
| 229 |
+
if entdig:
|
| 230 |
+
base += f', digest="{entdig}"'
|
| 231 |
+
if qop:
|
| 232 |
+
base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"'
|
| 233 |
+
|
| 234 |
+
return f"Digest {base}"
|
| 235 |
+
|
| 236 |
+
def handle_redirect(self, r, **kwargs):
|
| 237 |
+
"""Reset num_401_calls counter on redirects."""
|
| 238 |
+
if r.is_redirect:
|
| 239 |
+
self._thread_local.num_401_calls = 1
|
| 240 |
+
|
| 241 |
+
def handle_401(self, r, **kwargs):
|
| 242 |
+
"""
|
| 243 |
+
Takes the given response and tries digest-auth, if needed.
|
| 244 |
+
|
| 245 |
+
:rtype: requests.Response
|
| 246 |
+
"""
|
| 247 |
+
|
| 248 |
+
# If response is not 4xx, do not auth
|
| 249 |
+
# See https://github.com/psf/requests/issues/3772
|
| 250 |
+
if not 400 <= r.status_code < 500:
|
| 251 |
+
self._thread_local.num_401_calls = 1
|
| 252 |
+
return r
|
| 253 |
+
|
| 254 |
+
if self._thread_local.pos is not None:
|
| 255 |
+
# Rewind the file position indicator of the body to where
|
| 256 |
+
# it was to resend the request.
|
| 257 |
+
r.request.body.seek(self._thread_local.pos)
|
| 258 |
+
s_auth = r.headers.get("www-authenticate", "")
|
| 259 |
+
|
| 260 |
+
if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
|
| 261 |
+
self._thread_local.num_401_calls += 1
|
| 262 |
+
pat = re.compile(r"digest ", flags=re.IGNORECASE)
|
| 263 |
+
self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
|
| 264 |
+
|
| 265 |
+
# Consume content and release the original connection
|
| 266 |
+
# to allow our new request to reuse the same one.
|
| 267 |
+
r.content
|
| 268 |
+
r.close()
|
| 269 |
+
prep = r.request.copy()
|
| 270 |
+
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
| 271 |
+
prep.prepare_cookies(prep._cookies)
|
| 272 |
+
|
| 273 |
+
prep.headers["Authorization"] = self.build_digest_header(
|
| 274 |
+
prep.method, prep.url
|
| 275 |
+
)
|
| 276 |
+
_r = r.connection.send(prep, **kwargs)
|
| 277 |
+
_r.history.append(r)
|
| 278 |
+
_r.request = prep
|
| 279 |
+
|
| 280 |
+
return _r
|
| 281 |
+
|
| 282 |
+
self._thread_local.num_401_calls = 1
|
| 283 |
+
return r
|
| 284 |
+
|
| 285 |
+
def __call__(self, r):
|
| 286 |
+
# Initialize per-thread state, if needed
|
| 287 |
+
self.init_per_thread_state()
|
| 288 |
+
# If we have a saved nonce, skip the 401
|
| 289 |
+
if self._thread_local.last_nonce:
|
| 290 |
+
r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
|
| 291 |
+
try:
|
| 292 |
+
self._thread_local.pos = r.body.tell()
|
| 293 |
+
except AttributeError:
|
| 294 |
+
# In the case of HTTPDigestAuth being reused and the body of
|
| 295 |
+
# the previous request was a file-like object, pos has the
|
| 296 |
+
# file position of the previous body. Ensure it's set to
|
| 297 |
+
# None.
|
| 298 |
+
self._thread_local.pos = None
|
| 299 |
+
r.register_hook("response", self.handle_401)
|
| 300 |
+
r.register_hook("response", self.handle_redirect)
|
| 301 |
+
self._thread_local.num_401_calls = 1
|
| 302 |
+
|
| 303 |
+
return r
|
| 304 |
+
|
| 305 |
+
def __eq__(self, other):
|
| 306 |
+
return all(
|
| 307 |
+
[
|
| 308 |
+
self.username == getattr(other, "username", None),
|
| 309 |
+
self.password == getattr(other, "password", None),
|
| 310 |
+
]
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
def __ne__(self, other):
|
| 314 |
+
return not self == other
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/certs.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
requests.certs
|
| 5 |
+
~~~~~~~~~~~~~~
|
| 6 |
+
|
| 7 |
+
This module returns the preferred default CA certificate bundle. There is
|
| 8 |
+
only one — the one from the certifi package.
|
| 9 |
+
|
| 10 |
+
If you are packaging Requests, e.g., for a Linux distribution or a managed
|
| 11 |
+
environment, you can change the definition of where() to return a separately
|
| 12 |
+
packaged CA bundle.
|
| 13 |
+
"""
|
| 14 |
+
from pip._vendor.certifi import where
|
| 15 |
+
|
| 16 |
+
if __name__ == "__main__":
|
| 17 |
+
print(where())
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/compat.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.compat
|
| 3 |
+
~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module previously handled import compatibility issues
|
| 6 |
+
between Python 2 and Python 3. It remains for backwards
|
| 7 |
+
compatibility until the next major version.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import sys
|
| 11 |
+
|
| 12 |
+
# -------------------
|
| 13 |
+
# Character Detection
|
| 14 |
+
# -------------------
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def _resolve_char_detection():
|
| 18 |
+
"""Find supported character detection libraries."""
|
| 19 |
+
chardet = None
|
| 20 |
+
return chardet
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
chardet = _resolve_char_detection()
|
| 24 |
+
|
| 25 |
+
# -------
|
| 26 |
+
# Pythons
|
| 27 |
+
# -------
|
| 28 |
+
|
| 29 |
+
# Syntax sugar.
|
| 30 |
+
_ver = sys.version_info
|
| 31 |
+
|
| 32 |
+
#: Python 2.x?
|
| 33 |
+
is_py2 = _ver[0] == 2
|
| 34 |
+
|
| 35 |
+
#: Python 3.x?
|
| 36 |
+
is_py3 = _ver[0] == 3
|
| 37 |
+
|
| 38 |
+
# Note: We've patched out simplejson support in pip because it prevents
|
| 39 |
+
# upgrading simplejson on Windows.
|
| 40 |
+
import json
|
| 41 |
+
from json import JSONDecodeError
|
| 42 |
+
|
| 43 |
+
# Keep OrderedDict for backwards compatibility.
|
| 44 |
+
from collections import OrderedDict
|
| 45 |
+
from collections.abc import Callable, Mapping, MutableMapping
|
| 46 |
+
from http import cookiejar as cookielib
|
| 47 |
+
from http.cookies import Morsel
|
| 48 |
+
from io import StringIO
|
| 49 |
+
|
| 50 |
+
# --------------
|
| 51 |
+
# Legacy Imports
|
| 52 |
+
# --------------
|
| 53 |
+
from urllib.parse import (
|
| 54 |
+
quote,
|
| 55 |
+
quote_plus,
|
| 56 |
+
unquote,
|
| 57 |
+
unquote_plus,
|
| 58 |
+
urldefrag,
|
| 59 |
+
urlencode,
|
| 60 |
+
urljoin,
|
| 61 |
+
urlparse,
|
| 62 |
+
urlsplit,
|
| 63 |
+
urlunparse,
|
| 64 |
+
)
|
| 65 |
+
from urllib.request import (
|
| 66 |
+
getproxies,
|
| 67 |
+
getproxies_environment,
|
| 68 |
+
parse_http_list,
|
| 69 |
+
proxy_bypass,
|
| 70 |
+
proxy_bypass_environment,
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
builtin_str = str
|
| 74 |
+
str = str
|
| 75 |
+
bytes = bytes
|
| 76 |
+
basestring = (str, bytes)
|
| 77 |
+
numeric_types = (int, float)
|
| 78 |
+
integer_types = (int,)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/cookies.py
ADDED
|
@@ -0,0 +1,561 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.cookies
|
| 3 |
+
~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Compatibility code to be able to use `http.cookiejar.CookieJar` with requests.
|
| 6 |
+
|
| 7 |
+
requests.utils imports from here, so be careful with imports.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import calendar
|
| 11 |
+
import copy
|
| 12 |
+
import time
|
| 13 |
+
|
| 14 |
+
from ._internal_utils import to_native_string
|
| 15 |
+
from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
|
| 16 |
+
|
| 17 |
+
try:
|
| 18 |
+
import threading
|
| 19 |
+
except ImportError:
|
| 20 |
+
import dummy_threading as threading
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class MockRequest:
|
| 24 |
+
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
|
| 25 |
+
|
| 26 |
+
The code in `http.cookiejar.CookieJar` expects this interface in order to correctly
|
| 27 |
+
manage cookie policies, i.e., determine whether a cookie can be set, given the
|
| 28 |
+
domains of the request and the cookie.
|
| 29 |
+
|
| 30 |
+
The original request object is read-only. The client is responsible for collecting
|
| 31 |
+
the new headers via `get_new_headers()` and interpreting them appropriately. You
|
| 32 |
+
probably want `get_cookie_header`, defined below.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
def __init__(self, request):
|
| 36 |
+
self._r = request
|
| 37 |
+
self._new_headers = {}
|
| 38 |
+
self.type = urlparse(self._r.url).scheme
|
| 39 |
+
|
| 40 |
+
def get_type(self):
|
| 41 |
+
return self.type
|
| 42 |
+
|
| 43 |
+
def get_host(self):
|
| 44 |
+
return urlparse(self._r.url).netloc
|
| 45 |
+
|
| 46 |
+
def get_origin_req_host(self):
|
| 47 |
+
return self.get_host()
|
| 48 |
+
|
| 49 |
+
def get_full_url(self):
|
| 50 |
+
# Only return the response's URL if the user hadn't set the Host
|
| 51 |
+
# header
|
| 52 |
+
if not self._r.headers.get("Host"):
|
| 53 |
+
return self._r.url
|
| 54 |
+
# If they did set it, retrieve it and reconstruct the expected domain
|
| 55 |
+
host = to_native_string(self._r.headers["Host"], encoding="utf-8")
|
| 56 |
+
parsed = urlparse(self._r.url)
|
| 57 |
+
# Reconstruct the URL as we expect it
|
| 58 |
+
return urlunparse(
|
| 59 |
+
[
|
| 60 |
+
parsed.scheme,
|
| 61 |
+
host,
|
| 62 |
+
parsed.path,
|
| 63 |
+
parsed.params,
|
| 64 |
+
parsed.query,
|
| 65 |
+
parsed.fragment,
|
| 66 |
+
]
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
def is_unverifiable(self):
|
| 70 |
+
return True
|
| 71 |
+
|
| 72 |
+
def has_header(self, name):
|
| 73 |
+
return name in self._r.headers or name in self._new_headers
|
| 74 |
+
|
| 75 |
+
def get_header(self, name, default=None):
|
| 76 |
+
return self._r.headers.get(name, self._new_headers.get(name, default))
|
| 77 |
+
|
| 78 |
+
def add_header(self, key, val):
|
| 79 |
+
"""cookiejar has no legitimate use for this method; add it back if you find one."""
|
| 80 |
+
raise NotImplementedError(
|
| 81 |
+
"Cookie headers should be added with add_unredirected_header()"
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
def add_unredirected_header(self, name, value):
|
| 85 |
+
self._new_headers[name] = value
|
| 86 |
+
|
| 87 |
+
def get_new_headers(self):
|
| 88 |
+
return self._new_headers
|
| 89 |
+
|
| 90 |
+
@property
|
| 91 |
+
def unverifiable(self):
|
| 92 |
+
return self.is_unverifiable()
|
| 93 |
+
|
| 94 |
+
@property
|
| 95 |
+
def origin_req_host(self):
|
| 96 |
+
return self.get_origin_req_host()
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def host(self):
|
| 100 |
+
return self.get_host()
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class MockResponse:
|
| 104 |
+
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
|
| 105 |
+
|
| 106 |
+
...what? Basically, expose the parsed HTTP headers from the server response
|
| 107 |
+
the way `http.cookiejar` expects to see them.
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
def __init__(self, headers):
|
| 111 |
+
"""Make a MockResponse for `cookiejar` to read.
|
| 112 |
+
|
| 113 |
+
:param headers: a httplib.HTTPMessage or analogous carrying the headers
|
| 114 |
+
"""
|
| 115 |
+
self._headers = headers
|
| 116 |
+
|
| 117 |
+
def info(self):
|
| 118 |
+
return self._headers
|
| 119 |
+
|
| 120 |
+
def getheaders(self, name):
|
| 121 |
+
self._headers.getheaders(name)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def extract_cookies_to_jar(jar, request, response):
|
| 125 |
+
"""Extract the cookies from the response into a CookieJar.
|
| 126 |
+
|
| 127 |
+
:param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar)
|
| 128 |
+
:param request: our own requests.Request object
|
| 129 |
+
:param response: urllib3.HTTPResponse object
|
| 130 |
+
"""
|
| 131 |
+
if not (hasattr(response, "_original_response") and response._original_response):
|
| 132 |
+
return
|
| 133 |
+
# the _original_response field is the wrapped httplib.HTTPResponse object,
|
| 134 |
+
req = MockRequest(request)
|
| 135 |
+
# pull out the HTTPMessage with the headers and put it in the mock:
|
| 136 |
+
res = MockResponse(response._original_response.msg)
|
| 137 |
+
jar.extract_cookies(res, req)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def get_cookie_header(jar, request):
|
| 141 |
+
"""
|
| 142 |
+
Produce an appropriate Cookie header string to be sent with `request`, or None.
|
| 143 |
+
|
| 144 |
+
:rtype: str
|
| 145 |
+
"""
|
| 146 |
+
r = MockRequest(request)
|
| 147 |
+
jar.add_cookie_header(r)
|
| 148 |
+
return r.get_new_headers().get("Cookie")
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
|
| 152 |
+
"""Unsets a cookie by name, by default over all domains and paths.
|
| 153 |
+
|
| 154 |
+
Wraps CookieJar.clear(), is O(n).
|
| 155 |
+
"""
|
| 156 |
+
clearables = []
|
| 157 |
+
for cookie in cookiejar:
|
| 158 |
+
if cookie.name != name:
|
| 159 |
+
continue
|
| 160 |
+
if domain is not None and domain != cookie.domain:
|
| 161 |
+
continue
|
| 162 |
+
if path is not None and path != cookie.path:
|
| 163 |
+
continue
|
| 164 |
+
clearables.append((cookie.domain, cookie.path, cookie.name))
|
| 165 |
+
|
| 166 |
+
for domain, path, name in clearables:
|
| 167 |
+
cookiejar.clear(domain, path, name)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class CookieConflictError(RuntimeError):
|
| 171 |
+
"""There are two cookies that meet the criteria specified in the cookie jar.
|
| 172 |
+
Use .get and .set and include domain and path args in order to be more specific.
|
| 173 |
+
"""
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
| 177 |
+
"""Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict
|
| 178 |
+
interface.
|
| 179 |
+
|
| 180 |
+
This is the CookieJar we create by default for requests and sessions that
|
| 181 |
+
don't specify one, since some clients may expect response.cookies and
|
| 182 |
+
session.cookies to support dict operations.
|
| 183 |
+
|
| 184 |
+
Requests does not use the dict interface internally; it's just for
|
| 185 |
+
compatibility with external client code. All requests code should work
|
| 186 |
+
out of the box with externally provided instances of ``CookieJar``, e.g.
|
| 187 |
+
``LWPCookieJar`` and ``FileCookieJar``.
|
| 188 |
+
|
| 189 |
+
Unlike a regular CookieJar, this class is pickleable.
|
| 190 |
+
|
| 191 |
+
.. warning:: dictionary operations that are normally O(1) may be O(n).
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
def get(self, name, default=None, domain=None, path=None):
|
| 195 |
+
"""Dict-like get() that also supports optional domain and path args in
|
| 196 |
+
order to resolve naming collisions from using one cookie jar over
|
| 197 |
+
multiple domains.
|
| 198 |
+
|
| 199 |
+
.. warning:: operation is O(n), not O(1).
|
| 200 |
+
"""
|
| 201 |
+
try:
|
| 202 |
+
return self._find_no_duplicates(name, domain, path)
|
| 203 |
+
except KeyError:
|
| 204 |
+
return default
|
| 205 |
+
|
| 206 |
+
def set(self, name, value, **kwargs):
|
| 207 |
+
"""Dict-like set() that also supports optional domain and path args in
|
| 208 |
+
order to resolve naming collisions from using one cookie jar over
|
| 209 |
+
multiple domains.
|
| 210 |
+
"""
|
| 211 |
+
# support client code that unsets cookies by assignment of a None value:
|
| 212 |
+
if value is None:
|
| 213 |
+
remove_cookie_by_name(
|
| 214 |
+
self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
|
| 215 |
+
)
|
| 216 |
+
return
|
| 217 |
+
|
| 218 |
+
if isinstance(value, Morsel):
|
| 219 |
+
c = morsel_to_cookie(value)
|
| 220 |
+
else:
|
| 221 |
+
c = create_cookie(name, value, **kwargs)
|
| 222 |
+
self.set_cookie(c)
|
| 223 |
+
return c
|
| 224 |
+
|
| 225 |
+
def iterkeys(self):
|
| 226 |
+
"""Dict-like iterkeys() that returns an iterator of names of cookies
|
| 227 |
+
from the jar.
|
| 228 |
+
|
| 229 |
+
.. seealso:: itervalues() and iteritems().
|
| 230 |
+
"""
|
| 231 |
+
for cookie in iter(self):
|
| 232 |
+
yield cookie.name
|
| 233 |
+
|
| 234 |
+
def keys(self):
|
| 235 |
+
"""Dict-like keys() that returns a list of names of cookies from the
|
| 236 |
+
jar.
|
| 237 |
+
|
| 238 |
+
.. seealso:: values() and items().
|
| 239 |
+
"""
|
| 240 |
+
return list(self.iterkeys())
|
| 241 |
+
|
| 242 |
+
def itervalues(self):
|
| 243 |
+
"""Dict-like itervalues() that returns an iterator of values of cookies
|
| 244 |
+
from the jar.
|
| 245 |
+
|
| 246 |
+
.. seealso:: iterkeys() and iteritems().
|
| 247 |
+
"""
|
| 248 |
+
for cookie in iter(self):
|
| 249 |
+
yield cookie.value
|
| 250 |
+
|
| 251 |
+
def values(self):
|
| 252 |
+
"""Dict-like values() that returns a list of values of cookies from the
|
| 253 |
+
jar.
|
| 254 |
+
|
| 255 |
+
.. seealso:: keys() and items().
|
| 256 |
+
"""
|
| 257 |
+
return list(self.itervalues())
|
| 258 |
+
|
| 259 |
+
def iteritems(self):
|
| 260 |
+
"""Dict-like iteritems() that returns an iterator of name-value tuples
|
| 261 |
+
from the jar.
|
| 262 |
+
|
| 263 |
+
.. seealso:: iterkeys() and itervalues().
|
| 264 |
+
"""
|
| 265 |
+
for cookie in iter(self):
|
| 266 |
+
yield cookie.name, cookie.value
|
| 267 |
+
|
| 268 |
+
def items(self):
|
| 269 |
+
"""Dict-like items() that returns a list of name-value tuples from the
|
| 270 |
+
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
|
| 271 |
+
vanilla python dict of key value pairs.
|
| 272 |
+
|
| 273 |
+
.. seealso:: keys() and values().
|
| 274 |
+
"""
|
| 275 |
+
return list(self.iteritems())
|
| 276 |
+
|
| 277 |
+
def list_domains(self):
|
| 278 |
+
"""Utility method to list all the domains in the jar."""
|
| 279 |
+
domains = []
|
| 280 |
+
for cookie in iter(self):
|
| 281 |
+
if cookie.domain not in domains:
|
| 282 |
+
domains.append(cookie.domain)
|
| 283 |
+
return domains
|
| 284 |
+
|
| 285 |
+
def list_paths(self):
|
| 286 |
+
"""Utility method to list all the paths in the jar."""
|
| 287 |
+
paths = []
|
| 288 |
+
for cookie in iter(self):
|
| 289 |
+
if cookie.path not in paths:
|
| 290 |
+
paths.append(cookie.path)
|
| 291 |
+
return paths
|
| 292 |
+
|
| 293 |
+
def multiple_domains(self):
|
| 294 |
+
"""Returns True if there are multiple domains in the jar.
|
| 295 |
+
Returns False otherwise.
|
| 296 |
+
|
| 297 |
+
:rtype: bool
|
| 298 |
+
"""
|
| 299 |
+
domains = []
|
| 300 |
+
for cookie in iter(self):
|
| 301 |
+
if cookie.domain is not None and cookie.domain in domains:
|
| 302 |
+
return True
|
| 303 |
+
domains.append(cookie.domain)
|
| 304 |
+
return False # there is only one domain in jar
|
| 305 |
+
|
| 306 |
+
def get_dict(self, domain=None, path=None):
|
| 307 |
+
"""Takes as an argument an optional domain and path and returns a plain
|
| 308 |
+
old Python dict of name-value pairs of cookies that meet the
|
| 309 |
+
requirements.
|
| 310 |
+
|
| 311 |
+
:rtype: dict
|
| 312 |
+
"""
|
| 313 |
+
dictionary = {}
|
| 314 |
+
for cookie in iter(self):
|
| 315 |
+
if (domain is None or cookie.domain == domain) and (
|
| 316 |
+
path is None or cookie.path == path
|
| 317 |
+
):
|
| 318 |
+
dictionary[cookie.name] = cookie.value
|
| 319 |
+
return dictionary
|
| 320 |
+
|
| 321 |
+
def __contains__(self, name):
|
| 322 |
+
try:
|
| 323 |
+
return super().__contains__(name)
|
| 324 |
+
except CookieConflictError:
|
| 325 |
+
return True
|
| 326 |
+
|
| 327 |
+
def __getitem__(self, name):
|
| 328 |
+
"""Dict-like __getitem__() for compatibility with client code. Throws
|
| 329 |
+
exception if there are more than one cookie with name. In that case,
|
| 330 |
+
use the more explicit get() method instead.
|
| 331 |
+
|
| 332 |
+
.. warning:: operation is O(n), not O(1).
|
| 333 |
+
"""
|
| 334 |
+
return self._find_no_duplicates(name)
|
| 335 |
+
|
| 336 |
+
def __setitem__(self, name, value):
|
| 337 |
+
"""Dict-like __setitem__ for compatibility with client code. Throws
|
| 338 |
+
exception if there is already a cookie of that name in the jar. In that
|
| 339 |
+
case, use the more explicit set() method instead.
|
| 340 |
+
"""
|
| 341 |
+
self.set(name, value)
|
| 342 |
+
|
| 343 |
+
def __delitem__(self, name):
|
| 344 |
+
"""Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s
|
| 345 |
+
``remove_cookie_by_name()``.
|
| 346 |
+
"""
|
| 347 |
+
remove_cookie_by_name(self, name)
|
| 348 |
+
|
| 349 |
+
def set_cookie(self, cookie, *args, **kwargs):
|
| 350 |
+
if (
|
| 351 |
+
hasattr(cookie.value, "startswith")
|
| 352 |
+
and cookie.value.startswith('"')
|
| 353 |
+
and cookie.value.endswith('"')
|
| 354 |
+
):
|
| 355 |
+
cookie.value = cookie.value.replace('\\"', "")
|
| 356 |
+
return super().set_cookie(cookie, *args, **kwargs)
|
| 357 |
+
|
| 358 |
+
def update(self, other):
|
| 359 |
+
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
| 360 |
+
if isinstance(other, cookielib.CookieJar):
|
| 361 |
+
for cookie in other:
|
| 362 |
+
self.set_cookie(copy.copy(cookie))
|
| 363 |
+
else:
|
| 364 |
+
super().update(other)
|
| 365 |
+
|
| 366 |
+
def _find(self, name, domain=None, path=None):
|
| 367 |
+
"""Requests uses this method internally to get cookie values.
|
| 368 |
+
|
| 369 |
+
If there are conflicting cookies, _find arbitrarily chooses one.
|
| 370 |
+
See _find_no_duplicates if you want an exception thrown if there are
|
| 371 |
+
conflicting cookies.
|
| 372 |
+
|
| 373 |
+
:param name: a string containing name of cookie
|
| 374 |
+
:param domain: (optional) string containing domain of cookie
|
| 375 |
+
:param path: (optional) string containing path of cookie
|
| 376 |
+
:return: cookie.value
|
| 377 |
+
"""
|
| 378 |
+
for cookie in iter(self):
|
| 379 |
+
if cookie.name == name:
|
| 380 |
+
if domain is None or cookie.domain == domain:
|
| 381 |
+
if path is None or cookie.path == path:
|
| 382 |
+
return cookie.value
|
| 383 |
+
|
| 384 |
+
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
| 385 |
+
|
| 386 |
+
def _find_no_duplicates(self, name, domain=None, path=None):
|
| 387 |
+
"""Both ``__get_item__`` and ``get`` call this function: it's never
|
| 388 |
+
used elsewhere in Requests.
|
| 389 |
+
|
| 390 |
+
:param name: a string containing name of cookie
|
| 391 |
+
:param domain: (optional) string containing domain of cookie
|
| 392 |
+
:param path: (optional) string containing path of cookie
|
| 393 |
+
:raises KeyError: if cookie is not found
|
| 394 |
+
:raises CookieConflictError: if there are multiple cookies
|
| 395 |
+
that match name and optionally domain and path
|
| 396 |
+
:return: cookie.value
|
| 397 |
+
"""
|
| 398 |
+
toReturn = None
|
| 399 |
+
for cookie in iter(self):
|
| 400 |
+
if cookie.name == name:
|
| 401 |
+
if domain is None or cookie.domain == domain:
|
| 402 |
+
if path is None or cookie.path == path:
|
| 403 |
+
if toReturn is not None:
|
| 404 |
+
# if there are multiple cookies that meet passed in criteria
|
| 405 |
+
raise CookieConflictError(
|
| 406 |
+
f"There are multiple cookies with name, {name!r}"
|
| 407 |
+
)
|
| 408 |
+
# we will eventually return this as long as no cookie conflict
|
| 409 |
+
toReturn = cookie.value
|
| 410 |
+
|
| 411 |
+
if toReturn:
|
| 412 |
+
return toReturn
|
| 413 |
+
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
| 414 |
+
|
| 415 |
+
def __getstate__(self):
|
| 416 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
| 417 |
+
state = self.__dict__.copy()
|
| 418 |
+
# remove the unpickleable RLock object
|
| 419 |
+
state.pop("_cookies_lock")
|
| 420 |
+
return state
|
| 421 |
+
|
| 422 |
+
def __setstate__(self, state):
|
| 423 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
| 424 |
+
self.__dict__.update(state)
|
| 425 |
+
if "_cookies_lock" not in self.__dict__:
|
| 426 |
+
self._cookies_lock = threading.RLock()
|
| 427 |
+
|
| 428 |
+
def copy(self):
|
| 429 |
+
"""Return a copy of this RequestsCookieJar."""
|
| 430 |
+
new_cj = RequestsCookieJar()
|
| 431 |
+
new_cj.set_policy(self.get_policy())
|
| 432 |
+
new_cj.update(self)
|
| 433 |
+
return new_cj
|
| 434 |
+
|
| 435 |
+
def get_policy(self):
|
| 436 |
+
"""Return the CookiePolicy instance used."""
|
| 437 |
+
return self._policy
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def _copy_cookie_jar(jar):
|
| 441 |
+
if jar is None:
|
| 442 |
+
return None
|
| 443 |
+
|
| 444 |
+
if hasattr(jar, "copy"):
|
| 445 |
+
# We're dealing with an instance of RequestsCookieJar
|
| 446 |
+
return jar.copy()
|
| 447 |
+
# We're dealing with a generic CookieJar instance
|
| 448 |
+
new_jar = copy.copy(jar)
|
| 449 |
+
new_jar.clear()
|
| 450 |
+
for cookie in jar:
|
| 451 |
+
new_jar.set_cookie(copy.copy(cookie))
|
| 452 |
+
return new_jar
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
def create_cookie(name, value, **kwargs):
|
| 456 |
+
"""Make a cookie from underspecified parameters.
|
| 457 |
+
|
| 458 |
+
By default, the pair of `name` and `value` will be set for the domain ''
|
| 459 |
+
and sent on every request (this is sometimes called a "supercookie").
|
| 460 |
+
"""
|
| 461 |
+
result = {
|
| 462 |
+
"version": 0,
|
| 463 |
+
"name": name,
|
| 464 |
+
"value": value,
|
| 465 |
+
"port": None,
|
| 466 |
+
"domain": "",
|
| 467 |
+
"path": "/",
|
| 468 |
+
"secure": False,
|
| 469 |
+
"expires": None,
|
| 470 |
+
"discard": True,
|
| 471 |
+
"comment": None,
|
| 472 |
+
"comment_url": None,
|
| 473 |
+
"rest": {"HttpOnly": None},
|
| 474 |
+
"rfc2109": False,
|
| 475 |
+
}
|
| 476 |
+
|
| 477 |
+
badargs = set(kwargs) - set(result)
|
| 478 |
+
if badargs:
|
| 479 |
+
raise TypeError(
|
| 480 |
+
f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
|
| 481 |
+
)
|
| 482 |
+
|
| 483 |
+
result.update(kwargs)
|
| 484 |
+
result["port_specified"] = bool(result["port"])
|
| 485 |
+
result["domain_specified"] = bool(result["domain"])
|
| 486 |
+
result["domain_initial_dot"] = result["domain"].startswith(".")
|
| 487 |
+
result["path_specified"] = bool(result["path"])
|
| 488 |
+
|
| 489 |
+
return cookielib.Cookie(**result)
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
def morsel_to_cookie(morsel):
|
| 493 |
+
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
|
| 494 |
+
|
| 495 |
+
expires = None
|
| 496 |
+
if morsel["max-age"]:
|
| 497 |
+
try:
|
| 498 |
+
expires = int(time.time() + int(morsel["max-age"]))
|
| 499 |
+
except ValueError:
|
| 500 |
+
raise TypeError(f"max-age: {morsel['max-age']} must be integer")
|
| 501 |
+
elif morsel["expires"]:
|
| 502 |
+
time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
|
| 503 |
+
expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
|
| 504 |
+
return create_cookie(
|
| 505 |
+
comment=morsel["comment"],
|
| 506 |
+
comment_url=bool(morsel["comment"]),
|
| 507 |
+
discard=False,
|
| 508 |
+
domain=morsel["domain"],
|
| 509 |
+
expires=expires,
|
| 510 |
+
name=morsel.key,
|
| 511 |
+
path=morsel["path"],
|
| 512 |
+
port=None,
|
| 513 |
+
rest={"HttpOnly": morsel["httponly"]},
|
| 514 |
+
rfc2109=False,
|
| 515 |
+
secure=bool(morsel["secure"]),
|
| 516 |
+
value=morsel.value,
|
| 517 |
+
version=morsel["version"] or 0,
|
| 518 |
+
)
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
|
| 522 |
+
"""Returns a CookieJar from a key/value dictionary.
|
| 523 |
+
|
| 524 |
+
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
| 525 |
+
:param cookiejar: (optional) A cookiejar to add the cookies to.
|
| 526 |
+
:param overwrite: (optional) If False, will not replace cookies
|
| 527 |
+
already in the jar with new ones.
|
| 528 |
+
:rtype: CookieJar
|
| 529 |
+
"""
|
| 530 |
+
if cookiejar is None:
|
| 531 |
+
cookiejar = RequestsCookieJar()
|
| 532 |
+
|
| 533 |
+
if cookie_dict is not None:
|
| 534 |
+
names_from_jar = [cookie.name for cookie in cookiejar]
|
| 535 |
+
for name in cookie_dict:
|
| 536 |
+
if overwrite or (name not in names_from_jar):
|
| 537 |
+
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
|
| 538 |
+
|
| 539 |
+
return cookiejar
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
def merge_cookies(cookiejar, cookies):
|
| 543 |
+
"""Add cookies to cookiejar and returns a merged CookieJar.
|
| 544 |
+
|
| 545 |
+
:param cookiejar: CookieJar object to add the cookies to.
|
| 546 |
+
:param cookies: Dictionary or CookieJar object to be added.
|
| 547 |
+
:rtype: CookieJar
|
| 548 |
+
"""
|
| 549 |
+
if not isinstance(cookiejar, cookielib.CookieJar):
|
| 550 |
+
raise ValueError("You can only merge into CookieJar")
|
| 551 |
+
|
| 552 |
+
if isinstance(cookies, dict):
|
| 553 |
+
cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
|
| 554 |
+
elif isinstance(cookies, cookielib.CookieJar):
|
| 555 |
+
try:
|
| 556 |
+
cookiejar.update(cookies)
|
| 557 |
+
except AttributeError:
|
| 558 |
+
for cookie_in_jar in cookies:
|
| 559 |
+
cookiejar.set_cookie(cookie_in_jar)
|
| 560 |
+
|
| 561 |
+
return cookiejar
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/exceptions.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.exceptions
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the set of Requests' exceptions.
|
| 6 |
+
"""
|
| 7 |
+
from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError
|
| 8 |
+
|
| 9 |
+
from .compat import JSONDecodeError as CompatJSONDecodeError
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class RequestException(IOError):
|
| 13 |
+
"""There was an ambiguous exception that occurred while handling your
|
| 14 |
+
request.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def __init__(self, *args, **kwargs):
|
| 18 |
+
"""Initialize RequestException with `request` and `response` objects."""
|
| 19 |
+
response = kwargs.pop("response", None)
|
| 20 |
+
self.response = response
|
| 21 |
+
self.request = kwargs.pop("request", None)
|
| 22 |
+
if response is not None and not self.request and hasattr(response, "request"):
|
| 23 |
+
self.request = self.response.request
|
| 24 |
+
super().__init__(*args, **kwargs)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class InvalidJSONError(RequestException):
|
| 28 |
+
"""A JSON error occurred."""
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
|
| 32 |
+
"""Couldn't decode the text into json"""
|
| 33 |
+
|
| 34 |
+
def __init__(self, *args, **kwargs):
|
| 35 |
+
"""
|
| 36 |
+
Construct the JSONDecodeError instance first with all
|
| 37 |
+
args. Then use it's args to construct the IOError so that
|
| 38 |
+
the json specific args aren't used as IOError specific args
|
| 39 |
+
and the error message from JSONDecodeError is preserved.
|
| 40 |
+
"""
|
| 41 |
+
CompatJSONDecodeError.__init__(self, *args)
|
| 42 |
+
InvalidJSONError.__init__(self, *self.args, **kwargs)
|
| 43 |
+
|
| 44 |
+
def __reduce__(self):
|
| 45 |
+
"""
|
| 46 |
+
The __reduce__ method called when pickling the object must
|
| 47 |
+
be the one from the JSONDecodeError (be it json/simplejson)
|
| 48 |
+
as it expects all the arguments for instantiation, not just
|
| 49 |
+
one like the IOError, and the MRO would by default call the
|
| 50 |
+
__reduce__ method from the IOError due to the inheritance order.
|
| 51 |
+
"""
|
| 52 |
+
return CompatJSONDecodeError.__reduce__(self)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class HTTPError(RequestException):
|
| 56 |
+
"""An HTTP error occurred."""
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class ConnectionError(RequestException):
|
| 60 |
+
"""A Connection error occurred."""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class ProxyError(ConnectionError):
|
| 64 |
+
"""A proxy error occurred."""
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class SSLError(ConnectionError):
|
| 68 |
+
"""An SSL error occurred."""
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class Timeout(RequestException):
|
| 72 |
+
"""The request timed out.
|
| 73 |
+
|
| 74 |
+
Catching this error will catch both
|
| 75 |
+
:exc:`~requests.exceptions.ConnectTimeout` and
|
| 76 |
+
:exc:`~requests.exceptions.ReadTimeout` errors.
|
| 77 |
+
"""
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class ConnectTimeout(ConnectionError, Timeout):
|
| 81 |
+
"""The request timed out while trying to connect to the remote server.
|
| 82 |
+
|
| 83 |
+
Requests that produced this error are safe to retry.
|
| 84 |
+
"""
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class ReadTimeout(Timeout):
|
| 88 |
+
"""The server did not send any data in the allotted amount of time."""
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class URLRequired(RequestException):
|
| 92 |
+
"""A valid URL is required to make a request."""
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class TooManyRedirects(RequestException):
|
| 96 |
+
"""Too many redirects."""
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class MissingSchema(RequestException, ValueError):
|
| 100 |
+
"""The URL scheme (e.g. http or https) is missing."""
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class InvalidSchema(RequestException, ValueError):
|
| 104 |
+
"""The URL scheme provided is either invalid or unsupported."""
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class InvalidURL(RequestException, ValueError):
|
| 108 |
+
"""The URL provided was somehow invalid."""
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class InvalidHeader(RequestException, ValueError):
|
| 112 |
+
"""The header value provided was somehow invalid."""
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class InvalidProxyURL(InvalidURL):
|
| 116 |
+
"""The proxy URL provided is invalid."""
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
class ChunkedEncodingError(RequestException):
|
| 120 |
+
"""The server declared chunked encoding but sent an invalid chunk."""
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class ContentDecodingError(RequestException, BaseHTTPError):
|
| 124 |
+
"""Failed to decode response content."""
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class StreamConsumedError(RequestException, TypeError):
|
| 128 |
+
"""The content for this response was already consumed."""
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class RetryError(RequestException):
|
| 132 |
+
"""Custom retries logic failed"""
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
class UnrewindableBodyError(RequestException):
|
| 136 |
+
"""Requests encountered an error when trying to rewind a body."""
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
# Warnings
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class RequestsWarning(Warning):
|
| 143 |
+
"""Base warning for Requests."""
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
class FileModeWarning(RequestsWarning, DeprecationWarning):
|
| 147 |
+
"""A file was opened in text mode, but Requests determined its binary length."""
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class RequestsDependencyWarning(RequestsWarning):
|
| 151 |
+
"""An imported dependency doesn't match the expected version range."""
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/help.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Module containing bug report helper(s)."""
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import platform
|
| 5 |
+
import ssl
|
| 6 |
+
import sys
|
| 7 |
+
|
| 8 |
+
from pip._vendor import idna
|
| 9 |
+
from pip._vendor import urllib3
|
| 10 |
+
|
| 11 |
+
from . import __version__ as requests_version
|
| 12 |
+
|
| 13 |
+
charset_normalizer = None
|
| 14 |
+
chardet = None
|
| 15 |
+
|
| 16 |
+
try:
|
| 17 |
+
from pip._vendor.urllib3.contrib import pyopenssl
|
| 18 |
+
except ImportError:
|
| 19 |
+
pyopenssl = None
|
| 20 |
+
OpenSSL = None
|
| 21 |
+
cryptography = None
|
| 22 |
+
else:
|
| 23 |
+
import cryptography
|
| 24 |
+
import OpenSSL
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _implementation():
|
| 28 |
+
"""Return a dict with the Python implementation and version.
|
| 29 |
+
|
| 30 |
+
Provide both the name and the version of the Python implementation
|
| 31 |
+
currently running. For example, on CPython 3.10.3 it will return
|
| 32 |
+
{'name': 'CPython', 'version': '3.10.3'}.
|
| 33 |
+
|
| 34 |
+
This function works best on CPython and PyPy: in particular, it probably
|
| 35 |
+
doesn't work for Jython or IronPython. Future investigation should be done
|
| 36 |
+
to work out the correct shape of the code for those platforms.
|
| 37 |
+
"""
|
| 38 |
+
implementation = platform.python_implementation()
|
| 39 |
+
|
| 40 |
+
if implementation == "CPython":
|
| 41 |
+
implementation_version = platform.python_version()
|
| 42 |
+
elif implementation == "PyPy":
|
| 43 |
+
implementation_version = "{}.{}.{}".format(
|
| 44 |
+
sys.pypy_version_info.major,
|
| 45 |
+
sys.pypy_version_info.minor,
|
| 46 |
+
sys.pypy_version_info.micro,
|
| 47 |
+
)
|
| 48 |
+
if sys.pypy_version_info.releaselevel != "final":
|
| 49 |
+
implementation_version = "".join(
|
| 50 |
+
[implementation_version, sys.pypy_version_info.releaselevel]
|
| 51 |
+
)
|
| 52 |
+
elif implementation == "Jython":
|
| 53 |
+
implementation_version = platform.python_version() # Complete Guess
|
| 54 |
+
elif implementation == "IronPython":
|
| 55 |
+
implementation_version = platform.python_version() # Complete Guess
|
| 56 |
+
else:
|
| 57 |
+
implementation_version = "Unknown"
|
| 58 |
+
|
| 59 |
+
return {"name": implementation, "version": implementation_version}
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def info():
|
| 63 |
+
"""Generate information for a bug report."""
|
| 64 |
+
try:
|
| 65 |
+
platform_info = {
|
| 66 |
+
"system": platform.system(),
|
| 67 |
+
"release": platform.release(),
|
| 68 |
+
}
|
| 69 |
+
except OSError:
|
| 70 |
+
platform_info = {
|
| 71 |
+
"system": "Unknown",
|
| 72 |
+
"release": "Unknown",
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
implementation_info = _implementation()
|
| 76 |
+
urllib3_info = {"version": urllib3.__version__}
|
| 77 |
+
charset_normalizer_info = {"version": None}
|
| 78 |
+
chardet_info = {"version": None}
|
| 79 |
+
if charset_normalizer:
|
| 80 |
+
charset_normalizer_info = {"version": charset_normalizer.__version__}
|
| 81 |
+
if chardet:
|
| 82 |
+
chardet_info = {"version": chardet.__version__}
|
| 83 |
+
|
| 84 |
+
pyopenssl_info = {
|
| 85 |
+
"version": None,
|
| 86 |
+
"openssl_version": "",
|
| 87 |
+
}
|
| 88 |
+
if OpenSSL:
|
| 89 |
+
pyopenssl_info = {
|
| 90 |
+
"version": OpenSSL.__version__,
|
| 91 |
+
"openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
|
| 92 |
+
}
|
| 93 |
+
cryptography_info = {
|
| 94 |
+
"version": getattr(cryptography, "__version__", ""),
|
| 95 |
+
}
|
| 96 |
+
idna_info = {
|
| 97 |
+
"version": getattr(idna, "__version__", ""),
|
| 98 |
+
}
|
| 99 |
+
|
| 100 |
+
system_ssl = ssl.OPENSSL_VERSION_NUMBER
|
| 101 |
+
system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
|
| 102 |
+
|
| 103 |
+
return {
|
| 104 |
+
"platform": platform_info,
|
| 105 |
+
"implementation": implementation_info,
|
| 106 |
+
"system_ssl": system_ssl_info,
|
| 107 |
+
"using_pyopenssl": pyopenssl is not None,
|
| 108 |
+
"using_charset_normalizer": chardet is None,
|
| 109 |
+
"pyOpenSSL": pyopenssl_info,
|
| 110 |
+
"urllib3": urllib3_info,
|
| 111 |
+
"chardet": chardet_info,
|
| 112 |
+
"charset_normalizer": charset_normalizer_info,
|
| 113 |
+
"cryptography": cryptography_info,
|
| 114 |
+
"idna": idna_info,
|
| 115 |
+
"requests": {
|
| 116 |
+
"version": requests_version,
|
| 117 |
+
},
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def main():
|
| 122 |
+
"""Pretty-print the bug information as JSON."""
|
| 123 |
+
print(json.dumps(info(), sort_keys=True, indent=2))
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
if __name__ == "__main__":
|
| 127 |
+
main()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/hooks.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.hooks
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module provides the capabilities for the Requests hooks system.
|
| 6 |
+
|
| 7 |
+
Available hooks:
|
| 8 |
+
|
| 9 |
+
``response``:
|
| 10 |
+
The response generated from a Request.
|
| 11 |
+
"""
|
| 12 |
+
HOOKS = ["response"]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def default_hooks():
|
| 16 |
+
return {event: [] for event in HOOKS}
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
# TODO: response is the only one
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def dispatch_hook(key, hooks, hook_data, **kwargs):
|
| 23 |
+
"""Dispatches a hook dictionary on a given piece of data."""
|
| 24 |
+
hooks = hooks or {}
|
| 25 |
+
hooks = hooks.get(key)
|
| 26 |
+
if hooks:
|
| 27 |
+
if hasattr(hooks, "__call__"):
|
| 28 |
+
hooks = [hooks]
|
| 29 |
+
for hook in hooks:
|
| 30 |
+
_hook_data = hook(hook_data, **kwargs)
|
| 31 |
+
if _hook_data is not None:
|
| 32 |
+
hook_data = _hook_data
|
| 33 |
+
return hook_data
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/models.py
ADDED
|
@@ -0,0 +1,1037 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.models
|
| 3 |
+
~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the primary objects that power Requests.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import datetime
|
| 9 |
+
|
| 10 |
+
# Import encoding now, to avoid implicit import later.
|
| 11 |
+
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
|
| 12 |
+
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
|
| 13 |
+
import encodings.idna # noqa: F401
|
| 14 |
+
from io import UnsupportedOperation
|
| 15 |
+
|
| 16 |
+
from pip._vendor.urllib3.exceptions import (
|
| 17 |
+
DecodeError,
|
| 18 |
+
LocationParseError,
|
| 19 |
+
ProtocolError,
|
| 20 |
+
ReadTimeoutError,
|
| 21 |
+
SSLError,
|
| 22 |
+
)
|
| 23 |
+
from pip._vendor.urllib3.fields import RequestField
|
| 24 |
+
from pip._vendor.urllib3.filepost import encode_multipart_formdata
|
| 25 |
+
from pip._vendor.urllib3.util import parse_url
|
| 26 |
+
|
| 27 |
+
from ._internal_utils import to_native_string, unicode_is_ascii
|
| 28 |
+
from .auth import HTTPBasicAuth
|
| 29 |
+
from .compat import (
|
| 30 |
+
Callable,
|
| 31 |
+
JSONDecodeError,
|
| 32 |
+
Mapping,
|
| 33 |
+
basestring,
|
| 34 |
+
builtin_str,
|
| 35 |
+
chardet,
|
| 36 |
+
cookielib,
|
| 37 |
+
)
|
| 38 |
+
from .compat import json as complexjson
|
| 39 |
+
from .compat import urlencode, urlsplit, urlunparse
|
| 40 |
+
from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header
|
| 41 |
+
from .exceptions import (
|
| 42 |
+
ChunkedEncodingError,
|
| 43 |
+
ConnectionError,
|
| 44 |
+
ContentDecodingError,
|
| 45 |
+
HTTPError,
|
| 46 |
+
InvalidJSONError,
|
| 47 |
+
InvalidURL,
|
| 48 |
+
)
|
| 49 |
+
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
|
| 50 |
+
from .exceptions import MissingSchema
|
| 51 |
+
from .exceptions import SSLError as RequestsSSLError
|
| 52 |
+
from .exceptions import StreamConsumedError
|
| 53 |
+
from .hooks import default_hooks
|
| 54 |
+
from .status_codes import codes
|
| 55 |
+
from .structures import CaseInsensitiveDict
|
| 56 |
+
from .utils import (
|
| 57 |
+
check_header_validity,
|
| 58 |
+
get_auth_from_url,
|
| 59 |
+
guess_filename,
|
| 60 |
+
guess_json_utf,
|
| 61 |
+
iter_slices,
|
| 62 |
+
parse_header_links,
|
| 63 |
+
requote_uri,
|
| 64 |
+
stream_decode_response_unicode,
|
| 65 |
+
super_len,
|
| 66 |
+
to_key_val_list,
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
#: The set of HTTP status codes that indicate an automatically
|
| 70 |
+
#: processable redirect.
|
| 71 |
+
REDIRECT_STATI = (
|
| 72 |
+
codes.moved, # 301
|
| 73 |
+
codes.found, # 302
|
| 74 |
+
codes.other, # 303
|
| 75 |
+
codes.temporary_redirect, # 307
|
| 76 |
+
codes.permanent_redirect, # 308
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
DEFAULT_REDIRECT_LIMIT = 30
|
| 80 |
+
CONTENT_CHUNK_SIZE = 10 * 1024
|
| 81 |
+
ITER_CHUNK_SIZE = 512
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class RequestEncodingMixin:
|
| 85 |
+
@property
|
| 86 |
+
def path_url(self):
|
| 87 |
+
"""Build the path URL to use."""
|
| 88 |
+
|
| 89 |
+
url = []
|
| 90 |
+
|
| 91 |
+
p = urlsplit(self.url)
|
| 92 |
+
|
| 93 |
+
path = p.path
|
| 94 |
+
if not path:
|
| 95 |
+
path = "/"
|
| 96 |
+
|
| 97 |
+
url.append(path)
|
| 98 |
+
|
| 99 |
+
query = p.query
|
| 100 |
+
if query:
|
| 101 |
+
url.append("?")
|
| 102 |
+
url.append(query)
|
| 103 |
+
|
| 104 |
+
return "".join(url)
|
| 105 |
+
|
| 106 |
+
@staticmethod
|
| 107 |
+
def _encode_params(data):
|
| 108 |
+
"""Encode parameters in a piece of data.
|
| 109 |
+
|
| 110 |
+
Will successfully encode parameters when passed as a dict or a list of
|
| 111 |
+
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
|
| 112 |
+
if parameters are supplied as a dict.
|
| 113 |
+
"""
|
| 114 |
+
|
| 115 |
+
if isinstance(data, (str, bytes)):
|
| 116 |
+
return data
|
| 117 |
+
elif hasattr(data, "read"):
|
| 118 |
+
return data
|
| 119 |
+
elif hasattr(data, "__iter__"):
|
| 120 |
+
result = []
|
| 121 |
+
for k, vs in to_key_val_list(data):
|
| 122 |
+
if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
|
| 123 |
+
vs = [vs]
|
| 124 |
+
for v in vs:
|
| 125 |
+
if v is not None:
|
| 126 |
+
result.append(
|
| 127 |
+
(
|
| 128 |
+
k.encode("utf-8") if isinstance(k, str) else k,
|
| 129 |
+
v.encode("utf-8") if isinstance(v, str) else v,
|
| 130 |
+
)
|
| 131 |
+
)
|
| 132 |
+
return urlencode(result, doseq=True)
|
| 133 |
+
else:
|
| 134 |
+
return data
|
| 135 |
+
|
| 136 |
+
@staticmethod
|
| 137 |
+
def _encode_files(files, data):
|
| 138 |
+
"""Build the body for a multipart/form-data request.
|
| 139 |
+
|
| 140 |
+
Will successfully encode files when passed as a dict or a list of
|
| 141 |
+
tuples. Order is retained if data is a list of tuples but arbitrary
|
| 142 |
+
if parameters are supplied as a dict.
|
| 143 |
+
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
|
| 144 |
+
or 4-tuples (filename, fileobj, contentype, custom_headers).
|
| 145 |
+
"""
|
| 146 |
+
if not files:
|
| 147 |
+
raise ValueError("Files must be provided.")
|
| 148 |
+
elif isinstance(data, basestring):
|
| 149 |
+
raise ValueError("Data must not be a string.")
|
| 150 |
+
|
| 151 |
+
new_fields = []
|
| 152 |
+
fields = to_key_val_list(data or {})
|
| 153 |
+
files = to_key_val_list(files or {})
|
| 154 |
+
|
| 155 |
+
for field, val in fields:
|
| 156 |
+
if isinstance(val, basestring) or not hasattr(val, "__iter__"):
|
| 157 |
+
val = [val]
|
| 158 |
+
for v in val:
|
| 159 |
+
if v is not None:
|
| 160 |
+
# Don't call str() on bytestrings: in Py3 it all goes wrong.
|
| 161 |
+
if not isinstance(v, bytes):
|
| 162 |
+
v = str(v)
|
| 163 |
+
|
| 164 |
+
new_fields.append(
|
| 165 |
+
(
|
| 166 |
+
field.decode("utf-8")
|
| 167 |
+
if isinstance(field, bytes)
|
| 168 |
+
else field,
|
| 169 |
+
v.encode("utf-8") if isinstance(v, str) else v,
|
| 170 |
+
)
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
for k, v in files:
|
| 174 |
+
# support for explicit filename
|
| 175 |
+
ft = None
|
| 176 |
+
fh = None
|
| 177 |
+
if isinstance(v, (tuple, list)):
|
| 178 |
+
if len(v) == 2:
|
| 179 |
+
fn, fp = v
|
| 180 |
+
elif len(v) == 3:
|
| 181 |
+
fn, fp, ft = v
|
| 182 |
+
else:
|
| 183 |
+
fn, fp, ft, fh = v
|
| 184 |
+
else:
|
| 185 |
+
fn = guess_filename(v) or k
|
| 186 |
+
fp = v
|
| 187 |
+
|
| 188 |
+
if isinstance(fp, (str, bytes, bytearray)):
|
| 189 |
+
fdata = fp
|
| 190 |
+
elif hasattr(fp, "read"):
|
| 191 |
+
fdata = fp.read()
|
| 192 |
+
elif fp is None:
|
| 193 |
+
continue
|
| 194 |
+
else:
|
| 195 |
+
fdata = fp
|
| 196 |
+
|
| 197 |
+
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
|
| 198 |
+
rf.make_multipart(content_type=ft)
|
| 199 |
+
new_fields.append(rf)
|
| 200 |
+
|
| 201 |
+
body, content_type = encode_multipart_formdata(new_fields)
|
| 202 |
+
|
| 203 |
+
return body, content_type
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class RequestHooksMixin:
|
| 207 |
+
def register_hook(self, event, hook):
|
| 208 |
+
"""Properly register a hook."""
|
| 209 |
+
|
| 210 |
+
if event not in self.hooks:
|
| 211 |
+
raise ValueError(f'Unsupported event specified, with event name "{event}"')
|
| 212 |
+
|
| 213 |
+
if isinstance(hook, Callable):
|
| 214 |
+
self.hooks[event].append(hook)
|
| 215 |
+
elif hasattr(hook, "__iter__"):
|
| 216 |
+
self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
|
| 217 |
+
|
| 218 |
+
def deregister_hook(self, event, hook):
|
| 219 |
+
"""Deregister a previously registered hook.
|
| 220 |
+
Returns True if the hook existed, False if not.
|
| 221 |
+
"""
|
| 222 |
+
|
| 223 |
+
try:
|
| 224 |
+
self.hooks[event].remove(hook)
|
| 225 |
+
return True
|
| 226 |
+
except ValueError:
|
| 227 |
+
return False
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
class Request(RequestHooksMixin):
|
| 231 |
+
"""A user-created :class:`Request <Request>` object.
|
| 232 |
+
|
| 233 |
+
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
|
| 234 |
+
|
| 235 |
+
:param method: HTTP method to use.
|
| 236 |
+
:param url: URL to send.
|
| 237 |
+
:param headers: dictionary of headers to send.
|
| 238 |
+
:param files: dictionary of {filename: fileobject} files to multipart upload.
|
| 239 |
+
:param data: the body to attach to the request. If a dictionary or
|
| 240 |
+
list of tuples ``[(key, value)]`` is provided, form-encoding will
|
| 241 |
+
take place.
|
| 242 |
+
:param json: json for the body to attach to the request (if files or data is not specified).
|
| 243 |
+
:param params: URL parameters to append to the URL. If a dictionary or
|
| 244 |
+
list of tuples ``[(key, value)]`` is provided, form-encoding will
|
| 245 |
+
take place.
|
| 246 |
+
:param auth: Auth handler or (user, pass) tuple.
|
| 247 |
+
:param cookies: dictionary or CookieJar of cookies to attach to this request.
|
| 248 |
+
:param hooks: dictionary of callback hooks, for internal usage.
|
| 249 |
+
|
| 250 |
+
Usage::
|
| 251 |
+
|
| 252 |
+
>>> import requests
|
| 253 |
+
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
| 254 |
+
>>> req.prepare()
|
| 255 |
+
<PreparedRequest [GET]>
|
| 256 |
+
"""
|
| 257 |
+
|
| 258 |
+
def __init__(
|
| 259 |
+
self,
|
| 260 |
+
method=None,
|
| 261 |
+
url=None,
|
| 262 |
+
headers=None,
|
| 263 |
+
files=None,
|
| 264 |
+
data=None,
|
| 265 |
+
params=None,
|
| 266 |
+
auth=None,
|
| 267 |
+
cookies=None,
|
| 268 |
+
hooks=None,
|
| 269 |
+
json=None,
|
| 270 |
+
):
|
| 271 |
+
# Default empty dicts for dict params.
|
| 272 |
+
data = [] if data is None else data
|
| 273 |
+
files = [] if files is None else files
|
| 274 |
+
headers = {} if headers is None else headers
|
| 275 |
+
params = {} if params is None else params
|
| 276 |
+
hooks = {} if hooks is None else hooks
|
| 277 |
+
|
| 278 |
+
self.hooks = default_hooks()
|
| 279 |
+
for k, v in list(hooks.items()):
|
| 280 |
+
self.register_hook(event=k, hook=v)
|
| 281 |
+
|
| 282 |
+
self.method = method
|
| 283 |
+
self.url = url
|
| 284 |
+
self.headers = headers
|
| 285 |
+
self.files = files
|
| 286 |
+
self.data = data
|
| 287 |
+
self.json = json
|
| 288 |
+
self.params = params
|
| 289 |
+
self.auth = auth
|
| 290 |
+
self.cookies = cookies
|
| 291 |
+
|
| 292 |
+
def __repr__(self):
|
| 293 |
+
return f"<Request [{self.method}]>"
|
| 294 |
+
|
| 295 |
+
def prepare(self):
|
| 296 |
+
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
|
| 297 |
+
p = PreparedRequest()
|
| 298 |
+
p.prepare(
|
| 299 |
+
method=self.method,
|
| 300 |
+
url=self.url,
|
| 301 |
+
headers=self.headers,
|
| 302 |
+
files=self.files,
|
| 303 |
+
data=self.data,
|
| 304 |
+
json=self.json,
|
| 305 |
+
params=self.params,
|
| 306 |
+
auth=self.auth,
|
| 307 |
+
cookies=self.cookies,
|
| 308 |
+
hooks=self.hooks,
|
| 309 |
+
)
|
| 310 |
+
return p
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
| 314 |
+
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
|
| 315 |
+
containing the exact bytes that will be sent to the server.
|
| 316 |
+
|
| 317 |
+
Instances are generated from a :class:`Request <Request>` object, and
|
| 318 |
+
should not be instantiated manually; doing so may produce undesirable
|
| 319 |
+
effects.
|
| 320 |
+
|
| 321 |
+
Usage::
|
| 322 |
+
|
| 323 |
+
>>> import requests
|
| 324 |
+
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
| 325 |
+
>>> r = req.prepare()
|
| 326 |
+
>>> r
|
| 327 |
+
<PreparedRequest [GET]>
|
| 328 |
+
|
| 329 |
+
>>> s = requests.Session()
|
| 330 |
+
>>> s.send(r)
|
| 331 |
+
<Response [200]>
|
| 332 |
+
"""
|
| 333 |
+
|
| 334 |
+
def __init__(self):
|
| 335 |
+
#: HTTP verb to send to the server.
|
| 336 |
+
self.method = None
|
| 337 |
+
#: HTTP URL to send the request to.
|
| 338 |
+
self.url = None
|
| 339 |
+
#: dictionary of HTTP headers.
|
| 340 |
+
self.headers = None
|
| 341 |
+
# The `CookieJar` used to create the Cookie header will be stored here
|
| 342 |
+
# after prepare_cookies is called
|
| 343 |
+
self._cookies = None
|
| 344 |
+
#: request body to send to the server.
|
| 345 |
+
self.body = None
|
| 346 |
+
#: dictionary of callback hooks, for internal usage.
|
| 347 |
+
self.hooks = default_hooks()
|
| 348 |
+
#: integer denoting starting position of a readable file-like body.
|
| 349 |
+
self._body_position = None
|
| 350 |
+
|
| 351 |
+
def prepare(
|
| 352 |
+
self,
|
| 353 |
+
method=None,
|
| 354 |
+
url=None,
|
| 355 |
+
headers=None,
|
| 356 |
+
files=None,
|
| 357 |
+
data=None,
|
| 358 |
+
params=None,
|
| 359 |
+
auth=None,
|
| 360 |
+
cookies=None,
|
| 361 |
+
hooks=None,
|
| 362 |
+
json=None,
|
| 363 |
+
):
|
| 364 |
+
"""Prepares the entire request with the given parameters."""
|
| 365 |
+
|
| 366 |
+
self.prepare_method(method)
|
| 367 |
+
self.prepare_url(url, params)
|
| 368 |
+
self.prepare_headers(headers)
|
| 369 |
+
self.prepare_cookies(cookies)
|
| 370 |
+
self.prepare_body(data, files, json)
|
| 371 |
+
self.prepare_auth(auth, url)
|
| 372 |
+
|
| 373 |
+
# Note that prepare_auth must be last to enable authentication schemes
|
| 374 |
+
# such as OAuth to work on a fully prepared request.
|
| 375 |
+
|
| 376 |
+
# This MUST go after prepare_auth. Authenticators could add a hook
|
| 377 |
+
self.prepare_hooks(hooks)
|
| 378 |
+
|
| 379 |
+
def __repr__(self):
|
| 380 |
+
return f"<PreparedRequest [{self.method}]>"
|
| 381 |
+
|
| 382 |
+
def copy(self):
|
| 383 |
+
p = PreparedRequest()
|
| 384 |
+
p.method = self.method
|
| 385 |
+
p.url = self.url
|
| 386 |
+
p.headers = self.headers.copy() if self.headers is not None else None
|
| 387 |
+
p._cookies = _copy_cookie_jar(self._cookies)
|
| 388 |
+
p.body = self.body
|
| 389 |
+
p.hooks = self.hooks
|
| 390 |
+
p._body_position = self._body_position
|
| 391 |
+
return p
|
| 392 |
+
|
| 393 |
+
def prepare_method(self, method):
|
| 394 |
+
"""Prepares the given HTTP method."""
|
| 395 |
+
self.method = method
|
| 396 |
+
if self.method is not None:
|
| 397 |
+
self.method = to_native_string(self.method.upper())
|
| 398 |
+
|
| 399 |
+
@staticmethod
|
| 400 |
+
def _get_idna_encoded_host(host):
|
| 401 |
+
from pip._vendor import idna
|
| 402 |
+
|
| 403 |
+
try:
|
| 404 |
+
host = idna.encode(host, uts46=True).decode("utf-8")
|
| 405 |
+
except idna.IDNAError:
|
| 406 |
+
raise UnicodeError
|
| 407 |
+
return host
|
| 408 |
+
|
| 409 |
+
def prepare_url(self, url, params):
|
| 410 |
+
"""Prepares the given HTTP URL."""
|
| 411 |
+
#: Accept objects that have string representations.
|
| 412 |
+
#: We're unable to blindly call unicode/str functions
|
| 413 |
+
#: as this will include the bytestring indicator (b'')
|
| 414 |
+
#: on python 3.x.
|
| 415 |
+
#: https://github.com/psf/requests/pull/2238
|
| 416 |
+
if isinstance(url, bytes):
|
| 417 |
+
url = url.decode("utf8")
|
| 418 |
+
else:
|
| 419 |
+
url = str(url)
|
| 420 |
+
|
| 421 |
+
# Remove leading whitespaces from url
|
| 422 |
+
url = url.lstrip()
|
| 423 |
+
|
| 424 |
+
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
|
| 425 |
+
# `data` etc to work around exceptions from `url_parse`, which
|
| 426 |
+
# handles RFC 3986 only.
|
| 427 |
+
if ":" in url and not url.lower().startswith("http"):
|
| 428 |
+
self.url = url
|
| 429 |
+
return
|
| 430 |
+
|
| 431 |
+
# Support for unicode domain names and paths.
|
| 432 |
+
try:
|
| 433 |
+
scheme, auth, host, port, path, query, fragment = parse_url(url)
|
| 434 |
+
except LocationParseError as e:
|
| 435 |
+
raise InvalidURL(*e.args)
|
| 436 |
+
|
| 437 |
+
if not scheme:
|
| 438 |
+
raise MissingSchema(
|
| 439 |
+
f"Invalid URL {url!r}: No scheme supplied. "
|
| 440 |
+
f"Perhaps you meant https://{url}?"
|
| 441 |
+
)
|
| 442 |
+
|
| 443 |
+
if not host:
|
| 444 |
+
raise InvalidURL(f"Invalid URL {url!r}: No host supplied")
|
| 445 |
+
|
| 446 |
+
# In general, we want to try IDNA encoding the hostname if the string contains
|
| 447 |
+
# non-ASCII characters. This allows users to automatically get the correct IDNA
|
| 448 |
+
# behaviour. For strings containing only ASCII characters, we need to also verify
|
| 449 |
+
# it doesn't start with a wildcard (*), before allowing the unencoded hostname.
|
| 450 |
+
if not unicode_is_ascii(host):
|
| 451 |
+
try:
|
| 452 |
+
host = self._get_idna_encoded_host(host)
|
| 453 |
+
except UnicodeError:
|
| 454 |
+
raise InvalidURL("URL has an invalid label.")
|
| 455 |
+
elif host.startswith(("*", ".")):
|
| 456 |
+
raise InvalidURL("URL has an invalid label.")
|
| 457 |
+
|
| 458 |
+
# Carefully reconstruct the network location
|
| 459 |
+
netloc = auth or ""
|
| 460 |
+
if netloc:
|
| 461 |
+
netloc += "@"
|
| 462 |
+
netloc += host
|
| 463 |
+
if port:
|
| 464 |
+
netloc += f":{port}"
|
| 465 |
+
|
| 466 |
+
# Bare domains aren't valid URLs.
|
| 467 |
+
if not path:
|
| 468 |
+
path = "/"
|
| 469 |
+
|
| 470 |
+
if isinstance(params, (str, bytes)):
|
| 471 |
+
params = to_native_string(params)
|
| 472 |
+
|
| 473 |
+
enc_params = self._encode_params(params)
|
| 474 |
+
if enc_params:
|
| 475 |
+
if query:
|
| 476 |
+
query = f"{query}&{enc_params}"
|
| 477 |
+
else:
|
| 478 |
+
query = enc_params
|
| 479 |
+
|
| 480 |
+
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
|
| 481 |
+
self.url = url
|
| 482 |
+
|
| 483 |
+
def prepare_headers(self, headers):
|
| 484 |
+
"""Prepares the given HTTP headers."""
|
| 485 |
+
|
| 486 |
+
self.headers = CaseInsensitiveDict()
|
| 487 |
+
if headers:
|
| 488 |
+
for header in headers.items():
|
| 489 |
+
# Raise exception on invalid header value.
|
| 490 |
+
check_header_validity(header)
|
| 491 |
+
name, value = header
|
| 492 |
+
self.headers[to_native_string(name)] = value
|
| 493 |
+
|
| 494 |
+
def prepare_body(self, data, files, json=None):
|
| 495 |
+
"""Prepares the given HTTP body data."""
|
| 496 |
+
|
| 497 |
+
# Check if file, fo, generator, iterator.
|
| 498 |
+
# If not, run through normal process.
|
| 499 |
+
|
| 500 |
+
# Nottin' on you.
|
| 501 |
+
body = None
|
| 502 |
+
content_type = None
|
| 503 |
+
|
| 504 |
+
if not data and json is not None:
|
| 505 |
+
# urllib3 requires a bytes-like body. Python 2's json.dumps
|
| 506 |
+
# provides this natively, but Python 3 gives a Unicode string.
|
| 507 |
+
content_type = "application/json"
|
| 508 |
+
|
| 509 |
+
try:
|
| 510 |
+
body = complexjson.dumps(json, allow_nan=False)
|
| 511 |
+
except ValueError as ve:
|
| 512 |
+
raise InvalidJSONError(ve, request=self)
|
| 513 |
+
|
| 514 |
+
if not isinstance(body, bytes):
|
| 515 |
+
body = body.encode("utf-8")
|
| 516 |
+
|
| 517 |
+
is_stream = all(
|
| 518 |
+
[
|
| 519 |
+
hasattr(data, "__iter__"),
|
| 520 |
+
not isinstance(data, (basestring, list, tuple, Mapping)),
|
| 521 |
+
]
|
| 522 |
+
)
|
| 523 |
+
|
| 524 |
+
if is_stream:
|
| 525 |
+
try:
|
| 526 |
+
length = super_len(data)
|
| 527 |
+
except (TypeError, AttributeError, UnsupportedOperation):
|
| 528 |
+
length = None
|
| 529 |
+
|
| 530 |
+
body = data
|
| 531 |
+
|
| 532 |
+
if getattr(body, "tell", None) is not None:
|
| 533 |
+
# Record the current file position before reading.
|
| 534 |
+
# This will allow us to rewind a file in the event
|
| 535 |
+
# of a redirect.
|
| 536 |
+
try:
|
| 537 |
+
self._body_position = body.tell()
|
| 538 |
+
except OSError:
|
| 539 |
+
# This differentiates from None, allowing us to catch
|
| 540 |
+
# a failed `tell()` later when trying to rewind the body
|
| 541 |
+
self._body_position = object()
|
| 542 |
+
|
| 543 |
+
if files:
|
| 544 |
+
raise NotImplementedError(
|
| 545 |
+
"Streamed bodies and files are mutually exclusive."
|
| 546 |
+
)
|
| 547 |
+
|
| 548 |
+
if length:
|
| 549 |
+
self.headers["Content-Length"] = builtin_str(length)
|
| 550 |
+
else:
|
| 551 |
+
self.headers["Transfer-Encoding"] = "chunked"
|
| 552 |
+
else:
|
| 553 |
+
# Multi-part file uploads.
|
| 554 |
+
if files:
|
| 555 |
+
(body, content_type) = self._encode_files(files, data)
|
| 556 |
+
else:
|
| 557 |
+
if data:
|
| 558 |
+
body = self._encode_params(data)
|
| 559 |
+
if isinstance(data, basestring) or hasattr(data, "read"):
|
| 560 |
+
content_type = None
|
| 561 |
+
else:
|
| 562 |
+
content_type = "application/x-www-form-urlencoded"
|
| 563 |
+
|
| 564 |
+
self.prepare_content_length(body)
|
| 565 |
+
|
| 566 |
+
# Add content-type if it wasn't explicitly provided.
|
| 567 |
+
if content_type and ("content-type" not in self.headers):
|
| 568 |
+
self.headers["Content-Type"] = content_type
|
| 569 |
+
|
| 570 |
+
self.body = body
|
| 571 |
+
|
| 572 |
+
def prepare_content_length(self, body):
|
| 573 |
+
"""Prepare Content-Length header based on request method and body"""
|
| 574 |
+
if body is not None:
|
| 575 |
+
length = super_len(body)
|
| 576 |
+
if length:
|
| 577 |
+
# If length exists, set it. Otherwise, we fallback
|
| 578 |
+
# to Transfer-Encoding: chunked.
|
| 579 |
+
self.headers["Content-Length"] = builtin_str(length)
|
| 580 |
+
elif (
|
| 581 |
+
self.method not in ("GET", "HEAD")
|
| 582 |
+
and self.headers.get("Content-Length") is None
|
| 583 |
+
):
|
| 584 |
+
# Set Content-Length to 0 for methods that can have a body
|
| 585 |
+
# but don't provide one. (i.e. not GET or HEAD)
|
| 586 |
+
self.headers["Content-Length"] = "0"
|
| 587 |
+
|
| 588 |
+
def prepare_auth(self, auth, url=""):
|
| 589 |
+
"""Prepares the given HTTP auth data."""
|
| 590 |
+
|
| 591 |
+
# If no Auth is explicitly provided, extract it from the URL first.
|
| 592 |
+
if auth is None:
|
| 593 |
+
url_auth = get_auth_from_url(self.url)
|
| 594 |
+
auth = url_auth if any(url_auth) else None
|
| 595 |
+
|
| 596 |
+
if auth:
|
| 597 |
+
if isinstance(auth, tuple) and len(auth) == 2:
|
| 598 |
+
# special-case basic HTTP auth
|
| 599 |
+
auth = HTTPBasicAuth(*auth)
|
| 600 |
+
|
| 601 |
+
# Allow auth to make its changes.
|
| 602 |
+
r = auth(self)
|
| 603 |
+
|
| 604 |
+
# Update self to reflect the auth changes.
|
| 605 |
+
self.__dict__.update(r.__dict__)
|
| 606 |
+
|
| 607 |
+
# Recompute Content-Length
|
| 608 |
+
self.prepare_content_length(self.body)
|
| 609 |
+
|
| 610 |
+
def prepare_cookies(self, cookies):
|
| 611 |
+
"""Prepares the given HTTP cookie data.
|
| 612 |
+
|
| 613 |
+
This function eventually generates a ``Cookie`` header from the
|
| 614 |
+
given cookies using cookielib. Due to cookielib's design, the header
|
| 615 |
+
will not be regenerated if it already exists, meaning this function
|
| 616 |
+
can only be called once for the life of the
|
| 617 |
+
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
|
| 618 |
+
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
|
| 619 |
+
header is removed beforehand.
|
| 620 |
+
"""
|
| 621 |
+
if isinstance(cookies, cookielib.CookieJar):
|
| 622 |
+
self._cookies = cookies
|
| 623 |
+
else:
|
| 624 |
+
self._cookies = cookiejar_from_dict(cookies)
|
| 625 |
+
|
| 626 |
+
cookie_header = get_cookie_header(self._cookies, self)
|
| 627 |
+
if cookie_header is not None:
|
| 628 |
+
self.headers["Cookie"] = cookie_header
|
| 629 |
+
|
| 630 |
+
def prepare_hooks(self, hooks):
|
| 631 |
+
"""Prepares the given hooks."""
|
| 632 |
+
# hooks can be passed as None to the prepare method and to this
|
| 633 |
+
# method. To prevent iterating over None, simply use an empty list
|
| 634 |
+
# if hooks is False-y
|
| 635 |
+
hooks = hooks or []
|
| 636 |
+
for event in hooks:
|
| 637 |
+
self.register_hook(event, hooks[event])
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
class Response:
|
| 641 |
+
"""The :class:`Response <Response>` object, which contains a
|
| 642 |
+
server's response to an HTTP request.
|
| 643 |
+
"""
|
| 644 |
+
|
| 645 |
+
__attrs__ = [
|
| 646 |
+
"_content",
|
| 647 |
+
"status_code",
|
| 648 |
+
"headers",
|
| 649 |
+
"url",
|
| 650 |
+
"history",
|
| 651 |
+
"encoding",
|
| 652 |
+
"reason",
|
| 653 |
+
"cookies",
|
| 654 |
+
"elapsed",
|
| 655 |
+
"request",
|
| 656 |
+
]
|
| 657 |
+
|
| 658 |
+
def __init__(self):
|
| 659 |
+
self._content = False
|
| 660 |
+
self._content_consumed = False
|
| 661 |
+
self._next = None
|
| 662 |
+
|
| 663 |
+
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
|
| 664 |
+
self.status_code = None
|
| 665 |
+
|
| 666 |
+
#: Case-insensitive Dictionary of Response Headers.
|
| 667 |
+
#: For example, ``headers['content-encoding']`` will return the
|
| 668 |
+
#: value of a ``'Content-Encoding'`` response header.
|
| 669 |
+
self.headers = CaseInsensitiveDict()
|
| 670 |
+
|
| 671 |
+
#: File-like object representation of response (for advanced usage).
|
| 672 |
+
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
|
| 673 |
+
#: This requirement does not apply for use internally to Requests.
|
| 674 |
+
self.raw = None
|
| 675 |
+
|
| 676 |
+
#: Final URL location of Response.
|
| 677 |
+
self.url = None
|
| 678 |
+
|
| 679 |
+
#: Encoding to decode with when accessing r.text.
|
| 680 |
+
self.encoding = None
|
| 681 |
+
|
| 682 |
+
#: A list of :class:`Response <Response>` objects from
|
| 683 |
+
#: the history of the Request. Any redirect responses will end
|
| 684 |
+
#: up here. The list is sorted from the oldest to the most recent request.
|
| 685 |
+
self.history = []
|
| 686 |
+
|
| 687 |
+
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
|
| 688 |
+
self.reason = None
|
| 689 |
+
|
| 690 |
+
#: A CookieJar of Cookies the server sent back.
|
| 691 |
+
self.cookies = cookiejar_from_dict({})
|
| 692 |
+
|
| 693 |
+
#: The amount of time elapsed between sending the request
|
| 694 |
+
#: and the arrival of the response (as a timedelta).
|
| 695 |
+
#: This property specifically measures the time taken between sending
|
| 696 |
+
#: the first byte of the request and finishing parsing the headers. It
|
| 697 |
+
#: is therefore unaffected by consuming the response content or the
|
| 698 |
+
#: value of the ``stream`` keyword argument.
|
| 699 |
+
self.elapsed = datetime.timedelta(0)
|
| 700 |
+
|
| 701 |
+
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
|
| 702 |
+
#: is a response.
|
| 703 |
+
self.request = None
|
| 704 |
+
|
| 705 |
+
def __enter__(self):
|
| 706 |
+
return self
|
| 707 |
+
|
| 708 |
+
def __exit__(self, *args):
|
| 709 |
+
self.close()
|
| 710 |
+
|
| 711 |
+
def __getstate__(self):
|
| 712 |
+
# Consume everything; accessing the content attribute makes
|
| 713 |
+
# sure the content has been fully read.
|
| 714 |
+
if not self._content_consumed:
|
| 715 |
+
self.content
|
| 716 |
+
|
| 717 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
| 718 |
+
|
| 719 |
+
def __setstate__(self, state):
|
| 720 |
+
for name, value in state.items():
|
| 721 |
+
setattr(self, name, value)
|
| 722 |
+
|
| 723 |
+
# pickled objects do not have .raw
|
| 724 |
+
setattr(self, "_content_consumed", True)
|
| 725 |
+
setattr(self, "raw", None)
|
| 726 |
+
|
| 727 |
+
def __repr__(self):
|
| 728 |
+
return f"<Response [{self.status_code}]>"
|
| 729 |
+
|
| 730 |
+
def __bool__(self):
|
| 731 |
+
"""Returns True if :attr:`status_code` is less than 400.
|
| 732 |
+
|
| 733 |
+
This attribute checks if the status code of the response is between
|
| 734 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 735 |
+
the status code, is between 200 and 400, this will return True. This
|
| 736 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 737 |
+
"""
|
| 738 |
+
return self.ok
|
| 739 |
+
|
| 740 |
+
def __nonzero__(self):
|
| 741 |
+
"""Returns True if :attr:`status_code` is less than 400.
|
| 742 |
+
|
| 743 |
+
This attribute checks if the status code of the response is between
|
| 744 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 745 |
+
the status code, is between 200 and 400, this will return True. This
|
| 746 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 747 |
+
"""
|
| 748 |
+
return self.ok
|
| 749 |
+
|
| 750 |
+
def __iter__(self):
|
| 751 |
+
"""Allows you to use a response as an iterator."""
|
| 752 |
+
return self.iter_content(128)
|
| 753 |
+
|
| 754 |
+
@property
|
| 755 |
+
def ok(self):
|
| 756 |
+
"""Returns True if :attr:`status_code` is less than 400, False if not.
|
| 757 |
+
|
| 758 |
+
This attribute checks if the status code of the response is between
|
| 759 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 760 |
+
the status code is between 200 and 400, this will return True. This
|
| 761 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 762 |
+
"""
|
| 763 |
+
try:
|
| 764 |
+
self.raise_for_status()
|
| 765 |
+
except HTTPError:
|
| 766 |
+
return False
|
| 767 |
+
return True
|
| 768 |
+
|
| 769 |
+
@property
|
| 770 |
+
def is_redirect(self):
|
| 771 |
+
"""True if this Response is a well-formed HTTP redirect that could have
|
| 772 |
+
been processed automatically (by :meth:`Session.resolve_redirects`).
|
| 773 |
+
"""
|
| 774 |
+
return "location" in self.headers and self.status_code in REDIRECT_STATI
|
| 775 |
+
|
| 776 |
+
@property
|
| 777 |
+
def is_permanent_redirect(self):
|
| 778 |
+
"""True if this Response one of the permanent versions of redirect."""
|
| 779 |
+
return "location" in self.headers and self.status_code in (
|
| 780 |
+
codes.moved_permanently,
|
| 781 |
+
codes.permanent_redirect,
|
| 782 |
+
)
|
| 783 |
+
|
| 784 |
+
@property
|
| 785 |
+
def next(self):
|
| 786 |
+
"""Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
|
| 787 |
+
return self._next
|
| 788 |
+
|
| 789 |
+
@property
|
| 790 |
+
def apparent_encoding(self):
|
| 791 |
+
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
|
| 792 |
+
if chardet is not None:
|
| 793 |
+
return chardet.detect(self.content)["encoding"]
|
| 794 |
+
else:
|
| 795 |
+
# If no character detection library is available, we'll fall back
|
| 796 |
+
# to a standard Python utf-8 str.
|
| 797 |
+
return "utf-8"
|
| 798 |
+
|
| 799 |
+
def iter_content(self, chunk_size=1, decode_unicode=False):
|
| 800 |
+
"""Iterates over the response data. When stream=True is set on the
|
| 801 |
+
request, this avoids reading the content at once into memory for
|
| 802 |
+
large responses. The chunk size is the number of bytes it should
|
| 803 |
+
read into memory. This is not necessarily the length of each item
|
| 804 |
+
returned as decoding can take place.
|
| 805 |
+
|
| 806 |
+
chunk_size must be of type int or None. A value of None will
|
| 807 |
+
function differently depending on the value of `stream`.
|
| 808 |
+
stream=True will read data as it arrives in whatever size the
|
| 809 |
+
chunks are received. If stream=False, data is returned as
|
| 810 |
+
a single chunk.
|
| 811 |
+
|
| 812 |
+
If decode_unicode is True, content will be decoded using the best
|
| 813 |
+
available encoding based on the response.
|
| 814 |
+
"""
|
| 815 |
+
|
| 816 |
+
def generate():
|
| 817 |
+
# Special case for urllib3.
|
| 818 |
+
if hasattr(self.raw, "stream"):
|
| 819 |
+
try:
|
| 820 |
+
yield from self.raw.stream(chunk_size, decode_content=True)
|
| 821 |
+
except ProtocolError as e:
|
| 822 |
+
raise ChunkedEncodingError(e)
|
| 823 |
+
except DecodeError as e:
|
| 824 |
+
raise ContentDecodingError(e)
|
| 825 |
+
except ReadTimeoutError as e:
|
| 826 |
+
raise ConnectionError(e)
|
| 827 |
+
except SSLError as e:
|
| 828 |
+
raise RequestsSSLError(e)
|
| 829 |
+
else:
|
| 830 |
+
# Standard file-like object.
|
| 831 |
+
while True:
|
| 832 |
+
chunk = self.raw.read(chunk_size)
|
| 833 |
+
if not chunk:
|
| 834 |
+
break
|
| 835 |
+
yield chunk
|
| 836 |
+
|
| 837 |
+
self._content_consumed = True
|
| 838 |
+
|
| 839 |
+
if self._content_consumed and isinstance(self._content, bool):
|
| 840 |
+
raise StreamConsumedError()
|
| 841 |
+
elif chunk_size is not None and not isinstance(chunk_size, int):
|
| 842 |
+
raise TypeError(
|
| 843 |
+
f"chunk_size must be an int, it is instead a {type(chunk_size)}."
|
| 844 |
+
)
|
| 845 |
+
# simulate reading small chunks of the content
|
| 846 |
+
reused_chunks = iter_slices(self._content, chunk_size)
|
| 847 |
+
|
| 848 |
+
stream_chunks = generate()
|
| 849 |
+
|
| 850 |
+
chunks = reused_chunks if self._content_consumed else stream_chunks
|
| 851 |
+
|
| 852 |
+
if decode_unicode:
|
| 853 |
+
chunks = stream_decode_response_unicode(chunks, self)
|
| 854 |
+
|
| 855 |
+
return chunks
|
| 856 |
+
|
| 857 |
+
def iter_lines(
|
| 858 |
+
self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None
|
| 859 |
+
):
|
| 860 |
+
"""Iterates over the response data, one line at a time. When
|
| 861 |
+
stream=True is set on the request, this avoids reading the
|
| 862 |
+
content at once into memory for large responses.
|
| 863 |
+
|
| 864 |
+
.. note:: This method is not reentrant safe.
|
| 865 |
+
"""
|
| 866 |
+
|
| 867 |
+
pending = None
|
| 868 |
+
|
| 869 |
+
for chunk in self.iter_content(
|
| 870 |
+
chunk_size=chunk_size, decode_unicode=decode_unicode
|
| 871 |
+
):
|
| 872 |
+
if pending is not None:
|
| 873 |
+
chunk = pending + chunk
|
| 874 |
+
|
| 875 |
+
if delimiter:
|
| 876 |
+
lines = chunk.split(delimiter)
|
| 877 |
+
else:
|
| 878 |
+
lines = chunk.splitlines()
|
| 879 |
+
|
| 880 |
+
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
|
| 881 |
+
pending = lines.pop()
|
| 882 |
+
else:
|
| 883 |
+
pending = None
|
| 884 |
+
|
| 885 |
+
yield from lines
|
| 886 |
+
|
| 887 |
+
if pending is not None:
|
| 888 |
+
yield pending
|
| 889 |
+
|
| 890 |
+
@property
|
| 891 |
+
def content(self):
|
| 892 |
+
"""Content of the response, in bytes."""
|
| 893 |
+
|
| 894 |
+
if self._content is False:
|
| 895 |
+
# Read the contents.
|
| 896 |
+
if self._content_consumed:
|
| 897 |
+
raise RuntimeError("The content for this response was already consumed")
|
| 898 |
+
|
| 899 |
+
if self.status_code == 0 or self.raw is None:
|
| 900 |
+
self._content = None
|
| 901 |
+
else:
|
| 902 |
+
self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
|
| 903 |
+
|
| 904 |
+
self._content_consumed = True
|
| 905 |
+
# don't need to release the connection; that's been handled by urllib3
|
| 906 |
+
# since we exhausted the data.
|
| 907 |
+
return self._content
|
| 908 |
+
|
| 909 |
+
@property
|
| 910 |
+
def text(self):
|
| 911 |
+
"""Content of the response, in unicode.
|
| 912 |
+
|
| 913 |
+
If Response.encoding is None, encoding will be guessed using
|
| 914 |
+
``charset_normalizer`` or ``chardet``.
|
| 915 |
+
|
| 916 |
+
The encoding of the response content is determined based solely on HTTP
|
| 917 |
+
headers, following RFC 2616 to the letter. If you can take advantage of
|
| 918 |
+
non-HTTP knowledge to make a better guess at the encoding, you should
|
| 919 |
+
set ``r.encoding`` appropriately before accessing this property.
|
| 920 |
+
"""
|
| 921 |
+
|
| 922 |
+
# Try charset from content-type
|
| 923 |
+
content = None
|
| 924 |
+
encoding = self.encoding
|
| 925 |
+
|
| 926 |
+
if not self.content:
|
| 927 |
+
return ""
|
| 928 |
+
|
| 929 |
+
# Fallback to auto-detected encoding.
|
| 930 |
+
if self.encoding is None:
|
| 931 |
+
encoding = self.apparent_encoding
|
| 932 |
+
|
| 933 |
+
# Decode unicode from given encoding.
|
| 934 |
+
try:
|
| 935 |
+
content = str(self.content, encoding, errors="replace")
|
| 936 |
+
except (LookupError, TypeError):
|
| 937 |
+
# A LookupError is raised if the encoding was not found which could
|
| 938 |
+
# indicate a misspelling or similar mistake.
|
| 939 |
+
#
|
| 940 |
+
# A TypeError can be raised if encoding is None
|
| 941 |
+
#
|
| 942 |
+
# So we try blindly encoding.
|
| 943 |
+
content = str(self.content, errors="replace")
|
| 944 |
+
|
| 945 |
+
return content
|
| 946 |
+
|
| 947 |
+
def json(self, **kwargs):
|
| 948 |
+
r"""Returns the json-encoded content of a response, if any.
|
| 949 |
+
|
| 950 |
+
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
|
| 951 |
+
:raises requests.exceptions.JSONDecodeError: If the response body does not
|
| 952 |
+
contain valid json.
|
| 953 |
+
"""
|
| 954 |
+
|
| 955 |
+
if not self.encoding and self.content and len(self.content) > 3:
|
| 956 |
+
# No encoding set. JSON RFC 4627 section 3 states we should expect
|
| 957 |
+
# UTF-8, -16 or -32. Detect which one to use; If the detection or
|
| 958 |
+
# decoding fails, fall back to `self.text` (using charset_normalizer to make
|
| 959 |
+
# a best guess).
|
| 960 |
+
encoding = guess_json_utf(self.content)
|
| 961 |
+
if encoding is not None:
|
| 962 |
+
try:
|
| 963 |
+
return complexjson.loads(self.content.decode(encoding), **kwargs)
|
| 964 |
+
except UnicodeDecodeError:
|
| 965 |
+
# Wrong UTF codec detected; usually because it's not UTF-8
|
| 966 |
+
# but some other 8-bit codec. This is an RFC violation,
|
| 967 |
+
# and the server didn't bother to tell us what codec *was*
|
| 968 |
+
# used.
|
| 969 |
+
pass
|
| 970 |
+
except JSONDecodeError as e:
|
| 971 |
+
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
| 972 |
+
|
| 973 |
+
try:
|
| 974 |
+
return complexjson.loads(self.text, **kwargs)
|
| 975 |
+
except JSONDecodeError as e:
|
| 976 |
+
# Catch JSON-related errors and raise as requests.JSONDecodeError
|
| 977 |
+
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
|
| 978 |
+
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
| 979 |
+
|
| 980 |
+
@property
|
| 981 |
+
def links(self):
|
| 982 |
+
"""Returns the parsed header links of the response, if any."""
|
| 983 |
+
|
| 984 |
+
header = self.headers.get("link")
|
| 985 |
+
|
| 986 |
+
resolved_links = {}
|
| 987 |
+
|
| 988 |
+
if header:
|
| 989 |
+
links = parse_header_links(header)
|
| 990 |
+
|
| 991 |
+
for link in links:
|
| 992 |
+
key = link.get("rel") or link.get("url")
|
| 993 |
+
resolved_links[key] = link
|
| 994 |
+
|
| 995 |
+
return resolved_links
|
| 996 |
+
|
| 997 |
+
def raise_for_status(self):
|
| 998 |
+
"""Raises :class:`HTTPError`, if one occurred."""
|
| 999 |
+
|
| 1000 |
+
http_error_msg = ""
|
| 1001 |
+
if isinstance(self.reason, bytes):
|
| 1002 |
+
# We attempt to decode utf-8 first because some servers
|
| 1003 |
+
# choose to localize their reason strings. If the string
|
| 1004 |
+
# isn't utf-8, we fall back to iso-8859-1 for all other
|
| 1005 |
+
# encodings. (See PR #3538)
|
| 1006 |
+
try:
|
| 1007 |
+
reason = self.reason.decode("utf-8")
|
| 1008 |
+
except UnicodeDecodeError:
|
| 1009 |
+
reason = self.reason.decode("iso-8859-1")
|
| 1010 |
+
else:
|
| 1011 |
+
reason = self.reason
|
| 1012 |
+
|
| 1013 |
+
if 400 <= self.status_code < 500:
|
| 1014 |
+
http_error_msg = (
|
| 1015 |
+
f"{self.status_code} Client Error: {reason} for url: {self.url}"
|
| 1016 |
+
)
|
| 1017 |
+
|
| 1018 |
+
elif 500 <= self.status_code < 600:
|
| 1019 |
+
http_error_msg = (
|
| 1020 |
+
f"{self.status_code} Server Error: {reason} for url: {self.url}"
|
| 1021 |
+
)
|
| 1022 |
+
|
| 1023 |
+
if http_error_msg:
|
| 1024 |
+
raise HTTPError(http_error_msg, response=self)
|
| 1025 |
+
|
| 1026 |
+
def close(self):
|
| 1027 |
+
"""Releases the connection back to the pool. Once this method has been
|
| 1028 |
+
called the underlying ``raw`` object must not be accessed again.
|
| 1029 |
+
|
| 1030 |
+
*Note: Should not normally need to be called explicitly.*
|
| 1031 |
+
"""
|
| 1032 |
+
if not self._content_consumed:
|
| 1033 |
+
self.raw.close()
|
| 1034 |
+
|
| 1035 |
+
release_conn = getattr(self.raw, "release_conn", None)
|
| 1036 |
+
if release_conn is not None:
|
| 1037 |
+
release_conn()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/packages.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from .compat import chardet
|
| 4 |
+
|
| 5 |
+
# This code exists for backwards compatibility reasons.
|
| 6 |
+
# I don't like it either. Just look the other way. :)
|
| 7 |
+
|
| 8 |
+
for package in ("urllib3", "idna"):
|
| 9 |
+
vendored_package = "pip._vendor." + package
|
| 10 |
+
locals()[package] = __import__(vendored_package)
|
| 11 |
+
# This traversal is apparently necessary such that the identities are
|
| 12 |
+
# preserved (requests.packages.urllib3.* is urllib3.*)
|
| 13 |
+
for mod in list(sys.modules):
|
| 14 |
+
if mod == vendored_package or mod.startswith(vendored_package + '.'):
|
| 15 |
+
unprefixed_mod = mod[len("pip._vendor."):]
|
| 16 |
+
sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod]
|
| 17 |
+
|
| 18 |
+
if chardet is not None:
|
| 19 |
+
target = chardet.__name__
|
| 20 |
+
for mod in list(sys.modules):
|
| 21 |
+
if mod == target or mod.startswith(f"{target}."):
|
| 22 |
+
imported_mod = sys.modules[mod]
|
| 23 |
+
sys.modules[f"requests.packages.{mod}"] = imported_mod
|
| 24 |
+
mod = mod.replace(target, "chardet")
|
| 25 |
+
sys.modules[f"requests.packages.{mod}"] = imported_mod
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/sessions.py
ADDED
|
@@ -0,0 +1,831 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.sessions
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module provides a Session object to manage and persist settings across
|
| 6 |
+
requests (cookies, auth, proxies).
|
| 7 |
+
"""
|
| 8 |
+
import os
|
| 9 |
+
import sys
|
| 10 |
+
import time
|
| 11 |
+
from collections import OrderedDict
|
| 12 |
+
from datetime import timedelta
|
| 13 |
+
|
| 14 |
+
from ._internal_utils import to_native_string
|
| 15 |
+
from .adapters import HTTPAdapter
|
| 16 |
+
from .auth import _basic_auth_str
|
| 17 |
+
from .compat import Mapping, cookielib, urljoin, urlparse
|
| 18 |
+
from .cookies import (
|
| 19 |
+
RequestsCookieJar,
|
| 20 |
+
cookiejar_from_dict,
|
| 21 |
+
extract_cookies_to_jar,
|
| 22 |
+
merge_cookies,
|
| 23 |
+
)
|
| 24 |
+
from .exceptions import (
|
| 25 |
+
ChunkedEncodingError,
|
| 26 |
+
ContentDecodingError,
|
| 27 |
+
InvalidSchema,
|
| 28 |
+
TooManyRedirects,
|
| 29 |
+
)
|
| 30 |
+
from .hooks import default_hooks, dispatch_hook
|
| 31 |
+
|
| 32 |
+
# formerly defined here, reexposed here for backward compatibility
|
| 33 |
+
from .models import ( # noqa: F401
|
| 34 |
+
DEFAULT_REDIRECT_LIMIT,
|
| 35 |
+
REDIRECT_STATI,
|
| 36 |
+
PreparedRequest,
|
| 37 |
+
Request,
|
| 38 |
+
)
|
| 39 |
+
from .status_codes import codes
|
| 40 |
+
from .structures import CaseInsensitiveDict
|
| 41 |
+
from .utils import ( # noqa: F401
|
| 42 |
+
DEFAULT_PORTS,
|
| 43 |
+
default_headers,
|
| 44 |
+
get_auth_from_url,
|
| 45 |
+
get_environ_proxies,
|
| 46 |
+
get_netrc_auth,
|
| 47 |
+
requote_uri,
|
| 48 |
+
resolve_proxies,
|
| 49 |
+
rewind_body,
|
| 50 |
+
should_bypass_proxies,
|
| 51 |
+
to_key_val_list,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
# Preferred clock, based on which one is more accurate on a given system.
|
| 55 |
+
if sys.platform == "win32":
|
| 56 |
+
preferred_clock = time.perf_counter
|
| 57 |
+
else:
|
| 58 |
+
preferred_clock = time.time
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
|
| 62 |
+
"""Determines appropriate setting for a given request, taking into account
|
| 63 |
+
the explicit setting on that request, and the setting in the session. If a
|
| 64 |
+
setting is a dictionary, they will be merged together using `dict_class`
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
if session_setting is None:
|
| 68 |
+
return request_setting
|
| 69 |
+
|
| 70 |
+
if request_setting is None:
|
| 71 |
+
return session_setting
|
| 72 |
+
|
| 73 |
+
# Bypass if not a dictionary (e.g. verify)
|
| 74 |
+
if not (
|
| 75 |
+
isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping)
|
| 76 |
+
):
|
| 77 |
+
return request_setting
|
| 78 |
+
|
| 79 |
+
merged_setting = dict_class(to_key_val_list(session_setting))
|
| 80 |
+
merged_setting.update(to_key_val_list(request_setting))
|
| 81 |
+
|
| 82 |
+
# Remove keys that are set to None. Extract keys first to avoid altering
|
| 83 |
+
# the dictionary during iteration.
|
| 84 |
+
none_keys = [k for (k, v) in merged_setting.items() if v is None]
|
| 85 |
+
for key in none_keys:
|
| 86 |
+
del merged_setting[key]
|
| 87 |
+
|
| 88 |
+
return merged_setting
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
|
| 92 |
+
"""Properly merges both requests and session hooks.
|
| 93 |
+
|
| 94 |
+
This is necessary because when request_hooks == {'response': []}, the
|
| 95 |
+
merge breaks Session hooks entirely.
|
| 96 |
+
"""
|
| 97 |
+
if session_hooks is None or session_hooks.get("response") == []:
|
| 98 |
+
return request_hooks
|
| 99 |
+
|
| 100 |
+
if request_hooks is None or request_hooks.get("response") == []:
|
| 101 |
+
return session_hooks
|
| 102 |
+
|
| 103 |
+
return merge_setting(request_hooks, session_hooks, dict_class)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class SessionRedirectMixin:
|
| 107 |
+
def get_redirect_target(self, resp):
|
| 108 |
+
"""Receives a Response. Returns a redirect URI or ``None``"""
|
| 109 |
+
# Due to the nature of how requests processes redirects this method will
|
| 110 |
+
# be called at least once upon the original response and at least twice
|
| 111 |
+
# on each subsequent redirect response (if any).
|
| 112 |
+
# If a custom mixin is used to handle this logic, it may be advantageous
|
| 113 |
+
# to cache the redirect location onto the response object as a private
|
| 114 |
+
# attribute.
|
| 115 |
+
if resp.is_redirect:
|
| 116 |
+
location = resp.headers["location"]
|
| 117 |
+
# Currently the underlying http module on py3 decode headers
|
| 118 |
+
# in latin1, but empirical evidence suggests that latin1 is very
|
| 119 |
+
# rarely used with non-ASCII characters in HTTP headers.
|
| 120 |
+
# It is more likely to get UTF8 header rather than latin1.
|
| 121 |
+
# This causes incorrect handling of UTF8 encoded location headers.
|
| 122 |
+
# To solve this, we re-encode the location in latin1.
|
| 123 |
+
location = location.encode("latin1")
|
| 124 |
+
return to_native_string(location, "utf8")
|
| 125 |
+
return None
|
| 126 |
+
|
| 127 |
+
def should_strip_auth(self, old_url, new_url):
|
| 128 |
+
"""Decide whether Authorization header should be removed when redirecting"""
|
| 129 |
+
old_parsed = urlparse(old_url)
|
| 130 |
+
new_parsed = urlparse(new_url)
|
| 131 |
+
if old_parsed.hostname != new_parsed.hostname:
|
| 132 |
+
return True
|
| 133 |
+
# Special case: allow http -> https redirect when using the standard
|
| 134 |
+
# ports. This isn't specified by RFC 7235, but is kept to avoid
|
| 135 |
+
# breaking backwards compatibility with older versions of requests
|
| 136 |
+
# that allowed any redirects on the same host.
|
| 137 |
+
if (
|
| 138 |
+
old_parsed.scheme == "http"
|
| 139 |
+
and old_parsed.port in (80, None)
|
| 140 |
+
and new_parsed.scheme == "https"
|
| 141 |
+
and new_parsed.port in (443, None)
|
| 142 |
+
):
|
| 143 |
+
return False
|
| 144 |
+
|
| 145 |
+
# Handle default port usage corresponding to scheme.
|
| 146 |
+
changed_port = old_parsed.port != new_parsed.port
|
| 147 |
+
changed_scheme = old_parsed.scheme != new_parsed.scheme
|
| 148 |
+
default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
|
| 149 |
+
if (
|
| 150 |
+
not changed_scheme
|
| 151 |
+
and old_parsed.port in default_port
|
| 152 |
+
and new_parsed.port in default_port
|
| 153 |
+
):
|
| 154 |
+
return False
|
| 155 |
+
|
| 156 |
+
# Standard case: root URI must match
|
| 157 |
+
return changed_port or changed_scheme
|
| 158 |
+
|
| 159 |
+
def resolve_redirects(
|
| 160 |
+
self,
|
| 161 |
+
resp,
|
| 162 |
+
req,
|
| 163 |
+
stream=False,
|
| 164 |
+
timeout=None,
|
| 165 |
+
verify=True,
|
| 166 |
+
cert=None,
|
| 167 |
+
proxies=None,
|
| 168 |
+
yield_requests=False,
|
| 169 |
+
**adapter_kwargs,
|
| 170 |
+
):
|
| 171 |
+
"""Receives a Response. Returns a generator of Responses or Requests."""
|
| 172 |
+
|
| 173 |
+
hist = [] # keep track of history
|
| 174 |
+
|
| 175 |
+
url = self.get_redirect_target(resp)
|
| 176 |
+
previous_fragment = urlparse(req.url).fragment
|
| 177 |
+
while url:
|
| 178 |
+
prepared_request = req.copy()
|
| 179 |
+
|
| 180 |
+
# Update history and keep track of redirects.
|
| 181 |
+
# resp.history must ignore the original request in this loop
|
| 182 |
+
hist.append(resp)
|
| 183 |
+
resp.history = hist[1:]
|
| 184 |
+
|
| 185 |
+
try:
|
| 186 |
+
resp.content # Consume socket so it can be released
|
| 187 |
+
except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
|
| 188 |
+
resp.raw.read(decode_content=False)
|
| 189 |
+
|
| 190 |
+
if len(resp.history) >= self.max_redirects:
|
| 191 |
+
raise TooManyRedirects(
|
| 192 |
+
f"Exceeded {self.max_redirects} redirects.", response=resp
|
| 193 |
+
)
|
| 194 |
+
|
| 195 |
+
# Release the connection back into the pool.
|
| 196 |
+
resp.close()
|
| 197 |
+
|
| 198 |
+
# Handle redirection without scheme (see: RFC 1808 Section 4)
|
| 199 |
+
if url.startswith("//"):
|
| 200 |
+
parsed_rurl = urlparse(resp.url)
|
| 201 |
+
url = ":".join([to_native_string(parsed_rurl.scheme), url])
|
| 202 |
+
|
| 203 |
+
# Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
|
| 204 |
+
parsed = urlparse(url)
|
| 205 |
+
if parsed.fragment == "" and previous_fragment:
|
| 206 |
+
parsed = parsed._replace(fragment=previous_fragment)
|
| 207 |
+
elif parsed.fragment:
|
| 208 |
+
previous_fragment = parsed.fragment
|
| 209 |
+
url = parsed.geturl()
|
| 210 |
+
|
| 211 |
+
# Facilitate relative 'location' headers, as allowed by RFC 7231.
|
| 212 |
+
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
|
| 213 |
+
# Compliant with RFC3986, we percent encode the url.
|
| 214 |
+
if not parsed.netloc:
|
| 215 |
+
url = urljoin(resp.url, requote_uri(url))
|
| 216 |
+
else:
|
| 217 |
+
url = requote_uri(url)
|
| 218 |
+
|
| 219 |
+
prepared_request.url = to_native_string(url)
|
| 220 |
+
|
| 221 |
+
self.rebuild_method(prepared_request, resp)
|
| 222 |
+
|
| 223 |
+
# https://github.com/psf/requests/issues/1084
|
| 224 |
+
if resp.status_code not in (
|
| 225 |
+
codes.temporary_redirect,
|
| 226 |
+
codes.permanent_redirect,
|
| 227 |
+
):
|
| 228 |
+
# https://github.com/psf/requests/issues/3490
|
| 229 |
+
purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding")
|
| 230 |
+
for header in purged_headers:
|
| 231 |
+
prepared_request.headers.pop(header, None)
|
| 232 |
+
prepared_request.body = None
|
| 233 |
+
|
| 234 |
+
headers = prepared_request.headers
|
| 235 |
+
headers.pop("Cookie", None)
|
| 236 |
+
|
| 237 |
+
# Extract any cookies sent on the response to the cookiejar
|
| 238 |
+
# in the new request. Because we've mutated our copied prepared
|
| 239 |
+
# request, use the old one that we haven't yet touched.
|
| 240 |
+
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
|
| 241 |
+
merge_cookies(prepared_request._cookies, self.cookies)
|
| 242 |
+
prepared_request.prepare_cookies(prepared_request._cookies)
|
| 243 |
+
|
| 244 |
+
# Rebuild auth and proxy information.
|
| 245 |
+
proxies = self.rebuild_proxies(prepared_request, proxies)
|
| 246 |
+
self.rebuild_auth(prepared_request, resp)
|
| 247 |
+
|
| 248 |
+
# A failed tell() sets `_body_position` to `object()`. This non-None
|
| 249 |
+
# value ensures `rewindable` will be True, allowing us to raise an
|
| 250 |
+
# UnrewindableBodyError, instead of hanging the connection.
|
| 251 |
+
rewindable = prepared_request._body_position is not None and (
|
| 252 |
+
"Content-Length" in headers or "Transfer-Encoding" in headers
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
# Attempt to rewind consumed file-like object.
|
| 256 |
+
if rewindable:
|
| 257 |
+
rewind_body(prepared_request)
|
| 258 |
+
|
| 259 |
+
# Override the original request.
|
| 260 |
+
req = prepared_request
|
| 261 |
+
|
| 262 |
+
if yield_requests:
|
| 263 |
+
yield req
|
| 264 |
+
else:
|
| 265 |
+
resp = self.send(
|
| 266 |
+
req,
|
| 267 |
+
stream=stream,
|
| 268 |
+
timeout=timeout,
|
| 269 |
+
verify=verify,
|
| 270 |
+
cert=cert,
|
| 271 |
+
proxies=proxies,
|
| 272 |
+
allow_redirects=False,
|
| 273 |
+
**adapter_kwargs,
|
| 274 |
+
)
|
| 275 |
+
|
| 276 |
+
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
|
| 277 |
+
|
| 278 |
+
# extract redirect url, if any, for the next loop
|
| 279 |
+
url = self.get_redirect_target(resp)
|
| 280 |
+
yield resp
|
| 281 |
+
|
| 282 |
+
def rebuild_auth(self, prepared_request, response):
|
| 283 |
+
"""When being redirected we may want to strip authentication from the
|
| 284 |
+
request to avoid leaking credentials. This method intelligently removes
|
| 285 |
+
and reapplies authentication where possible to avoid credential loss.
|
| 286 |
+
"""
|
| 287 |
+
headers = prepared_request.headers
|
| 288 |
+
url = prepared_request.url
|
| 289 |
+
|
| 290 |
+
if "Authorization" in headers and self.should_strip_auth(
|
| 291 |
+
response.request.url, url
|
| 292 |
+
):
|
| 293 |
+
# If we get redirected to a new host, we should strip out any
|
| 294 |
+
# authentication headers.
|
| 295 |
+
del headers["Authorization"]
|
| 296 |
+
|
| 297 |
+
# .netrc might have more auth for us on our new host.
|
| 298 |
+
new_auth = get_netrc_auth(url) if self.trust_env else None
|
| 299 |
+
if new_auth is not None:
|
| 300 |
+
prepared_request.prepare_auth(new_auth)
|
| 301 |
+
|
| 302 |
+
def rebuild_proxies(self, prepared_request, proxies):
|
| 303 |
+
"""This method re-evaluates the proxy configuration by considering the
|
| 304 |
+
environment variables. If we are redirected to a URL covered by
|
| 305 |
+
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
|
| 306 |
+
proxy keys for this URL (in case they were stripped by a previous
|
| 307 |
+
redirect).
|
| 308 |
+
|
| 309 |
+
This method also replaces the Proxy-Authorization header where
|
| 310 |
+
necessary.
|
| 311 |
+
|
| 312 |
+
:rtype: dict
|
| 313 |
+
"""
|
| 314 |
+
headers = prepared_request.headers
|
| 315 |
+
scheme = urlparse(prepared_request.url).scheme
|
| 316 |
+
new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
|
| 317 |
+
|
| 318 |
+
if "Proxy-Authorization" in headers:
|
| 319 |
+
del headers["Proxy-Authorization"]
|
| 320 |
+
|
| 321 |
+
try:
|
| 322 |
+
username, password = get_auth_from_url(new_proxies[scheme])
|
| 323 |
+
except KeyError:
|
| 324 |
+
username, password = None, None
|
| 325 |
+
|
| 326 |
+
# urllib3 handles proxy authorization for us in the standard adapter.
|
| 327 |
+
# Avoid appending this to TLS tunneled requests where it may be leaked.
|
| 328 |
+
if not scheme.startswith("https") and username and password:
|
| 329 |
+
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
|
| 330 |
+
|
| 331 |
+
return new_proxies
|
| 332 |
+
|
| 333 |
+
def rebuild_method(self, prepared_request, response):
|
| 334 |
+
"""When being redirected we may want to change the method of the request
|
| 335 |
+
based on certain specs or browser behavior.
|
| 336 |
+
"""
|
| 337 |
+
method = prepared_request.method
|
| 338 |
+
|
| 339 |
+
# https://tools.ietf.org/html/rfc7231#section-6.4.4
|
| 340 |
+
if response.status_code == codes.see_other and method != "HEAD":
|
| 341 |
+
method = "GET"
|
| 342 |
+
|
| 343 |
+
# Do what the browsers do, despite standards...
|
| 344 |
+
# First, turn 302s into GETs.
|
| 345 |
+
if response.status_code == codes.found and method != "HEAD":
|
| 346 |
+
method = "GET"
|
| 347 |
+
|
| 348 |
+
# Second, if a POST is responded to with a 301, turn it into a GET.
|
| 349 |
+
# This bizarre behaviour is explained in Issue 1704.
|
| 350 |
+
if response.status_code == codes.moved and method == "POST":
|
| 351 |
+
method = "GET"
|
| 352 |
+
|
| 353 |
+
prepared_request.method = method
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
class Session(SessionRedirectMixin):
|
| 357 |
+
"""A Requests session.
|
| 358 |
+
|
| 359 |
+
Provides cookie persistence, connection-pooling, and configuration.
|
| 360 |
+
|
| 361 |
+
Basic Usage::
|
| 362 |
+
|
| 363 |
+
>>> import requests
|
| 364 |
+
>>> s = requests.Session()
|
| 365 |
+
>>> s.get('https://httpbin.org/get')
|
| 366 |
+
<Response [200]>
|
| 367 |
+
|
| 368 |
+
Or as a context manager::
|
| 369 |
+
|
| 370 |
+
>>> with requests.Session() as s:
|
| 371 |
+
... s.get('https://httpbin.org/get')
|
| 372 |
+
<Response [200]>
|
| 373 |
+
"""
|
| 374 |
+
|
| 375 |
+
__attrs__ = [
|
| 376 |
+
"headers",
|
| 377 |
+
"cookies",
|
| 378 |
+
"auth",
|
| 379 |
+
"proxies",
|
| 380 |
+
"hooks",
|
| 381 |
+
"params",
|
| 382 |
+
"verify",
|
| 383 |
+
"cert",
|
| 384 |
+
"adapters",
|
| 385 |
+
"stream",
|
| 386 |
+
"trust_env",
|
| 387 |
+
"max_redirects",
|
| 388 |
+
]
|
| 389 |
+
|
| 390 |
+
def __init__(self):
|
| 391 |
+
#: A case-insensitive dictionary of headers to be sent on each
|
| 392 |
+
#: :class:`Request <Request>` sent from this
|
| 393 |
+
#: :class:`Session <Session>`.
|
| 394 |
+
self.headers = default_headers()
|
| 395 |
+
|
| 396 |
+
#: Default Authentication tuple or object to attach to
|
| 397 |
+
#: :class:`Request <Request>`.
|
| 398 |
+
self.auth = None
|
| 399 |
+
|
| 400 |
+
#: Dictionary mapping protocol or protocol and host to the URL of the proxy
|
| 401 |
+
#: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
|
| 402 |
+
#: be used on each :class:`Request <Request>`.
|
| 403 |
+
self.proxies = {}
|
| 404 |
+
|
| 405 |
+
#: Event-handling hooks.
|
| 406 |
+
self.hooks = default_hooks()
|
| 407 |
+
|
| 408 |
+
#: Dictionary of querystring data to attach to each
|
| 409 |
+
#: :class:`Request <Request>`. The dictionary values may be lists for
|
| 410 |
+
#: representing multivalued query parameters.
|
| 411 |
+
self.params = {}
|
| 412 |
+
|
| 413 |
+
#: Stream response content default.
|
| 414 |
+
self.stream = False
|
| 415 |
+
|
| 416 |
+
#: SSL Verification default.
|
| 417 |
+
#: Defaults to `True`, requiring requests to verify the TLS certificate at the
|
| 418 |
+
#: remote end.
|
| 419 |
+
#: If verify is set to `False`, requests will accept any TLS certificate
|
| 420 |
+
#: presented by the server, and will ignore hostname mismatches and/or
|
| 421 |
+
#: expired certificates, which will make your application vulnerable to
|
| 422 |
+
#: man-in-the-middle (MitM) attacks.
|
| 423 |
+
#: Only set this to `False` for testing.
|
| 424 |
+
self.verify = True
|
| 425 |
+
|
| 426 |
+
#: SSL client certificate default, if String, path to ssl client
|
| 427 |
+
#: cert file (.pem). If Tuple, ('cert', 'key') pair.
|
| 428 |
+
self.cert = None
|
| 429 |
+
|
| 430 |
+
#: Maximum number of redirects allowed. If the request exceeds this
|
| 431 |
+
#: limit, a :class:`TooManyRedirects` exception is raised.
|
| 432 |
+
#: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is
|
| 433 |
+
#: 30.
|
| 434 |
+
self.max_redirects = DEFAULT_REDIRECT_LIMIT
|
| 435 |
+
|
| 436 |
+
#: Trust environment settings for proxy configuration, default
|
| 437 |
+
#: authentication and similar.
|
| 438 |
+
self.trust_env = True
|
| 439 |
+
|
| 440 |
+
#: A CookieJar containing all currently outstanding cookies set on this
|
| 441 |
+
#: session. By default it is a
|
| 442 |
+
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
|
| 443 |
+
#: may be any other ``cookielib.CookieJar`` compatible object.
|
| 444 |
+
self.cookies = cookiejar_from_dict({})
|
| 445 |
+
|
| 446 |
+
# Default connection adapters.
|
| 447 |
+
self.adapters = OrderedDict()
|
| 448 |
+
self.mount("https://", HTTPAdapter())
|
| 449 |
+
self.mount("http://", HTTPAdapter())
|
| 450 |
+
|
| 451 |
+
def __enter__(self):
|
| 452 |
+
return self
|
| 453 |
+
|
| 454 |
+
def __exit__(self, *args):
|
| 455 |
+
self.close()
|
| 456 |
+
|
| 457 |
+
def prepare_request(self, request):
|
| 458 |
+
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
|
| 459 |
+
transmission and returns it. The :class:`PreparedRequest` has settings
|
| 460 |
+
merged from the :class:`Request <Request>` instance and those of the
|
| 461 |
+
:class:`Session`.
|
| 462 |
+
|
| 463 |
+
:param request: :class:`Request` instance to prepare with this
|
| 464 |
+
session's settings.
|
| 465 |
+
:rtype: requests.PreparedRequest
|
| 466 |
+
"""
|
| 467 |
+
cookies = request.cookies or {}
|
| 468 |
+
|
| 469 |
+
# Bootstrap CookieJar.
|
| 470 |
+
if not isinstance(cookies, cookielib.CookieJar):
|
| 471 |
+
cookies = cookiejar_from_dict(cookies)
|
| 472 |
+
|
| 473 |
+
# Merge with session cookies
|
| 474 |
+
merged_cookies = merge_cookies(
|
| 475 |
+
merge_cookies(RequestsCookieJar(), self.cookies), cookies
|
| 476 |
+
)
|
| 477 |
+
|
| 478 |
+
# Set environment's basic authentication if not explicitly set.
|
| 479 |
+
auth = request.auth
|
| 480 |
+
if self.trust_env and not auth and not self.auth:
|
| 481 |
+
auth = get_netrc_auth(request.url)
|
| 482 |
+
|
| 483 |
+
p = PreparedRequest()
|
| 484 |
+
p.prepare(
|
| 485 |
+
method=request.method.upper(),
|
| 486 |
+
url=request.url,
|
| 487 |
+
files=request.files,
|
| 488 |
+
data=request.data,
|
| 489 |
+
json=request.json,
|
| 490 |
+
headers=merge_setting(
|
| 491 |
+
request.headers, self.headers, dict_class=CaseInsensitiveDict
|
| 492 |
+
),
|
| 493 |
+
params=merge_setting(request.params, self.params),
|
| 494 |
+
auth=merge_setting(auth, self.auth),
|
| 495 |
+
cookies=merged_cookies,
|
| 496 |
+
hooks=merge_hooks(request.hooks, self.hooks),
|
| 497 |
+
)
|
| 498 |
+
return p
|
| 499 |
+
|
| 500 |
+
def request(
|
| 501 |
+
self,
|
| 502 |
+
method,
|
| 503 |
+
url,
|
| 504 |
+
params=None,
|
| 505 |
+
data=None,
|
| 506 |
+
headers=None,
|
| 507 |
+
cookies=None,
|
| 508 |
+
files=None,
|
| 509 |
+
auth=None,
|
| 510 |
+
timeout=None,
|
| 511 |
+
allow_redirects=True,
|
| 512 |
+
proxies=None,
|
| 513 |
+
hooks=None,
|
| 514 |
+
stream=None,
|
| 515 |
+
verify=None,
|
| 516 |
+
cert=None,
|
| 517 |
+
json=None,
|
| 518 |
+
):
|
| 519 |
+
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
|
| 520 |
+
Returns :class:`Response <Response>` object.
|
| 521 |
+
|
| 522 |
+
:param method: method for the new :class:`Request` object.
|
| 523 |
+
:param url: URL for the new :class:`Request` object.
|
| 524 |
+
:param params: (optional) Dictionary or bytes to be sent in the query
|
| 525 |
+
string for the :class:`Request`.
|
| 526 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 527 |
+
object to send in the body of the :class:`Request`.
|
| 528 |
+
:param json: (optional) json to send in the body of the
|
| 529 |
+
:class:`Request`.
|
| 530 |
+
:param headers: (optional) Dictionary of HTTP Headers to send with the
|
| 531 |
+
:class:`Request`.
|
| 532 |
+
:param cookies: (optional) Dict or CookieJar object to send with the
|
| 533 |
+
:class:`Request`.
|
| 534 |
+
:param files: (optional) Dictionary of ``'filename': file-like-objects``
|
| 535 |
+
for multipart encoding upload.
|
| 536 |
+
:param auth: (optional) Auth tuple or callable to enable
|
| 537 |
+
Basic/Digest/Custom HTTP Auth.
|
| 538 |
+
:param timeout: (optional) How long to wait for the server to send
|
| 539 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
| 540 |
+
read timeout) <timeouts>` tuple.
|
| 541 |
+
:type timeout: float or tuple
|
| 542 |
+
:param allow_redirects: (optional) Set to True by default.
|
| 543 |
+
:type allow_redirects: bool
|
| 544 |
+
:param proxies: (optional) Dictionary mapping protocol or protocol and
|
| 545 |
+
hostname to the URL of the proxy.
|
| 546 |
+
:param hooks: (optional) Dictionary mapping hook name to one event or
|
| 547 |
+
list of events, event must be callable.
|
| 548 |
+
:param stream: (optional) whether to immediately download the response
|
| 549 |
+
content. Defaults to ``False``.
|
| 550 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
| 551 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 552 |
+
to a CA bundle to use. Defaults to ``True``. When set to
|
| 553 |
+
``False``, requests will accept any TLS certificate presented by
|
| 554 |
+
the server, and will ignore hostname mismatches and/or expired
|
| 555 |
+
certificates, which will make your application vulnerable to
|
| 556 |
+
man-in-the-middle (MitM) attacks. Setting verify to ``False``
|
| 557 |
+
may be useful during local development or testing.
|
| 558 |
+
:param cert: (optional) if String, path to ssl client cert file (.pem).
|
| 559 |
+
If Tuple, ('cert', 'key') pair.
|
| 560 |
+
:rtype: requests.Response
|
| 561 |
+
"""
|
| 562 |
+
# Create the Request.
|
| 563 |
+
req = Request(
|
| 564 |
+
method=method.upper(),
|
| 565 |
+
url=url,
|
| 566 |
+
headers=headers,
|
| 567 |
+
files=files,
|
| 568 |
+
data=data or {},
|
| 569 |
+
json=json,
|
| 570 |
+
params=params or {},
|
| 571 |
+
auth=auth,
|
| 572 |
+
cookies=cookies,
|
| 573 |
+
hooks=hooks,
|
| 574 |
+
)
|
| 575 |
+
prep = self.prepare_request(req)
|
| 576 |
+
|
| 577 |
+
proxies = proxies or {}
|
| 578 |
+
|
| 579 |
+
settings = self.merge_environment_settings(
|
| 580 |
+
prep.url, proxies, stream, verify, cert
|
| 581 |
+
)
|
| 582 |
+
|
| 583 |
+
# Send the request.
|
| 584 |
+
send_kwargs = {
|
| 585 |
+
"timeout": timeout,
|
| 586 |
+
"allow_redirects": allow_redirects,
|
| 587 |
+
}
|
| 588 |
+
send_kwargs.update(settings)
|
| 589 |
+
resp = self.send(prep, **send_kwargs)
|
| 590 |
+
|
| 591 |
+
return resp
|
| 592 |
+
|
| 593 |
+
def get(self, url, **kwargs):
|
| 594 |
+
r"""Sends a GET request. Returns :class:`Response` object.
|
| 595 |
+
|
| 596 |
+
:param url: URL for the new :class:`Request` object.
|
| 597 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 598 |
+
:rtype: requests.Response
|
| 599 |
+
"""
|
| 600 |
+
|
| 601 |
+
kwargs.setdefault("allow_redirects", True)
|
| 602 |
+
return self.request("GET", url, **kwargs)
|
| 603 |
+
|
| 604 |
+
def options(self, url, **kwargs):
|
| 605 |
+
r"""Sends a OPTIONS request. Returns :class:`Response` object.
|
| 606 |
+
|
| 607 |
+
:param url: URL for the new :class:`Request` object.
|
| 608 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 609 |
+
:rtype: requests.Response
|
| 610 |
+
"""
|
| 611 |
+
|
| 612 |
+
kwargs.setdefault("allow_redirects", True)
|
| 613 |
+
return self.request("OPTIONS", url, **kwargs)
|
| 614 |
+
|
| 615 |
+
def head(self, url, **kwargs):
|
| 616 |
+
r"""Sends a HEAD request. Returns :class:`Response` object.
|
| 617 |
+
|
| 618 |
+
:param url: URL for the new :class:`Request` object.
|
| 619 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 620 |
+
:rtype: requests.Response
|
| 621 |
+
"""
|
| 622 |
+
|
| 623 |
+
kwargs.setdefault("allow_redirects", False)
|
| 624 |
+
return self.request("HEAD", url, **kwargs)
|
| 625 |
+
|
| 626 |
+
def post(self, url, data=None, json=None, **kwargs):
|
| 627 |
+
r"""Sends a POST request. Returns :class:`Response` object.
|
| 628 |
+
|
| 629 |
+
:param url: URL for the new :class:`Request` object.
|
| 630 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 631 |
+
object to send in the body of the :class:`Request`.
|
| 632 |
+
:param json: (optional) json to send in the body of the :class:`Request`.
|
| 633 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 634 |
+
:rtype: requests.Response
|
| 635 |
+
"""
|
| 636 |
+
|
| 637 |
+
return self.request("POST", url, data=data, json=json, **kwargs)
|
| 638 |
+
|
| 639 |
+
def put(self, url, data=None, **kwargs):
|
| 640 |
+
r"""Sends a PUT request. Returns :class:`Response` object.
|
| 641 |
+
|
| 642 |
+
:param url: URL for the new :class:`Request` object.
|
| 643 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 644 |
+
object to send in the body of the :class:`Request`.
|
| 645 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 646 |
+
:rtype: requests.Response
|
| 647 |
+
"""
|
| 648 |
+
|
| 649 |
+
return self.request("PUT", url, data=data, **kwargs)
|
| 650 |
+
|
| 651 |
+
def patch(self, url, data=None, **kwargs):
|
| 652 |
+
r"""Sends a PATCH request. Returns :class:`Response` object.
|
| 653 |
+
|
| 654 |
+
:param url: URL for the new :class:`Request` object.
|
| 655 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
| 656 |
+
object to send in the body of the :class:`Request`.
|
| 657 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 658 |
+
:rtype: requests.Response
|
| 659 |
+
"""
|
| 660 |
+
|
| 661 |
+
return self.request("PATCH", url, data=data, **kwargs)
|
| 662 |
+
|
| 663 |
+
def delete(self, url, **kwargs):
|
| 664 |
+
r"""Sends a DELETE request. Returns :class:`Response` object.
|
| 665 |
+
|
| 666 |
+
:param url: URL for the new :class:`Request` object.
|
| 667 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
| 668 |
+
:rtype: requests.Response
|
| 669 |
+
"""
|
| 670 |
+
|
| 671 |
+
return self.request("DELETE", url, **kwargs)
|
| 672 |
+
|
| 673 |
+
def send(self, request, **kwargs):
|
| 674 |
+
"""Send a given PreparedRequest.
|
| 675 |
+
|
| 676 |
+
:rtype: requests.Response
|
| 677 |
+
"""
|
| 678 |
+
# Set defaults that the hooks can utilize to ensure they always have
|
| 679 |
+
# the correct parameters to reproduce the previous request.
|
| 680 |
+
kwargs.setdefault("stream", self.stream)
|
| 681 |
+
kwargs.setdefault("verify", self.verify)
|
| 682 |
+
kwargs.setdefault("cert", self.cert)
|
| 683 |
+
if "proxies" not in kwargs:
|
| 684 |
+
kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env)
|
| 685 |
+
|
| 686 |
+
# It's possible that users might accidentally send a Request object.
|
| 687 |
+
# Guard against that specific failure case.
|
| 688 |
+
if isinstance(request, Request):
|
| 689 |
+
raise ValueError("You can only send PreparedRequests.")
|
| 690 |
+
|
| 691 |
+
# Set up variables needed for resolve_redirects and dispatching of hooks
|
| 692 |
+
allow_redirects = kwargs.pop("allow_redirects", True)
|
| 693 |
+
stream = kwargs.get("stream")
|
| 694 |
+
hooks = request.hooks
|
| 695 |
+
|
| 696 |
+
# Get the appropriate adapter to use
|
| 697 |
+
adapter = self.get_adapter(url=request.url)
|
| 698 |
+
|
| 699 |
+
# Start time (approximately) of the request
|
| 700 |
+
start = preferred_clock()
|
| 701 |
+
|
| 702 |
+
# Send the request
|
| 703 |
+
r = adapter.send(request, **kwargs)
|
| 704 |
+
|
| 705 |
+
# Total elapsed time of the request (approximately)
|
| 706 |
+
elapsed = preferred_clock() - start
|
| 707 |
+
r.elapsed = timedelta(seconds=elapsed)
|
| 708 |
+
|
| 709 |
+
# Response manipulation hooks
|
| 710 |
+
r = dispatch_hook("response", hooks, r, **kwargs)
|
| 711 |
+
|
| 712 |
+
# Persist cookies
|
| 713 |
+
if r.history:
|
| 714 |
+
# If the hooks create history then we want those cookies too
|
| 715 |
+
for resp in r.history:
|
| 716 |
+
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
|
| 717 |
+
|
| 718 |
+
extract_cookies_to_jar(self.cookies, request, r.raw)
|
| 719 |
+
|
| 720 |
+
# Resolve redirects if allowed.
|
| 721 |
+
if allow_redirects:
|
| 722 |
+
# Redirect resolving generator.
|
| 723 |
+
gen = self.resolve_redirects(r, request, **kwargs)
|
| 724 |
+
history = [resp for resp in gen]
|
| 725 |
+
else:
|
| 726 |
+
history = []
|
| 727 |
+
|
| 728 |
+
# Shuffle things around if there's history.
|
| 729 |
+
if history:
|
| 730 |
+
# Insert the first (original) request at the start
|
| 731 |
+
history.insert(0, r)
|
| 732 |
+
# Get the last request made
|
| 733 |
+
r = history.pop()
|
| 734 |
+
r.history = history
|
| 735 |
+
|
| 736 |
+
# If redirects aren't being followed, store the response on the Request for Response.next().
|
| 737 |
+
if not allow_redirects:
|
| 738 |
+
try:
|
| 739 |
+
r._next = next(
|
| 740 |
+
self.resolve_redirects(r, request, yield_requests=True, **kwargs)
|
| 741 |
+
)
|
| 742 |
+
except StopIteration:
|
| 743 |
+
pass
|
| 744 |
+
|
| 745 |
+
if not stream:
|
| 746 |
+
r.content
|
| 747 |
+
|
| 748 |
+
return r
|
| 749 |
+
|
| 750 |
+
def merge_environment_settings(self, url, proxies, stream, verify, cert):
|
| 751 |
+
"""
|
| 752 |
+
Check the environment and merge it with some settings.
|
| 753 |
+
|
| 754 |
+
:rtype: dict
|
| 755 |
+
"""
|
| 756 |
+
# Gather clues from the surrounding environment.
|
| 757 |
+
if self.trust_env:
|
| 758 |
+
# Set environment's proxies.
|
| 759 |
+
no_proxy = proxies.get("no_proxy") if proxies is not None else None
|
| 760 |
+
env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
| 761 |
+
for k, v in env_proxies.items():
|
| 762 |
+
proxies.setdefault(k, v)
|
| 763 |
+
|
| 764 |
+
# Look for requests environment configuration
|
| 765 |
+
# and be compatible with cURL.
|
| 766 |
+
if verify is True or verify is None:
|
| 767 |
+
verify = (
|
| 768 |
+
os.environ.get("REQUESTS_CA_BUNDLE")
|
| 769 |
+
or os.environ.get("CURL_CA_BUNDLE")
|
| 770 |
+
or verify
|
| 771 |
+
)
|
| 772 |
+
|
| 773 |
+
# Merge all the kwargs.
|
| 774 |
+
proxies = merge_setting(proxies, self.proxies)
|
| 775 |
+
stream = merge_setting(stream, self.stream)
|
| 776 |
+
verify = merge_setting(verify, self.verify)
|
| 777 |
+
cert = merge_setting(cert, self.cert)
|
| 778 |
+
|
| 779 |
+
return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert}
|
| 780 |
+
|
| 781 |
+
def get_adapter(self, url):
|
| 782 |
+
"""
|
| 783 |
+
Returns the appropriate connection adapter for the given URL.
|
| 784 |
+
|
| 785 |
+
:rtype: requests.adapters.BaseAdapter
|
| 786 |
+
"""
|
| 787 |
+
for prefix, adapter in self.adapters.items():
|
| 788 |
+
if url.lower().startswith(prefix.lower()):
|
| 789 |
+
return adapter
|
| 790 |
+
|
| 791 |
+
# Nothing matches :-/
|
| 792 |
+
raise InvalidSchema(f"No connection adapters were found for {url!r}")
|
| 793 |
+
|
| 794 |
+
def close(self):
|
| 795 |
+
"""Closes all adapters and as such the session"""
|
| 796 |
+
for v in self.adapters.values():
|
| 797 |
+
v.close()
|
| 798 |
+
|
| 799 |
+
def mount(self, prefix, adapter):
|
| 800 |
+
"""Registers a connection adapter to a prefix.
|
| 801 |
+
|
| 802 |
+
Adapters are sorted in descending order by prefix length.
|
| 803 |
+
"""
|
| 804 |
+
self.adapters[prefix] = adapter
|
| 805 |
+
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
|
| 806 |
+
|
| 807 |
+
for key in keys_to_move:
|
| 808 |
+
self.adapters[key] = self.adapters.pop(key)
|
| 809 |
+
|
| 810 |
+
def __getstate__(self):
|
| 811 |
+
state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
| 812 |
+
return state
|
| 813 |
+
|
| 814 |
+
def __setstate__(self, state):
|
| 815 |
+
for attr, value in state.items():
|
| 816 |
+
setattr(self, attr, value)
|
| 817 |
+
|
| 818 |
+
|
| 819 |
+
def session():
|
| 820 |
+
"""
|
| 821 |
+
Returns a :class:`Session` for context-management.
|
| 822 |
+
|
| 823 |
+
.. deprecated:: 1.0.0
|
| 824 |
+
|
| 825 |
+
This method has been deprecated since version 1.0.0 and is only kept for
|
| 826 |
+
backwards compatibility. New code should use :class:`~requests.sessions.Session`
|
| 827 |
+
to create a session. This may be removed at a future date.
|
| 828 |
+
|
| 829 |
+
:rtype: Session
|
| 830 |
+
"""
|
| 831 |
+
return Session()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/status_codes.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""
|
| 2 |
+
The ``codes`` object defines a mapping from common names for HTTP statuses
|
| 3 |
+
to their numerical codes, accessible either as attributes or as dictionary
|
| 4 |
+
items.
|
| 5 |
+
|
| 6 |
+
Example::
|
| 7 |
+
|
| 8 |
+
>>> import requests
|
| 9 |
+
>>> requests.codes['temporary_redirect']
|
| 10 |
+
307
|
| 11 |
+
>>> requests.codes.teapot
|
| 12 |
+
418
|
| 13 |
+
>>> requests.codes['\o/']
|
| 14 |
+
200
|
| 15 |
+
|
| 16 |
+
Some codes have multiple names, and both upper- and lower-case versions of
|
| 17 |
+
the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
|
| 18 |
+
``codes.okay`` all correspond to the HTTP status code 200.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
from .structures import LookupDict
|
| 22 |
+
|
| 23 |
+
_codes = {
|
| 24 |
+
# Informational.
|
| 25 |
+
100: ("continue",),
|
| 26 |
+
101: ("switching_protocols",),
|
| 27 |
+
102: ("processing", "early-hints"),
|
| 28 |
+
103: ("checkpoint",),
|
| 29 |
+
122: ("uri_too_long", "request_uri_too_long"),
|
| 30 |
+
200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"),
|
| 31 |
+
201: ("created",),
|
| 32 |
+
202: ("accepted",),
|
| 33 |
+
203: ("non_authoritative_info", "non_authoritative_information"),
|
| 34 |
+
204: ("no_content",),
|
| 35 |
+
205: ("reset_content", "reset"),
|
| 36 |
+
206: ("partial_content", "partial"),
|
| 37 |
+
207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"),
|
| 38 |
+
208: ("already_reported",),
|
| 39 |
+
226: ("im_used",),
|
| 40 |
+
# Redirection.
|
| 41 |
+
300: ("multiple_choices",),
|
| 42 |
+
301: ("moved_permanently", "moved", "\\o-"),
|
| 43 |
+
302: ("found",),
|
| 44 |
+
303: ("see_other", "other"),
|
| 45 |
+
304: ("not_modified",),
|
| 46 |
+
305: ("use_proxy",),
|
| 47 |
+
306: ("switch_proxy",),
|
| 48 |
+
307: ("temporary_redirect", "temporary_moved", "temporary"),
|
| 49 |
+
308: (
|
| 50 |
+
"permanent_redirect",
|
| 51 |
+
"resume_incomplete",
|
| 52 |
+
"resume",
|
| 53 |
+
), # "resume" and "resume_incomplete" to be removed in 3.0
|
| 54 |
+
# Client Error.
|
| 55 |
+
400: ("bad_request", "bad"),
|
| 56 |
+
401: ("unauthorized",),
|
| 57 |
+
402: ("payment_required", "payment"),
|
| 58 |
+
403: ("forbidden",),
|
| 59 |
+
404: ("not_found", "-o-"),
|
| 60 |
+
405: ("method_not_allowed", "not_allowed"),
|
| 61 |
+
406: ("not_acceptable",),
|
| 62 |
+
407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"),
|
| 63 |
+
408: ("request_timeout", "timeout"),
|
| 64 |
+
409: ("conflict",),
|
| 65 |
+
410: ("gone",),
|
| 66 |
+
411: ("length_required",),
|
| 67 |
+
412: ("precondition_failed", "precondition"),
|
| 68 |
+
413: ("request_entity_too_large", "content_too_large"),
|
| 69 |
+
414: ("request_uri_too_large", "uri_too_long"),
|
| 70 |
+
415: ("unsupported_media_type", "unsupported_media", "media_type"),
|
| 71 |
+
416: (
|
| 72 |
+
"requested_range_not_satisfiable",
|
| 73 |
+
"requested_range",
|
| 74 |
+
"range_not_satisfiable",
|
| 75 |
+
),
|
| 76 |
+
417: ("expectation_failed",),
|
| 77 |
+
418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
|
| 78 |
+
421: ("misdirected_request",),
|
| 79 |
+
422: ("unprocessable_entity", "unprocessable", "unprocessable_content"),
|
| 80 |
+
423: ("locked",),
|
| 81 |
+
424: ("failed_dependency", "dependency"),
|
| 82 |
+
425: ("unordered_collection", "unordered", "too_early"),
|
| 83 |
+
426: ("upgrade_required", "upgrade"),
|
| 84 |
+
428: ("precondition_required", "precondition"),
|
| 85 |
+
429: ("too_many_requests", "too_many"),
|
| 86 |
+
431: ("header_fields_too_large", "fields_too_large"),
|
| 87 |
+
444: ("no_response", "none"),
|
| 88 |
+
449: ("retry_with", "retry"),
|
| 89 |
+
450: ("blocked_by_windows_parental_controls", "parental_controls"),
|
| 90 |
+
451: ("unavailable_for_legal_reasons", "legal_reasons"),
|
| 91 |
+
499: ("client_closed_request",),
|
| 92 |
+
# Server Error.
|
| 93 |
+
500: ("internal_server_error", "server_error", "/o\\", "✗"),
|
| 94 |
+
501: ("not_implemented",),
|
| 95 |
+
502: ("bad_gateway",),
|
| 96 |
+
503: ("service_unavailable", "unavailable"),
|
| 97 |
+
504: ("gateway_timeout",),
|
| 98 |
+
505: ("http_version_not_supported", "http_version"),
|
| 99 |
+
506: ("variant_also_negotiates",),
|
| 100 |
+
507: ("insufficient_storage",),
|
| 101 |
+
509: ("bandwidth_limit_exceeded", "bandwidth"),
|
| 102 |
+
510: ("not_extended",),
|
| 103 |
+
511: ("network_authentication_required", "network_auth", "network_authentication"),
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
codes = LookupDict(name="status_codes")
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def _init():
|
| 110 |
+
for code, titles in _codes.items():
|
| 111 |
+
for title in titles:
|
| 112 |
+
setattr(codes, title, code)
|
| 113 |
+
if not title.startswith(("\\", "/")):
|
| 114 |
+
setattr(codes, title.upper(), code)
|
| 115 |
+
|
| 116 |
+
def doc(code):
|
| 117 |
+
names = ", ".join(f"``{n}``" for n in _codes[code])
|
| 118 |
+
return "* %d: %s" % (code, names)
|
| 119 |
+
|
| 120 |
+
global __doc__
|
| 121 |
+
__doc__ = (
|
| 122 |
+
__doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes))
|
| 123 |
+
if __doc__ is not None
|
| 124 |
+
else None
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
_init()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/structures.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.structures
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Data structures that power Requests.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from collections import OrderedDict
|
| 9 |
+
|
| 10 |
+
from .compat import Mapping, MutableMapping
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class CaseInsensitiveDict(MutableMapping):
|
| 14 |
+
"""A case-insensitive ``dict``-like object.
|
| 15 |
+
|
| 16 |
+
Implements all methods and operations of
|
| 17 |
+
``MutableMapping`` as well as dict's ``copy``. Also
|
| 18 |
+
provides ``lower_items``.
|
| 19 |
+
|
| 20 |
+
All keys are expected to be strings. The structure remembers the
|
| 21 |
+
case of the last key to be set, and ``iter(instance)``,
|
| 22 |
+
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
|
| 23 |
+
will contain case-sensitive keys. However, querying and contains
|
| 24 |
+
testing is case insensitive::
|
| 25 |
+
|
| 26 |
+
cid = CaseInsensitiveDict()
|
| 27 |
+
cid['Accept'] = 'application/json'
|
| 28 |
+
cid['aCCEPT'] == 'application/json' # True
|
| 29 |
+
list(cid) == ['Accept'] # True
|
| 30 |
+
|
| 31 |
+
For example, ``headers['content-encoding']`` will return the
|
| 32 |
+
value of a ``'Content-Encoding'`` response header, regardless
|
| 33 |
+
of how the header name was originally stored.
|
| 34 |
+
|
| 35 |
+
If the constructor, ``.update``, or equality comparison
|
| 36 |
+
operations are given keys that have equal ``.lower()``s, the
|
| 37 |
+
behavior is undefined.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
def __init__(self, data=None, **kwargs):
|
| 41 |
+
self._store = OrderedDict()
|
| 42 |
+
if data is None:
|
| 43 |
+
data = {}
|
| 44 |
+
self.update(data, **kwargs)
|
| 45 |
+
|
| 46 |
+
def __setitem__(self, key, value):
|
| 47 |
+
# Use the lowercased key for lookups, but store the actual
|
| 48 |
+
# key alongside the value.
|
| 49 |
+
self._store[key.lower()] = (key, value)
|
| 50 |
+
|
| 51 |
+
def __getitem__(self, key):
|
| 52 |
+
return self._store[key.lower()][1]
|
| 53 |
+
|
| 54 |
+
def __delitem__(self, key):
|
| 55 |
+
del self._store[key.lower()]
|
| 56 |
+
|
| 57 |
+
def __iter__(self):
|
| 58 |
+
return (casedkey for casedkey, mappedvalue in self._store.values())
|
| 59 |
+
|
| 60 |
+
def __len__(self):
|
| 61 |
+
return len(self._store)
|
| 62 |
+
|
| 63 |
+
def lower_items(self):
|
| 64 |
+
"""Like iteritems(), but with all lowercase keys."""
|
| 65 |
+
return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
|
| 66 |
+
|
| 67 |
+
def __eq__(self, other):
|
| 68 |
+
if isinstance(other, Mapping):
|
| 69 |
+
other = CaseInsensitiveDict(other)
|
| 70 |
+
else:
|
| 71 |
+
return NotImplemented
|
| 72 |
+
# Compare insensitively
|
| 73 |
+
return dict(self.lower_items()) == dict(other.lower_items())
|
| 74 |
+
|
| 75 |
+
# Copy is required
|
| 76 |
+
def copy(self):
|
| 77 |
+
return CaseInsensitiveDict(self._store.values())
|
| 78 |
+
|
| 79 |
+
def __repr__(self):
|
| 80 |
+
return str(dict(self.items()))
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class LookupDict(dict):
|
| 84 |
+
"""Dictionary lookup object."""
|
| 85 |
+
|
| 86 |
+
def __init__(self, name=None):
|
| 87 |
+
self.name = name
|
| 88 |
+
super().__init__()
|
| 89 |
+
|
| 90 |
+
def __repr__(self):
|
| 91 |
+
return f"<lookup '{self.name}'>"
|
| 92 |
+
|
| 93 |
+
def __getitem__(self, key):
|
| 94 |
+
# We allow fall-through here, so values default to None
|
| 95 |
+
|
| 96 |
+
return self.__dict__.get(key, None)
|
| 97 |
+
|
| 98 |
+
def get(self, key, default=None):
|
| 99 |
+
return self.__dict__.get(key, default)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/requests/utils.py
ADDED
|
@@ -0,0 +1,1096 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.utils
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module provides utility functions that are used within Requests
|
| 6 |
+
that are also useful for external consumption.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import codecs
|
| 10 |
+
import contextlib
|
| 11 |
+
import io
|
| 12 |
+
import os
|
| 13 |
+
import re
|
| 14 |
+
import socket
|
| 15 |
+
import struct
|
| 16 |
+
import sys
|
| 17 |
+
import tempfile
|
| 18 |
+
import warnings
|
| 19 |
+
import zipfile
|
| 20 |
+
from collections import OrderedDict
|
| 21 |
+
|
| 22 |
+
from pip._vendor.urllib3.util import make_headers, parse_url
|
| 23 |
+
|
| 24 |
+
from . import certs
|
| 25 |
+
from .__version__ import __version__
|
| 26 |
+
|
| 27 |
+
# to_native_string is unused here, but imported here for backwards compatibility
|
| 28 |
+
from ._internal_utils import ( # noqa: F401
|
| 29 |
+
_HEADER_VALIDATORS_BYTE,
|
| 30 |
+
_HEADER_VALIDATORS_STR,
|
| 31 |
+
HEADER_VALIDATORS,
|
| 32 |
+
to_native_string,
|
| 33 |
+
)
|
| 34 |
+
from .compat import (
|
| 35 |
+
Mapping,
|
| 36 |
+
basestring,
|
| 37 |
+
bytes,
|
| 38 |
+
getproxies,
|
| 39 |
+
getproxies_environment,
|
| 40 |
+
integer_types,
|
| 41 |
+
)
|
| 42 |
+
from .compat import parse_http_list as _parse_list_header
|
| 43 |
+
from .compat import (
|
| 44 |
+
proxy_bypass,
|
| 45 |
+
proxy_bypass_environment,
|
| 46 |
+
quote,
|
| 47 |
+
str,
|
| 48 |
+
unquote,
|
| 49 |
+
urlparse,
|
| 50 |
+
urlunparse,
|
| 51 |
+
)
|
| 52 |
+
from .cookies import cookiejar_from_dict
|
| 53 |
+
from .exceptions import (
|
| 54 |
+
FileModeWarning,
|
| 55 |
+
InvalidHeader,
|
| 56 |
+
InvalidURL,
|
| 57 |
+
UnrewindableBodyError,
|
| 58 |
+
)
|
| 59 |
+
from .structures import CaseInsensitiveDict
|
| 60 |
+
|
| 61 |
+
NETRC_FILES = (".netrc", "_netrc")
|
| 62 |
+
|
| 63 |
+
DEFAULT_CA_BUNDLE_PATH = certs.where()
|
| 64 |
+
|
| 65 |
+
DEFAULT_PORTS = {"http": 80, "https": 443}
|
| 66 |
+
|
| 67 |
+
# Ensure that ', ' is used to preserve previous delimiter behavior.
|
| 68 |
+
DEFAULT_ACCEPT_ENCODING = ", ".join(
|
| 69 |
+
re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
if sys.platform == "win32":
|
| 74 |
+
# provide a proxy_bypass version on Windows without DNS lookups
|
| 75 |
+
|
| 76 |
+
def proxy_bypass_registry(host):
|
| 77 |
+
try:
|
| 78 |
+
import winreg
|
| 79 |
+
except ImportError:
|
| 80 |
+
return False
|
| 81 |
+
|
| 82 |
+
try:
|
| 83 |
+
internetSettings = winreg.OpenKey(
|
| 84 |
+
winreg.HKEY_CURRENT_USER,
|
| 85 |
+
r"Software\Microsoft\Windows\CurrentVersion\Internet Settings",
|
| 86 |
+
)
|
| 87 |
+
# ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
|
| 88 |
+
proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0])
|
| 89 |
+
# ProxyOverride is almost always a string
|
| 90 |
+
proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0]
|
| 91 |
+
except (OSError, ValueError):
|
| 92 |
+
return False
|
| 93 |
+
if not proxyEnable or not proxyOverride:
|
| 94 |
+
return False
|
| 95 |
+
|
| 96 |
+
# make a check value list from the registry entry: replace the
|
| 97 |
+
# '<local>' string by the localhost entry and the corresponding
|
| 98 |
+
# canonical entry.
|
| 99 |
+
proxyOverride = proxyOverride.split(";")
|
| 100 |
+
# filter out empty strings to avoid re.match return true in the following code.
|
| 101 |
+
proxyOverride = filter(None, proxyOverride)
|
| 102 |
+
# now check if we match one of the registry values.
|
| 103 |
+
for test in proxyOverride:
|
| 104 |
+
if test == "<local>":
|
| 105 |
+
if "." not in host:
|
| 106 |
+
return True
|
| 107 |
+
test = test.replace(".", r"\.") # mask dots
|
| 108 |
+
test = test.replace("*", r".*") # change glob sequence
|
| 109 |
+
test = test.replace("?", r".") # change glob char
|
| 110 |
+
if re.match(test, host, re.I):
|
| 111 |
+
return True
|
| 112 |
+
return False
|
| 113 |
+
|
| 114 |
+
def proxy_bypass(host): # noqa
|
| 115 |
+
"""Return True, if the host should be bypassed.
|
| 116 |
+
|
| 117 |
+
Checks proxy settings gathered from the environment, if specified,
|
| 118 |
+
or the registry.
|
| 119 |
+
"""
|
| 120 |
+
if getproxies_environment():
|
| 121 |
+
return proxy_bypass_environment(host)
|
| 122 |
+
else:
|
| 123 |
+
return proxy_bypass_registry(host)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def dict_to_sequence(d):
|
| 127 |
+
"""Returns an internal sequence dictionary update."""
|
| 128 |
+
|
| 129 |
+
if hasattr(d, "items"):
|
| 130 |
+
d = d.items()
|
| 131 |
+
|
| 132 |
+
return d
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def super_len(o):
|
| 136 |
+
total_length = None
|
| 137 |
+
current_position = 0
|
| 138 |
+
|
| 139 |
+
if isinstance(o, str):
|
| 140 |
+
o = o.encode("utf-8")
|
| 141 |
+
|
| 142 |
+
if hasattr(o, "__len__"):
|
| 143 |
+
total_length = len(o)
|
| 144 |
+
|
| 145 |
+
elif hasattr(o, "len"):
|
| 146 |
+
total_length = o.len
|
| 147 |
+
|
| 148 |
+
elif hasattr(o, "fileno"):
|
| 149 |
+
try:
|
| 150 |
+
fileno = o.fileno()
|
| 151 |
+
except (io.UnsupportedOperation, AttributeError):
|
| 152 |
+
# AttributeError is a surprising exception, seeing as how we've just checked
|
| 153 |
+
# that `hasattr(o, 'fileno')`. It happens for objects obtained via
|
| 154 |
+
# `Tarfile.extractfile()`, per issue 5229.
|
| 155 |
+
pass
|
| 156 |
+
else:
|
| 157 |
+
total_length = os.fstat(fileno).st_size
|
| 158 |
+
|
| 159 |
+
# Having used fstat to determine the file length, we need to
|
| 160 |
+
# confirm that this file was opened up in binary mode.
|
| 161 |
+
if "b" not in o.mode:
|
| 162 |
+
warnings.warn(
|
| 163 |
+
(
|
| 164 |
+
"Requests has determined the content-length for this "
|
| 165 |
+
"request using the binary size of the file: however, the "
|
| 166 |
+
"file has been opened in text mode (i.e. without the 'b' "
|
| 167 |
+
"flag in the mode). This may lead to an incorrect "
|
| 168 |
+
"content-length. In Requests 3.0, support will be removed "
|
| 169 |
+
"for files in text mode."
|
| 170 |
+
),
|
| 171 |
+
FileModeWarning,
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
if hasattr(o, "tell"):
|
| 175 |
+
try:
|
| 176 |
+
current_position = o.tell()
|
| 177 |
+
except OSError:
|
| 178 |
+
# This can happen in some weird situations, such as when the file
|
| 179 |
+
# is actually a special file descriptor like stdin. In this
|
| 180 |
+
# instance, we don't know what the length is, so set it to zero and
|
| 181 |
+
# let requests chunk it instead.
|
| 182 |
+
if total_length is not None:
|
| 183 |
+
current_position = total_length
|
| 184 |
+
else:
|
| 185 |
+
if hasattr(o, "seek") and total_length is None:
|
| 186 |
+
# StringIO and BytesIO have seek but no usable fileno
|
| 187 |
+
try:
|
| 188 |
+
# seek to end of file
|
| 189 |
+
o.seek(0, 2)
|
| 190 |
+
total_length = o.tell()
|
| 191 |
+
|
| 192 |
+
# seek back to current position to support
|
| 193 |
+
# partially read file-like objects
|
| 194 |
+
o.seek(current_position or 0)
|
| 195 |
+
except OSError:
|
| 196 |
+
total_length = 0
|
| 197 |
+
|
| 198 |
+
if total_length is None:
|
| 199 |
+
total_length = 0
|
| 200 |
+
|
| 201 |
+
return max(0, total_length - current_position)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def get_netrc_auth(url, raise_errors=False):
|
| 205 |
+
"""Returns the Requests tuple auth for a given url from netrc."""
|
| 206 |
+
|
| 207 |
+
netrc_file = os.environ.get("NETRC")
|
| 208 |
+
if netrc_file is not None:
|
| 209 |
+
netrc_locations = (netrc_file,)
|
| 210 |
+
else:
|
| 211 |
+
netrc_locations = (f"~/{f}" for f in NETRC_FILES)
|
| 212 |
+
|
| 213 |
+
try:
|
| 214 |
+
from netrc import NetrcParseError, netrc
|
| 215 |
+
|
| 216 |
+
netrc_path = None
|
| 217 |
+
|
| 218 |
+
for f in netrc_locations:
|
| 219 |
+
try:
|
| 220 |
+
loc = os.path.expanduser(f)
|
| 221 |
+
except KeyError:
|
| 222 |
+
# os.path.expanduser can fail when $HOME is undefined and
|
| 223 |
+
# getpwuid fails. See https://bugs.python.org/issue20164 &
|
| 224 |
+
# https://github.com/psf/requests/issues/1846
|
| 225 |
+
return
|
| 226 |
+
|
| 227 |
+
if os.path.exists(loc):
|
| 228 |
+
netrc_path = loc
|
| 229 |
+
break
|
| 230 |
+
|
| 231 |
+
# Abort early if there isn't one.
|
| 232 |
+
if netrc_path is None:
|
| 233 |
+
return
|
| 234 |
+
|
| 235 |
+
ri = urlparse(url)
|
| 236 |
+
|
| 237 |
+
# Strip port numbers from netloc. This weird `if...encode`` dance is
|
| 238 |
+
# used for Python 3.2, which doesn't support unicode literals.
|
| 239 |
+
splitstr = b":"
|
| 240 |
+
if isinstance(url, str):
|
| 241 |
+
splitstr = splitstr.decode("ascii")
|
| 242 |
+
host = ri.netloc.split(splitstr)[0]
|
| 243 |
+
|
| 244 |
+
try:
|
| 245 |
+
_netrc = netrc(netrc_path).authenticators(host)
|
| 246 |
+
if _netrc:
|
| 247 |
+
# Return with login / password
|
| 248 |
+
login_i = 0 if _netrc[0] else 1
|
| 249 |
+
return (_netrc[login_i], _netrc[2])
|
| 250 |
+
except (NetrcParseError, OSError):
|
| 251 |
+
# If there was a parsing error or a permissions issue reading the file,
|
| 252 |
+
# we'll just skip netrc auth unless explicitly asked to raise errors.
|
| 253 |
+
if raise_errors:
|
| 254 |
+
raise
|
| 255 |
+
|
| 256 |
+
# App Engine hackiness.
|
| 257 |
+
except (ImportError, AttributeError):
|
| 258 |
+
pass
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def guess_filename(obj):
|
| 262 |
+
"""Tries to guess the filename of the given object."""
|
| 263 |
+
name = getattr(obj, "name", None)
|
| 264 |
+
if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">":
|
| 265 |
+
return os.path.basename(name)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def extract_zipped_paths(path):
|
| 269 |
+
"""Replace nonexistent paths that look like they refer to a member of a zip
|
| 270 |
+
archive with the location of an extracted copy of the target, or else
|
| 271 |
+
just return the provided path unchanged.
|
| 272 |
+
"""
|
| 273 |
+
if os.path.exists(path):
|
| 274 |
+
# this is already a valid path, no need to do anything further
|
| 275 |
+
return path
|
| 276 |
+
|
| 277 |
+
# find the first valid part of the provided path and treat that as a zip archive
|
| 278 |
+
# assume the rest of the path is the name of a member in the archive
|
| 279 |
+
archive, member = os.path.split(path)
|
| 280 |
+
while archive and not os.path.exists(archive):
|
| 281 |
+
archive, prefix = os.path.split(archive)
|
| 282 |
+
if not prefix:
|
| 283 |
+
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
|
| 284 |
+
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
|
| 285 |
+
break
|
| 286 |
+
member = "/".join([prefix, member])
|
| 287 |
+
|
| 288 |
+
if not zipfile.is_zipfile(archive):
|
| 289 |
+
return path
|
| 290 |
+
|
| 291 |
+
zip_file = zipfile.ZipFile(archive)
|
| 292 |
+
if member not in zip_file.namelist():
|
| 293 |
+
return path
|
| 294 |
+
|
| 295 |
+
# we have a valid zip archive and a valid member of that archive
|
| 296 |
+
tmp = tempfile.gettempdir()
|
| 297 |
+
extracted_path = os.path.join(tmp, member.split("/")[-1])
|
| 298 |
+
if not os.path.exists(extracted_path):
|
| 299 |
+
# use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
|
| 300 |
+
with atomic_open(extracted_path) as file_handler:
|
| 301 |
+
file_handler.write(zip_file.read(member))
|
| 302 |
+
return extracted_path
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
@contextlib.contextmanager
|
| 306 |
+
def atomic_open(filename):
|
| 307 |
+
"""Write a file to the disk in an atomic fashion"""
|
| 308 |
+
tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
|
| 309 |
+
try:
|
| 310 |
+
with os.fdopen(tmp_descriptor, "wb") as tmp_handler:
|
| 311 |
+
yield tmp_handler
|
| 312 |
+
os.replace(tmp_name, filename)
|
| 313 |
+
except BaseException:
|
| 314 |
+
os.remove(tmp_name)
|
| 315 |
+
raise
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def from_key_val_list(value):
|
| 319 |
+
"""Take an object and test to see if it can be represented as a
|
| 320 |
+
dictionary. Unless it can not be represented as such, return an
|
| 321 |
+
OrderedDict, e.g.,
|
| 322 |
+
|
| 323 |
+
::
|
| 324 |
+
|
| 325 |
+
>>> from_key_val_list([('key', 'val')])
|
| 326 |
+
OrderedDict([('key', 'val')])
|
| 327 |
+
>>> from_key_val_list('string')
|
| 328 |
+
Traceback (most recent call last):
|
| 329 |
+
...
|
| 330 |
+
ValueError: cannot encode objects that are not 2-tuples
|
| 331 |
+
>>> from_key_val_list({'key': 'val'})
|
| 332 |
+
OrderedDict([('key', 'val')])
|
| 333 |
+
|
| 334 |
+
:rtype: OrderedDict
|
| 335 |
+
"""
|
| 336 |
+
if value is None:
|
| 337 |
+
return None
|
| 338 |
+
|
| 339 |
+
if isinstance(value, (str, bytes, bool, int)):
|
| 340 |
+
raise ValueError("cannot encode objects that are not 2-tuples")
|
| 341 |
+
|
| 342 |
+
return OrderedDict(value)
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
def to_key_val_list(value):
|
| 346 |
+
"""Take an object and test to see if it can be represented as a
|
| 347 |
+
dictionary. If it can be, return a list of tuples, e.g.,
|
| 348 |
+
|
| 349 |
+
::
|
| 350 |
+
|
| 351 |
+
>>> to_key_val_list([('key', 'val')])
|
| 352 |
+
[('key', 'val')]
|
| 353 |
+
>>> to_key_val_list({'key': 'val'})
|
| 354 |
+
[('key', 'val')]
|
| 355 |
+
>>> to_key_val_list('string')
|
| 356 |
+
Traceback (most recent call last):
|
| 357 |
+
...
|
| 358 |
+
ValueError: cannot encode objects that are not 2-tuples
|
| 359 |
+
|
| 360 |
+
:rtype: list
|
| 361 |
+
"""
|
| 362 |
+
if value is None:
|
| 363 |
+
return None
|
| 364 |
+
|
| 365 |
+
if isinstance(value, (str, bytes, bool, int)):
|
| 366 |
+
raise ValueError("cannot encode objects that are not 2-tuples")
|
| 367 |
+
|
| 368 |
+
if isinstance(value, Mapping):
|
| 369 |
+
value = value.items()
|
| 370 |
+
|
| 371 |
+
return list(value)
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
# From mitsuhiko/werkzeug (used with permission).
|
| 375 |
+
def parse_list_header(value):
|
| 376 |
+
"""Parse lists as described by RFC 2068 Section 2.
|
| 377 |
+
|
| 378 |
+
In particular, parse comma-separated lists where the elements of
|
| 379 |
+
the list may include quoted-strings. A quoted-string could
|
| 380 |
+
contain a comma. A non-quoted string could have quotes in the
|
| 381 |
+
middle. Quotes are removed automatically after parsing.
|
| 382 |
+
|
| 383 |
+
It basically works like :func:`parse_set_header` just that items
|
| 384 |
+
may appear multiple times and case sensitivity is preserved.
|
| 385 |
+
|
| 386 |
+
The return value is a standard :class:`list`:
|
| 387 |
+
|
| 388 |
+
>>> parse_list_header('token, "quoted value"')
|
| 389 |
+
['token', 'quoted value']
|
| 390 |
+
|
| 391 |
+
To create a header from the :class:`list` again, use the
|
| 392 |
+
:func:`dump_header` function.
|
| 393 |
+
|
| 394 |
+
:param value: a string with a list header.
|
| 395 |
+
:return: :class:`list`
|
| 396 |
+
:rtype: list
|
| 397 |
+
"""
|
| 398 |
+
result = []
|
| 399 |
+
for item in _parse_list_header(value):
|
| 400 |
+
if item[:1] == item[-1:] == '"':
|
| 401 |
+
item = unquote_header_value(item[1:-1])
|
| 402 |
+
result.append(item)
|
| 403 |
+
return result
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
# From mitsuhiko/werkzeug (used with permission).
|
| 407 |
+
def parse_dict_header(value):
|
| 408 |
+
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
|
| 409 |
+
convert them into a python dict:
|
| 410 |
+
|
| 411 |
+
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
|
| 412 |
+
>>> type(d) is dict
|
| 413 |
+
True
|
| 414 |
+
>>> sorted(d.items())
|
| 415 |
+
[('bar', 'as well'), ('foo', 'is a fish')]
|
| 416 |
+
|
| 417 |
+
If there is no value for a key it will be `None`:
|
| 418 |
+
|
| 419 |
+
>>> parse_dict_header('key_without_value')
|
| 420 |
+
{'key_without_value': None}
|
| 421 |
+
|
| 422 |
+
To create a header from the :class:`dict` again, use the
|
| 423 |
+
:func:`dump_header` function.
|
| 424 |
+
|
| 425 |
+
:param value: a string with a dict header.
|
| 426 |
+
:return: :class:`dict`
|
| 427 |
+
:rtype: dict
|
| 428 |
+
"""
|
| 429 |
+
result = {}
|
| 430 |
+
for item in _parse_list_header(value):
|
| 431 |
+
if "=" not in item:
|
| 432 |
+
result[item] = None
|
| 433 |
+
continue
|
| 434 |
+
name, value = item.split("=", 1)
|
| 435 |
+
if value[:1] == value[-1:] == '"':
|
| 436 |
+
value = unquote_header_value(value[1:-1])
|
| 437 |
+
result[name] = value
|
| 438 |
+
return result
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
# From mitsuhiko/werkzeug (used with permission).
|
| 442 |
+
def unquote_header_value(value, is_filename=False):
|
| 443 |
+
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
|
| 444 |
+
This does not use the real unquoting but what browsers are actually
|
| 445 |
+
using for quoting.
|
| 446 |
+
|
| 447 |
+
:param value: the header value to unquote.
|
| 448 |
+
:rtype: str
|
| 449 |
+
"""
|
| 450 |
+
if value and value[0] == value[-1] == '"':
|
| 451 |
+
# this is not the real unquoting, but fixing this so that the
|
| 452 |
+
# RFC is met will result in bugs with internet explorer and
|
| 453 |
+
# probably some other browsers as well. IE for example is
|
| 454 |
+
# uploading files with "C:\foo\bar.txt" as filename
|
| 455 |
+
value = value[1:-1]
|
| 456 |
+
|
| 457 |
+
# if this is a filename and the starting characters look like
|
| 458 |
+
# a UNC path, then just return the value without quotes. Using the
|
| 459 |
+
# replace sequence below on a UNC path has the effect of turning
|
| 460 |
+
# the leading double slash into a single slash and then
|
| 461 |
+
# _fix_ie_filename() doesn't work correctly. See #458.
|
| 462 |
+
if not is_filename or value[:2] != "\\\\":
|
| 463 |
+
return value.replace("\\\\", "\\").replace('\\"', '"')
|
| 464 |
+
return value
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def dict_from_cookiejar(cj):
|
| 468 |
+
"""Returns a key/value dictionary from a CookieJar.
|
| 469 |
+
|
| 470 |
+
:param cj: CookieJar object to extract cookies from.
|
| 471 |
+
:rtype: dict
|
| 472 |
+
"""
|
| 473 |
+
|
| 474 |
+
cookie_dict = {cookie.name: cookie.value for cookie in cj}
|
| 475 |
+
return cookie_dict
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
def add_dict_to_cookiejar(cj, cookie_dict):
|
| 479 |
+
"""Returns a CookieJar from a key/value dictionary.
|
| 480 |
+
|
| 481 |
+
:param cj: CookieJar to insert cookies into.
|
| 482 |
+
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
| 483 |
+
:rtype: CookieJar
|
| 484 |
+
"""
|
| 485 |
+
|
| 486 |
+
return cookiejar_from_dict(cookie_dict, cj)
|
| 487 |
+
|
| 488 |
+
|
| 489 |
+
def get_encodings_from_content(content):
|
| 490 |
+
"""Returns encodings from given content string.
|
| 491 |
+
|
| 492 |
+
:param content: bytestring to extract encodings from.
|
| 493 |
+
"""
|
| 494 |
+
warnings.warn(
|
| 495 |
+
(
|
| 496 |
+
"In requests 3.0, get_encodings_from_content will be removed. For "
|
| 497 |
+
"more information, please see the discussion on issue #2266. (This"
|
| 498 |
+
" warning should only appear once.)"
|
| 499 |
+
),
|
| 500 |
+
DeprecationWarning,
|
| 501 |
+
)
|
| 502 |
+
|
| 503 |
+
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
|
| 504 |
+
pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
|
| 505 |
+
xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
|
| 506 |
+
|
| 507 |
+
return (
|
| 508 |
+
charset_re.findall(content)
|
| 509 |
+
+ pragma_re.findall(content)
|
| 510 |
+
+ xml_re.findall(content)
|
| 511 |
+
)
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def _parse_content_type_header(header):
|
| 515 |
+
"""Returns content type and parameters from given header
|
| 516 |
+
|
| 517 |
+
:param header: string
|
| 518 |
+
:return: tuple containing content type and dictionary of
|
| 519 |
+
parameters
|
| 520 |
+
"""
|
| 521 |
+
|
| 522 |
+
tokens = header.split(";")
|
| 523 |
+
content_type, params = tokens[0].strip(), tokens[1:]
|
| 524 |
+
params_dict = {}
|
| 525 |
+
items_to_strip = "\"' "
|
| 526 |
+
|
| 527 |
+
for param in params:
|
| 528 |
+
param = param.strip()
|
| 529 |
+
if param:
|
| 530 |
+
key, value = param, True
|
| 531 |
+
index_of_equals = param.find("=")
|
| 532 |
+
if index_of_equals != -1:
|
| 533 |
+
key = param[:index_of_equals].strip(items_to_strip)
|
| 534 |
+
value = param[index_of_equals + 1 :].strip(items_to_strip)
|
| 535 |
+
params_dict[key.lower()] = value
|
| 536 |
+
return content_type, params_dict
|
| 537 |
+
|
| 538 |
+
|
| 539 |
+
def get_encoding_from_headers(headers):
|
| 540 |
+
"""Returns encodings from given HTTP Header Dict.
|
| 541 |
+
|
| 542 |
+
:param headers: dictionary to extract encoding from.
|
| 543 |
+
:rtype: str
|
| 544 |
+
"""
|
| 545 |
+
|
| 546 |
+
content_type = headers.get("content-type")
|
| 547 |
+
|
| 548 |
+
if not content_type:
|
| 549 |
+
return None
|
| 550 |
+
|
| 551 |
+
content_type, params = _parse_content_type_header(content_type)
|
| 552 |
+
|
| 553 |
+
if "charset" in params:
|
| 554 |
+
return params["charset"].strip("'\"")
|
| 555 |
+
|
| 556 |
+
if "text" in content_type:
|
| 557 |
+
return "ISO-8859-1"
|
| 558 |
+
|
| 559 |
+
if "application/json" in content_type:
|
| 560 |
+
# Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
|
| 561 |
+
return "utf-8"
|
| 562 |
+
|
| 563 |
+
|
| 564 |
+
def stream_decode_response_unicode(iterator, r):
|
| 565 |
+
"""Stream decodes an iterator."""
|
| 566 |
+
|
| 567 |
+
if r.encoding is None:
|
| 568 |
+
yield from iterator
|
| 569 |
+
return
|
| 570 |
+
|
| 571 |
+
decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace")
|
| 572 |
+
for chunk in iterator:
|
| 573 |
+
rv = decoder.decode(chunk)
|
| 574 |
+
if rv:
|
| 575 |
+
yield rv
|
| 576 |
+
rv = decoder.decode(b"", final=True)
|
| 577 |
+
if rv:
|
| 578 |
+
yield rv
|
| 579 |
+
|
| 580 |
+
|
| 581 |
+
def iter_slices(string, slice_length):
|
| 582 |
+
"""Iterate over slices of a string."""
|
| 583 |
+
pos = 0
|
| 584 |
+
if slice_length is None or slice_length <= 0:
|
| 585 |
+
slice_length = len(string)
|
| 586 |
+
while pos < len(string):
|
| 587 |
+
yield string[pos : pos + slice_length]
|
| 588 |
+
pos += slice_length
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
def get_unicode_from_response(r):
|
| 592 |
+
"""Returns the requested content back in unicode.
|
| 593 |
+
|
| 594 |
+
:param r: Response object to get unicode content from.
|
| 595 |
+
|
| 596 |
+
Tried:
|
| 597 |
+
|
| 598 |
+
1. charset from content-type
|
| 599 |
+
2. fall back and replace all unicode characters
|
| 600 |
+
|
| 601 |
+
:rtype: str
|
| 602 |
+
"""
|
| 603 |
+
warnings.warn(
|
| 604 |
+
(
|
| 605 |
+
"In requests 3.0, get_unicode_from_response will be removed. For "
|
| 606 |
+
"more information, please see the discussion on issue #2266. (This"
|
| 607 |
+
" warning should only appear once.)"
|
| 608 |
+
),
|
| 609 |
+
DeprecationWarning,
|
| 610 |
+
)
|
| 611 |
+
|
| 612 |
+
tried_encodings = []
|
| 613 |
+
|
| 614 |
+
# Try charset from content-type
|
| 615 |
+
encoding = get_encoding_from_headers(r.headers)
|
| 616 |
+
|
| 617 |
+
if encoding:
|
| 618 |
+
try:
|
| 619 |
+
return str(r.content, encoding)
|
| 620 |
+
except UnicodeError:
|
| 621 |
+
tried_encodings.append(encoding)
|
| 622 |
+
|
| 623 |
+
# Fall back:
|
| 624 |
+
try:
|
| 625 |
+
return str(r.content, encoding, errors="replace")
|
| 626 |
+
except TypeError:
|
| 627 |
+
return r.content
|
| 628 |
+
|
| 629 |
+
|
| 630 |
+
# The unreserved URI characters (RFC 3986)
|
| 631 |
+
UNRESERVED_SET = frozenset(
|
| 632 |
+
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~"
|
| 633 |
+
)
|
| 634 |
+
|
| 635 |
+
|
| 636 |
+
def unquote_unreserved(uri):
|
| 637 |
+
"""Un-escape any percent-escape sequences in a URI that are unreserved
|
| 638 |
+
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
|
| 639 |
+
|
| 640 |
+
:rtype: str
|
| 641 |
+
"""
|
| 642 |
+
parts = uri.split("%")
|
| 643 |
+
for i in range(1, len(parts)):
|
| 644 |
+
h = parts[i][0:2]
|
| 645 |
+
if len(h) == 2 and h.isalnum():
|
| 646 |
+
try:
|
| 647 |
+
c = chr(int(h, 16))
|
| 648 |
+
except ValueError:
|
| 649 |
+
raise InvalidURL(f"Invalid percent-escape sequence: '{h}'")
|
| 650 |
+
|
| 651 |
+
if c in UNRESERVED_SET:
|
| 652 |
+
parts[i] = c + parts[i][2:]
|
| 653 |
+
else:
|
| 654 |
+
parts[i] = f"%{parts[i]}"
|
| 655 |
+
else:
|
| 656 |
+
parts[i] = f"%{parts[i]}"
|
| 657 |
+
return "".join(parts)
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
def requote_uri(uri):
|
| 661 |
+
"""Re-quote the given URI.
|
| 662 |
+
|
| 663 |
+
This function passes the given URI through an unquote/quote cycle to
|
| 664 |
+
ensure that it is fully and consistently quoted.
|
| 665 |
+
|
| 666 |
+
:rtype: str
|
| 667 |
+
"""
|
| 668 |
+
safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
|
| 669 |
+
safe_without_percent = "!#$&'()*+,/:;=?@[]~"
|
| 670 |
+
try:
|
| 671 |
+
# Unquote only the unreserved characters
|
| 672 |
+
# Then quote only illegal characters (do not quote reserved,
|
| 673 |
+
# unreserved, or '%')
|
| 674 |
+
return quote(unquote_unreserved(uri), safe=safe_with_percent)
|
| 675 |
+
except InvalidURL:
|
| 676 |
+
# We couldn't unquote the given URI, so let's try quoting it, but
|
| 677 |
+
# there may be unquoted '%'s in the URI. We need to make sure they're
|
| 678 |
+
# properly quoted so they do not cause issues elsewhere.
|
| 679 |
+
return quote(uri, safe=safe_without_percent)
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
def address_in_network(ip, net):
|
| 683 |
+
"""This function allows you to check if an IP belongs to a network subnet
|
| 684 |
+
|
| 685 |
+
Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
|
| 686 |
+
returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
|
| 687 |
+
|
| 688 |
+
:rtype: bool
|
| 689 |
+
"""
|
| 690 |
+
ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0]
|
| 691 |
+
netaddr, bits = net.split("/")
|
| 692 |
+
netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0]
|
| 693 |
+
network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask
|
| 694 |
+
return (ipaddr & netmask) == (network & netmask)
|
| 695 |
+
|
| 696 |
+
|
| 697 |
+
def dotted_netmask(mask):
|
| 698 |
+
"""Converts mask from /xx format to xxx.xxx.xxx.xxx
|
| 699 |
+
|
| 700 |
+
Example: if mask is 24 function returns 255.255.255.0
|
| 701 |
+
|
| 702 |
+
:rtype: str
|
| 703 |
+
"""
|
| 704 |
+
bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1
|
| 705 |
+
return socket.inet_ntoa(struct.pack(">I", bits))
|
| 706 |
+
|
| 707 |
+
|
| 708 |
+
def is_ipv4_address(string_ip):
|
| 709 |
+
"""
|
| 710 |
+
:rtype: bool
|
| 711 |
+
"""
|
| 712 |
+
try:
|
| 713 |
+
socket.inet_aton(string_ip)
|
| 714 |
+
except OSError:
|
| 715 |
+
return False
|
| 716 |
+
return True
|
| 717 |
+
|
| 718 |
+
|
| 719 |
+
def is_valid_cidr(string_network):
|
| 720 |
+
"""
|
| 721 |
+
Very simple check of the cidr format in no_proxy variable.
|
| 722 |
+
|
| 723 |
+
:rtype: bool
|
| 724 |
+
"""
|
| 725 |
+
if string_network.count("/") == 1:
|
| 726 |
+
try:
|
| 727 |
+
mask = int(string_network.split("/")[1])
|
| 728 |
+
except ValueError:
|
| 729 |
+
return False
|
| 730 |
+
|
| 731 |
+
if mask < 1 or mask > 32:
|
| 732 |
+
return False
|
| 733 |
+
|
| 734 |
+
try:
|
| 735 |
+
socket.inet_aton(string_network.split("/")[0])
|
| 736 |
+
except OSError:
|
| 737 |
+
return False
|
| 738 |
+
else:
|
| 739 |
+
return False
|
| 740 |
+
return True
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
@contextlib.contextmanager
|
| 744 |
+
def set_environ(env_name, value):
|
| 745 |
+
"""Set the environment variable 'env_name' to 'value'
|
| 746 |
+
|
| 747 |
+
Save previous value, yield, and then restore the previous value stored in
|
| 748 |
+
the environment variable 'env_name'.
|
| 749 |
+
|
| 750 |
+
If 'value' is None, do nothing"""
|
| 751 |
+
value_changed = value is not None
|
| 752 |
+
if value_changed:
|
| 753 |
+
old_value = os.environ.get(env_name)
|
| 754 |
+
os.environ[env_name] = value
|
| 755 |
+
try:
|
| 756 |
+
yield
|
| 757 |
+
finally:
|
| 758 |
+
if value_changed:
|
| 759 |
+
if old_value is None:
|
| 760 |
+
del os.environ[env_name]
|
| 761 |
+
else:
|
| 762 |
+
os.environ[env_name] = old_value
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
def should_bypass_proxies(url, no_proxy):
|
| 766 |
+
"""
|
| 767 |
+
Returns whether we should bypass proxies or not.
|
| 768 |
+
|
| 769 |
+
:rtype: bool
|
| 770 |
+
"""
|
| 771 |
+
|
| 772 |
+
# Prioritize lowercase environment variables over uppercase
|
| 773 |
+
# to keep a consistent behaviour with other http projects (curl, wget).
|
| 774 |
+
def get_proxy(key):
|
| 775 |
+
return os.environ.get(key) or os.environ.get(key.upper())
|
| 776 |
+
|
| 777 |
+
# First check whether no_proxy is defined. If it is, check that the URL
|
| 778 |
+
# we're getting isn't in the no_proxy list.
|
| 779 |
+
no_proxy_arg = no_proxy
|
| 780 |
+
if no_proxy is None:
|
| 781 |
+
no_proxy = get_proxy("no_proxy")
|
| 782 |
+
parsed = urlparse(url)
|
| 783 |
+
|
| 784 |
+
if parsed.hostname is None:
|
| 785 |
+
# URLs don't always have hostnames, e.g. file:/// urls.
|
| 786 |
+
return True
|
| 787 |
+
|
| 788 |
+
if no_proxy:
|
| 789 |
+
# We need to check whether we match here. We need to see if we match
|
| 790 |
+
# the end of the hostname, both with and without the port.
|
| 791 |
+
no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host)
|
| 792 |
+
|
| 793 |
+
if is_ipv4_address(parsed.hostname):
|
| 794 |
+
for proxy_ip in no_proxy:
|
| 795 |
+
if is_valid_cidr(proxy_ip):
|
| 796 |
+
if address_in_network(parsed.hostname, proxy_ip):
|
| 797 |
+
return True
|
| 798 |
+
elif parsed.hostname == proxy_ip:
|
| 799 |
+
# If no_proxy ip was defined in plain IP notation instead of cidr notation &
|
| 800 |
+
# matches the IP of the index
|
| 801 |
+
return True
|
| 802 |
+
else:
|
| 803 |
+
host_with_port = parsed.hostname
|
| 804 |
+
if parsed.port:
|
| 805 |
+
host_with_port += f":{parsed.port}"
|
| 806 |
+
|
| 807 |
+
for host in no_proxy:
|
| 808 |
+
if parsed.hostname.endswith(host) or host_with_port.endswith(host):
|
| 809 |
+
# The URL does match something in no_proxy, so we don't want
|
| 810 |
+
# to apply the proxies on this URL.
|
| 811 |
+
return True
|
| 812 |
+
|
| 813 |
+
with set_environ("no_proxy", no_proxy_arg):
|
| 814 |
+
# parsed.hostname can be `None` in cases such as a file URI.
|
| 815 |
+
try:
|
| 816 |
+
bypass = proxy_bypass(parsed.hostname)
|
| 817 |
+
except (TypeError, socket.gaierror):
|
| 818 |
+
bypass = False
|
| 819 |
+
|
| 820 |
+
if bypass:
|
| 821 |
+
return True
|
| 822 |
+
|
| 823 |
+
return False
|
| 824 |
+
|
| 825 |
+
|
| 826 |
+
def get_environ_proxies(url, no_proxy=None):
|
| 827 |
+
"""
|
| 828 |
+
Return a dict of environment proxies.
|
| 829 |
+
|
| 830 |
+
:rtype: dict
|
| 831 |
+
"""
|
| 832 |
+
if should_bypass_proxies(url, no_proxy=no_proxy):
|
| 833 |
+
return {}
|
| 834 |
+
else:
|
| 835 |
+
return getproxies()
|
| 836 |
+
|
| 837 |
+
|
| 838 |
+
def select_proxy(url, proxies):
|
| 839 |
+
"""Select a proxy for the url, if applicable.
|
| 840 |
+
|
| 841 |
+
:param url: The url being for the request
|
| 842 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
| 843 |
+
"""
|
| 844 |
+
proxies = proxies or {}
|
| 845 |
+
urlparts = urlparse(url)
|
| 846 |
+
if urlparts.hostname is None:
|
| 847 |
+
return proxies.get(urlparts.scheme, proxies.get("all"))
|
| 848 |
+
|
| 849 |
+
proxy_keys = [
|
| 850 |
+
urlparts.scheme + "://" + urlparts.hostname,
|
| 851 |
+
urlparts.scheme,
|
| 852 |
+
"all://" + urlparts.hostname,
|
| 853 |
+
"all",
|
| 854 |
+
]
|
| 855 |
+
proxy = None
|
| 856 |
+
for proxy_key in proxy_keys:
|
| 857 |
+
if proxy_key in proxies:
|
| 858 |
+
proxy = proxies[proxy_key]
|
| 859 |
+
break
|
| 860 |
+
|
| 861 |
+
return proxy
|
| 862 |
+
|
| 863 |
+
|
| 864 |
+
def resolve_proxies(request, proxies, trust_env=True):
|
| 865 |
+
"""This method takes proxy information from a request and configuration
|
| 866 |
+
input to resolve a mapping of target proxies. This will consider settings
|
| 867 |
+
such as NO_PROXY to strip proxy configurations.
|
| 868 |
+
|
| 869 |
+
:param request: Request or PreparedRequest
|
| 870 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
| 871 |
+
:param trust_env: Boolean declaring whether to trust environment configs
|
| 872 |
+
|
| 873 |
+
:rtype: dict
|
| 874 |
+
"""
|
| 875 |
+
proxies = proxies if proxies is not None else {}
|
| 876 |
+
url = request.url
|
| 877 |
+
scheme = urlparse(url).scheme
|
| 878 |
+
no_proxy = proxies.get("no_proxy")
|
| 879 |
+
new_proxies = proxies.copy()
|
| 880 |
+
|
| 881 |
+
if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
|
| 882 |
+
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
| 883 |
+
|
| 884 |
+
proxy = environ_proxies.get(scheme, environ_proxies.get("all"))
|
| 885 |
+
|
| 886 |
+
if proxy:
|
| 887 |
+
new_proxies.setdefault(scheme, proxy)
|
| 888 |
+
return new_proxies
|
| 889 |
+
|
| 890 |
+
|
| 891 |
+
def default_user_agent(name="python-requests"):
|
| 892 |
+
"""
|
| 893 |
+
Return a string representing the default user agent.
|
| 894 |
+
|
| 895 |
+
:rtype: str
|
| 896 |
+
"""
|
| 897 |
+
return f"{name}/{__version__}"
|
| 898 |
+
|
| 899 |
+
|
| 900 |
+
def default_headers():
|
| 901 |
+
"""
|
| 902 |
+
:rtype: requests.structures.CaseInsensitiveDict
|
| 903 |
+
"""
|
| 904 |
+
return CaseInsensitiveDict(
|
| 905 |
+
{
|
| 906 |
+
"User-Agent": default_user_agent(),
|
| 907 |
+
"Accept-Encoding": DEFAULT_ACCEPT_ENCODING,
|
| 908 |
+
"Accept": "*/*",
|
| 909 |
+
"Connection": "keep-alive",
|
| 910 |
+
}
|
| 911 |
+
)
|
| 912 |
+
|
| 913 |
+
|
| 914 |
+
def parse_header_links(value):
|
| 915 |
+
"""Return a list of parsed link headers proxies.
|
| 916 |
+
|
| 917 |
+
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
|
| 918 |
+
|
| 919 |
+
:rtype: list
|
| 920 |
+
"""
|
| 921 |
+
|
| 922 |
+
links = []
|
| 923 |
+
|
| 924 |
+
replace_chars = " '\""
|
| 925 |
+
|
| 926 |
+
value = value.strip(replace_chars)
|
| 927 |
+
if not value:
|
| 928 |
+
return links
|
| 929 |
+
|
| 930 |
+
for val in re.split(", *<", value):
|
| 931 |
+
try:
|
| 932 |
+
url, params = val.split(";", 1)
|
| 933 |
+
except ValueError:
|
| 934 |
+
url, params = val, ""
|
| 935 |
+
|
| 936 |
+
link = {"url": url.strip("<> '\"")}
|
| 937 |
+
|
| 938 |
+
for param in params.split(";"):
|
| 939 |
+
try:
|
| 940 |
+
key, value = param.split("=")
|
| 941 |
+
except ValueError:
|
| 942 |
+
break
|
| 943 |
+
|
| 944 |
+
link[key.strip(replace_chars)] = value.strip(replace_chars)
|
| 945 |
+
|
| 946 |
+
links.append(link)
|
| 947 |
+
|
| 948 |
+
return links
|
| 949 |
+
|
| 950 |
+
|
| 951 |
+
# Null bytes; no need to recreate these on each call to guess_json_utf
|
| 952 |
+
_null = "\x00".encode("ascii") # encoding to ASCII for Python 3
|
| 953 |
+
_null2 = _null * 2
|
| 954 |
+
_null3 = _null * 3
|
| 955 |
+
|
| 956 |
+
|
| 957 |
+
def guess_json_utf(data):
|
| 958 |
+
"""
|
| 959 |
+
:rtype: str
|
| 960 |
+
"""
|
| 961 |
+
# JSON always starts with two ASCII characters, so detection is as
|
| 962 |
+
# easy as counting the nulls and from their location and count
|
| 963 |
+
# determine the encoding. Also detect a BOM, if present.
|
| 964 |
+
sample = data[:4]
|
| 965 |
+
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
|
| 966 |
+
return "utf-32" # BOM included
|
| 967 |
+
if sample[:3] == codecs.BOM_UTF8:
|
| 968 |
+
return "utf-8-sig" # BOM included, MS style (discouraged)
|
| 969 |
+
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
|
| 970 |
+
return "utf-16" # BOM included
|
| 971 |
+
nullcount = sample.count(_null)
|
| 972 |
+
if nullcount == 0:
|
| 973 |
+
return "utf-8"
|
| 974 |
+
if nullcount == 2:
|
| 975 |
+
if sample[::2] == _null2: # 1st and 3rd are null
|
| 976 |
+
return "utf-16-be"
|
| 977 |
+
if sample[1::2] == _null2: # 2nd and 4th are null
|
| 978 |
+
return "utf-16-le"
|
| 979 |
+
# Did not detect 2 valid UTF-16 ascii-range characters
|
| 980 |
+
if nullcount == 3:
|
| 981 |
+
if sample[:3] == _null3:
|
| 982 |
+
return "utf-32-be"
|
| 983 |
+
if sample[1:] == _null3:
|
| 984 |
+
return "utf-32-le"
|
| 985 |
+
# Did not detect a valid UTF-32 ascii-range character
|
| 986 |
+
return None
|
| 987 |
+
|
| 988 |
+
|
| 989 |
+
def prepend_scheme_if_needed(url, new_scheme):
|
| 990 |
+
"""Given a URL that may or may not have a scheme, prepend the given scheme.
|
| 991 |
+
Does not replace a present scheme with the one provided as an argument.
|
| 992 |
+
|
| 993 |
+
:rtype: str
|
| 994 |
+
"""
|
| 995 |
+
parsed = parse_url(url)
|
| 996 |
+
scheme, auth, host, port, path, query, fragment = parsed
|
| 997 |
+
|
| 998 |
+
# A defect in urlparse determines that there isn't a netloc present in some
|
| 999 |
+
# urls. We previously assumed parsing was overly cautious, and swapped the
|
| 1000 |
+
# netloc and path. Due to a lack of tests on the original defect, this is
|
| 1001 |
+
# maintained with parse_url for backwards compatibility.
|
| 1002 |
+
netloc = parsed.netloc
|
| 1003 |
+
if not netloc:
|
| 1004 |
+
netloc, path = path, netloc
|
| 1005 |
+
|
| 1006 |
+
if auth:
|
| 1007 |
+
# parse_url doesn't provide the netloc with auth
|
| 1008 |
+
# so we'll add it ourselves.
|
| 1009 |
+
netloc = "@".join([auth, netloc])
|
| 1010 |
+
if scheme is None:
|
| 1011 |
+
scheme = new_scheme
|
| 1012 |
+
if path is None:
|
| 1013 |
+
path = ""
|
| 1014 |
+
|
| 1015 |
+
return urlunparse((scheme, netloc, path, "", query, fragment))
|
| 1016 |
+
|
| 1017 |
+
|
| 1018 |
+
def get_auth_from_url(url):
|
| 1019 |
+
"""Given a url with authentication components, extract them into a tuple of
|
| 1020 |
+
username,password.
|
| 1021 |
+
|
| 1022 |
+
:rtype: (str,str)
|
| 1023 |
+
"""
|
| 1024 |
+
parsed = urlparse(url)
|
| 1025 |
+
|
| 1026 |
+
try:
|
| 1027 |
+
auth = (unquote(parsed.username), unquote(parsed.password))
|
| 1028 |
+
except (AttributeError, TypeError):
|
| 1029 |
+
auth = ("", "")
|
| 1030 |
+
|
| 1031 |
+
return auth
|
| 1032 |
+
|
| 1033 |
+
|
| 1034 |
+
def check_header_validity(header):
|
| 1035 |
+
"""Verifies that header parts don't contain leading whitespace
|
| 1036 |
+
reserved characters, or return characters.
|
| 1037 |
+
|
| 1038 |
+
:param header: tuple, in the format (name, value).
|
| 1039 |
+
"""
|
| 1040 |
+
name, value = header
|
| 1041 |
+
_validate_header_part(header, name, 0)
|
| 1042 |
+
_validate_header_part(header, value, 1)
|
| 1043 |
+
|
| 1044 |
+
|
| 1045 |
+
def _validate_header_part(header, header_part, header_validator_index):
|
| 1046 |
+
if isinstance(header_part, str):
|
| 1047 |
+
validator = _HEADER_VALIDATORS_STR[header_validator_index]
|
| 1048 |
+
elif isinstance(header_part, bytes):
|
| 1049 |
+
validator = _HEADER_VALIDATORS_BYTE[header_validator_index]
|
| 1050 |
+
else:
|
| 1051 |
+
raise InvalidHeader(
|
| 1052 |
+
f"Header part ({header_part!r}) from {header} "
|
| 1053 |
+
f"must be of type str or bytes, not {type(header_part)}"
|
| 1054 |
+
)
|
| 1055 |
+
|
| 1056 |
+
if not validator.match(header_part):
|
| 1057 |
+
header_kind = "name" if header_validator_index == 0 else "value"
|
| 1058 |
+
raise InvalidHeader(
|
| 1059 |
+
f"Invalid leading whitespace, reserved character(s), or return "
|
| 1060 |
+
f"character(s) in header {header_kind}: {header_part!r}"
|
| 1061 |
+
)
|
| 1062 |
+
|
| 1063 |
+
|
| 1064 |
+
def urldefragauth(url):
|
| 1065 |
+
"""
|
| 1066 |
+
Given a url remove the fragment and the authentication part.
|
| 1067 |
+
|
| 1068 |
+
:rtype: str
|
| 1069 |
+
"""
|
| 1070 |
+
scheme, netloc, path, params, query, fragment = urlparse(url)
|
| 1071 |
+
|
| 1072 |
+
# see func:`prepend_scheme_if_needed`
|
| 1073 |
+
if not netloc:
|
| 1074 |
+
netloc, path = path, netloc
|
| 1075 |
+
|
| 1076 |
+
netloc = netloc.rsplit("@", 1)[-1]
|
| 1077 |
+
|
| 1078 |
+
return urlunparse((scheme, netloc, path, params, query, ""))
|
| 1079 |
+
|
| 1080 |
+
|
| 1081 |
+
def rewind_body(prepared_request):
|
| 1082 |
+
"""Move file pointer back to its recorded starting position
|
| 1083 |
+
so it can be read again on redirect.
|
| 1084 |
+
"""
|
| 1085 |
+
body_seek = getattr(prepared_request.body, "seek", None)
|
| 1086 |
+
if body_seek is not None and isinstance(
|
| 1087 |
+
prepared_request._body_position, integer_types
|
| 1088 |
+
):
|
| 1089 |
+
try:
|
| 1090 |
+
body_seek(prepared_request._body_position)
|
| 1091 |
+
except OSError:
|
| 1092 |
+
raise UnrewindableBodyError(
|
| 1093 |
+
"An error occurred when rewinding request body for redirect."
|
| 1094 |
+
)
|
| 1095 |
+
else:
|
| 1096 |
+
raise UnrewindableBodyError("Unable to rewind request body for redirect.")
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (6.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_cell_widths.cpython-310.pyc
ADDED
|
Binary file (7.85 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_replace.cpython-310.pyc
ADDED
|
Binary file (1.18 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_export_format.cpython-310.pyc
ADDED
|
Binary file (2.31 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_fileno.cpython-310.pyc
ADDED
|
Binary file (775 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_inspect.cpython-310.pyc
ADDED
|
Binary file (8.61 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_log_render.cpython-310.pyc
ADDED
|
Binary file (2.63 kB). View file
|
|
|