Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- evalkit_llava/bin/openssl +3 -0
- evalkit_llava/lib/liblzma.so +3 -0
- evalkit_llava/lib/python3.10/idlelib/Icons/idle_16.gif +3 -0
- evalkit_llava/lib/python3.10/idlelib/Icons/openfolder.gif +3 -0
- evalkit_llava/lib/python3.10/idlelib/Icons/plusnode.gif +3 -0
- evalkit_llava/lib/python3.10/site-packages/distutils-precedence.pth +3 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py +29 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py +70 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py +161 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py +75 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py +8 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py +182 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py +48 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py +500 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py +119 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py +157 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/py.typed +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py +146 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py +43 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__init__.py +54 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__main__.py +4 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/distro.py +1403 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/py.typed +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__init__.py +55 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/exceptions.py +48 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/ext.py +170 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/fallback.py +929 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pygments/__main__.py +17 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pygments/formatter.py +129 -0
.gitattributes
CHANGED
|
@@ -75,3 +75,5 @@ evalkit_llava/lib/libbz2.a filter=lfs diff=lfs merge=lfs -text
|
|
| 75 |
evalkit_llava/bin/unlzma filter=lfs diff=lfs merge=lfs -text
|
| 76 |
evalkit_llava/lib/libbz2.so.1.0.8 filter=lfs diff=lfs merge=lfs -text
|
| 77 |
evalkit_llava/bin/bunzip2 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 75 |
evalkit_llava/bin/unlzma filter=lfs diff=lfs merge=lfs -text
|
| 76 |
evalkit_llava/lib/libbz2.so.1.0.8 filter=lfs diff=lfs merge=lfs -text
|
| 77 |
evalkit_llava/bin/bunzip2 filter=lfs diff=lfs merge=lfs -text
|
| 78 |
+
evalkit_llava/bin/openssl filter=lfs diff=lfs merge=lfs -text
|
| 79 |
+
evalkit_llava/lib/liblzma.so filter=lfs diff=lfs merge=lfs -text
|
evalkit_llava/bin/openssl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5eb7a2b4dd73f2fd19ae40f97604ebc4bf6316d8ab55c7b786674026081d8bf8
|
| 3 |
+
size 975848
|
evalkit_llava/lib/liblzma.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6b126e186cc43702dcacb626e0455f8aecf030d969d6d4a407ce4cc35293c503
|
| 3 |
+
size 218304
|
evalkit_llava/lib/python3.10/idlelib/Icons/idle_16.gif
ADDED
|
|
Git LFS Details
|
evalkit_llava/lib/python3.10/idlelib/Icons/openfolder.gif
ADDED
|
|
Git LFS Details
|
evalkit_llava/lib/python3.10/idlelib/Icons/plusnode.gif
ADDED
|
|
Git LFS Details
|
evalkit_llava/lib/python3.10/site-packages/distutils-precedence.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2638ce9e2500e572a5e0de7faed6661eb569d1b696fcba07b0dd223da5f5d224
|
| 3 |
+
size 151
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (3.05 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
|
| 5 |
+
"""CacheControl import Interface.
|
| 6 |
+
|
| 7 |
+
Make it easy to import from cachecontrol without long namespaces.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
__author__ = "Eric Larson"
|
| 11 |
+
__email__ = "[email protected]"
|
| 12 |
+
__version__ = "0.14.1"
|
| 13 |
+
|
| 14 |
+
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
| 15 |
+
from pip._vendor.cachecontrol.controller import CacheController
|
| 16 |
+
from pip._vendor.cachecontrol.wrapper import CacheControl
|
| 17 |
+
|
| 18 |
+
__all__ = [
|
| 19 |
+
"__author__",
|
| 20 |
+
"__email__",
|
| 21 |
+
"__version__",
|
| 22 |
+
"CacheControlAdapter",
|
| 23 |
+
"CacheController",
|
| 24 |
+
"CacheControl",
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
import logging
|
| 28 |
+
|
| 29 |
+
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (749 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc
ADDED
|
Binary file (1.82 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc
ADDED
|
Binary file (4.4 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc
ADDED
|
Binary file (3.26 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc
ADDED
|
Binary file (10.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc
ADDED
|
Binary file (3.2 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc
ADDED
|
Binary file (5.38 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc
ADDED
|
Binary file (3.34 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc
ADDED
|
Binary file (1.45 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import logging
|
| 7 |
+
from argparse import ArgumentParser
|
| 8 |
+
from typing import TYPE_CHECKING
|
| 9 |
+
|
| 10 |
+
from pip._vendor import requests
|
| 11 |
+
|
| 12 |
+
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
| 13 |
+
from pip._vendor.cachecontrol.cache import DictCache
|
| 14 |
+
from pip._vendor.cachecontrol.controller import logger
|
| 15 |
+
|
| 16 |
+
if TYPE_CHECKING:
|
| 17 |
+
from argparse import Namespace
|
| 18 |
+
|
| 19 |
+
from pip._vendor.cachecontrol.controller import CacheController
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def setup_logging() -> None:
|
| 23 |
+
logger.setLevel(logging.DEBUG)
|
| 24 |
+
handler = logging.StreamHandler()
|
| 25 |
+
logger.addHandler(handler)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def get_session() -> requests.Session:
|
| 29 |
+
adapter = CacheControlAdapter(
|
| 30 |
+
DictCache(), cache_etags=True, serializer=None, heuristic=None
|
| 31 |
+
)
|
| 32 |
+
sess = requests.Session()
|
| 33 |
+
sess.mount("http://", adapter)
|
| 34 |
+
sess.mount("https://", adapter)
|
| 35 |
+
|
| 36 |
+
sess.cache_controller = adapter.controller # type: ignore[attr-defined]
|
| 37 |
+
return sess
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def get_args() -> Namespace:
|
| 41 |
+
parser = ArgumentParser()
|
| 42 |
+
parser.add_argument("url", help="The URL to try and cache")
|
| 43 |
+
return parser.parse_args()
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def main() -> None:
|
| 47 |
+
args = get_args()
|
| 48 |
+
sess = get_session()
|
| 49 |
+
|
| 50 |
+
# Make a request to get a response
|
| 51 |
+
resp = sess.get(args.url)
|
| 52 |
+
|
| 53 |
+
# Turn on logging
|
| 54 |
+
setup_logging()
|
| 55 |
+
|
| 56 |
+
# try setting the cache
|
| 57 |
+
cache_controller: CacheController = (
|
| 58 |
+
sess.cache_controller # type: ignore[attr-defined]
|
| 59 |
+
)
|
| 60 |
+
cache_controller.cache_response(resp.request, resp.raw)
|
| 61 |
+
|
| 62 |
+
# Now try to get it
|
| 63 |
+
if cache_controller.cached_request(resp.request):
|
| 64 |
+
print("Cached!")
|
| 65 |
+
else:
|
| 66 |
+
print("Not cached :(")
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
if __name__ == "__main__":
|
| 70 |
+
main()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import functools
|
| 7 |
+
import types
|
| 8 |
+
import zlib
|
| 9 |
+
from typing import TYPE_CHECKING, Any, Collection, Mapping
|
| 10 |
+
|
| 11 |
+
from pip._vendor.requests.adapters import HTTPAdapter
|
| 12 |
+
|
| 13 |
+
from pip._vendor.cachecontrol.cache import DictCache
|
| 14 |
+
from pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController
|
| 15 |
+
from pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper
|
| 16 |
+
|
| 17 |
+
if TYPE_CHECKING:
|
| 18 |
+
from pip._vendor.requests import PreparedRequest, Response
|
| 19 |
+
from pip._vendor.urllib3 import HTTPResponse
|
| 20 |
+
|
| 21 |
+
from pip._vendor.cachecontrol.cache import BaseCache
|
| 22 |
+
from pip._vendor.cachecontrol.heuristics import BaseHeuristic
|
| 23 |
+
from pip._vendor.cachecontrol.serialize import Serializer
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class CacheControlAdapter(HTTPAdapter):
|
| 27 |
+
invalidating_methods = {"PUT", "PATCH", "DELETE"}
|
| 28 |
+
|
| 29 |
+
def __init__(
|
| 30 |
+
self,
|
| 31 |
+
cache: BaseCache | None = None,
|
| 32 |
+
cache_etags: bool = True,
|
| 33 |
+
controller_class: type[CacheController] | None = None,
|
| 34 |
+
serializer: Serializer | None = None,
|
| 35 |
+
heuristic: BaseHeuristic | None = None,
|
| 36 |
+
cacheable_methods: Collection[str] | None = None,
|
| 37 |
+
*args: Any,
|
| 38 |
+
**kw: Any,
|
| 39 |
+
) -> None:
|
| 40 |
+
super().__init__(*args, **kw)
|
| 41 |
+
self.cache = DictCache() if cache is None else cache
|
| 42 |
+
self.heuristic = heuristic
|
| 43 |
+
self.cacheable_methods = cacheable_methods or ("GET",)
|
| 44 |
+
|
| 45 |
+
controller_factory = controller_class or CacheController
|
| 46 |
+
self.controller = controller_factory(
|
| 47 |
+
self.cache, cache_etags=cache_etags, serializer=serializer
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
def send(
|
| 51 |
+
self,
|
| 52 |
+
request: PreparedRequest,
|
| 53 |
+
stream: bool = False,
|
| 54 |
+
timeout: None | float | tuple[float, float] | tuple[float, None] = None,
|
| 55 |
+
verify: bool | str = True,
|
| 56 |
+
cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None,
|
| 57 |
+
proxies: Mapping[str, str] | None = None,
|
| 58 |
+
cacheable_methods: Collection[str] | None = None,
|
| 59 |
+
) -> Response:
|
| 60 |
+
"""
|
| 61 |
+
Send a request. Use the request information to see if it
|
| 62 |
+
exists in the cache and cache the response if we need to and can.
|
| 63 |
+
"""
|
| 64 |
+
cacheable = cacheable_methods or self.cacheable_methods
|
| 65 |
+
if request.method in cacheable:
|
| 66 |
+
try:
|
| 67 |
+
cached_response = self.controller.cached_request(request)
|
| 68 |
+
except zlib.error:
|
| 69 |
+
cached_response = None
|
| 70 |
+
if cached_response:
|
| 71 |
+
return self.build_response(request, cached_response, from_cache=True)
|
| 72 |
+
|
| 73 |
+
# check for etags and add headers if appropriate
|
| 74 |
+
request.headers.update(self.controller.conditional_headers(request))
|
| 75 |
+
|
| 76 |
+
resp = super().send(request, stream, timeout, verify, cert, proxies)
|
| 77 |
+
|
| 78 |
+
return resp
|
| 79 |
+
|
| 80 |
+
def build_response( # type: ignore[override]
|
| 81 |
+
self,
|
| 82 |
+
request: PreparedRequest,
|
| 83 |
+
response: HTTPResponse,
|
| 84 |
+
from_cache: bool = False,
|
| 85 |
+
cacheable_methods: Collection[str] | None = None,
|
| 86 |
+
) -> Response:
|
| 87 |
+
"""
|
| 88 |
+
Build a response by making a request or using the cache.
|
| 89 |
+
|
| 90 |
+
This will end up calling send and returning a potentially
|
| 91 |
+
cached response
|
| 92 |
+
"""
|
| 93 |
+
cacheable = cacheable_methods or self.cacheable_methods
|
| 94 |
+
if not from_cache and request.method in cacheable:
|
| 95 |
+
# Check for any heuristics that might update headers
|
| 96 |
+
# before trying to cache.
|
| 97 |
+
if self.heuristic:
|
| 98 |
+
response = self.heuristic.apply(response)
|
| 99 |
+
|
| 100 |
+
# apply any expiration heuristics
|
| 101 |
+
if response.status == 304:
|
| 102 |
+
# We must have sent an ETag request. This could mean
|
| 103 |
+
# that we've been expired already or that we simply
|
| 104 |
+
# have an etag. In either case, we want to try and
|
| 105 |
+
# update the cache if that is the case.
|
| 106 |
+
cached_response = self.controller.update_cached_response(
|
| 107 |
+
request, response
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
if cached_response is not response:
|
| 111 |
+
from_cache = True
|
| 112 |
+
|
| 113 |
+
# We are done with the server response, read a
|
| 114 |
+
# possible response body (compliant servers will
|
| 115 |
+
# not return one, but we cannot be 100% sure) and
|
| 116 |
+
# release the connection back to the pool.
|
| 117 |
+
response.read(decode_content=False)
|
| 118 |
+
response.release_conn()
|
| 119 |
+
|
| 120 |
+
response = cached_response
|
| 121 |
+
|
| 122 |
+
# We always cache the 301 responses
|
| 123 |
+
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
|
| 124 |
+
self.controller.cache_response(request, response)
|
| 125 |
+
else:
|
| 126 |
+
# Wrap the response file with a wrapper that will cache the
|
| 127 |
+
# response when the stream has been consumed.
|
| 128 |
+
response._fp = CallbackFileWrapper( # type: ignore[assignment]
|
| 129 |
+
response._fp, # type: ignore[arg-type]
|
| 130 |
+
functools.partial(
|
| 131 |
+
self.controller.cache_response, request, response
|
| 132 |
+
),
|
| 133 |
+
)
|
| 134 |
+
if response.chunked:
|
| 135 |
+
super_update_chunk_length = response._update_chunk_length
|
| 136 |
+
|
| 137 |
+
def _update_chunk_length(self: HTTPResponse) -> None:
|
| 138 |
+
super_update_chunk_length()
|
| 139 |
+
if self.chunk_left == 0:
|
| 140 |
+
self._fp._close() # type: ignore[union-attr]
|
| 141 |
+
|
| 142 |
+
response._update_chunk_length = types.MethodType( # type: ignore[method-assign]
|
| 143 |
+
_update_chunk_length, response
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
resp: Response = super().build_response(request, response)
|
| 147 |
+
|
| 148 |
+
# See if we should invalidate the cache.
|
| 149 |
+
if request.method in self.invalidating_methods and resp.ok:
|
| 150 |
+
assert request.url is not None
|
| 151 |
+
cache_url = self.controller.cache_url(request.url)
|
| 152 |
+
self.cache.delete(cache_url)
|
| 153 |
+
|
| 154 |
+
# Give the request a from_cache attr to let people use it
|
| 155 |
+
resp.from_cache = from_cache # type: ignore[attr-defined]
|
| 156 |
+
|
| 157 |
+
return resp
|
| 158 |
+
|
| 159 |
+
def close(self) -> None:
|
| 160 |
+
self.cache.close()
|
| 161 |
+
super().close() # type: ignore[no-untyped-call]
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
|
| 5 |
+
"""
|
| 6 |
+
The cache object API for implementing caches. The default is a thread
|
| 7 |
+
safe in-memory dictionary.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from __future__ import annotations
|
| 11 |
+
|
| 12 |
+
from threading import Lock
|
| 13 |
+
from typing import IO, TYPE_CHECKING, MutableMapping
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
from datetime import datetime
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class BaseCache:
|
| 20 |
+
def get(self, key: str) -> bytes | None:
|
| 21 |
+
raise NotImplementedError()
|
| 22 |
+
|
| 23 |
+
def set(
|
| 24 |
+
self, key: str, value: bytes, expires: int | datetime | None = None
|
| 25 |
+
) -> None:
|
| 26 |
+
raise NotImplementedError()
|
| 27 |
+
|
| 28 |
+
def delete(self, key: str) -> None:
|
| 29 |
+
raise NotImplementedError()
|
| 30 |
+
|
| 31 |
+
def close(self) -> None:
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class DictCache(BaseCache):
|
| 36 |
+
def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None:
|
| 37 |
+
self.lock = Lock()
|
| 38 |
+
self.data = init_dict or {}
|
| 39 |
+
|
| 40 |
+
def get(self, key: str) -> bytes | None:
|
| 41 |
+
return self.data.get(key, None)
|
| 42 |
+
|
| 43 |
+
def set(
|
| 44 |
+
self, key: str, value: bytes, expires: int | datetime | None = None
|
| 45 |
+
) -> None:
|
| 46 |
+
with self.lock:
|
| 47 |
+
self.data.update({key: value})
|
| 48 |
+
|
| 49 |
+
def delete(self, key: str) -> None:
|
| 50 |
+
with self.lock:
|
| 51 |
+
if key in self.data:
|
| 52 |
+
self.data.pop(key)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class SeparateBodyBaseCache(BaseCache):
|
| 56 |
+
"""
|
| 57 |
+
In this variant, the body is not stored mixed in with the metadata, but is
|
| 58 |
+
passed in (as a bytes-like object) in a separate call to ``set_body()``.
|
| 59 |
+
|
| 60 |
+
That is, the expected interaction pattern is::
|
| 61 |
+
|
| 62 |
+
cache.set(key, serialized_metadata)
|
| 63 |
+
cache.set_body(key)
|
| 64 |
+
|
| 65 |
+
Similarly, the body should be loaded separately via ``get_body()``.
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
def set_body(self, key: str, body: bytes) -> None:
|
| 69 |
+
raise NotImplementedError()
|
| 70 |
+
|
| 71 |
+
def get_body(self, key: str) -> IO[bytes] | None:
|
| 72 |
+
"""
|
| 73 |
+
Return the body as file-like object.
|
| 74 |
+
"""
|
| 75 |
+
raise NotImplementedError()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
|
| 5 |
+
from pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache
|
| 6 |
+
from pip._vendor.cachecontrol.caches.redis_cache import RedisCache
|
| 7 |
+
|
| 8 |
+
__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"]
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (412 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc
ADDED
|
Binary file (5.63 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc
ADDED
|
Binary file (2.03 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import hashlib
|
| 7 |
+
import os
|
| 8 |
+
from textwrap import dedent
|
| 9 |
+
from typing import IO, TYPE_CHECKING
|
| 10 |
+
from pathlib import Path
|
| 11 |
+
|
| 12 |
+
from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache
|
| 13 |
+
from pip._vendor.cachecontrol.controller import CacheController
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
from datetime import datetime
|
| 17 |
+
|
| 18 |
+
from filelock import BaseFileLock
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _secure_open_write(filename: str, fmode: int) -> IO[bytes]:
|
| 22 |
+
# We only want to write to this file, so open it in write only mode
|
| 23 |
+
flags = os.O_WRONLY
|
| 24 |
+
|
| 25 |
+
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
|
| 26 |
+
# will open *new* files.
|
| 27 |
+
# We specify this because we want to ensure that the mode we pass is the
|
| 28 |
+
# mode of the file.
|
| 29 |
+
flags |= os.O_CREAT | os.O_EXCL
|
| 30 |
+
|
| 31 |
+
# Do not follow symlinks to prevent someone from making a symlink that
|
| 32 |
+
# we follow and insecurely open a cache file.
|
| 33 |
+
if hasattr(os, "O_NOFOLLOW"):
|
| 34 |
+
flags |= os.O_NOFOLLOW
|
| 35 |
+
|
| 36 |
+
# On Windows we'll mark this file as binary
|
| 37 |
+
if hasattr(os, "O_BINARY"):
|
| 38 |
+
flags |= os.O_BINARY
|
| 39 |
+
|
| 40 |
+
# Before we open our file, we want to delete any existing file that is
|
| 41 |
+
# there
|
| 42 |
+
try:
|
| 43 |
+
os.remove(filename)
|
| 44 |
+
except OSError:
|
| 45 |
+
# The file must not exist already, so we can just skip ahead to opening
|
| 46 |
+
pass
|
| 47 |
+
|
| 48 |
+
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
|
| 49 |
+
# race condition happens between the os.remove and this line, that an
|
| 50 |
+
# error will be raised. Because we utilize a lockfile this should only
|
| 51 |
+
# happen if someone is attempting to attack us.
|
| 52 |
+
fd = os.open(filename, flags, fmode)
|
| 53 |
+
try:
|
| 54 |
+
return os.fdopen(fd, "wb")
|
| 55 |
+
|
| 56 |
+
except:
|
| 57 |
+
# An error occurred wrapping our FD in a file object
|
| 58 |
+
os.close(fd)
|
| 59 |
+
raise
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class _FileCacheMixin:
|
| 63 |
+
"""Shared implementation for both FileCache variants."""
|
| 64 |
+
|
| 65 |
+
def __init__(
|
| 66 |
+
self,
|
| 67 |
+
directory: str | Path,
|
| 68 |
+
forever: bool = False,
|
| 69 |
+
filemode: int = 0o0600,
|
| 70 |
+
dirmode: int = 0o0700,
|
| 71 |
+
lock_class: type[BaseFileLock] | None = None,
|
| 72 |
+
) -> None:
|
| 73 |
+
try:
|
| 74 |
+
if lock_class is None:
|
| 75 |
+
from filelock import FileLock
|
| 76 |
+
|
| 77 |
+
lock_class = FileLock
|
| 78 |
+
except ImportError:
|
| 79 |
+
notice = dedent(
|
| 80 |
+
"""
|
| 81 |
+
NOTE: In order to use the FileCache you must have
|
| 82 |
+
filelock installed. You can install it via pip:
|
| 83 |
+
pip install cachecontrol[filecache]
|
| 84 |
+
"""
|
| 85 |
+
)
|
| 86 |
+
raise ImportError(notice)
|
| 87 |
+
|
| 88 |
+
self.directory = directory
|
| 89 |
+
self.forever = forever
|
| 90 |
+
self.filemode = filemode
|
| 91 |
+
self.dirmode = dirmode
|
| 92 |
+
self.lock_class = lock_class
|
| 93 |
+
|
| 94 |
+
@staticmethod
|
| 95 |
+
def encode(x: str) -> str:
|
| 96 |
+
return hashlib.sha224(x.encode()).hexdigest()
|
| 97 |
+
|
| 98 |
+
def _fn(self, name: str) -> str:
|
| 99 |
+
# NOTE: This method should not change as some may depend on it.
|
| 100 |
+
# See: https://github.com/ionrock/cachecontrol/issues/63
|
| 101 |
+
hashed = self.encode(name)
|
| 102 |
+
parts = list(hashed[:5]) + [hashed]
|
| 103 |
+
return os.path.join(self.directory, *parts)
|
| 104 |
+
|
| 105 |
+
def get(self, key: str) -> bytes | None:
|
| 106 |
+
name = self._fn(key)
|
| 107 |
+
try:
|
| 108 |
+
with open(name, "rb") as fh:
|
| 109 |
+
return fh.read()
|
| 110 |
+
|
| 111 |
+
except FileNotFoundError:
|
| 112 |
+
return None
|
| 113 |
+
|
| 114 |
+
def set(
|
| 115 |
+
self, key: str, value: bytes, expires: int | datetime | None = None
|
| 116 |
+
) -> None:
|
| 117 |
+
name = self._fn(key)
|
| 118 |
+
self._write(name, value)
|
| 119 |
+
|
| 120 |
+
def _write(self, path: str, data: bytes) -> None:
|
| 121 |
+
"""
|
| 122 |
+
Safely write the data to the given path.
|
| 123 |
+
"""
|
| 124 |
+
# Make sure the directory exists
|
| 125 |
+
try:
|
| 126 |
+
os.makedirs(os.path.dirname(path), self.dirmode)
|
| 127 |
+
except OSError:
|
| 128 |
+
pass
|
| 129 |
+
|
| 130 |
+
with self.lock_class(path + ".lock"):
|
| 131 |
+
# Write our actual file
|
| 132 |
+
with _secure_open_write(path, self.filemode) as fh:
|
| 133 |
+
fh.write(data)
|
| 134 |
+
|
| 135 |
+
def _delete(self, key: str, suffix: str) -> None:
|
| 136 |
+
name = self._fn(key) + suffix
|
| 137 |
+
if not self.forever:
|
| 138 |
+
try:
|
| 139 |
+
os.remove(name)
|
| 140 |
+
except FileNotFoundError:
|
| 141 |
+
pass
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class FileCache(_FileCacheMixin, BaseCache):
|
| 145 |
+
"""
|
| 146 |
+
Traditional FileCache: body is stored in memory, so not suitable for large
|
| 147 |
+
downloads.
|
| 148 |
+
"""
|
| 149 |
+
|
| 150 |
+
def delete(self, key: str) -> None:
|
| 151 |
+
self._delete(key, "")
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache):
|
| 155 |
+
"""
|
| 156 |
+
Memory-efficient FileCache: body is stored in a separate file, reducing
|
| 157 |
+
peak memory usage.
|
| 158 |
+
"""
|
| 159 |
+
|
| 160 |
+
def get_body(self, key: str) -> IO[bytes] | None:
|
| 161 |
+
name = self._fn(key) + ".body"
|
| 162 |
+
try:
|
| 163 |
+
return open(name, "rb")
|
| 164 |
+
except FileNotFoundError:
|
| 165 |
+
return None
|
| 166 |
+
|
| 167 |
+
def set_body(self, key: str, body: bytes) -> None:
|
| 168 |
+
name = self._fn(key) + ".body"
|
| 169 |
+
self._write(name, body)
|
| 170 |
+
|
| 171 |
+
def delete(self, key: str) -> None:
|
| 172 |
+
self._delete(key, "")
|
| 173 |
+
self._delete(key, ".body")
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def url_to_file_path(url: str, filecache: FileCache) -> str:
|
| 177 |
+
"""Return the file cache path based on the URL.
|
| 178 |
+
|
| 179 |
+
This does not ensure the file exists!
|
| 180 |
+
"""
|
| 181 |
+
key = CacheController.cache_url(url)
|
| 182 |
+
return filecache._fn(key)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
from datetime import datetime, timezone
|
| 8 |
+
from typing import TYPE_CHECKING
|
| 9 |
+
|
| 10 |
+
from pip._vendor.cachecontrol.cache import BaseCache
|
| 11 |
+
|
| 12 |
+
if TYPE_CHECKING:
|
| 13 |
+
from redis import Redis
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class RedisCache(BaseCache):
|
| 17 |
+
def __init__(self, conn: Redis[bytes]) -> None:
|
| 18 |
+
self.conn = conn
|
| 19 |
+
|
| 20 |
+
def get(self, key: str) -> bytes | None:
|
| 21 |
+
return self.conn.get(key)
|
| 22 |
+
|
| 23 |
+
def set(
|
| 24 |
+
self, key: str, value: bytes, expires: int | datetime | None = None
|
| 25 |
+
) -> None:
|
| 26 |
+
if not expires:
|
| 27 |
+
self.conn.set(key, value)
|
| 28 |
+
elif isinstance(expires, datetime):
|
| 29 |
+
now_utc = datetime.now(timezone.utc)
|
| 30 |
+
if expires.tzinfo is None:
|
| 31 |
+
now_utc = now_utc.replace(tzinfo=None)
|
| 32 |
+
delta = expires - now_utc
|
| 33 |
+
self.conn.setex(key, int(delta.total_seconds()), value)
|
| 34 |
+
else:
|
| 35 |
+
self.conn.setex(key, expires, value)
|
| 36 |
+
|
| 37 |
+
def delete(self, key: str) -> None:
|
| 38 |
+
self.conn.delete(key)
|
| 39 |
+
|
| 40 |
+
def clear(self) -> None:
|
| 41 |
+
"""Helper for clearing all the keys in a database. Use with
|
| 42 |
+
caution!"""
|
| 43 |
+
for key in self.conn.keys():
|
| 44 |
+
self.conn.delete(key)
|
| 45 |
+
|
| 46 |
+
def close(self) -> None:
|
| 47 |
+
"""Redis uses connection pooling, no need to close the connection."""
|
| 48 |
+
pass
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py
ADDED
|
@@ -0,0 +1,500 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
|
| 5 |
+
"""
|
| 6 |
+
The httplib2 algorithms ported for use with requests.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from __future__ import annotations
|
| 10 |
+
|
| 11 |
+
import calendar
|
| 12 |
+
import logging
|
| 13 |
+
import re
|
| 14 |
+
import time
|
| 15 |
+
from email.utils import parsedate_tz
|
| 16 |
+
from typing import TYPE_CHECKING, Collection, Mapping
|
| 17 |
+
|
| 18 |
+
from pip._vendor.requests.structures import CaseInsensitiveDict
|
| 19 |
+
|
| 20 |
+
from pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache
|
| 21 |
+
from pip._vendor.cachecontrol.serialize import Serializer
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
from typing import Literal
|
| 25 |
+
|
| 26 |
+
from pip._vendor.requests import PreparedRequest
|
| 27 |
+
from pip._vendor.urllib3 import HTTPResponse
|
| 28 |
+
|
| 29 |
+
from pip._vendor.cachecontrol.cache import BaseCache
|
| 30 |
+
|
| 31 |
+
logger = logging.getLogger(__name__)
|
| 32 |
+
|
| 33 |
+
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
|
| 34 |
+
|
| 35 |
+
PERMANENT_REDIRECT_STATUSES = (301, 308)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def parse_uri(uri: str) -> tuple[str, str, str, str, str]:
|
| 39 |
+
"""Parses a URI using the regex given in Appendix B of RFC 3986.
|
| 40 |
+
|
| 41 |
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
| 42 |
+
"""
|
| 43 |
+
match = URI.match(uri)
|
| 44 |
+
assert match is not None
|
| 45 |
+
groups = match.groups()
|
| 46 |
+
return (groups[1], groups[3], groups[4], groups[6], groups[8])
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class CacheController:
|
| 50 |
+
"""An interface to see if request should cached or not."""
|
| 51 |
+
|
| 52 |
+
def __init__(
|
| 53 |
+
self,
|
| 54 |
+
cache: BaseCache | None = None,
|
| 55 |
+
cache_etags: bool = True,
|
| 56 |
+
serializer: Serializer | None = None,
|
| 57 |
+
status_codes: Collection[int] | None = None,
|
| 58 |
+
):
|
| 59 |
+
self.cache = DictCache() if cache is None else cache
|
| 60 |
+
self.cache_etags = cache_etags
|
| 61 |
+
self.serializer = serializer or Serializer()
|
| 62 |
+
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
|
| 63 |
+
|
| 64 |
+
@classmethod
|
| 65 |
+
def _urlnorm(cls, uri: str) -> str:
|
| 66 |
+
"""Normalize the URL to create a safe key for the cache"""
|
| 67 |
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
| 68 |
+
if not scheme or not authority:
|
| 69 |
+
raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
|
| 70 |
+
|
| 71 |
+
scheme = scheme.lower()
|
| 72 |
+
authority = authority.lower()
|
| 73 |
+
|
| 74 |
+
if not path:
|
| 75 |
+
path = "/"
|
| 76 |
+
|
| 77 |
+
# Could do syntax based normalization of the URI before
|
| 78 |
+
# computing the digest. See Section 6.2.2 of Std 66.
|
| 79 |
+
request_uri = query and "?".join([path, query]) or path
|
| 80 |
+
defrag_uri = scheme + "://" + authority + request_uri
|
| 81 |
+
|
| 82 |
+
return defrag_uri
|
| 83 |
+
|
| 84 |
+
@classmethod
|
| 85 |
+
def cache_url(cls, uri: str) -> str:
|
| 86 |
+
return cls._urlnorm(uri)
|
| 87 |
+
|
| 88 |
+
def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]:
|
| 89 |
+
known_directives = {
|
| 90 |
+
# https://tools.ietf.org/html/rfc7234#section-5.2
|
| 91 |
+
"max-age": (int, True),
|
| 92 |
+
"max-stale": (int, False),
|
| 93 |
+
"min-fresh": (int, True),
|
| 94 |
+
"no-cache": (None, False),
|
| 95 |
+
"no-store": (None, False),
|
| 96 |
+
"no-transform": (None, False),
|
| 97 |
+
"only-if-cached": (None, False),
|
| 98 |
+
"must-revalidate": (None, False),
|
| 99 |
+
"public": (None, False),
|
| 100 |
+
"private": (None, False),
|
| 101 |
+
"proxy-revalidate": (None, False),
|
| 102 |
+
"s-maxage": (int, True),
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
|
| 106 |
+
|
| 107 |
+
retval: dict[str, int | None] = {}
|
| 108 |
+
|
| 109 |
+
for cc_directive in cc_headers.split(","):
|
| 110 |
+
if not cc_directive.strip():
|
| 111 |
+
continue
|
| 112 |
+
|
| 113 |
+
parts = cc_directive.split("=", 1)
|
| 114 |
+
directive = parts[0].strip()
|
| 115 |
+
|
| 116 |
+
try:
|
| 117 |
+
typ, required = known_directives[directive]
|
| 118 |
+
except KeyError:
|
| 119 |
+
logger.debug("Ignoring unknown cache-control directive: %s", directive)
|
| 120 |
+
continue
|
| 121 |
+
|
| 122 |
+
if not typ or not required:
|
| 123 |
+
retval[directive] = None
|
| 124 |
+
if typ:
|
| 125 |
+
try:
|
| 126 |
+
retval[directive] = typ(parts[1].strip())
|
| 127 |
+
except IndexError:
|
| 128 |
+
if required:
|
| 129 |
+
logger.debug(
|
| 130 |
+
"Missing value for cache-control " "directive: %s",
|
| 131 |
+
directive,
|
| 132 |
+
)
|
| 133 |
+
except ValueError:
|
| 134 |
+
logger.debug(
|
| 135 |
+
"Invalid value for cache-control directive " "%s, must be %s",
|
| 136 |
+
directive,
|
| 137 |
+
typ.__name__,
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
return retval
|
| 141 |
+
|
| 142 |
+
def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None:
|
| 143 |
+
"""
|
| 144 |
+
Load a cached response, or return None if it's not available.
|
| 145 |
+
"""
|
| 146 |
+
# We do not support caching of partial content: so if the request contains a
|
| 147 |
+
# Range header then we don't want to load anything from the cache.
|
| 148 |
+
if "Range" in request.headers:
|
| 149 |
+
return None
|
| 150 |
+
|
| 151 |
+
cache_url = request.url
|
| 152 |
+
assert cache_url is not None
|
| 153 |
+
cache_data = self.cache.get(cache_url)
|
| 154 |
+
if cache_data is None:
|
| 155 |
+
logger.debug("No cache entry available")
|
| 156 |
+
return None
|
| 157 |
+
|
| 158 |
+
if isinstance(self.cache, SeparateBodyBaseCache):
|
| 159 |
+
body_file = self.cache.get_body(cache_url)
|
| 160 |
+
else:
|
| 161 |
+
body_file = None
|
| 162 |
+
|
| 163 |
+
result = self.serializer.loads(request, cache_data, body_file)
|
| 164 |
+
if result is None:
|
| 165 |
+
logger.warning("Cache entry deserialization failed, entry ignored")
|
| 166 |
+
return result
|
| 167 |
+
|
| 168 |
+
def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]:
|
| 169 |
+
"""
|
| 170 |
+
Return a cached response if it exists in the cache, otherwise
|
| 171 |
+
return False.
|
| 172 |
+
"""
|
| 173 |
+
assert request.url is not None
|
| 174 |
+
cache_url = self.cache_url(request.url)
|
| 175 |
+
logger.debug('Looking up "%s" in the cache', cache_url)
|
| 176 |
+
cc = self.parse_cache_control(request.headers)
|
| 177 |
+
|
| 178 |
+
# Bail out if the request insists on fresh data
|
| 179 |
+
if "no-cache" in cc:
|
| 180 |
+
logger.debug('Request header has "no-cache", cache bypassed')
|
| 181 |
+
return False
|
| 182 |
+
|
| 183 |
+
if "max-age" in cc and cc["max-age"] == 0:
|
| 184 |
+
logger.debug('Request header has "max_age" as 0, cache bypassed')
|
| 185 |
+
return False
|
| 186 |
+
|
| 187 |
+
# Check whether we can load the response from the cache:
|
| 188 |
+
resp = self._load_from_cache(request)
|
| 189 |
+
if not resp:
|
| 190 |
+
return False
|
| 191 |
+
|
| 192 |
+
# If we have a cached permanent redirect, return it immediately. We
|
| 193 |
+
# don't need to test our response for other headers b/c it is
|
| 194 |
+
# intrinsically "cacheable" as it is Permanent.
|
| 195 |
+
#
|
| 196 |
+
# See:
|
| 197 |
+
# https://tools.ietf.org/html/rfc7231#section-6.4.2
|
| 198 |
+
#
|
| 199 |
+
# Client can try to refresh the value by repeating the request
|
| 200 |
+
# with cache busting headers as usual (ie no-cache).
|
| 201 |
+
if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
|
| 202 |
+
msg = (
|
| 203 |
+
"Returning cached permanent redirect response "
|
| 204 |
+
"(ignoring date and etag information)"
|
| 205 |
+
)
|
| 206 |
+
logger.debug(msg)
|
| 207 |
+
return resp
|
| 208 |
+
|
| 209 |
+
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
|
| 210 |
+
if not headers or "date" not in headers:
|
| 211 |
+
if "etag" not in headers:
|
| 212 |
+
# Without date or etag, the cached response can never be used
|
| 213 |
+
# and should be deleted.
|
| 214 |
+
logger.debug("Purging cached response: no date or etag")
|
| 215 |
+
self.cache.delete(cache_url)
|
| 216 |
+
logger.debug("Ignoring cached response: no date")
|
| 217 |
+
return False
|
| 218 |
+
|
| 219 |
+
now = time.time()
|
| 220 |
+
time_tuple = parsedate_tz(headers["date"])
|
| 221 |
+
assert time_tuple is not None
|
| 222 |
+
date = calendar.timegm(time_tuple[:6])
|
| 223 |
+
current_age = max(0, now - date)
|
| 224 |
+
logger.debug("Current age based on date: %i", current_age)
|
| 225 |
+
|
| 226 |
+
# TODO: There is an assumption that the result will be a
|
| 227 |
+
# urllib3 response object. This may not be best since we
|
| 228 |
+
# could probably avoid instantiating or constructing the
|
| 229 |
+
# response until we know we need it.
|
| 230 |
+
resp_cc = self.parse_cache_control(headers)
|
| 231 |
+
|
| 232 |
+
# determine freshness
|
| 233 |
+
freshness_lifetime = 0
|
| 234 |
+
|
| 235 |
+
# Check the max-age pragma in the cache control header
|
| 236 |
+
max_age = resp_cc.get("max-age")
|
| 237 |
+
if max_age is not None:
|
| 238 |
+
freshness_lifetime = max_age
|
| 239 |
+
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
|
| 240 |
+
|
| 241 |
+
# If there isn't a max-age, check for an expires header
|
| 242 |
+
elif "expires" in headers:
|
| 243 |
+
expires = parsedate_tz(headers["expires"])
|
| 244 |
+
if expires is not None:
|
| 245 |
+
expire_time = calendar.timegm(expires[:6]) - date
|
| 246 |
+
freshness_lifetime = max(0, expire_time)
|
| 247 |
+
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
|
| 248 |
+
|
| 249 |
+
# Determine if we are setting freshness limit in the
|
| 250 |
+
# request. Note, this overrides what was in the response.
|
| 251 |
+
max_age = cc.get("max-age")
|
| 252 |
+
if max_age is not None:
|
| 253 |
+
freshness_lifetime = max_age
|
| 254 |
+
logger.debug(
|
| 255 |
+
"Freshness lifetime from request max-age: %i", freshness_lifetime
|
| 256 |
+
)
|
| 257 |
+
|
| 258 |
+
min_fresh = cc.get("min-fresh")
|
| 259 |
+
if min_fresh is not None:
|
| 260 |
+
# adjust our current age by our min fresh
|
| 261 |
+
current_age += min_fresh
|
| 262 |
+
logger.debug("Adjusted current age from min-fresh: %i", current_age)
|
| 263 |
+
|
| 264 |
+
# Return entry if it is fresh enough
|
| 265 |
+
if freshness_lifetime > current_age:
|
| 266 |
+
logger.debug('The response is "fresh", returning cached response')
|
| 267 |
+
logger.debug("%i > %i", freshness_lifetime, current_age)
|
| 268 |
+
return resp
|
| 269 |
+
|
| 270 |
+
# we're not fresh. If we don't have an Etag, clear it out
|
| 271 |
+
if "etag" not in headers:
|
| 272 |
+
logger.debug('The cached response is "stale" with no etag, purging')
|
| 273 |
+
self.cache.delete(cache_url)
|
| 274 |
+
|
| 275 |
+
# return the original handler
|
| 276 |
+
return False
|
| 277 |
+
|
| 278 |
+
def conditional_headers(self, request: PreparedRequest) -> dict[str, str]:
|
| 279 |
+
resp = self._load_from_cache(request)
|
| 280 |
+
new_headers = {}
|
| 281 |
+
|
| 282 |
+
if resp:
|
| 283 |
+
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
|
| 284 |
+
|
| 285 |
+
if "etag" in headers:
|
| 286 |
+
new_headers["If-None-Match"] = headers["ETag"]
|
| 287 |
+
|
| 288 |
+
if "last-modified" in headers:
|
| 289 |
+
new_headers["If-Modified-Since"] = headers["Last-Modified"]
|
| 290 |
+
|
| 291 |
+
return new_headers
|
| 292 |
+
|
| 293 |
+
def _cache_set(
|
| 294 |
+
self,
|
| 295 |
+
cache_url: str,
|
| 296 |
+
request: PreparedRequest,
|
| 297 |
+
response: HTTPResponse,
|
| 298 |
+
body: bytes | None = None,
|
| 299 |
+
expires_time: int | None = None,
|
| 300 |
+
) -> None:
|
| 301 |
+
"""
|
| 302 |
+
Store the data in the cache.
|
| 303 |
+
"""
|
| 304 |
+
if isinstance(self.cache, SeparateBodyBaseCache):
|
| 305 |
+
# We pass in the body separately; just put a placeholder empty
|
| 306 |
+
# string in the metadata.
|
| 307 |
+
self.cache.set(
|
| 308 |
+
cache_url,
|
| 309 |
+
self.serializer.dumps(request, response, b""),
|
| 310 |
+
expires=expires_time,
|
| 311 |
+
)
|
| 312 |
+
# body is None can happen when, for example, we're only updating
|
| 313 |
+
# headers, as is the case in update_cached_response().
|
| 314 |
+
if body is not None:
|
| 315 |
+
self.cache.set_body(cache_url, body)
|
| 316 |
+
else:
|
| 317 |
+
self.cache.set(
|
| 318 |
+
cache_url,
|
| 319 |
+
self.serializer.dumps(request, response, body),
|
| 320 |
+
expires=expires_time,
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
def cache_response(
|
| 324 |
+
self,
|
| 325 |
+
request: PreparedRequest,
|
| 326 |
+
response: HTTPResponse,
|
| 327 |
+
body: bytes | None = None,
|
| 328 |
+
status_codes: Collection[int] | None = None,
|
| 329 |
+
) -> None:
|
| 330 |
+
"""
|
| 331 |
+
Algorithm for caching requests.
|
| 332 |
+
|
| 333 |
+
This assumes a requests Response object.
|
| 334 |
+
"""
|
| 335 |
+
# From httplib2: Don't cache 206's since we aren't going to
|
| 336 |
+
# handle byte range requests
|
| 337 |
+
cacheable_status_codes = status_codes or self.cacheable_status_codes
|
| 338 |
+
if response.status not in cacheable_status_codes:
|
| 339 |
+
logger.debug(
|
| 340 |
+
"Status code %s not in %s", response.status, cacheable_status_codes
|
| 341 |
+
)
|
| 342 |
+
return
|
| 343 |
+
|
| 344 |
+
response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
|
| 345 |
+
response.headers
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
if "date" in response_headers:
|
| 349 |
+
time_tuple = parsedate_tz(response_headers["date"])
|
| 350 |
+
assert time_tuple is not None
|
| 351 |
+
date = calendar.timegm(time_tuple[:6])
|
| 352 |
+
else:
|
| 353 |
+
date = 0
|
| 354 |
+
|
| 355 |
+
# If we've been given a body, our response has a Content-Length, that
|
| 356 |
+
# Content-Length is valid then we can check to see if the body we've
|
| 357 |
+
# been given matches the expected size, and if it doesn't we'll just
|
| 358 |
+
# skip trying to cache it.
|
| 359 |
+
if (
|
| 360 |
+
body is not None
|
| 361 |
+
and "content-length" in response_headers
|
| 362 |
+
and response_headers["content-length"].isdigit()
|
| 363 |
+
and int(response_headers["content-length"]) != len(body)
|
| 364 |
+
):
|
| 365 |
+
return
|
| 366 |
+
|
| 367 |
+
cc_req = self.parse_cache_control(request.headers)
|
| 368 |
+
cc = self.parse_cache_control(response_headers)
|
| 369 |
+
|
| 370 |
+
assert request.url is not None
|
| 371 |
+
cache_url = self.cache_url(request.url)
|
| 372 |
+
logger.debug('Updating cache with response from "%s"', cache_url)
|
| 373 |
+
|
| 374 |
+
# Delete it from the cache if we happen to have it stored there
|
| 375 |
+
no_store = False
|
| 376 |
+
if "no-store" in cc:
|
| 377 |
+
no_store = True
|
| 378 |
+
logger.debug('Response header has "no-store"')
|
| 379 |
+
if "no-store" in cc_req:
|
| 380 |
+
no_store = True
|
| 381 |
+
logger.debug('Request header has "no-store"')
|
| 382 |
+
if no_store and self.cache.get(cache_url):
|
| 383 |
+
logger.debug('Purging existing cache entry to honor "no-store"')
|
| 384 |
+
self.cache.delete(cache_url)
|
| 385 |
+
if no_store:
|
| 386 |
+
return
|
| 387 |
+
|
| 388 |
+
# https://tools.ietf.org/html/rfc7234#section-4.1:
|
| 389 |
+
# A Vary header field-value of "*" always fails to match.
|
| 390 |
+
# Storing such a response leads to a deserialization warning
|
| 391 |
+
# during cache lookup and is not allowed to ever be served,
|
| 392 |
+
# so storing it can be avoided.
|
| 393 |
+
if "*" in response_headers.get("vary", ""):
|
| 394 |
+
logger.debug('Response header has "Vary: *"')
|
| 395 |
+
return
|
| 396 |
+
|
| 397 |
+
# If we've been given an etag, then keep the response
|
| 398 |
+
if self.cache_etags and "etag" in response_headers:
|
| 399 |
+
expires_time = 0
|
| 400 |
+
if response_headers.get("expires"):
|
| 401 |
+
expires = parsedate_tz(response_headers["expires"])
|
| 402 |
+
if expires is not None:
|
| 403 |
+
expires_time = calendar.timegm(expires[:6]) - date
|
| 404 |
+
|
| 405 |
+
expires_time = max(expires_time, 14 * 86400)
|
| 406 |
+
|
| 407 |
+
logger.debug(f"etag object cached for {expires_time} seconds")
|
| 408 |
+
logger.debug("Caching due to etag")
|
| 409 |
+
self._cache_set(cache_url, request, response, body, expires_time)
|
| 410 |
+
|
| 411 |
+
# Add to the cache any permanent redirects. We do this before looking
|
| 412 |
+
# that the Date headers.
|
| 413 |
+
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
|
| 414 |
+
logger.debug("Caching permanent redirect")
|
| 415 |
+
self._cache_set(cache_url, request, response, b"")
|
| 416 |
+
|
| 417 |
+
# Add to the cache if the response headers demand it. If there
|
| 418 |
+
# is no date header then we can't do anything about expiring
|
| 419 |
+
# the cache.
|
| 420 |
+
elif "date" in response_headers:
|
| 421 |
+
time_tuple = parsedate_tz(response_headers["date"])
|
| 422 |
+
assert time_tuple is not None
|
| 423 |
+
date = calendar.timegm(time_tuple[:6])
|
| 424 |
+
# cache when there is a max-age > 0
|
| 425 |
+
max_age = cc.get("max-age")
|
| 426 |
+
if max_age is not None and max_age > 0:
|
| 427 |
+
logger.debug("Caching b/c date exists and max-age > 0")
|
| 428 |
+
expires_time = max_age
|
| 429 |
+
self._cache_set(
|
| 430 |
+
cache_url,
|
| 431 |
+
request,
|
| 432 |
+
response,
|
| 433 |
+
body,
|
| 434 |
+
expires_time,
|
| 435 |
+
)
|
| 436 |
+
|
| 437 |
+
# If the request can expire, it means we should cache it
|
| 438 |
+
# in the meantime.
|
| 439 |
+
elif "expires" in response_headers:
|
| 440 |
+
if response_headers["expires"]:
|
| 441 |
+
expires = parsedate_tz(response_headers["expires"])
|
| 442 |
+
if expires is not None:
|
| 443 |
+
expires_time = calendar.timegm(expires[:6]) - date
|
| 444 |
+
else:
|
| 445 |
+
expires_time = None
|
| 446 |
+
|
| 447 |
+
logger.debug(
|
| 448 |
+
"Caching b/c of expires header. expires in {} seconds".format(
|
| 449 |
+
expires_time
|
| 450 |
+
)
|
| 451 |
+
)
|
| 452 |
+
self._cache_set(
|
| 453 |
+
cache_url,
|
| 454 |
+
request,
|
| 455 |
+
response,
|
| 456 |
+
body,
|
| 457 |
+
expires_time,
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
def update_cached_response(
|
| 461 |
+
self, request: PreparedRequest, response: HTTPResponse
|
| 462 |
+
) -> HTTPResponse:
|
| 463 |
+
"""On a 304 we will get a new set of headers that we want to
|
| 464 |
+
update our cached value with, assuming we have one.
|
| 465 |
+
|
| 466 |
+
This should only ever be called when we've sent an ETag and
|
| 467 |
+
gotten a 304 as the response.
|
| 468 |
+
"""
|
| 469 |
+
assert request.url is not None
|
| 470 |
+
cache_url = self.cache_url(request.url)
|
| 471 |
+
cached_response = self._load_from_cache(request)
|
| 472 |
+
|
| 473 |
+
if not cached_response:
|
| 474 |
+
# we didn't have a cached response
|
| 475 |
+
return response
|
| 476 |
+
|
| 477 |
+
# Lets update our headers with the headers from the new request:
|
| 478 |
+
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
|
| 479 |
+
#
|
| 480 |
+
# The server isn't supposed to send headers that would make
|
| 481 |
+
# the cached body invalid. But... just in case, we'll be sure
|
| 482 |
+
# to strip out ones we know that might be problmatic due to
|
| 483 |
+
# typical assumptions.
|
| 484 |
+
excluded_headers = ["content-length"]
|
| 485 |
+
|
| 486 |
+
cached_response.headers.update(
|
| 487 |
+
{
|
| 488 |
+
k: v
|
| 489 |
+
for k, v in response.headers.items()
|
| 490 |
+
if k.lower() not in excluded_headers
|
| 491 |
+
}
|
| 492 |
+
)
|
| 493 |
+
|
| 494 |
+
# we want a 200 b/c we have content via the cache
|
| 495 |
+
cached_response.status = 200
|
| 496 |
+
|
| 497 |
+
# update our cache
|
| 498 |
+
self._cache_set(cache_url, request, cached_response)
|
| 499 |
+
|
| 500 |
+
return cached_response
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import mmap
|
| 7 |
+
from tempfile import NamedTemporaryFile
|
| 8 |
+
from typing import TYPE_CHECKING, Any, Callable
|
| 9 |
+
|
| 10 |
+
if TYPE_CHECKING:
|
| 11 |
+
from http.client import HTTPResponse
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class CallbackFileWrapper:
|
| 15 |
+
"""
|
| 16 |
+
Small wrapper around a fp object which will tee everything read into a
|
| 17 |
+
buffer, and when that file is closed it will execute a callback with the
|
| 18 |
+
contents of that buffer.
|
| 19 |
+
|
| 20 |
+
All attributes are proxied to the underlying file object.
|
| 21 |
+
|
| 22 |
+
This class uses members with a double underscore (__) leading prefix so as
|
| 23 |
+
not to accidentally shadow an attribute.
|
| 24 |
+
|
| 25 |
+
The data is stored in a temporary file until it is all available. As long
|
| 26 |
+
as the temporary files directory is disk-based (sometimes it's a
|
| 27 |
+
memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory
|
| 28 |
+
pressure is high. For small files the disk usually won't be used at all,
|
| 29 |
+
it'll all be in the filesystem memory cache, so there should be no
|
| 30 |
+
performance impact.
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
def __init__(
|
| 34 |
+
self, fp: HTTPResponse, callback: Callable[[bytes], None] | None
|
| 35 |
+
) -> None:
|
| 36 |
+
self.__buf = NamedTemporaryFile("rb+", delete=True)
|
| 37 |
+
self.__fp = fp
|
| 38 |
+
self.__callback = callback
|
| 39 |
+
|
| 40 |
+
def __getattr__(self, name: str) -> Any:
|
| 41 |
+
# The vagaries of garbage collection means that self.__fp is
|
| 42 |
+
# not always set. By using __getattribute__ and the private
|
| 43 |
+
# name[0] allows looking up the attribute value and raising an
|
| 44 |
+
# AttributeError when it doesn't exist. This stop things from
|
| 45 |
+
# infinitely recursing calls to getattr in the case where
|
| 46 |
+
# self.__fp hasn't been set.
|
| 47 |
+
#
|
| 48 |
+
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
|
| 49 |
+
fp = self.__getattribute__("_CallbackFileWrapper__fp")
|
| 50 |
+
return getattr(fp, name)
|
| 51 |
+
|
| 52 |
+
def __is_fp_closed(self) -> bool:
|
| 53 |
+
try:
|
| 54 |
+
return self.__fp.fp is None
|
| 55 |
+
|
| 56 |
+
except AttributeError:
|
| 57 |
+
pass
|
| 58 |
+
|
| 59 |
+
try:
|
| 60 |
+
closed: bool = self.__fp.closed
|
| 61 |
+
return closed
|
| 62 |
+
|
| 63 |
+
except AttributeError:
|
| 64 |
+
pass
|
| 65 |
+
|
| 66 |
+
# We just don't cache it then.
|
| 67 |
+
# TODO: Add some logging here...
|
| 68 |
+
return False
|
| 69 |
+
|
| 70 |
+
def _close(self) -> None:
|
| 71 |
+
if self.__callback:
|
| 72 |
+
if self.__buf.tell() == 0:
|
| 73 |
+
# Empty file:
|
| 74 |
+
result = b""
|
| 75 |
+
else:
|
| 76 |
+
# Return the data without actually loading it into memory,
|
| 77 |
+
# relying on Python's buffer API and mmap(). mmap() just gives
|
| 78 |
+
# a view directly into the filesystem's memory cache, so it
|
| 79 |
+
# doesn't result in duplicate memory use.
|
| 80 |
+
self.__buf.seek(0, 0)
|
| 81 |
+
result = memoryview(
|
| 82 |
+
mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ)
|
| 83 |
+
)
|
| 84 |
+
self.__callback(result)
|
| 85 |
+
|
| 86 |
+
# We assign this to None here, because otherwise we can get into
|
| 87 |
+
# really tricky problems where the CPython interpreter dead locks
|
| 88 |
+
# because the callback is holding a reference to something which
|
| 89 |
+
# has a __del__ method. Setting this to None breaks the cycle
|
| 90 |
+
# and allows the garbage collector to do it's thing normally.
|
| 91 |
+
self.__callback = None
|
| 92 |
+
|
| 93 |
+
# Closing the temporary file releases memory and frees disk space.
|
| 94 |
+
# Important when caching big files.
|
| 95 |
+
self.__buf.close()
|
| 96 |
+
|
| 97 |
+
def read(self, amt: int | None = None) -> bytes:
|
| 98 |
+
data: bytes = self.__fp.read(amt)
|
| 99 |
+
if data:
|
| 100 |
+
# We may be dealing with b'', a sign that things are over:
|
| 101 |
+
# it's passed e.g. after we've already closed self.__buf.
|
| 102 |
+
self.__buf.write(data)
|
| 103 |
+
if self.__is_fp_closed():
|
| 104 |
+
self._close()
|
| 105 |
+
|
| 106 |
+
return data
|
| 107 |
+
|
| 108 |
+
def _safe_read(self, amt: int) -> bytes:
|
| 109 |
+
data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined]
|
| 110 |
+
if amt == 2 and data == b"\r\n":
|
| 111 |
+
# urllib executes this read to toss the CRLF at the end
|
| 112 |
+
# of the chunk.
|
| 113 |
+
return data
|
| 114 |
+
|
| 115 |
+
self.__buf.write(data)
|
| 116 |
+
if self.__is_fp_closed():
|
| 117 |
+
self._close()
|
| 118 |
+
|
| 119 |
+
return data
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import calendar
|
| 7 |
+
import time
|
| 8 |
+
from datetime import datetime, timedelta, timezone
|
| 9 |
+
from email.utils import formatdate, parsedate, parsedate_tz
|
| 10 |
+
from typing import TYPE_CHECKING, Any, Mapping
|
| 11 |
+
|
| 12 |
+
if TYPE_CHECKING:
|
| 13 |
+
from pip._vendor.urllib3 import HTTPResponse
|
| 14 |
+
|
| 15 |
+
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def expire_after(delta: timedelta, date: datetime | None = None) -> datetime:
|
| 19 |
+
date = date or datetime.now(timezone.utc)
|
| 20 |
+
return date + delta
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def datetime_to_header(dt: datetime) -> str:
|
| 24 |
+
return formatdate(calendar.timegm(dt.timetuple()))
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class BaseHeuristic:
|
| 28 |
+
def warning(self, response: HTTPResponse) -> str | None:
|
| 29 |
+
"""
|
| 30 |
+
Return a valid 1xx warning header value describing the cache
|
| 31 |
+
adjustments.
|
| 32 |
+
|
| 33 |
+
The response is provided too allow warnings like 113
|
| 34 |
+
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
|
| 35 |
+
to explicitly say response is over 24 hours old.
|
| 36 |
+
"""
|
| 37 |
+
return '110 - "Response is Stale"'
|
| 38 |
+
|
| 39 |
+
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
|
| 40 |
+
"""Update the response headers with any new headers.
|
| 41 |
+
|
| 42 |
+
NOTE: This SHOULD always include some Warning header to
|
| 43 |
+
signify that the response was cached by the client, not
|
| 44 |
+
by way of the provided headers.
|
| 45 |
+
"""
|
| 46 |
+
return {}
|
| 47 |
+
|
| 48 |
+
def apply(self, response: HTTPResponse) -> HTTPResponse:
|
| 49 |
+
updated_headers = self.update_headers(response)
|
| 50 |
+
|
| 51 |
+
if updated_headers:
|
| 52 |
+
response.headers.update(updated_headers)
|
| 53 |
+
warning_header_value = self.warning(response)
|
| 54 |
+
if warning_header_value is not None:
|
| 55 |
+
response.headers.update({"Warning": warning_header_value})
|
| 56 |
+
|
| 57 |
+
return response
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class OneDayCache(BaseHeuristic):
|
| 61 |
+
"""
|
| 62 |
+
Cache the response by providing an expires 1 day in the
|
| 63 |
+
future.
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
|
| 67 |
+
headers = {}
|
| 68 |
+
|
| 69 |
+
if "expires" not in response.headers:
|
| 70 |
+
date = parsedate(response.headers["date"])
|
| 71 |
+
expires = expire_after(
|
| 72 |
+
timedelta(days=1),
|
| 73 |
+
date=datetime(*date[:6], tzinfo=timezone.utc), # type: ignore[index,misc]
|
| 74 |
+
)
|
| 75 |
+
headers["expires"] = datetime_to_header(expires)
|
| 76 |
+
headers["cache-control"] = "public"
|
| 77 |
+
return headers
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class ExpiresAfter(BaseHeuristic):
|
| 81 |
+
"""
|
| 82 |
+
Cache **all** requests for a defined time period.
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
def __init__(self, **kw: Any) -> None:
|
| 86 |
+
self.delta = timedelta(**kw)
|
| 87 |
+
|
| 88 |
+
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
|
| 89 |
+
expires = expire_after(self.delta)
|
| 90 |
+
return {"expires": datetime_to_header(expires), "cache-control": "public"}
|
| 91 |
+
|
| 92 |
+
def warning(self, response: HTTPResponse) -> str | None:
|
| 93 |
+
tmpl = "110 - Automatically cached for %s. Response might be stale"
|
| 94 |
+
return tmpl % self.delta
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class LastModified(BaseHeuristic):
|
| 98 |
+
"""
|
| 99 |
+
If there is no Expires header already, fall back on Last-Modified
|
| 100 |
+
using the heuristic from
|
| 101 |
+
http://tools.ietf.org/html/rfc7234#section-4.2.2
|
| 102 |
+
to calculate a reasonable value.
|
| 103 |
+
|
| 104 |
+
Firefox also does something like this per
|
| 105 |
+
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
|
| 106 |
+
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
|
| 107 |
+
Unlike mozilla we limit this to 24-hr.
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
cacheable_by_default_statuses = {
|
| 111 |
+
200,
|
| 112 |
+
203,
|
| 113 |
+
204,
|
| 114 |
+
206,
|
| 115 |
+
300,
|
| 116 |
+
301,
|
| 117 |
+
404,
|
| 118 |
+
405,
|
| 119 |
+
410,
|
| 120 |
+
414,
|
| 121 |
+
501,
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
def update_headers(self, resp: HTTPResponse) -> dict[str, str]:
|
| 125 |
+
headers: Mapping[str, str] = resp.headers
|
| 126 |
+
|
| 127 |
+
if "expires" in headers:
|
| 128 |
+
return {}
|
| 129 |
+
|
| 130 |
+
if "cache-control" in headers and headers["cache-control"] != "public":
|
| 131 |
+
return {}
|
| 132 |
+
|
| 133 |
+
if resp.status not in self.cacheable_by_default_statuses:
|
| 134 |
+
return {}
|
| 135 |
+
|
| 136 |
+
if "date" not in headers or "last-modified" not in headers:
|
| 137 |
+
return {}
|
| 138 |
+
|
| 139 |
+
time_tuple = parsedate_tz(headers["date"])
|
| 140 |
+
assert time_tuple is not None
|
| 141 |
+
date = calendar.timegm(time_tuple[:6])
|
| 142 |
+
last_modified = parsedate(headers["last-modified"])
|
| 143 |
+
if last_modified is None:
|
| 144 |
+
return {}
|
| 145 |
+
|
| 146 |
+
now = time.time()
|
| 147 |
+
current_age = max(0, now - date)
|
| 148 |
+
delta = date - calendar.timegm(last_modified)
|
| 149 |
+
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
|
| 150 |
+
if freshness_lifetime <= current_age:
|
| 151 |
+
return {}
|
| 152 |
+
|
| 153 |
+
expires = date + freshness_lifetime
|
| 154 |
+
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
|
| 155 |
+
|
| 156 |
+
def warning(self, resp: HTTPResponse) -> str | None:
|
| 157 |
+
return None
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/py.typed
ADDED
|
File without changes
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import io
|
| 7 |
+
from typing import IO, TYPE_CHECKING, Any, Mapping, cast
|
| 8 |
+
|
| 9 |
+
from pip._vendor import msgpack
|
| 10 |
+
from pip._vendor.requests.structures import CaseInsensitiveDict
|
| 11 |
+
from pip._vendor.urllib3 import HTTPResponse
|
| 12 |
+
|
| 13 |
+
if TYPE_CHECKING:
|
| 14 |
+
from pip._vendor.requests import PreparedRequest
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class Serializer:
|
| 18 |
+
serde_version = "4"
|
| 19 |
+
|
| 20 |
+
def dumps(
|
| 21 |
+
self,
|
| 22 |
+
request: PreparedRequest,
|
| 23 |
+
response: HTTPResponse,
|
| 24 |
+
body: bytes | None = None,
|
| 25 |
+
) -> bytes:
|
| 26 |
+
response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
|
| 27 |
+
response.headers
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
if body is None:
|
| 31 |
+
# When a body isn't passed in, we'll read the response. We
|
| 32 |
+
# also update the response with a new file handler to be
|
| 33 |
+
# sure it acts as though it was never read.
|
| 34 |
+
body = response.read(decode_content=False)
|
| 35 |
+
response._fp = io.BytesIO(body) # type: ignore[assignment]
|
| 36 |
+
response.length_remaining = len(body)
|
| 37 |
+
|
| 38 |
+
data = {
|
| 39 |
+
"response": {
|
| 40 |
+
"body": body, # Empty bytestring if body is stored separately
|
| 41 |
+
"headers": {str(k): str(v) for k, v in response.headers.items()},
|
| 42 |
+
"status": response.status,
|
| 43 |
+
"version": response.version,
|
| 44 |
+
"reason": str(response.reason),
|
| 45 |
+
"decode_content": response.decode_content,
|
| 46 |
+
}
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
# Construct our vary headers
|
| 50 |
+
data["vary"] = {}
|
| 51 |
+
if "vary" in response_headers:
|
| 52 |
+
varied_headers = response_headers["vary"].split(",")
|
| 53 |
+
for header in varied_headers:
|
| 54 |
+
header = str(header).strip()
|
| 55 |
+
header_value = request.headers.get(header, None)
|
| 56 |
+
if header_value is not None:
|
| 57 |
+
header_value = str(header_value)
|
| 58 |
+
data["vary"][header] = header_value
|
| 59 |
+
|
| 60 |
+
return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)])
|
| 61 |
+
|
| 62 |
+
def serialize(self, data: dict[str, Any]) -> bytes:
|
| 63 |
+
return cast(bytes, msgpack.dumps(data, use_bin_type=True))
|
| 64 |
+
|
| 65 |
+
def loads(
|
| 66 |
+
self,
|
| 67 |
+
request: PreparedRequest,
|
| 68 |
+
data: bytes,
|
| 69 |
+
body_file: IO[bytes] | None = None,
|
| 70 |
+
) -> HTTPResponse | None:
|
| 71 |
+
# Short circuit if we've been given an empty set of data
|
| 72 |
+
if not data:
|
| 73 |
+
return None
|
| 74 |
+
|
| 75 |
+
# Previous versions of this library supported other serialization
|
| 76 |
+
# formats, but these have all been removed.
|
| 77 |
+
if not data.startswith(f"cc={self.serde_version},".encode()):
|
| 78 |
+
return None
|
| 79 |
+
|
| 80 |
+
data = data[5:]
|
| 81 |
+
return self._loads_v4(request, data, body_file)
|
| 82 |
+
|
| 83 |
+
def prepare_response(
|
| 84 |
+
self,
|
| 85 |
+
request: PreparedRequest,
|
| 86 |
+
cached: Mapping[str, Any],
|
| 87 |
+
body_file: IO[bytes] | None = None,
|
| 88 |
+
) -> HTTPResponse | None:
|
| 89 |
+
"""Verify our vary headers match and construct a real urllib3
|
| 90 |
+
HTTPResponse object.
|
| 91 |
+
"""
|
| 92 |
+
# Special case the '*' Vary value as it means we cannot actually
|
| 93 |
+
# determine if the cached response is suitable for this request.
|
| 94 |
+
# This case is also handled in the controller code when creating
|
| 95 |
+
# a cache entry, but is left here for backwards compatibility.
|
| 96 |
+
if "*" in cached.get("vary", {}):
|
| 97 |
+
return None
|
| 98 |
+
|
| 99 |
+
# Ensure that the Vary headers for the cached response match our
|
| 100 |
+
# request
|
| 101 |
+
for header, value in cached.get("vary", {}).items():
|
| 102 |
+
if request.headers.get(header, None) != value:
|
| 103 |
+
return None
|
| 104 |
+
|
| 105 |
+
body_raw = cached["response"].pop("body")
|
| 106 |
+
|
| 107 |
+
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
|
| 108 |
+
data=cached["response"]["headers"]
|
| 109 |
+
)
|
| 110 |
+
if headers.get("transfer-encoding", "") == "chunked":
|
| 111 |
+
headers.pop("transfer-encoding")
|
| 112 |
+
|
| 113 |
+
cached["response"]["headers"] = headers
|
| 114 |
+
|
| 115 |
+
try:
|
| 116 |
+
body: IO[bytes]
|
| 117 |
+
if body_file is None:
|
| 118 |
+
body = io.BytesIO(body_raw)
|
| 119 |
+
else:
|
| 120 |
+
body = body_file
|
| 121 |
+
except TypeError:
|
| 122 |
+
# This can happen if cachecontrol serialized to v1 format (pickle)
|
| 123 |
+
# using Python 2. A Python 2 str(byte string) will be unpickled as
|
| 124 |
+
# a Python 3 str (unicode string), which will cause the above to
|
| 125 |
+
# fail with:
|
| 126 |
+
#
|
| 127 |
+
# TypeError: 'str' does not support the buffer interface
|
| 128 |
+
body = io.BytesIO(body_raw.encode("utf8"))
|
| 129 |
+
|
| 130 |
+
# Discard any `strict` parameter serialized by older version of cachecontrol.
|
| 131 |
+
cached["response"].pop("strict", None)
|
| 132 |
+
|
| 133 |
+
return HTTPResponse(body=body, preload_content=False, **cached["response"])
|
| 134 |
+
|
| 135 |
+
def _loads_v4(
|
| 136 |
+
self,
|
| 137 |
+
request: PreparedRequest,
|
| 138 |
+
data: bytes,
|
| 139 |
+
body_file: IO[bytes] | None = None,
|
| 140 |
+
) -> HTTPResponse | None:
|
| 141 |
+
try:
|
| 142 |
+
cached = msgpack.loads(data, raw=False)
|
| 143 |
+
except ValueError:
|
| 144 |
+
return None
|
| 145 |
+
|
| 146 |
+
return self.prepare_response(request, cached, body_file)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
| 2 |
+
#
|
| 3 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
from typing import TYPE_CHECKING, Collection
|
| 7 |
+
|
| 8 |
+
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
| 9 |
+
from pip._vendor.cachecontrol.cache import DictCache
|
| 10 |
+
|
| 11 |
+
if TYPE_CHECKING:
|
| 12 |
+
from pip._vendor import requests
|
| 13 |
+
|
| 14 |
+
from pip._vendor.cachecontrol.cache import BaseCache
|
| 15 |
+
from pip._vendor.cachecontrol.controller import CacheController
|
| 16 |
+
from pip._vendor.cachecontrol.heuristics import BaseHeuristic
|
| 17 |
+
from pip._vendor.cachecontrol.serialize import Serializer
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def CacheControl(
|
| 21 |
+
sess: requests.Session,
|
| 22 |
+
cache: BaseCache | None = None,
|
| 23 |
+
cache_etags: bool = True,
|
| 24 |
+
serializer: Serializer | None = None,
|
| 25 |
+
heuristic: BaseHeuristic | None = None,
|
| 26 |
+
controller_class: type[CacheController] | None = None,
|
| 27 |
+
adapter_class: type[CacheControlAdapter] | None = None,
|
| 28 |
+
cacheable_methods: Collection[str] | None = None,
|
| 29 |
+
) -> requests.Session:
|
| 30 |
+
cache = DictCache() if cache is None else cache
|
| 31 |
+
adapter_class = adapter_class or CacheControlAdapter
|
| 32 |
+
adapter = adapter_class(
|
| 33 |
+
cache,
|
| 34 |
+
cache_etags=cache_etags,
|
| 35 |
+
serializer=serializer,
|
| 36 |
+
heuristic=heuristic,
|
| 37 |
+
controller_class=controller_class,
|
| 38 |
+
cacheable_methods=cacheable_methods,
|
| 39 |
+
)
|
| 40 |
+
sess.mount("http://", adapter)
|
| 41 |
+
sess.mount("https://", adapter)
|
| 42 |
+
|
| 43 |
+
return sess
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__init__.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .distro import (
|
| 2 |
+
NORMALIZED_DISTRO_ID,
|
| 3 |
+
NORMALIZED_LSB_ID,
|
| 4 |
+
NORMALIZED_OS_ID,
|
| 5 |
+
LinuxDistribution,
|
| 6 |
+
__version__,
|
| 7 |
+
build_number,
|
| 8 |
+
codename,
|
| 9 |
+
distro_release_attr,
|
| 10 |
+
distro_release_info,
|
| 11 |
+
id,
|
| 12 |
+
info,
|
| 13 |
+
like,
|
| 14 |
+
linux_distribution,
|
| 15 |
+
lsb_release_attr,
|
| 16 |
+
lsb_release_info,
|
| 17 |
+
major_version,
|
| 18 |
+
minor_version,
|
| 19 |
+
name,
|
| 20 |
+
os_release_attr,
|
| 21 |
+
os_release_info,
|
| 22 |
+
uname_attr,
|
| 23 |
+
uname_info,
|
| 24 |
+
version,
|
| 25 |
+
version_parts,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
__all__ = [
|
| 29 |
+
"NORMALIZED_DISTRO_ID",
|
| 30 |
+
"NORMALIZED_LSB_ID",
|
| 31 |
+
"NORMALIZED_OS_ID",
|
| 32 |
+
"LinuxDistribution",
|
| 33 |
+
"build_number",
|
| 34 |
+
"codename",
|
| 35 |
+
"distro_release_attr",
|
| 36 |
+
"distro_release_info",
|
| 37 |
+
"id",
|
| 38 |
+
"info",
|
| 39 |
+
"like",
|
| 40 |
+
"linux_distribution",
|
| 41 |
+
"lsb_release_attr",
|
| 42 |
+
"lsb_release_info",
|
| 43 |
+
"major_version",
|
| 44 |
+
"minor_version",
|
| 45 |
+
"name",
|
| 46 |
+
"os_release_attr",
|
| 47 |
+
"os_release_info",
|
| 48 |
+
"uname_attr",
|
| 49 |
+
"uname_info",
|
| 50 |
+
"version",
|
| 51 |
+
"version_parts",
|
| 52 |
+
]
|
| 53 |
+
|
| 54 |
+
__version__ = __version__
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__main__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .distro import main
|
| 2 |
+
|
| 3 |
+
if __name__ == "__main__":
|
| 4 |
+
main()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (902 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (256 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-310.pyc
ADDED
|
Binary file (42.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/distro.py
ADDED
|
@@ -0,0 +1,1403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
# Copyright 2015-2021 Nir Cohen
|
| 3 |
+
#
|
| 4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 5 |
+
# you may not use this file except in compliance with the License.
|
| 6 |
+
# You may obtain a copy of the License at
|
| 7 |
+
#
|
| 8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
| 9 |
+
#
|
| 10 |
+
# Unless required by applicable law or agreed to in writing, software
|
| 11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 13 |
+
# See the License for the specific language governing permissions and
|
| 14 |
+
# limitations under the License.
|
| 15 |
+
|
| 16 |
+
"""
|
| 17 |
+
The ``distro`` package (``distro`` stands for Linux Distribution) provides
|
| 18 |
+
information about the Linux distribution it runs on, such as a reliable
|
| 19 |
+
machine-readable distro ID, or version information.
|
| 20 |
+
|
| 21 |
+
It is the recommended replacement for Python's original
|
| 22 |
+
:py:func:`platform.linux_distribution` function, but it provides much more
|
| 23 |
+
functionality. An alternative implementation became necessary because Python
|
| 24 |
+
3.5 deprecated this function, and Python 3.8 removed it altogether. Its
|
| 25 |
+
predecessor function :py:func:`platform.dist` was already deprecated since
|
| 26 |
+
Python 2.6 and removed in Python 3.8. Still, there are many cases in which
|
| 27 |
+
access to OS distribution information is needed. See `Python issue 1322
|
| 28 |
+
<https://bugs.python.org/issue1322>`_ for more information.
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
import argparse
|
| 32 |
+
import json
|
| 33 |
+
import logging
|
| 34 |
+
import os
|
| 35 |
+
import re
|
| 36 |
+
import shlex
|
| 37 |
+
import subprocess
|
| 38 |
+
import sys
|
| 39 |
+
import warnings
|
| 40 |
+
from typing import (
|
| 41 |
+
Any,
|
| 42 |
+
Callable,
|
| 43 |
+
Dict,
|
| 44 |
+
Iterable,
|
| 45 |
+
Optional,
|
| 46 |
+
Sequence,
|
| 47 |
+
TextIO,
|
| 48 |
+
Tuple,
|
| 49 |
+
Type,
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
try:
|
| 53 |
+
from typing import TypedDict
|
| 54 |
+
except ImportError:
|
| 55 |
+
# Python 3.7
|
| 56 |
+
TypedDict = dict
|
| 57 |
+
|
| 58 |
+
__version__ = "1.9.0"
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class VersionDict(TypedDict):
|
| 62 |
+
major: str
|
| 63 |
+
minor: str
|
| 64 |
+
build_number: str
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class InfoDict(TypedDict):
|
| 68 |
+
id: str
|
| 69 |
+
version: str
|
| 70 |
+
version_parts: VersionDict
|
| 71 |
+
like: str
|
| 72 |
+
codename: str
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
|
| 76 |
+
_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
|
| 77 |
+
_OS_RELEASE_BASENAME = "os-release"
|
| 78 |
+
|
| 79 |
+
#: Translation table for normalizing the "ID" attribute defined in os-release
|
| 80 |
+
#: files, for use by the :func:`distro.id` method.
|
| 81 |
+
#:
|
| 82 |
+
#: * Key: Value as defined in the os-release file, translated to lower case,
|
| 83 |
+
#: with blanks translated to underscores.
|
| 84 |
+
#:
|
| 85 |
+
#: * Value: Normalized value.
|
| 86 |
+
NORMALIZED_OS_ID = {
|
| 87 |
+
"ol": "oracle", # Oracle Linux
|
| 88 |
+
"opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
#: Translation table for normalizing the "Distributor ID" attribute returned by
|
| 92 |
+
#: the lsb_release command, for use by the :func:`distro.id` method.
|
| 93 |
+
#:
|
| 94 |
+
#: * Key: Value as returned by the lsb_release command, translated to lower
|
| 95 |
+
#: case, with blanks translated to underscores.
|
| 96 |
+
#:
|
| 97 |
+
#: * Value: Normalized value.
|
| 98 |
+
NORMALIZED_LSB_ID = {
|
| 99 |
+
"enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4
|
| 100 |
+
"enterpriseenterpriseserver": "oracle", # Oracle Linux 5
|
| 101 |
+
"redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation
|
| 102 |
+
"redhatenterpriseserver": "rhel", # RHEL 6, 7 Server
|
| 103 |
+
"redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
#: Translation table for normalizing the distro ID derived from the file name
|
| 107 |
+
#: of distro release files, for use by the :func:`distro.id` method.
|
| 108 |
+
#:
|
| 109 |
+
#: * Key: Value as derived from the file name of a distro release file,
|
| 110 |
+
#: translated to lower case, with blanks translated to underscores.
|
| 111 |
+
#:
|
| 112 |
+
#: * Value: Normalized value.
|
| 113 |
+
NORMALIZED_DISTRO_ID = {
|
| 114 |
+
"redhat": "rhel", # RHEL 6.x, 7.x
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
# Pattern for content of distro release file (reversed)
|
| 118 |
+
_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
|
| 119 |
+
r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
# Pattern for base file name of distro release file
|
| 123 |
+
_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
|
| 124 |
+
|
| 125 |
+
# Base file names to be looked up for if _UNIXCONFDIR is not readable.
|
| 126 |
+
_DISTRO_RELEASE_BASENAMES = [
|
| 127 |
+
"SuSE-release",
|
| 128 |
+
"altlinux-release",
|
| 129 |
+
"arch-release",
|
| 130 |
+
"base-release",
|
| 131 |
+
"centos-release",
|
| 132 |
+
"fedora-release",
|
| 133 |
+
"gentoo-release",
|
| 134 |
+
"mageia-release",
|
| 135 |
+
"mandrake-release",
|
| 136 |
+
"mandriva-release",
|
| 137 |
+
"mandrivalinux-release",
|
| 138 |
+
"manjaro-release",
|
| 139 |
+
"oracle-release",
|
| 140 |
+
"redhat-release",
|
| 141 |
+
"rocky-release",
|
| 142 |
+
"sl-release",
|
| 143 |
+
"slackware-version",
|
| 144 |
+
]
|
| 145 |
+
|
| 146 |
+
# Base file names to be ignored when searching for distro release file
|
| 147 |
+
_DISTRO_RELEASE_IGNORE_BASENAMES = (
|
| 148 |
+
"debian_version",
|
| 149 |
+
"lsb-release",
|
| 150 |
+
"oem-release",
|
| 151 |
+
_OS_RELEASE_BASENAME,
|
| 152 |
+
"system-release",
|
| 153 |
+
"plesk-release",
|
| 154 |
+
"iredmail-release",
|
| 155 |
+
"board-release",
|
| 156 |
+
"ec2_version",
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]:
|
| 161 |
+
"""
|
| 162 |
+
.. deprecated:: 1.6.0
|
| 163 |
+
|
| 164 |
+
:func:`distro.linux_distribution()` is deprecated. It should only be
|
| 165 |
+
used as a compatibility shim with Python's
|
| 166 |
+
:py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
|
| 167 |
+
:func:`distro.version` and :func:`distro.name` instead.
|
| 168 |
+
|
| 169 |
+
Return information about the current OS distribution as a tuple
|
| 170 |
+
``(id_name, version, codename)`` with items as follows:
|
| 171 |
+
|
| 172 |
+
* ``id_name``: If *full_distribution_name* is false, the result of
|
| 173 |
+
:func:`distro.id`. Otherwise, the result of :func:`distro.name`.
|
| 174 |
+
|
| 175 |
+
* ``version``: The result of :func:`distro.version`.
|
| 176 |
+
|
| 177 |
+
* ``codename``: The extra item (usually in parentheses) after the
|
| 178 |
+
os-release version number, or the result of :func:`distro.codename`.
|
| 179 |
+
|
| 180 |
+
The interface of this function is compatible with the original
|
| 181 |
+
:py:func:`platform.linux_distribution` function, supporting a subset of
|
| 182 |
+
its parameters.
|
| 183 |
+
|
| 184 |
+
The data it returns may not exactly be the same, because it uses more data
|
| 185 |
+
sources than the original function, and that may lead to different data if
|
| 186 |
+
the OS distribution is not consistent across multiple data sources it
|
| 187 |
+
provides (there are indeed such distributions ...).
|
| 188 |
+
|
| 189 |
+
Another reason for differences is the fact that the :func:`distro.id`
|
| 190 |
+
method normalizes the distro ID string to a reliable machine-readable value
|
| 191 |
+
for a number of popular OS distributions.
|
| 192 |
+
"""
|
| 193 |
+
warnings.warn(
|
| 194 |
+
"distro.linux_distribution() is deprecated. It should only be used as a "
|
| 195 |
+
"compatibility shim with Python's platform.linux_distribution(). Please use "
|
| 196 |
+
"distro.id(), distro.version() and distro.name() instead.",
|
| 197 |
+
DeprecationWarning,
|
| 198 |
+
stacklevel=2,
|
| 199 |
+
)
|
| 200 |
+
return _distro.linux_distribution(full_distribution_name)
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
def id() -> str:
|
| 204 |
+
"""
|
| 205 |
+
Return the distro ID of the current distribution, as a
|
| 206 |
+
machine-readable string.
|
| 207 |
+
|
| 208 |
+
For a number of OS distributions, the returned distro ID value is
|
| 209 |
+
*reliable*, in the sense that it is documented and that it does not change
|
| 210 |
+
across releases of the distribution.
|
| 211 |
+
|
| 212 |
+
This package maintains the following reliable distro ID values:
|
| 213 |
+
|
| 214 |
+
============== =========================================
|
| 215 |
+
Distro ID Distribution
|
| 216 |
+
============== =========================================
|
| 217 |
+
"ubuntu" Ubuntu
|
| 218 |
+
"debian" Debian
|
| 219 |
+
"rhel" RedHat Enterprise Linux
|
| 220 |
+
"centos" CentOS
|
| 221 |
+
"fedora" Fedora
|
| 222 |
+
"sles" SUSE Linux Enterprise Server
|
| 223 |
+
"opensuse" openSUSE
|
| 224 |
+
"amzn" Amazon Linux
|
| 225 |
+
"arch" Arch Linux
|
| 226 |
+
"buildroot" Buildroot
|
| 227 |
+
"cloudlinux" CloudLinux OS
|
| 228 |
+
"exherbo" Exherbo Linux
|
| 229 |
+
"gentoo" GenToo Linux
|
| 230 |
+
"ibm_powerkvm" IBM PowerKVM
|
| 231 |
+
"kvmibm" KVM for IBM z Systems
|
| 232 |
+
"linuxmint" Linux Mint
|
| 233 |
+
"mageia" Mageia
|
| 234 |
+
"mandriva" Mandriva Linux
|
| 235 |
+
"parallels" Parallels
|
| 236 |
+
"pidora" Pidora
|
| 237 |
+
"raspbian" Raspbian
|
| 238 |
+
"oracle" Oracle Linux (and Oracle Enterprise Linux)
|
| 239 |
+
"scientific" Scientific Linux
|
| 240 |
+
"slackware" Slackware
|
| 241 |
+
"xenserver" XenServer
|
| 242 |
+
"openbsd" OpenBSD
|
| 243 |
+
"netbsd" NetBSD
|
| 244 |
+
"freebsd" FreeBSD
|
| 245 |
+
"midnightbsd" MidnightBSD
|
| 246 |
+
"rocky" Rocky Linux
|
| 247 |
+
"aix" AIX
|
| 248 |
+
"guix" Guix System
|
| 249 |
+
"altlinux" ALT Linux
|
| 250 |
+
============== =========================================
|
| 251 |
+
|
| 252 |
+
If you have a need to get distros for reliable IDs added into this set,
|
| 253 |
+
or if you find that the :func:`distro.id` function returns a different
|
| 254 |
+
distro ID for one of the listed distros, please create an issue in the
|
| 255 |
+
`distro issue tracker`_.
|
| 256 |
+
|
| 257 |
+
**Lookup hierarchy and transformations:**
|
| 258 |
+
|
| 259 |
+
First, the ID is obtained from the following sources, in the specified
|
| 260 |
+
order. The first available and non-empty value is used:
|
| 261 |
+
|
| 262 |
+
* the value of the "ID" attribute of the os-release file,
|
| 263 |
+
|
| 264 |
+
* the value of the "Distributor ID" attribute returned by the lsb_release
|
| 265 |
+
command,
|
| 266 |
+
|
| 267 |
+
* the first part of the file name of the distro release file,
|
| 268 |
+
|
| 269 |
+
The so determined ID value then passes the following transformations,
|
| 270 |
+
before it is returned by this method:
|
| 271 |
+
|
| 272 |
+
* it is translated to lower case,
|
| 273 |
+
|
| 274 |
+
* blanks (which should not be there anyway) are translated to underscores,
|
| 275 |
+
|
| 276 |
+
* a normalization of the ID is performed, based upon
|
| 277 |
+
`normalization tables`_. The purpose of this normalization is to ensure
|
| 278 |
+
that the ID is as reliable as possible, even across incompatible changes
|
| 279 |
+
in the OS distributions. A common reason for an incompatible change is
|
| 280 |
+
the addition of an os-release file, or the addition of the lsb_release
|
| 281 |
+
command, with ID values that differ from what was previously determined
|
| 282 |
+
from the distro release file name.
|
| 283 |
+
"""
|
| 284 |
+
return _distro.id()
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
def name(pretty: bool = False) -> str:
|
| 288 |
+
"""
|
| 289 |
+
Return the name of the current OS distribution, as a human-readable
|
| 290 |
+
string.
|
| 291 |
+
|
| 292 |
+
If *pretty* is false, the name is returned without version or codename.
|
| 293 |
+
(e.g. "CentOS Linux")
|
| 294 |
+
|
| 295 |
+
If *pretty* is true, the version and codename are appended.
|
| 296 |
+
(e.g. "CentOS Linux 7.1.1503 (Core)")
|
| 297 |
+
|
| 298 |
+
**Lookup hierarchy:**
|
| 299 |
+
|
| 300 |
+
The name is obtained from the following sources, in the specified order.
|
| 301 |
+
The first available and non-empty value is used:
|
| 302 |
+
|
| 303 |
+
* If *pretty* is false:
|
| 304 |
+
|
| 305 |
+
- the value of the "NAME" attribute of the os-release file,
|
| 306 |
+
|
| 307 |
+
- the value of the "Distributor ID" attribute returned by the lsb_release
|
| 308 |
+
command,
|
| 309 |
+
|
| 310 |
+
- the value of the "<name>" field of the distro release file.
|
| 311 |
+
|
| 312 |
+
* If *pretty* is true:
|
| 313 |
+
|
| 314 |
+
- the value of the "PRETTY_NAME" attribute of the os-release file,
|
| 315 |
+
|
| 316 |
+
- the value of the "Description" attribute returned by the lsb_release
|
| 317 |
+
command,
|
| 318 |
+
|
| 319 |
+
- the value of the "<name>" field of the distro release file, appended
|
| 320 |
+
with the value of the pretty version ("<version_id>" and "<codename>"
|
| 321 |
+
fields) of the distro release file, if available.
|
| 322 |
+
"""
|
| 323 |
+
return _distro.name(pretty)
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
def version(pretty: bool = False, best: bool = False) -> str:
|
| 327 |
+
"""
|
| 328 |
+
Return the version of the current OS distribution, as a human-readable
|
| 329 |
+
string.
|
| 330 |
+
|
| 331 |
+
If *pretty* is false, the version is returned without codename (e.g.
|
| 332 |
+
"7.0").
|
| 333 |
+
|
| 334 |
+
If *pretty* is true, the codename in parenthesis is appended, if the
|
| 335 |
+
codename is non-empty (e.g. "7.0 (Maipo)").
|
| 336 |
+
|
| 337 |
+
Some distributions provide version numbers with different precisions in
|
| 338 |
+
the different sources of distribution information. Examining the different
|
| 339 |
+
sources in a fixed priority order does not always yield the most precise
|
| 340 |
+
version (e.g. for Debian 8.2, or CentOS 7.1).
|
| 341 |
+
|
| 342 |
+
Some other distributions may not provide this kind of information. In these
|
| 343 |
+
cases, an empty string would be returned. This behavior can be observed
|
| 344 |
+
with rolling releases distributions (e.g. Arch Linux).
|
| 345 |
+
|
| 346 |
+
The *best* parameter can be used to control the approach for the returned
|
| 347 |
+
version:
|
| 348 |
+
|
| 349 |
+
If *best* is false, the first non-empty version number in priority order of
|
| 350 |
+
the examined sources is returned.
|
| 351 |
+
|
| 352 |
+
If *best* is true, the most precise version number out of all examined
|
| 353 |
+
sources is returned.
|
| 354 |
+
|
| 355 |
+
**Lookup hierarchy:**
|
| 356 |
+
|
| 357 |
+
In all cases, the version number is obtained from the following sources.
|
| 358 |
+
If *best* is false, this order represents the priority order:
|
| 359 |
+
|
| 360 |
+
* the value of the "VERSION_ID" attribute of the os-release file,
|
| 361 |
+
* the value of the "Release" attribute returned by the lsb_release
|
| 362 |
+
command,
|
| 363 |
+
* the version number parsed from the "<version_id>" field of the first line
|
| 364 |
+
of the distro release file,
|
| 365 |
+
* the version number parsed from the "PRETTY_NAME" attribute of the
|
| 366 |
+
os-release file, if it follows the format of the distro release files.
|
| 367 |
+
* the version number parsed from the "Description" attribute returned by
|
| 368 |
+
the lsb_release command, if it follows the format of the distro release
|
| 369 |
+
files.
|
| 370 |
+
"""
|
| 371 |
+
return _distro.version(pretty, best)
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def version_parts(best: bool = False) -> Tuple[str, str, str]:
|
| 375 |
+
"""
|
| 376 |
+
Return the version of the current OS distribution as a tuple
|
| 377 |
+
``(major, minor, build_number)`` with items as follows:
|
| 378 |
+
|
| 379 |
+
* ``major``: The result of :func:`distro.major_version`.
|
| 380 |
+
|
| 381 |
+
* ``minor``: The result of :func:`distro.minor_version`.
|
| 382 |
+
|
| 383 |
+
* ``build_number``: The result of :func:`distro.build_number`.
|
| 384 |
+
|
| 385 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
| 386 |
+
method.
|
| 387 |
+
"""
|
| 388 |
+
return _distro.version_parts(best)
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
def major_version(best: bool = False) -> str:
|
| 392 |
+
"""
|
| 393 |
+
Return the major version of the current OS distribution, as a string,
|
| 394 |
+
if provided.
|
| 395 |
+
Otherwise, the empty string is returned. The major version is the first
|
| 396 |
+
part of the dot-separated version string.
|
| 397 |
+
|
| 398 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
| 399 |
+
method.
|
| 400 |
+
"""
|
| 401 |
+
return _distro.major_version(best)
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
def minor_version(best: bool = False) -> str:
|
| 405 |
+
"""
|
| 406 |
+
Return the minor version of the current OS distribution, as a string,
|
| 407 |
+
if provided.
|
| 408 |
+
Otherwise, the empty string is returned. The minor version is the second
|
| 409 |
+
part of the dot-separated version string.
|
| 410 |
+
|
| 411 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
| 412 |
+
method.
|
| 413 |
+
"""
|
| 414 |
+
return _distro.minor_version(best)
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def build_number(best: bool = False) -> str:
|
| 418 |
+
"""
|
| 419 |
+
Return the build number of the current OS distribution, as a string,
|
| 420 |
+
if provided.
|
| 421 |
+
Otherwise, the empty string is returned. The build number is the third part
|
| 422 |
+
of the dot-separated version string.
|
| 423 |
+
|
| 424 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
| 425 |
+
method.
|
| 426 |
+
"""
|
| 427 |
+
return _distro.build_number(best)
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
def like() -> str:
|
| 431 |
+
"""
|
| 432 |
+
Return a space-separated list of distro IDs of distributions that are
|
| 433 |
+
closely related to the current OS distribution in regards to packaging
|
| 434 |
+
and programming interfaces, for example distributions the current
|
| 435 |
+
distribution is a derivative from.
|
| 436 |
+
|
| 437 |
+
**Lookup hierarchy:**
|
| 438 |
+
|
| 439 |
+
This information item is only provided by the os-release file.
|
| 440 |
+
For details, see the description of the "ID_LIKE" attribute in the
|
| 441 |
+
`os-release man page
|
| 442 |
+
<http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
|
| 443 |
+
"""
|
| 444 |
+
return _distro.like()
|
| 445 |
+
|
| 446 |
+
|
| 447 |
+
def codename() -> str:
|
| 448 |
+
"""
|
| 449 |
+
Return the codename for the release of the current OS distribution,
|
| 450 |
+
as a string.
|
| 451 |
+
|
| 452 |
+
If the distribution does not have a codename, an empty string is returned.
|
| 453 |
+
|
| 454 |
+
Note that the returned codename is not always really a codename. For
|
| 455 |
+
example, openSUSE returns "x86_64". This function does not handle such
|
| 456 |
+
cases in any special way and just returns the string it finds, if any.
|
| 457 |
+
|
| 458 |
+
**Lookup hierarchy:**
|
| 459 |
+
|
| 460 |
+
* the codename within the "VERSION" attribute of the os-release file, if
|
| 461 |
+
provided,
|
| 462 |
+
|
| 463 |
+
* the value of the "Codename" attribute returned by the lsb_release
|
| 464 |
+
command,
|
| 465 |
+
|
| 466 |
+
* the value of the "<codename>" field of the distro release file.
|
| 467 |
+
"""
|
| 468 |
+
return _distro.codename()
|
| 469 |
+
|
| 470 |
+
|
| 471 |
+
def info(pretty: bool = False, best: bool = False) -> InfoDict:
|
| 472 |
+
"""
|
| 473 |
+
Return certain machine-readable information items about the current OS
|
| 474 |
+
distribution in a dictionary, as shown in the following example:
|
| 475 |
+
|
| 476 |
+
.. sourcecode:: python
|
| 477 |
+
|
| 478 |
+
{
|
| 479 |
+
'id': 'rhel',
|
| 480 |
+
'version': '7.0',
|
| 481 |
+
'version_parts': {
|
| 482 |
+
'major': '7',
|
| 483 |
+
'minor': '0',
|
| 484 |
+
'build_number': ''
|
| 485 |
+
},
|
| 486 |
+
'like': 'fedora',
|
| 487 |
+
'codename': 'Maipo'
|
| 488 |
+
}
|
| 489 |
+
|
| 490 |
+
The dictionary structure and keys are always the same, regardless of which
|
| 491 |
+
information items are available in the underlying data sources. The values
|
| 492 |
+
for the various keys are as follows:
|
| 493 |
+
|
| 494 |
+
* ``id``: The result of :func:`distro.id`.
|
| 495 |
+
|
| 496 |
+
* ``version``: The result of :func:`distro.version`.
|
| 497 |
+
|
| 498 |
+
* ``version_parts -> major``: The result of :func:`distro.major_version`.
|
| 499 |
+
|
| 500 |
+
* ``version_parts -> minor``: The result of :func:`distro.minor_version`.
|
| 501 |
+
|
| 502 |
+
* ``version_parts -> build_number``: The result of
|
| 503 |
+
:func:`distro.build_number`.
|
| 504 |
+
|
| 505 |
+
* ``like``: The result of :func:`distro.like`.
|
| 506 |
+
|
| 507 |
+
* ``codename``: The result of :func:`distro.codename`.
|
| 508 |
+
|
| 509 |
+
For a description of the *pretty* and *best* parameters, see the
|
| 510 |
+
:func:`distro.version` method.
|
| 511 |
+
"""
|
| 512 |
+
return _distro.info(pretty, best)
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
def os_release_info() -> Dict[str, str]:
|
| 516 |
+
"""
|
| 517 |
+
Return a dictionary containing key-value pairs for the information items
|
| 518 |
+
from the os-release file data source of the current OS distribution.
|
| 519 |
+
|
| 520 |
+
See `os-release file`_ for details about these information items.
|
| 521 |
+
"""
|
| 522 |
+
return _distro.os_release_info()
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def lsb_release_info() -> Dict[str, str]:
|
| 526 |
+
"""
|
| 527 |
+
Return a dictionary containing key-value pairs for the information items
|
| 528 |
+
from the lsb_release command data source of the current OS distribution.
|
| 529 |
+
|
| 530 |
+
See `lsb_release command output`_ for details about these information
|
| 531 |
+
items.
|
| 532 |
+
"""
|
| 533 |
+
return _distro.lsb_release_info()
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
def distro_release_info() -> Dict[str, str]:
|
| 537 |
+
"""
|
| 538 |
+
Return a dictionary containing key-value pairs for the information items
|
| 539 |
+
from the distro release file data source of the current OS distribution.
|
| 540 |
+
|
| 541 |
+
See `distro release file`_ for details about these information items.
|
| 542 |
+
"""
|
| 543 |
+
return _distro.distro_release_info()
|
| 544 |
+
|
| 545 |
+
|
| 546 |
+
def uname_info() -> Dict[str, str]:
|
| 547 |
+
"""
|
| 548 |
+
Return a dictionary containing key-value pairs for the information items
|
| 549 |
+
from the distro release file data source of the current OS distribution.
|
| 550 |
+
"""
|
| 551 |
+
return _distro.uname_info()
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
def os_release_attr(attribute: str) -> str:
|
| 555 |
+
"""
|
| 556 |
+
Return a single named information item from the os-release file data source
|
| 557 |
+
of the current OS distribution.
|
| 558 |
+
|
| 559 |
+
Parameters:
|
| 560 |
+
|
| 561 |
+
* ``attribute`` (string): Key of the information item.
|
| 562 |
+
|
| 563 |
+
Returns:
|
| 564 |
+
|
| 565 |
+
* (string): Value of the information item, if the item exists.
|
| 566 |
+
The empty string, if the item does not exist.
|
| 567 |
+
|
| 568 |
+
See `os-release file`_ for details about these information items.
|
| 569 |
+
"""
|
| 570 |
+
return _distro.os_release_attr(attribute)
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
def lsb_release_attr(attribute: str) -> str:
|
| 574 |
+
"""
|
| 575 |
+
Return a single named information item from the lsb_release command output
|
| 576 |
+
data source of the current OS distribution.
|
| 577 |
+
|
| 578 |
+
Parameters:
|
| 579 |
+
|
| 580 |
+
* ``attribute`` (string): Key of the information item.
|
| 581 |
+
|
| 582 |
+
Returns:
|
| 583 |
+
|
| 584 |
+
* (string): Value of the information item, if the item exists.
|
| 585 |
+
The empty string, if the item does not exist.
|
| 586 |
+
|
| 587 |
+
See `lsb_release command output`_ for details about these information
|
| 588 |
+
items.
|
| 589 |
+
"""
|
| 590 |
+
return _distro.lsb_release_attr(attribute)
|
| 591 |
+
|
| 592 |
+
|
| 593 |
+
def distro_release_attr(attribute: str) -> str:
|
| 594 |
+
"""
|
| 595 |
+
Return a single named information item from the distro release file
|
| 596 |
+
data source of the current OS distribution.
|
| 597 |
+
|
| 598 |
+
Parameters:
|
| 599 |
+
|
| 600 |
+
* ``attribute`` (string): Key of the information item.
|
| 601 |
+
|
| 602 |
+
Returns:
|
| 603 |
+
|
| 604 |
+
* (string): Value of the information item, if the item exists.
|
| 605 |
+
The empty string, if the item does not exist.
|
| 606 |
+
|
| 607 |
+
See `distro release file`_ for details about these information items.
|
| 608 |
+
"""
|
| 609 |
+
return _distro.distro_release_attr(attribute)
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
def uname_attr(attribute: str) -> str:
|
| 613 |
+
"""
|
| 614 |
+
Return a single named information item from the distro release file
|
| 615 |
+
data source of the current OS distribution.
|
| 616 |
+
|
| 617 |
+
Parameters:
|
| 618 |
+
|
| 619 |
+
* ``attribute`` (string): Key of the information item.
|
| 620 |
+
|
| 621 |
+
Returns:
|
| 622 |
+
|
| 623 |
+
* (string): Value of the information item, if the item exists.
|
| 624 |
+
The empty string, if the item does not exist.
|
| 625 |
+
"""
|
| 626 |
+
return _distro.uname_attr(attribute)
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
try:
|
| 630 |
+
from functools import cached_property
|
| 631 |
+
except ImportError:
|
| 632 |
+
# Python < 3.8
|
| 633 |
+
class cached_property: # type: ignore
|
| 634 |
+
"""A version of @property which caches the value. On access, it calls the
|
| 635 |
+
underlying function and sets the value in `__dict__` so future accesses
|
| 636 |
+
will not re-call the property.
|
| 637 |
+
"""
|
| 638 |
+
|
| 639 |
+
def __init__(self, f: Callable[[Any], Any]) -> None:
|
| 640 |
+
self._fname = f.__name__
|
| 641 |
+
self._f = f
|
| 642 |
+
|
| 643 |
+
def __get__(self, obj: Any, owner: Type[Any]) -> Any:
|
| 644 |
+
assert obj is not None, f"call {self._fname} on an instance"
|
| 645 |
+
ret = obj.__dict__[self._fname] = self._f(obj)
|
| 646 |
+
return ret
|
| 647 |
+
|
| 648 |
+
|
| 649 |
+
class LinuxDistribution:
|
| 650 |
+
"""
|
| 651 |
+
Provides information about a OS distribution.
|
| 652 |
+
|
| 653 |
+
This package creates a private module-global instance of this class with
|
| 654 |
+
default initialization arguments, that is used by the
|
| 655 |
+
`consolidated accessor functions`_ and `single source accessor functions`_.
|
| 656 |
+
By using default initialization arguments, that module-global instance
|
| 657 |
+
returns data about the current OS distribution (i.e. the distro this
|
| 658 |
+
package runs on).
|
| 659 |
+
|
| 660 |
+
Normally, it is not necessary to create additional instances of this class.
|
| 661 |
+
However, in situations where control is needed over the exact data sources
|
| 662 |
+
that are used, instances of this class can be created with a specific
|
| 663 |
+
distro release file, or a specific os-release file, or without invoking the
|
| 664 |
+
lsb_release command.
|
| 665 |
+
"""
|
| 666 |
+
|
| 667 |
+
def __init__(
|
| 668 |
+
self,
|
| 669 |
+
include_lsb: Optional[bool] = None,
|
| 670 |
+
os_release_file: str = "",
|
| 671 |
+
distro_release_file: str = "",
|
| 672 |
+
include_uname: Optional[bool] = None,
|
| 673 |
+
root_dir: Optional[str] = None,
|
| 674 |
+
include_oslevel: Optional[bool] = None,
|
| 675 |
+
) -> None:
|
| 676 |
+
"""
|
| 677 |
+
The initialization method of this class gathers information from the
|
| 678 |
+
available data sources, and stores that in private instance attributes.
|
| 679 |
+
Subsequent access to the information items uses these private instance
|
| 680 |
+
attributes, so that the data sources are read only once.
|
| 681 |
+
|
| 682 |
+
Parameters:
|
| 683 |
+
|
| 684 |
+
* ``include_lsb`` (bool): Controls whether the
|
| 685 |
+
`lsb_release command output`_ is included as a data source.
|
| 686 |
+
|
| 687 |
+
If the lsb_release command is not available in the program execution
|
| 688 |
+
path, the data source for the lsb_release command will be empty.
|
| 689 |
+
|
| 690 |
+
* ``os_release_file`` (string): The path name of the
|
| 691 |
+
`os-release file`_ that is to be used as a data source.
|
| 692 |
+
|
| 693 |
+
An empty string (the default) will cause the default path name to
|
| 694 |
+
be used (see `os-release file`_ for details).
|
| 695 |
+
|
| 696 |
+
If the specified or defaulted os-release file does not exist, the
|
| 697 |
+
data source for the os-release file will be empty.
|
| 698 |
+
|
| 699 |
+
* ``distro_release_file`` (string): The path name of the
|
| 700 |
+
`distro release file`_ that is to be used as a data source.
|
| 701 |
+
|
| 702 |
+
An empty string (the default) will cause a default search algorithm
|
| 703 |
+
to be used (see `distro release file`_ for details).
|
| 704 |
+
|
| 705 |
+
If the specified distro release file does not exist, or if no default
|
| 706 |
+
distro release file can be found, the data source for the distro
|
| 707 |
+
release file will be empty.
|
| 708 |
+
|
| 709 |
+
* ``include_uname`` (bool): Controls whether uname command output is
|
| 710 |
+
included as a data source. If the uname command is not available in
|
| 711 |
+
the program execution path the data source for the uname command will
|
| 712 |
+
be empty.
|
| 713 |
+
|
| 714 |
+
* ``root_dir`` (string): The absolute path to the root directory to use
|
| 715 |
+
to find distro-related information files. Note that ``include_*``
|
| 716 |
+
parameters must not be enabled in combination with ``root_dir``.
|
| 717 |
+
|
| 718 |
+
* ``include_oslevel`` (bool): Controls whether (AIX) oslevel command
|
| 719 |
+
output is included as a data source. If the oslevel command is not
|
| 720 |
+
available in the program execution path the data source will be
|
| 721 |
+
empty.
|
| 722 |
+
|
| 723 |
+
Public instance attributes:
|
| 724 |
+
|
| 725 |
+
* ``os_release_file`` (string): The path name of the
|
| 726 |
+
`os-release file`_ that is actually used as a data source. The
|
| 727 |
+
empty string if no distro release file is used as a data source.
|
| 728 |
+
|
| 729 |
+
* ``distro_release_file`` (string): The path name of the
|
| 730 |
+
`distro release file`_ that is actually used as a data source. The
|
| 731 |
+
empty string if no distro release file is used as a data source.
|
| 732 |
+
|
| 733 |
+
* ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
|
| 734 |
+
This controls whether the lsb information will be loaded.
|
| 735 |
+
|
| 736 |
+
* ``include_uname`` (bool): The result of the ``include_uname``
|
| 737 |
+
parameter. This controls whether the uname information will
|
| 738 |
+
be loaded.
|
| 739 |
+
|
| 740 |
+
* ``include_oslevel`` (bool): The result of the ``include_oslevel``
|
| 741 |
+
parameter. This controls whether (AIX) oslevel information will be
|
| 742 |
+
loaded.
|
| 743 |
+
|
| 744 |
+
* ``root_dir`` (string): The result of the ``root_dir`` parameter.
|
| 745 |
+
The absolute path to the root directory to use to find distro-related
|
| 746 |
+
information files.
|
| 747 |
+
|
| 748 |
+
Raises:
|
| 749 |
+
|
| 750 |
+
* :py:exc:`ValueError`: Initialization parameters combination is not
|
| 751 |
+
supported.
|
| 752 |
+
|
| 753 |
+
* :py:exc:`OSError`: Some I/O issue with an os-release file or distro
|
| 754 |
+
release file.
|
| 755 |
+
|
| 756 |
+
* :py:exc:`UnicodeError`: A data source has unexpected characters or
|
| 757 |
+
uses an unexpected encoding.
|
| 758 |
+
"""
|
| 759 |
+
self.root_dir = root_dir
|
| 760 |
+
self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
|
| 761 |
+
self.usr_lib_dir = (
|
| 762 |
+
os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
|
| 763 |
+
)
|
| 764 |
+
|
| 765 |
+
if os_release_file:
|
| 766 |
+
self.os_release_file = os_release_file
|
| 767 |
+
else:
|
| 768 |
+
etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
|
| 769 |
+
usr_lib_os_release_file = os.path.join(
|
| 770 |
+
self.usr_lib_dir, _OS_RELEASE_BASENAME
|
| 771 |
+
)
|
| 772 |
+
|
| 773 |
+
# NOTE: The idea is to respect order **and** have it set
|
| 774 |
+
# at all times for API backwards compatibility.
|
| 775 |
+
if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
|
| 776 |
+
usr_lib_os_release_file
|
| 777 |
+
):
|
| 778 |
+
self.os_release_file = etc_dir_os_release_file
|
| 779 |
+
else:
|
| 780 |
+
self.os_release_file = usr_lib_os_release_file
|
| 781 |
+
|
| 782 |
+
self.distro_release_file = distro_release_file or "" # updated later
|
| 783 |
+
|
| 784 |
+
is_root_dir_defined = root_dir is not None
|
| 785 |
+
if is_root_dir_defined and (include_lsb or include_uname or include_oslevel):
|
| 786 |
+
raise ValueError(
|
| 787 |
+
"Including subprocess data sources from specific root_dir is disallowed"
|
| 788 |
+
" to prevent false information"
|
| 789 |
+
)
|
| 790 |
+
self.include_lsb = (
|
| 791 |
+
include_lsb if include_lsb is not None else not is_root_dir_defined
|
| 792 |
+
)
|
| 793 |
+
self.include_uname = (
|
| 794 |
+
include_uname if include_uname is not None else not is_root_dir_defined
|
| 795 |
+
)
|
| 796 |
+
self.include_oslevel = (
|
| 797 |
+
include_oslevel if include_oslevel is not None else not is_root_dir_defined
|
| 798 |
+
)
|
| 799 |
+
|
| 800 |
+
def __repr__(self) -> str:
|
| 801 |
+
"""Return repr of all info"""
|
| 802 |
+
return (
|
| 803 |
+
"LinuxDistribution("
|
| 804 |
+
"os_release_file={self.os_release_file!r}, "
|
| 805 |
+
"distro_release_file={self.distro_release_file!r}, "
|
| 806 |
+
"include_lsb={self.include_lsb!r}, "
|
| 807 |
+
"include_uname={self.include_uname!r}, "
|
| 808 |
+
"include_oslevel={self.include_oslevel!r}, "
|
| 809 |
+
"root_dir={self.root_dir!r}, "
|
| 810 |
+
"_os_release_info={self._os_release_info!r}, "
|
| 811 |
+
"_lsb_release_info={self._lsb_release_info!r}, "
|
| 812 |
+
"_distro_release_info={self._distro_release_info!r}, "
|
| 813 |
+
"_uname_info={self._uname_info!r}, "
|
| 814 |
+
"_oslevel_info={self._oslevel_info!r})".format(self=self)
|
| 815 |
+
)
|
| 816 |
+
|
| 817 |
+
def linux_distribution(
|
| 818 |
+
self, full_distribution_name: bool = True
|
| 819 |
+
) -> Tuple[str, str, str]:
|
| 820 |
+
"""
|
| 821 |
+
Return information about the OS distribution that is compatible
|
| 822 |
+
with Python's :func:`platform.linux_distribution`, supporting a subset
|
| 823 |
+
of its parameters.
|
| 824 |
+
|
| 825 |
+
For details, see :func:`distro.linux_distribution`.
|
| 826 |
+
"""
|
| 827 |
+
return (
|
| 828 |
+
self.name() if full_distribution_name else self.id(),
|
| 829 |
+
self.version(),
|
| 830 |
+
self._os_release_info.get("release_codename") or self.codename(),
|
| 831 |
+
)
|
| 832 |
+
|
| 833 |
+
def id(self) -> str:
|
| 834 |
+
"""Return the distro ID of the OS distribution, as a string.
|
| 835 |
+
|
| 836 |
+
For details, see :func:`distro.id`.
|
| 837 |
+
"""
|
| 838 |
+
|
| 839 |
+
def normalize(distro_id: str, table: Dict[str, str]) -> str:
|
| 840 |
+
distro_id = distro_id.lower().replace(" ", "_")
|
| 841 |
+
return table.get(distro_id, distro_id)
|
| 842 |
+
|
| 843 |
+
distro_id = self.os_release_attr("id")
|
| 844 |
+
if distro_id:
|
| 845 |
+
return normalize(distro_id, NORMALIZED_OS_ID)
|
| 846 |
+
|
| 847 |
+
distro_id = self.lsb_release_attr("distributor_id")
|
| 848 |
+
if distro_id:
|
| 849 |
+
return normalize(distro_id, NORMALIZED_LSB_ID)
|
| 850 |
+
|
| 851 |
+
distro_id = self.distro_release_attr("id")
|
| 852 |
+
if distro_id:
|
| 853 |
+
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
| 854 |
+
|
| 855 |
+
distro_id = self.uname_attr("id")
|
| 856 |
+
if distro_id:
|
| 857 |
+
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
| 858 |
+
|
| 859 |
+
return ""
|
| 860 |
+
|
| 861 |
+
def name(self, pretty: bool = False) -> str:
|
| 862 |
+
"""
|
| 863 |
+
Return the name of the OS distribution, as a string.
|
| 864 |
+
|
| 865 |
+
For details, see :func:`distro.name`.
|
| 866 |
+
"""
|
| 867 |
+
name = (
|
| 868 |
+
self.os_release_attr("name")
|
| 869 |
+
or self.lsb_release_attr("distributor_id")
|
| 870 |
+
or self.distro_release_attr("name")
|
| 871 |
+
or self.uname_attr("name")
|
| 872 |
+
)
|
| 873 |
+
if pretty:
|
| 874 |
+
name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
|
| 875 |
+
"description"
|
| 876 |
+
)
|
| 877 |
+
if not name:
|
| 878 |
+
name = self.distro_release_attr("name") or self.uname_attr("name")
|
| 879 |
+
version = self.version(pretty=True)
|
| 880 |
+
if version:
|
| 881 |
+
name = f"{name} {version}"
|
| 882 |
+
return name or ""
|
| 883 |
+
|
| 884 |
+
def version(self, pretty: bool = False, best: bool = False) -> str:
|
| 885 |
+
"""
|
| 886 |
+
Return the version of the OS distribution, as a string.
|
| 887 |
+
|
| 888 |
+
For details, see :func:`distro.version`.
|
| 889 |
+
"""
|
| 890 |
+
versions = [
|
| 891 |
+
self.os_release_attr("version_id"),
|
| 892 |
+
self.lsb_release_attr("release"),
|
| 893 |
+
self.distro_release_attr("version_id"),
|
| 894 |
+
self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
|
| 895 |
+
"version_id", ""
|
| 896 |
+
),
|
| 897 |
+
self._parse_distro_release_content(
|
| 898 |
+
self.lsb_release_attr("description")
|
| 899 |
+
).get("version_id", ""),
|
| 900 |
+
self.uname_attr("release"),
|
| 901 |
+
]
|
| 902 |
+
if self.uname_attr("id").startswith("aix"):
|
| 903 |
+
# On AIX platforms, prefer oslevel command output.
|
| 904 |
+
versions.insert(0, self.oslevel_info())
|
| 905 |
+
elif self.id() == "debian" or "debian" in self.like().split():
|
| 906 |
+
# On Debian-like, add debian_version file content to candidates list.
|
| 907 |
+
versions.append(self._debian_version)
|
| 908 |
+
version = ""
|
| 909 |
+
if best:
|
| 910 |
+
# This algorithm uses the last version in priority order that has
|
| 911 |
+
# the best precision. If the versions are not in conflict, that
|
| 912 |
+
# does not matter; otherwise, using the last one instead of the
|
| 913 |
+
# first one might be considered a surprise.
|
| 914 |
+
for v in versions:
|
| 915 |
+
if v.count(".") > version.count(".") or version == "":
|
| 916 |
+
version = v
|
| 917 |
+
else:
|
| 918 |
+
for v in versions:
|
| 919 |
+
if v != "":
|
| 920 |
+
version = v
|
| 921 |
+
break
|
| 922 |
+
if pretty and version and self.codename():
|
| 923 |
+
version = f"{version} ({self.codename()})"
|
| 924 |
+
return version
|
| 925 |
+
|
| 926 |
+
def version_parts(self, best: bool = False) -> Tuple[str, str, str]:
|
| 927 |
+
"""
|
| 928 |
+
Return the version of the OS distribution, as a tuple of version
|
| 929 |
+
numbers.
|
| 930 |
+
|
| 931 |
+
For details, see :func:`distro.version_parts`.
|
| 932 |
+
"""
|
| 933 |
+
version_str = self.version(best=best)
|
| 934 |
+
if version_str:
|
| 935 |
+
version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
|
| 936 |
+
matches = version_regex.match(version_str)
|
| 937 |
+
if matches:
|
| 938 |
+
major, minor, build_number = matches.groups()
|
| 939 |
+
return major, minor or "", build_number or ""
|
| 940 |
+
return "", "", ""
|
| 941 |
+
|
| 942 |
+
def major_version(self, best: bool = False) -> str:
|
| 943 |
+
"""
|
| 944 |
+
Return the major version number of the current distribution.
|
| 945 |
+
|
| 946 |
+
For details, see :func:`distro.major_version`.
|
| 947 |
+
"""
|
| 948 |
+
return self.version_parts(best)[0]
|
| 949 |
+
|
| 950 |
+
def minor_version(self, best: bool = False) -> str:
|
| 951 |
+
"""
|
| 952 |
+
Return the minor version number of the current distribution.
|
| 953 |
+
|
| 954 |
+
For details, see :func:`distro.minor_version`.
|
| 955 |
+
"""
|
| 956 |
+
return self.version_parts(best)[1]
|
| 957 |
+
|
| 958 |
+
def build_number(self, best: bool = False) -> str:
|
| 959 |
+
"""
|
| 960 |
+
Return the build number of the current distribution.
|
| 961 |
+
|
| 962 |
+
For details, see :func:`distro.build_number`.
|
| 963 |
+
"""
|
| 964 |
+
return self.version_parts(best)[2]
|
| 965 |
+
|
| 966 |
+
def like(self) -> str:
|
| 967 |
+
"""
|
| 968 |
+
Return the IDs of distributions that are like the OS distribution.
|
| 969 |
+
|
| 970 |
+
For details, see :func:`distro.like`.
|
| 971 |
+
"""
|
| 972 |
+
return self.os_release_attr("id_like") or ""
|
| 973 |
+
|
| 974 |
+
def codename(self) -> str:
|
| 975 |
+
"""
|
| 976 |
+
Return the codename of the OS distribution.
|
| 977 |
+
|
| 978 |
+
For details, see :func:`distro.codename`.
|
| 979 |
+
"""
|
| 980 |
+
try:
|
| 981 |
+
# Handle os_release specially since distros might purposefully set
|
| 982 |
+
# this to empty string to have no codename
|
| 983 |
+
return self._os_release_info["codename"]
|
| 984 |
+
except KeyError:
|
| 985 |
+
return (
|
| 986 |
+
self.lsb_release_attr("codename")
|
| 987 |
+
or self.distro_release_attr("codename")
|
| 988 |
+
or ""
|
| 989 |
+
)
|
| 990 |
+
|
| 991 |
+
def info(self, pretty: bool = False, best: bool = False) -> InfoDict:
|
| 992 |
+
"""
|
| 993 |
+
Return certain machine-readable information about the OS
|
| 994 |
+
distribution.
|
| 995 |
+
|
| 996 |
+
For details, see :func:`distro.info`.
|
| 997 |
+
"""
|
| 998 |
+
return InfoDict(
|
| 999 |
+
id=self.id(),
|
| 1000 |
+
version=self.version(pretty, best),
|
| 1001 |
+
version_parts=VersionDict(
|
| 1002 |
+
major=self.major_version(best),
|
| 1003 |
+
minor=self.minor_version(best),
|
| 1004 |
+
build_number=self.build_number(best),
|
| 1005 |
+
),
|
| 1006 |
+
like=self.like(),
|
| 1007 |
+
codename=self.codename(),
|
| 1008 |
+
)
|
| 1009 |
+
|
| 1010 |
+
def os_release_info(self) -> Dict[str, str]:
|
| 1011 |
+
"""
|
| 1012 |
+
Return a dictionary containing key-value pairs for the information
|
| 1013 |
+
items from the os-release file data source of the OS distribution.
|
| 1014 |
+
|
| 1015 |
+
For details, see :func:`distro.os_release_info`.
|
| 1016 |
+
"""
|
| 1017 |
+
return self._os_release_info
|
| 1018 |
+
|
| 1019 |
+
def lsb_release_info(self) -> Dict[str, str]:
|
| 1020 |
+
"""
|
| 1021 |
+
Return a dictionary containing key-value pairs for the information
|
| 1022 |
+
items from the lsb_release command data source of the OS
|
| 1023 |
+
distribution.
|
| 1024 |
+
|
| 1025 |
+
For details, see :func:`distro.lsb_release_info`.
|
| 1026 |
+
"""
|
| 1027 |
+
return self._lsb_release_info
|
| 1028 |
+
|
| 1029 |
+
def distro_release_info(self) -> Dict[str, str]:
|
| 1030 |
+
"""
|
| 1031 |
+
Return a dictionary containing key-value pairs for the information
|
| 1032 |
+
items from the distro release file data source of the OS
|
| 1033 |
+
distribution.
|
| 1034 |
+
|
| 1035 |
+
For details, see :func:`distro.distro_release_info`.
|
| 1036 |
+
"""
|
| 1037 |
+
return self._distro_release_info
|
| 1038 |
+
|
| 1039 |
+
def uname_info(self) -> Dict[str, str]:
|
| 1040 |
+
"""
|
| 1041 |
+
Return a dictionary containing key-value pairs for the information
|
| 1042 |
+
items from the uname command data source of the OS distribution.
|
| 1043 |
+
|
| 1044 |
+
For details, see :func:`distro.uname_info`.
|
| 1045 |
+
"""
|
| 1046 |
+
return self._uname_info
|
| 1047 |
+
|
| 1048 |
+
def oslevel_info(self) -> str:
|
| 1049 |
+
"""
|
| 1050 |
+
Return AIX' oslevel command output.
|
| 1051 |
+
"""
|
| 1052 |
+
return self._oslevel_info
|
| 1053 |
+
|
| 1054 |
+
def os_release_attr(self, attribute: str) -> str:
|
| 1055 |
+
"""
|
| 1056 |
+
Return a single named information item from the os-release file data
|
| 1057 |
+
source of the OS distribution.
|
| 1058 |
+
|
| 1059 |
+
For details, see :func:`distro.os_release_attr`.
|
| 1060 |
+
"""
|
| 1061 |
+
return self._os_release_info.get(attribute, "")
|
| 1062 |
+
|
| 1063 |
+
def lsb_release_attr(self, attribute: str) -> str:
|
| 1064 |
+
"""
|
| 1065 |
+
Return a single named information item from the lsb_release command
|
| 1066 |
+
output data source of the OS distribution.
|
| 1067 |
+
|
| 1068 |
+
For details, see :func:`distro.lsb_release_attr`.
|
| 1069 |
+
"""
|
| 1070 |
+
return self._lsb_release_info.get(attribute, "")
|
| 1071 |
+
|
| 1072 |
+
def distro_release_attr(self, attribute: str) -> str:
|
| 1073 |
+
"""
|
| 1074 |
+
Return a single named information item from the distro release file
|
| 1075 |
+
data source of the OS distribution.
|
| 1076 |
+
|
| 1077 |
+
For details, see :func:`distro.distro_release_attr`.
|
| 1078 |
+
"""
|
| 1079 |
+
return self._distro_release_info.get(attribute, "")
|
| 1080 |
+
|
| 1081 |
+
def uname_attr(self, attribute: str) -> str:
|
| 1082 |
+
"""
|
| 1083 |
+
Return a single named information item from the uname command
|
| 1084 |
+
output data source of the OS distribution.
|
| 1085 |
+
|
| 1086 |
+
For details, see :func:`distro.uname_attr`.
|
| 1087 |
+
"""
|
| 1088 |
+
return self._uname_info.get(attribute, "")
|
| 1089 |
+
|
| 1090 |
+
@cached_property
|
| 1091 |
+
def _os_release_info(self) -> Dict[str, str]:
|
| 1092 |
+
"""
|
| 1093 |
+
Get the information items from the specified os-release file.
|
| 1094 |
+
|
| 1095 |
+
Returns:
|
| 1096 |
+
A dictionary containing all information items.
|
| 1097 |
+
"""
|
| 1098 |
+
if os.path.isfile(self.os_release_file):
|
| 1099 |
+
with open(self.os_release_file, encoding="utf-8") as release_file:
|
| 1100 |
+
return self._parse_os_release_content(release_file)
|
| 1101 |
+
return {}
|
| 1102 |
+
|
| 1103 |
+
@staticmethod
|
| 1104 |
+
def _parse_os_release_content(lines: TextIO) -> Dict[str, str]:
|
| 1105 |
+
"""
|
| 1106 |
+
Parse the lines of an os-release file.
|
| 1107 |
+
|
| 1108 |
+
Parameters:
|
| 1109 |
+
|
| 1110 |
+
* lines: Iterable through the lines in the os-release file.
|
| 1111 |
+
Each line must be a unicode string or a UTF-8 encoded byte
|
| 1112 |
+
string.
|
| 1113 |
+
|
| 1114 |
+
Returns:
|
| 1115 |
+
A dictionary containing all information items.
|
| 1116 |
+
"""
|
| 1117 |
+
props = {}
|
| 1118 |
+
lexer = shlex.shlex(lines, posix=True)
|
| 1119 |
+
lexer.whitespace_split = True
|
| 1120 |
+
|
| 1121 |
+
tokens = list(lexer)
|
| 1122 |
+
for token in tokens:
|
| 1123 |
+
# At this point, all shell-like parsing has been done (i.e.
|
| 1124 |
+
# comments processed, quotes and backslash escape sequences
|
| 1125 |
+
# processed, multi-line values assembled, trailing newlines
|
| 1126 |
+
# stripped, etc.), so the tokens are now either:
|
| 1127 |
+
# * variable assignments: var=value
|
| 1128 |
+
# * commands or their arguments (not allowed in os-release)
|
| 1129 |
+
# Ignore any tokens that are not variable assignments
|
| 1130 |
+
if "=" in token:
|
| 1131 |
+
k, v = token.split("=", 1)
|
| 1132 |
+
props[k.lower()] = v
|
| 1133 |
+
|
| 1134 |
+
if "version" in props:
|
| 1135 |
+
# extract release codename (if any) from version attribute
|
| 1136 |
+
match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"])
|
| 1137 |
+
if match:
|
| 1138 |
+
release_codename = match.group(1) or match.group(2)
|
| 1139 |
+
props["codename"] = props["release_codename"] = release_codename
|
| 1140 |
+
|
| 1141 |
+
if "version_codename" in props:
|
| 1142 |
+
# os-release added a version_codename field. Use that in
|
| 1143 |
+
# preference to anything else Note that some distros purposefully
|
| 1144 |
+
# do not have code names. They should be setting
|
| 1145 |
+
# version_codename=""
|
| 1146 |
+
props["codename"] = props["version_codename"]
|
| 1147 |
+
elif "ubuntu_codename" in props:
|
| 1148 |
+
# Same as above but a non-standard field name used on older Ubuntus
|
| 1149 |
+
props["codename"] = props["ubuntu_codename"]
|
| 1150 |
+
|
| 1151 |
+
return props
|
| 1152 |
+
|
| 1153 |
+
@cached_property
|
| 1154 |
+
def _lsb_release_info(self) -> Dict[str, str]:
|
| 1155 |
+
"""
|
| 1156 |
+
Get the information items from the lsb_release command output.
|
| 1157 |
+
|
| 1158 |
+
Returns:
|
| 1159 |
+
A dictionary containing all information items.
|
| 1160 |
+
"""
|
| 1161 |
+
if not self.include_lsb:
|
| 1162 |
+
return {}
|
| 1163 |
+
try:
|
| 1164 |
+
cmd = ("lsb_release", "-a")
|
| 1165 |
+
stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
|
| 1166 |
+
# Command not found or lsb_release returned error
|
| 1167 |
+
except (OSError, subprocess.CalledProcessError):
|
| 1168 |
+
return {}
|
| 1169 |
+
content = self._to_str(stdout).splitlines()
|
| 1170 |
+
return self._parse_lsb_release_content(content)
|
| 1171 |
+
|
| 1172 |
+
@staticmethod
|
| 1173 |
+
def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]:
|
| 1174 |
+
"""
|
| 1175 |
+
Parse the output of the lsb_release command.
|
| 1176 |
+
|
| 1177 |
+
Parameters:
|
| 1178 |
+
|
| 1179 |
+
* lines: Iterable through the lines of the lsb_release output.
|
| 1180 |
+
Each line must be a unicode string or a UTF-8 encoded byte
|
| 1181 |
+
string.
|
| 1182 |
+
|
| 1183 |
+
Returns:
|
| 1184 |
+
A dictionary containing all information items.
|
| 1185 |
+
"""
|
| 1186 |
+
props = {}
|
| 1187 |
+
for line in lines:
|
| 1188 |
+
kv = line.strip("\n").split(":", 1)
|
| 1189 |
+
if len(kv) != 2:
|
| 1190 |
+
# Ignore lines without colon.
|
| 1191 |
+
continue
|
| 1192 |
+
k, v = kv
|
| 1193 |
+
props.update({k.replace(" ", "_").lower(): v.strip()})
|
| 1194 |
+
return props
|
| 1195 |
+
|
| 1196 |
+
@cached_property
|
| 1197 |
+
def _uname_info(self) -> Dict[str, str]:
|
| 1198 |
+
if not self.include_uname:
|
| 1199 |
+
return {}
|
| 1200 |
+
try:
|
| 1201 |
+
cmd = ("uname", "-rs")
|
| 1202 |
+
stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
|
| 1203 |
+
except OSError:
|
| 1204 |
+
return {}
|
| 1205 |
+
content = self._to_str(stdout).splitlines()
|
| 1206 |
+
return self._parse_uname_content(content)
|
| 1207 |
+
|
| 1208 |
+
@cached_property
|
| 1209 |
+
def _oslevel_info(self) -> str:
|
| 1210 |
+
if not self.include_oslevel:
|
| 1211 |
+
return ""
|
| 1212 |
+
try:
|
| 1213 |
+
stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
|
| 1214 |
+
except (OSError, subprocess.CalledProcessError):
|
| 1215 |
+
return ""
|
| 1216 |
+
return self._to_str(stdout).strip()
|
| 1217 |
+
|
| 1218 |
+
@cached_property
|
| 1219 |
+
def _debian_version(self) -> str:
|
| 1220 |
+
try:
|
| 1221 |
+
with open(
|
| 1222 |
+
os.path.join(self.etc_dir, "debian_version"), encoding="ascii"
|
| 1223 |
+
) as fp:
|
| 1224 |
+
return fp.readline().rstrip()
|
| 1225 |
+
except FileNotFoundError:
|
| 1226 |
+
return ""
|
| 1227 |
+
|
| 1228 |
+
@staticmethod
|
| 1229 |
+
def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]:
|
| 1230 |
+
if not lines:
|
| 1231 |
+
return {}
|
| 1232 |
+
props = {}
|
| 1233 |
+
match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
|
| 1234 |
+
if match:
|
| 1235 |
+
name, version = match.groups()
|
| 1236 |
+
|
| 1237 |
+
# This is to prevent the Linux kernel version from
|
| 1238 |
+
# appearing as the 'best' version on otherwise
|
| 1239 |
+
# identifiable distributions.
|
| 1240 |
+
if name == "Linux":
|
| 1241 |
+
return {}
|
| 1242 |
+
props["id"] = name.lower()
|
| 1243 |
+
props["name"] = name
|
| 1244 |
+
props["release"] = version
|
| 1245 |
+
return props
|
| 1246 |
+
|
| 1247 |
+
@staticmethod
|
| 1248 |
+
def _to_str(bytestring: bytes) -> str:
|
| 1249 |
+
encoding = sys.getfilesystemencoding()
|
| 1250 |
+
return bytestring.decode(encoding)
|
| 1251 |
+
|
| 1252 |
+
@cached_property
|
| 1253 |
+
def _distro_release_info(self) -> Dict[str, str]:
|
| 1254 |
+
"""
|
| 1255 |
+
Get the information items from the specified distro release file.
|
| 1256 |
+
|
| 1257 |
+
Returns:
|
| 1258 |
+
A dictionary containing all information items.
|
| 1259 |
+
"""
|
| 1260 |
+
if self.distro_release_file:
|
| 1261 |
+
# If it was specified, we use it and parse what we can, even if
|
| 1262 |
+
# its file name or content does not match the expected pattern.
|
| 1263 |
+
distro_info = self._parse_distro_release_file(self.distro_release_file)
|
| 1264 |
+
basename = os.path.basename(self.distro_release_file)
|
| 1265 |
+
# The file name pattern for user-specified distro release files
|
| 1266 |
+
# is somewhat more tolerant (compared to when searching for the
|
| 1267 |
+
# file), because we want to use what was specified as best as
|
| 1268 |
+
# possible.
|
| 1269 |
+
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
| 1270 |
+
else:
|
| 1271 |
+
try:
|
| 1272 |
+
basenames = [
|
| 1273 |
+
basename
|
| 1274 |
+
for basename in os.listdir(self.etc_dir)
|
| 1275 |
+
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
| 1276 |
+
and os.path.isfile(os.path.join(self.etc_dir, basename))
|
| 1277 |
+
]
|
| 1278 |
+
# We sort for repeatability in cases where there are multiple
|
| 1279 |
+
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
| 1280 |
+
# containing `redhat-release` on top of their own.
|
| 1281 |
+
basenames.sort()
|
| 1282 |
+
except OSError:
|
| 1283 |
+
# This may occur when /etc is not readable but we can't be
|
| 1284 |
+
# sure about the *-release files. Check common entries of
|
| 1285 |
+
# /etc for information. If they turn out to not be there the
|
| 1286 |
+
# error is handled in `_parse_distro_release_file()`.
|
| 1287 |
+
basenames = _DISTRO_RELEASE_BASENAMES
|
| 1288 |
+
for basename in basenames:
|
| 1289 |
+
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
| 1290 |
+
if match is None:
|
| 1291 |
+
continue
|
| 1292 |
+
filepath = os.path.join(self.etc_dir, basename)
|
| 1293 |
+
distro_info = self._parse_distro_release_file(filepath)
|
| 1294 |
+
# The name is always present if the pattern matches.
|
| 1295 |
+
if "name" not in distro_info:
|
| 1296 |
+
continue
|
| 1297 |
+
self.distro_release_file = filepath
|
| 1298 |
+
break
|
| 1299 |
+
else: # the loop didn't "break": no candidate.
|
| 1300 |
+
return {}
|
| 1301 |
+
|
| 1302 |
+
if match is not None:
|
| 1303 |
+
distro_info["id"] = match.group(1)
|
| 1304 |
+
|
| 1305 |
+
# CloudLinux < 7: manually enrich info with proper id.
|
| 1306 |
+
if "cloudlinux" in distro_info.get("name", "").lower():
|
| 1307 |
+
distro_info["id"] = "cloudlinux"
|
| 1308 |
+
|
| 1309 |
+
return distro_info
|
| 1310 |
+
|
| 1311 |
+
def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]:
|
| 1312 |
+
"""
|
| 1313 |
+
Parse a distro release file.
|
| 1314 |
+
|
| 1315 |
+
Parameters:
|
| 1316 |
+
|
| 1317 |
+
* filepath: Path name of the distro release file.
|
| 1318 |
+
|
| 1319 |
+
Returns:
|
| 1320 |
+
A dictionary containing all information items.
|
| 1321 |
+
"""
|
| 1322 |
+
try:
|
| 1323 |
+
with open(filepath, encoding="utf-8") as fp:
|
| 1324 |
+
# Only parse the first line. For instance, on SLES there
|
| 1325 |
+
# are multiple lines. We don't want them...
|
| 1326 |
+
return self._parse_distro_release_content(fp.readline())
|
| 1327 |
+
except OSError:
|
| 1328 |
+
# Ignore not being able to read a specific, seemingly version
|
| 1329 |
+
# related file.
|
| 1330 |
+
# See https://github.com/python-distro/distro/issues/162
|
| 1331 |
+
return {}
|
| 1332 |
+
|
| 1333 |
+
@staticmethod
|
| 1334 |
+
def _parse_distro_release_content(line: str) -> Dict[str, str]:
|
| 1335 |
+
"""
|
| 1336 |
+
Parse a line from a distro release file.
|
| 1337 |
+
|
| 1338 |
+
Parameters:
|
| 1339 |
+
* line: Line from the distro release file. Must be a unicode string
|
| 1340 |
+
or a UTF-8 encoded byte string.
|
| 1341 |
+
|
| 1342 |
+
Returns:
|
| 1343 |
+
A dictionary containing all information items.
|
| 1344 |
+
"""
|
| 1345 |
+
matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
|
| 1346 |
+
distro_info = {}
|
| 1347 |
+
if matches:
|
| 1348 |
+
# regexp ensures non-None
|
| 1349 |
+
distro_info["name"] = matches.group(3)[::-1]
|
| 1350 |
+
if matches.group(2):
|
| 1351 |
+
distro_info["version_id"] = matches.group(2)[::-1]
|
| 1352 |
+
if matches.group(1):
|
| 1353 |
+
distro_info["codename"] = matches.group(1)[::-1]
|
| 1354 |
+
elif line:
|
| 1355 |
+
distro_info["name"] = line.strip()
|
| 1356 |
+
return distro_info
|
| 1357 |
+
|
| 1358 |
+
|
| 1359 |
+
_distro = LinuxDistribution()
|
| 1360 |
+
|
| 1361 |
+
|
| 1362 |
+
def main() -> None:
|
| 1363 |
+
logger = logging.getLogger(__name__)
|
| 1364 |
+
logger.setLevel(logging.DEBUG)
|
| 1365 |
+
logger.addHandler(logging.StreamHandler(sys.stdout))
|
| 1366 |
+
|
| 1367 |
+
parser = argparse.ArgumentParser(description="OS distro info tool")
|
| 1368 |
+
parser.add_argument(
|
| 1369 |
+
"--json", "-j", help="Output in machine readable format", action="store_true"
|
| 1370 |
+
)
|
| 1371 |
+
|
| 1372 |
+
parser.add_argument(
|
| 1373 |
+
"--root-dir",
|
| 1374 |
+
"-r",
|
| 1375 |
+
type=str,
|
| 1376 |
+
dest="root_dir",
|
| 1377 |
+
help="Path to the root filesystem directory (defaults to /)",
|
| 1378 |
+
)
|
| 1379 |
+
|
| 1380 |
+
args = parser.parse_args()
|
| 1381 |
+
|
| 1382 |
+
if args.root_dir:
|
| 1383 |
+
dist = LinuxDistribution(
|
| 1384 |
+
include_lsb=False,
|
| 1385 |
+
include_uname=False,
|
| 1386 |
+
include_oslevel=False,
|
| 1387 |
+
root_dir=args.root_dir,
|
| 1388 |
+
)
|
| 1389 |
+
else:
|
| 1390 |
+
dist = _distro
|
| 1391 |
+
|
| 1392 |
+
if args.json:
|
| 1393 |
+
logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
|
| 1394 |
+
else:
|
| 1395 |
+
logger.info("Name: %s", dist.name(pretty=True))
|
| 1396 |
+
distribution_version = dist.version(pretty=True)
|
| 1397 |
+
logger.info("Version: %s", distribution_version)
|
| 1398 |
+
distribution_codename = dist.codename()
|
| 1399 |
+
logger.info("Codename: %s", distribution_codename)
|
| 1400 |
+
|
| 1401 |
+
|
| 1402 |
+
if __name__ == "__main__":
|
| 1403 |
+
main()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distro/py.typed
ADDED
|
File without changes
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__init__.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa: F401
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from .exceptions import * # noqa: F403
|
| 5 |
+
from .ext import ExtType, Timestamp
|
| 6 |
+
|
| 7 |
+
version = (1, 1, 0)
|
| 8 |
+
__version__ = "1.1.0"
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
if os.environ.get("MSGPACK_PUREPYTHON"):
|
| 12 |
+
from .fallback import Packer, Unpacker, unpackb
|
| 13 |
+
else:
|
| 14 |
+
try:
|
| 15 |
+
from ._cmsgpack import Packer, Unpacker, unpackb
|
| 16 |
+
except ImportError:
|
| 17 |
+
from .fallback import Packer, Unpacker, unpackb
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def pack(o, stream, **kwargs):
|
| 21 |
+
"""
|
| 22 |
+
Pack object `o` and write it to `stream`
|
| 23 |
+
|
| 24 |
+
See :class:`Packer` for options.
|
| 25 |
+
"""
|
| 26 |
+
packer = Packer(**kwargs)
|
| 27 |
+
stream.write(packer.pack(o))
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def packb(o, **kwargs):
|
| 31 |
+
"""
|
| 32 |
+
Pack object `o` and return packed bytes
|
| 33 |
+
|
| 34 |
+
See :class:`Packer` for options.
|
| 35 |
+
"""
|
| 36 |
+
return Packer(**kwargs).pack(o)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def unpack(stream, **kwargs):
|
| 40 |
+
"""
|
| 41 |
+
Unpack an object from `stream`.
|
| 42 |
+
|
| 43 |
+
Raises `ExtraData` when `stream` contains extra bytes.
|
| 44 |
+
See :class:`Unpacker` for options.
|
| 45 |
+
"""
|
| 46 |
+
data = stream.read()
|
| 47 |
+
return unpackb(data, **kwargs)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
# alias for compatibility to simplejson/marshal/pickle.
|
| 51 |
+
load = unpack
|
| 52 |
+
loads = unpackb
|
| 53 |
+
|
| 54 |
+
dump = pack
|
| 55 |
+
dumps = packb
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.38 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-310.pyc
ADDED
|
Binary file (1.8 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-310.pyc
ADDED
|
Binary file (6.08 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-310.pyc
ADDED
|
Binary file (23.8 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/exceptions.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
class UnpackException(Exception):
|
| 2 |
+
"""Base class for some exceptions raised while unpacking.
|
| 3 |
+
|
| 4 |
+
NOTE: unpack may raise exception other than subclass of
|
| 5 |
+
UnpackException. If you want to catch all error, catch
|
| 6 |
+
Exception instead.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class BufferFull(UnpackException):
|
| 11 |
+
pass
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class OutOfData(UnpackException):
|
| 15 |
+
pass
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class FormatError(ValueError, UnpackException):
|
| 19 |
+
"""Invalid msgpack format"""
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class StackError(ValueError, UnpackException):
|
| 23 |
+
"""Too nested"""
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
# Deprecated. Use ValueError instead
|
| 27 |
+
UnpackValueError = ValueError
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class ExtraData(UnpackValueError):
|
| 31 |
+
"""ExtraData is raised when there is trailing data.
|
| 32 |
+
|
| 33 |
+
This exception is raised while only one-shot (not streaming)
|
| 34 |
+
unpack.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
def __init__(self, unpacked, extra):
|
| 38 |
+
self.unpacked = unpacked
|
| 39 |
+
self.extra = extra
|
| 40 |
+
|
| 41 |
+
def __str__(self):
|
| 42 |
+
return "unpack(b) received extra data."
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
# Deprecated. Use Exception instead to catch all exception during packing.
|
| 46 |
+
PackException = Exception
|
| 47 |
+
PackValueError = ValueError
|
| 48 |
+
PackOverflowError = OverflowError
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/ext.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import datetime
|
| 2 |
+
import struct
|
| 3 |
+
from collections import namedtuple
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class ExtType(namedtuple("ExtType", "code data")):
|
| 7 |
+
"""ExtType represents ext type in msgpack."""
|
| 8 |
+
|
| 9 |
+
def __new__(cls, code, data):
|
| 10 |
+
if not isinstance(code, int):
|
| 11 |
+
raise TypeError("code must be int")
|
| 12 |
+
if not isinstance(data, bytes):
|
| 13 |
+
raise TypeError("data must be bytes")
|
| 14 |
+
if not 0 <= code <= 127:
|
| 15 |
+
raise ValueError("code must be 0~127")
|
| 16 |
+
return super().__new__(cls, code, data)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class Timestamp:
|
| 20 |
+
"""Timestamp represents the Timestamp extension type in msgpack.
|
| 21 |
+
|
| 22 |
+
When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`.
|
| 23 |
+
When using pure-Python msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and
|
| 24 |
+
unpack `Timestamp`.
|
| 25 |
+
|
| 26 |
+
This class is immutable: Do not override seconds and nanoseconds.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
__slots__ = ["seconds", "nanoseconds"]
|
| 30 |
+
|
| 31 |
+
def __init__(self, seconds, nanoseconds=0):
|
| 32 |
+
"""Initialize a Timestamp object.
|
| 33 |
+
|
| 34 |
+
:param int seconds:
|
| 35 |
+
Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds).
|
| 36 |
+
May be negative.
|
| 37 |
+
|
| 38 |
+
:param int nanoseconds:
|
| 39 |
+
Number of nanoseconds to add to `seconds` to get fractional time.
|
| 40 |
+
Maximum is 999_999_999. Default is 0.
|
| 41 |
+
|
| 42 |
+
Note: Negative times (before the UNIX epoch) are represented as neg. seconds + pos. ns.
|
| 43 |
+
"""
|
| 44 |
+
if not isinstance(seconds, int):
|
| 45 |
+
raise TypeError("seconds must be an integer")
|
| 46 |
+
if not isinstance(nanoseconds, int):
|
| 47 |
+
raise TypeError("nanoseconds must be an integer")
|
| 48 |
+
if not (0 <= nanoseconds < 10**9):
|
| 49 |
+
raise ValueError("nanoseconds must be a non-negative integer less than 999999999.")
|
| 50 |
+
self.seconds = seconds
|
| 51 |
+
self.nanoseconds = nanoseconds
|
| 52 |
+
|
| 53 |
+
def __repr__(self):
|
| 54 |
+
"""String representation of Timestamp."""
|
| 55 |
+
return f"Timestamp(seconds={self.seconds}, nanoseconds={self.nanoseconds})"
|
| 56 |
+
|
| 57 |
+
def __eq__(self, other):
|
| 58 |
+
"""Check for equality with another Timestamp object"""
|
| 59 |
+
if type(other) is self.__class__:
|
| 60 |
+
return self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
|
| 61 |
+
return False
|
| 62 |
+
|
| 63 |
+
def __ne__(self, other):
|
| 64 |
+
"""not-equals method (see :func:`__eq__()`)"""
|
| 65 |
+
return not self.__eq__(other)
|
| 66 |
+
|
| 67 |
+
def __hash__(self):
|
| 68 |
+
return hash((self.seconds, self.nanoseconds))
|
| 69 |
+
|
| 70 |
+
@staticmethod
|
| 71 |
+
def from_bytes(b):
|
| 72 |
+
"""Unpack bytes into a `Timestamp` object.
|
| 73 |
+
|
| 74 |
+
Used for pure-Python msgpack unpacking.
|
| 75 |
+
|
| 76 |
+
:param b: Payload from msgpack ext message with code -1
|
| 77 |
+
:type b: bytes
|
| 78 |
+
|
| 79 |
+
:returns: Timestamp object unpacked from msgpack ext payload
|
| 80 |
+
:rtype: Timestamp
|
| 81 |
+
"""
|
| 82 |
+
if len(b) == 4:
|
| 83 |
+
seconds = struct.unpack("!L", b)[0]
|
| 84 |
+
nanoseconds = 0
|
| 85 |
+
elif len(b) == 8:
|
| 86 |
+
data64 = struct.unpack("!Q", b)[0]
|
| 87 |
+
seconds = data64 & 0x00000003FFFFFFFF
|
| 88 |
+
nanoseconds = data64 >> 34
|
| 89 |
+
elif len(b) == 12:
|
| 90 |
+
nanoseconds, seconds = struct.unpack("!Iq", b)
|
| 91 |
+
else:
|
| 92 |
+
raise ValueError(
|
| 93 |
+
"Timestamp type can only be created from 32, 64, or 96-bit byte objects"
|
| 94 |
+
)
|
| 95 |
+
return Timestamp(seconds, nanoseconds)
|
| 96 |
+
|
| 97 |
+
def to_bytes(self):
|
| 98 |
+
"""Pack this Timestamp object into bytes.
|
| 99 |
+
|
| 100 |
+
Used for pure-Python msgpack packing.
|
| 101 |
+
|
| 102 |
+
:returns data: Payload for EXT message with code -1 (timestamp type)
|
| 103 |
+
:rtype: bytes
|
| 104 |
+
"""
|
| 105 |
+
if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits
|
| 106 |
+
data64 = self.nanoseconds << 34 | self.seconds
|
| 107 |
+
if data64 & 0xFFFFFFFF00000000 == 0:
|
| 108 |
+
# nanoseconds is zero and seconds < 2**32, so timestamp 32
|
| 109 |
+
data = struct.pack("!L", data64)
|
| 110 |
+
else:
|
| 111 |
+
# timestamp 64
|
| 112 |
+
data = struct.pack("!Q", data64)
|
| 113 |
+
else:
|
| 114 |
+
# timestamp 96
|
| 115 |
+
data = struct.pack("!Iq", self.nanoseconds, self.seconds)
|
| 116 |
+
return data
|
| 117 |
+
|
| 118 |
+
@staticmethod
|
| 119 |
+
def from_unix(unix_sec):
|
| 120 |
+
"""Create a Timestamp from posix timestamp in seconds.
|
| 121 |
+
|
| 122 |
+
:param unix_float: Posix timestamp in seconds.
|
| 123 |
+
:type unix_float: int or float
|
| 124 |
+
"""
|
| 125 |
+
seconds = int(unix_sec // 1)
|
| 126 |
+
nanoseconds = int((unix_sec % 1) * 10**9)
|
| 127 |
+
return Timestamp(seconds, nanoseconds)
|
| 128 |
+
|
| 129 |
+
def to_unix(self):
|
| 130 |
+
"""Get the timestamp as a floating-point value.
|
| 131 |
+
|
| 132 |
+
:returns: posix timestamp
|
| 133 |
+
:rtype: float
|
| 134 |
+
"""
|
| 135 |
+
return self.seconds + self.nanoseconds / 1e9
|
| 136 |
+
|
| 137 |
+
@staticmethod
|
| 138 |
+
def from_unix_nano(unix_ns):
|
| 139 |
+
"""Create a Timestamp from posix timestamp in nanoseconds.
|
| 140 |
+
|
| 141 |
+
:param int unix_ns: Posix timestamp in nanoseconds.
|
| 142 |
+
:rtype: Timestamp
|
| 143 |
+
"""
|
| 144 |
+
return Timestamp(*divmod(unix_ns, 10**9))
|
| 145 |
+
|
| 146 |
+
def to_unix_nano(self):
|
| 147 |
+
"""Get the timestamp as a unixtime in nanoseconds.
|
| 148 |
+
|
| 149 |
+
:returns: posix timestamp in nanoseconds
|
| 150 |
+
:rtype: int
|
| 151 |
+
"""
|
| 152 |
+
return self.seconds * 10**9 + self.nanoseconds
|
| 153 |
+
|
| 154 |
+
def to_datetime(self):
|
| 155 |
+
"""Get the timestamp as a UTC datetime.
|
| 156 |
+
|
| 157 |
+
:rtype: `datetime.datetime`
|
| 158 |
+
"""
|
| 159 |
+
utc = datetime.timezone.utc
|
| 160 |
+
return datetime.datetime.fromtimestamp(0, utc) + datetime.timedelta(
|
| 161 |
+
seconds=self.seconds, microseconds=self.nanoseconds // 1000
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
@staticmethod
|
| 165 |
+
def from_datetime(dt):
|
| 166 |
+
"""Create a Timestamp from datetime with tzinfo.
|
| 167 |
+
|
| 168 |
+
:rtype: Timestamp
|
| 169 |
+
"""
|
| 170 |
+
return Timestamp(seconds=int(dt.timestamp()), nanoseconds=dt.microsecond * 1000)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/msgpack/fallback.py
ADDED
|
@@ -0,0 +1,929 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fallback pure Python implementation of msgpack"""
|
| 2 |
+
|
| 3 |
+
import struct
|
| 4 |
+
import sys
|
| 5 |
+
from datetime import datetime as _DateTime
|
| 6 |
+
|
| 7 |
+
if hasattr(sys, "pypy_version_info"):
|
| 8 |
+
from __pypy__ import newlist_hint
|
| 9 |
+
from __pypy__.builders import BytesBuilder
|
| 10 |
+
|
| 11 |
+
_USING_STRINGBUILDER = True
|
| 12 |
+
|
| 13 |
+
class BytesIO:
|
| 14 |
+
def __init__(self, s=b""):
|
| 15 |
+
if s:
|
| 16 |
+
self.builder = BytesBuilder(len(s))
|
| 17 |
+
self.builder.append(s)
|
| 18 |
+
else:
|
| 19 |
+
self.builder = BytesBuilder()
|
| 20 |
+
|
| 21 |
+
def write(self, s):
|
| 22 |
+
if isinstance(s, memoryview):
|
| 23 |
+
s = s.tobytes()
|
| 24 |
+
elif isinstance(s, bytearray):
|
| 25 |
+
s = bytes(s)
|
| 26 |
+
self.builder.append(s)
|
| 27 |
+
|
| 28 |
+
def getvalue(self):
|
| 29 |
+
return self.builder.build()
|
| 30 |
+
|
| 31 |
+
else:
|
| 32 |
+
from io import BytesIO
|
| 33 |
+
|
| 34 |
+
_USING_STRINGBUILDER = False
|
| 35 |
+
|
| 36 |
+
def newlist_hint(size):
|
| 37 |
+
return []
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
from .exceptions import BufferFull, ExtraData, FormatError, OutOfData, StackError
|
| 41 |
+
from .ext import ExtType, Timestamp
|
| 42 |
+
|
| 43 |
+
EX_SKIP = 0
|
| 44 |
+
EX_CONSTRUCT = 1
|
| 45 |
+
EX_READ_ARRAY_HEADER = 2
|
| 46 |
+
EX_READ_MAP_HEADER = 3
|
| 47 |
+
|
| 48 |
+
TYPE_IMMEDIATE = 0
|
| 49 |
+
TYPE_ARRAY = 1
|
| 50 |
+
TYPE_MAP = 2
|
| 51 |
+
TYPE_RAW = 3
|
| 52 |
+
TYPE_BIN = 4
|
| 53 |
+
TYPE_EXT = 5
|
| 54 |
+
|
| 55 |
+
DEFAULT_RECURSE_LIMIT = 511
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def _check_type_strict(obj, t, type=type, tuple=tuple):
|
| 59 |
+
if type(t) is tuple:
|
| 60 |
+
return type(obj) in t
|
| 61 |
+
else:
|
| 62 |
+
return type(obj) is t
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def _get_data_from_buffer(obj):
|
| 66 |
+
view = memoryview(obj)
|
| 67 |
+
if view.itemsize != 1:
|
| 68 |
+
raise ValueError("cannot unpack from multi-byte object")
|
| 69 |
+
return view
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def unpackb(packed, **kwargs):
|
| 73 |
+
"""
|
| 74 |
+
Unpack an object from `packed`.
|
| 75 |
+
|
| 76 |
+
Raises ``ExtraData`` when *packed* contains extra bytes.
|
| 77 |
+
Raises ``ValueError`` when *packed* is incomplete.
|
| 78 |
+
Raises ``FormatError`` when *packed* is not valid msgpack.
|
| 79 |
+
Raises ``StackError`` when *packed* contains too nested.
|
| 80 |
+
Other exceptions can be raised during unpacking.
|
| 81 |
+
|
| 82 |
+
See :class:`Unpacker` for options.
|
| 83 |
+
"""
|
| 84 |
+
unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs)
|
| 85 |
+
unpacker.feed(packed)
|
| 86 |
+
try:
|
| 87 |
+
ret = unpacker._unpack()
|
| 88 |
+
except OutOfData:
|
| 89 |
+
raise ValueError("Unpack failed: incomplete input")
|
| 90 |
+
except RecursionError:
|
| 91 |
+
raise StackError
|
| 92 |
+
if unpacker._got_extradata():
|
| 93 |
+
raise ExtraData(ret, unpacker._get_extradata())
|
| 94 |
+
return ret
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
_NO_FORMAT_USED = ""
|
| 98 |
+
_MSGPACK_HEADERS = {
|
| 99 |
+
0xC4: (1, _NO_FORMAT_USED, TYPE_BIN),
|
| 100 |
+
0xC5: (2, ">H", TYPE_BIN),
|
| 101 |
+
0xC6: (4, ">I", TYPE_BIN),
|
| 102 |
+
0xC7: (2, "Bb", TYPE_EXT),
|
| 103 |
+
0xC8: (3, ">Hb", TYPE_EXT),
|
| 104 |
+
0xC9: (5, ">Ib", TYPE_EXT),
|
| 105 |
+
0xCA: (4, ">f"),
|
| 106 |
+
0xCB: (8, ">d"),
|
| 107 |
+
0xCC: (1, _NO_FORMAT_USED),
|
| 108 |
+
0xCD: (2, ">H"),
|
| 109 |
+
0xCE: (4, ">I"),
|
| 110 |
+
0xCF: (8, ">Q"),
|
| 111 |
+
0xD0: (1, "b"),
|
| 112 |
+
0xD1: (2, ">h"),
|
| 113 |
+
0xD2: (4, ">i"),
|
| 114 |
+
0xD3: (8, ">q"),
|
| 115 |
+
0xD4: (1, "b1s", TYPE_EXT),
|
| 116 |
+
0xD5: (2, "b2s", TYPE_EXT),
|
| 117 |
+
0xD6: (4, "b4s", TYPE_EXT),
|
| 118 |
+
0xD7: (8, "b8s", TYPE_EXT),
|
| 119 |
+
0xD8: (16, "b16s", TYPE_EXT),
|
| 120 |
+
0xD9: (1, _NO_FORMAT_USED, TYPE_RAW),
|
| 121 |
+
0xDA: (2, ">H", TYPE_RAW),
|
| 122 |
+
0xDB: (4, ">I", TYPE_RAW),
|
| 123 |
+
0xDC: (2, ">H", TYPE_ARRAY),
|
| 124 |
+
0xDD: (4, ">I", TYPE_ARRAY),
|
| 125 |
+
0xDE: (2, ">H", TYPE_MAP),
|
| 126 |
+
0xDF: (4, ">I", TYPE_MAP),
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
class Unpacker:
|
| 131 |
+
"""Streaming unpacker.
|
| 132 |
+
|
| 133 |
+
Arguments:
|
| 134 |
+
|
| 135 |
+
:param file_like:
|
| 136 |
+
File-like object having `.read(n)` method.
|
| 137 |
+
If specified, unpacker reads serialized data from it and `.feed()` is not usable.
|
| 138 |
+
|
| 139 |
+
:param int read_size:
|
| 140 |
+
Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`)
|
| 141 |
+
|
| 142 |
+
:param bool use_list:
|
| 143 |
+
If true, unpack msgpack array to Python list.
|
| 144 |
+
Otherwise, unpack to Python tuple. (default: True)
|
| 145 |
+
|
| 146 |
+
:param bool raw:
|
| 147 |
+
If true, unpack msgpack raw to Python bytes.
|
| 148 |
+
Otherwise, unpack to Python str by decoding with UTF-8 encoding (default).
|
| 149 |
+
|
| 150 |
+
:param int timestamp:
|
| 151 |
+
Control how timestamp type is unpacked:
|
| 152 |
+
|
| 153 |
+
0 - Timestamp
|
| 154 |
+
1 - float (Seconds from the EPOCH)
|
| 155 |
+
2 - int (Nanoseconds from the EPOCH)
|
| 156 |
+
3 - datetime.datetime (UTC).
|
| 157 |
+
|
| 158 |
+
:param bool strict_map_key:
|
| 159 |
+
If true (default), only str or bytes are accepted for map (dict) keys.
|
| 160 |
+
|
| 161 |
+
:param object_hook:
|
| 162 |
+
When specified, it should be callable.
|
| 163 |
+
Unpacker calls it with a dict argument after unpacking msgpack map.
|
| 164 |
+
(See also simplejson)
|
| 165 |
+
|
| 166 |
+
:param object_pairs_hook:
|
| 167 |
+
When specified, it should be callable.
|
| 168 |
+
Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
|
| 169 |
+
(See also simplejson)
|
| 170 |
+
|
| 171 |
+
:param str unicode_errors:
|
| 172 |
+
The error handler for decoding unicode. (default: 'strict')
|
| 173 |
+
This option should be used only when you have msgpack data which
|
| 174 |
+
contains invalid UTF-8 string.
|
| 175 |
+
|
| 176 |
+
:param int max_buffer_size:
|
| 177 |
+
Limits size of data waiting unpacked. 0 means 2**32-1.
|
| 178 |
+
The default value is 100*1024*1024 (100MiB).
|
| 179 |
+
Raises `BufferFull` exception when it is insufficient.
|
| 180 |
+
You should set this parameter when unpacking data from untrusted source.
|
| 181 |
+
|
| 182 |
+
:param int max_str_len:
|
| 183 |
+
Deprecated, use *max_buffer_size* instead.
|
| 184 |
+
Limits max length of str. (default: max_buffer_size)
|
| 185 |
+
|
| 186 |
+
:param int max_bin_len:
|
| 187 |
+
Deprecated, use *max_buffer_size* instead.
|
| 188 |
+
Limits max length of bin. (default: max_buffer_size)
|
| 189 |
+
|
| 190 |
+
:param int max_array_len:
|
| 191 |
+
Limits max length of array.
|
| 192 |
+
(default: max_buffer_size)
|
| 193 |
+
|
| 194 |
+
:param int max_map_len:
|
| 195 |
+
Limits max length of map.
|
| 196 |
+
(default: max_buffer_size//2)
|
| 197 |
+
|
| 198 |
+
:param int max_ext_len:
|
| 199 |
+
Deprecated, use *max_buffer_size* instead.
|
| 200 |
+
Limits max size of ext type. (default: max_buffer_size)
|
| 201 |
+
|
| 202 |
+
Example of streaming deserialize from file-like object::
|
| 203 |
+
|
| 204 |
+
unpacker = Unpacker(file_like)
|
| 205 |
+
for o in unpacker:
|
| 206 |
+
process(o)
|
| 207 |
+
|
| 208 |
+
Example of streaming deserialize from socket::
|
| 209 |
+
|
| 210 |
+
unpacker = Unpacker()
|
| 211 |
+
while True:
|
| 212 |
+
buf = sock.recv(1024**2)
|
| 213 |
+
if not buf:
|
| 214 |
+
break
|
| 215 |
+
unpacker.feed(buf)
|
| 216 |
+
for o in unpacker:
|
| 217 |
+
process(o)
|
| 218 |
+
|
| 219 |
+
Raises ``ExtraData`` when *packed* contains extra bytes.
|
| 220 |
+
Raises ``OutOfData`` when *packed* is incomplete.
|
| 221 |
+
Raises ``FormatError`` when *packed* is not valid msgpack.
|
| 222 |
+
Raises ``StackError`` when *packed* contains too nested.
|
| 223 |
+
Other exceptions can be raised during unpacking.
|
| 224 |
+
"""
|
| 225 |
+
|
| 226 |
+
def __init__(
|
| 227 |
+
self,
|
| 228 |
+
file_like=None,
|
| 229 |
+
*,
|
| 230 |
+
read_size=0,
|
| 231 |
+
use_list=True,
|
| 232 |
+
raw=False,
|
| 233 |
+
timestamp=0,
|
| 234 |
+
strict_map_key=True,
|
| 235 |
+
object_hook=None,
|
| 236 |
+
object_pairs_hook=None,
|
| 237 |
+
list_hook=None,
|
| 238 |
+
unicode_errors=None,
|
| 239 |
+
max_buffer_size=100 * 1024 * 1024,
|
| 240 |
+
ext_hook=ExtType,
|
| 241 |
+
max_str_len=-1,
|
| 242 |
+
max_bin_len=-1,
|
| 243 |
+
max_array_len=-1,
|
| 244 |
+
max_map_len=-1,
|
| 245 |
+
max_ext_len=-1,
|
| 246 |
+
):
|
| 247 |
+
if unicode_errors is None:
|
| 248 |
+
unicode_errors = "strict"
|
| 249 |
+
|
| 250 |
+
if file_like is None:
|
| 251 |
+
self._feeding = True
|
| 252 |
+
else:
|
| 253 |
+
if not callable(file_like.read):
|
| 254 |
+
raise TypeError("`file_like.read` must be callable")
|
| 255 |
+
self.file_like = file_like
|
| 256 |
+
self._feeding = False
|
| 257 |
+
|
| 258 |
+
#: array of bytes fed.
|
| 259 |
+
self._buffer = bytearray()
|
| 260 |
+
#: Which position we currently reads
|
| 261 |
+
self._buff_i = 0
|
| 262 |
+
|
| 263 |
+
# When Unpacker is used as an iterable, between the calls to next(),
|
| 264 |
+
# the buffer is not "consumed" completely, for efficiency sake.
|
| 265 |
+
# Instead, it is done sloppily. To make sure we raise BufferFull at
|
| 266 |
+
# the correct moments, we have to keep track of how sloppy we were.
|
| 267 |
+
# Furthermore, when the buffer is incomplete (that is: in the case
|
| 268 |
+
# we raise an OutOfData) we need to rollback the buffer to the correct
|
| 269 |
+
# state, which _buf_checkpoint records.
|
| 270 |
+
self._buf_checkpoint = 0
|
| 271 |
+
|
| 272 |
+
if not max_buffer_size:
|
| 273 |
+
max_buffer_size = 2**31 - 1
|
| 274 |
+
if max_str_len == -1:
|
| 275 |
+
max_str_len = max_buffer_size
|
| 276 |
+
if max_bin_len == -1:
|
| 277 |
+
max_bin_len = max_buffer_size
|
| 278 |
+
if max_array_len == -1:
|
| 279 |
+
max_array_len = max_buffer_size
|
| 280 |
+
if max_map_len == -1:
|
| 281 |
+
max_map_len = max_buffer_size // 2
|
| 282 |
+
if max_ext_len == -1:
|
| 283 |
+
max_ext_len = max_buffer_size
|
| 284 |
+
|
| 285 |
+
self._max_buffer_size = max_buffer_size
|
| 286 |
+
if read_size > self._max_buffer_size:
|
| 287 |
+
raise ValueError("read_size must be smaller than max_buffer_size")
|
| 288 |
+
self._read_size = read_size or min(self._max_buffer_size, 16 * 1024)
|
| 289 |
+
self._raw = bool(raw)
|
| 290 |
+
self._strict_map_key = bool(strict_map_key)
|
| 291 |
+
self._unicode_errors = unicode_errors
|
| 292 |
+
self._use_list = use_list
|
| 293 |
+
if not (0 <= timestamp <= 3):
|
| 294 |
+
raise ValueError("timestamp must be 0..3")
|
| 295 |
+
self._timestamp = timestamp
|
| 296 |
+
self._list_hook = list_hook
|
| 297 |
+
self._object_hook = object_hook
|
| 298 |
+
self._object_pairs_hook = object_pairs_hook
|
| 299 |
+
self._ext_hook = ext_hook
|
| 300 |
+
self._max_str_len = max_str_len
|
| 301 |
+
self._max_bin_len = max_bin_len
|
| 302 |
+
self._max_array_len = max_array_len
|
| 303 |
+
self._max_map_len = max_map_len
|
| 304 |
+
self._max_ext_len = max_ext_len
|
| 305 |
+
self._stream_offset = 0
|
| 306 |
+
|
| 307 |
+
if list_hook is not None and not callable(list_hook):
|
| 308 |
+
raise TypeError("`list_hook` is not callable")
|
| 309 |
+
if object_hook is not None and not callable(object_hook):
|
| 310 |
+
raise TypeError("`object_hook` is not callable")
|
| 311 |
+
if object_pairs_hook is not None and not callable(object_pairs_hook):
|
| 312 |
+
raise TypeError("`object_pairs_hook` is not callable")
|
| 313 |
+
if object_hook is not None and object_pairs_hook is not None:
|
| 314 |
+
raise TypeError("object_pairs_hook and object_hook are mutually exclusive")
|
| 315 |
+
if not callable(ext_hook):
|
| 316 |
+
raise TypeError("`ext_hook` is not callable")
|
| 317 |
+
|
| 318 |
+
def feed(self, next_bytes):
|
| 319 |
+
assert self._feeding
|
| 320 |
+
view = _get_data_from_buffer(next_bytes)
|
| 321 |
+
if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size:
|
| 322 |
+
raise BufferFull
|
| 323 |
+
|
| 324 |
+
# Strip buffer before checkpoint before reading file.
|
| 325 |
+
if self._buf_checkpoint > 0:
|
| 326 |
+
del self._buffer[: self._buf_checkpoint]
|
| 327 |
+
self._buff_i -= self._buf_checkpoint
|
| 328 |
+
self._buf_checkpoint = 0
|
| 329 |
+
|
| 330 |
+
# Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython
|
| 331 |
+
self._buffer.extend(view)
|
| 332 |
+
view.release()
|
| 333 |
+
|
| 334 |
+
def _consume(self):
|
| 335 |
+
"""Gets rid of the used parts of the buffer."""
|
| 336 |
+
self._stream_offset += self._buff_i - self._buf_checkpoint
|
| 337 |
+
self._buf_checkpoint = self._buff_i
|
| 338 |
+
|
| 339 |
+
def _got_extradata(self):
|
| 340 |
+
return self._buff_i < len(self._buffer)
|
| 341 |
+
|
| 342 |
+
def _get_extradata(self):
|
| 343 |
+
return self._buffer[self._buff_i :]
|
| 344 |
+
|
| 345 |
+
def read_bytes(self, n):
|
| 346 |
+
ret = self._read(n, raise_outofdata=False)
|
| 347 |
+
self._consume()
|
| 348 |
+
return ret
|
| 349 |
+
|
| 350 |
+
def _read(self, n, raise_outofdata=True):
|
| 351 |
+
# (int) -> bytearray
|
| 352 |
+
self._reserve(n, raise_outofdata=raise_outofdata)
|
| 353 |
+
i = self._buff_i
|
| 354 |
+
ret = self._buffer[i : i + n]
|
| 355 |
+
self._buff_i = i + len(ret)
|
| 356 |
+
return ret
|
| 357 |
+
|
| 358 |
+
def _reserve(self, n, raise_outofdata=True):
|
| 359 |
+
remain_bytes = len(self._buffer) - self._buff_i - n
|
| 360 |
+
|
| 361 |
+
# Fast path: buffer has n bytes already
|
| 362 |
+
if remain_bytes >= 0:
|
| 363 |
+
return
|
| 364 |
+
|
| 365 |
+
if self._feeding:
|
| 366 |
+
self._buff_i = self._buf_checkpoint
|
| 367 |
+
raise OutOfData
|
| 368 |
+
|
| 369 |
+
# Strip buffer before checkpoint before reading file.
|
| 370 |
+
if self._buf_checkpoint > 0:
|
| 371 |
+
del self._buffer[: self._buf_checkpoint]
|
| 372 |
+
self._buff_i -= self._buf_checkpoint
|
| 373 |
+
self._buf_checkpoint = 0
|
| 374 |
+
|
| 375 |
+
# Read from file
|
| 376 |
+
remain_bytes = -remain_bytes
|
| 377 |
+
if remain_bytes + len(self._buffer) > self._max_buffer_size:
|
| 378 |
+
raise BufferFull
|
| 379 |
+
while remain_bytes > 0:
|
| 380 |
+
to_read_bytes = max(self._read_size, remain_bytes)
|
| 381 |
+
read_data = self.file_like.read(to_read_bytes)
|
| 382 |
+
if not read_data:
|
| 383 |
+
break
|
| 384 |
+
assert isinstance(read_data, bytes)
|
| 385 |
+
self._buffer += read_data
|
| 386 |
+
remain_bytes -= len(read_data)
|
| 387 |
+
|
| 388 |
+
if len(self._buffer) < n + self._buff_i and raise_outofdata:
|
| 389 |
+
self._buff_i = 0 # rollback
|
| 390 |
+
raise OutOfData
|
| 391 |
+
|
| 392 |
+
def _read_header(self):
|
| 393 |
+
typ = TYPE_IMMEDIATE
|
| 394 |
+
n = 0
|
| 395 |
+
obj = None
|
| 396 |
+
self._reserve(1)
|
| 397 |
+
b = self._buffer[self._buff_i]
|
| 398 |
+
self._buff_i += 1
|
| 399 |
+
if b & 0b10000000 == 0:
|
| 400 |
+
obj = b
|
| 401 |
+
elif b & 0b11100000 == 0b11100000:
|
| 402 |
+
obj = -1 - (b ^ 0xFF)
|
| 403 |
+
elif b & 0b11100000 == 0b10100000:
|
| 404 |
+
n = b & 0b00011111
|
| 405 |
+
typ = TYPE_RAW
|
| 406 |
+
if n > self._max_str_len:
|
| 407 |
+
raise ValueError(f"{n} exceeds max_str_len({self._max_str_len})")
|
| 408 |
+
obj = self._read(n)
|
| 409 |
+
elif b & 0b11110000 == 0b10010000:
|
| 410 |
+
n = b & 0b00001111
|
| 411 |
+
typ = TYPE_ARRAY
|
| 412 |
+
if n > self._max_array_len:
|
| 413 |
+
raise ValueError(f"{n} exceeds max_array_len({self._max_array_len})")
|
| 414 |
+
elif b & 0b11110000 == 0b10000000:
|
| 415 |
+
n = b & 0b00001111
|
| 416 |
+
typ = TYPE_MAP
|
| 417 |
+
if n > self._max_map_len:
|
| 418 |
+
raise ValueError(f"{n} exceeds max_map_len({self._max_map_len})")
|
| 419 |
+
elif b == 0xC0:
|
| 420 |
+
obj = None
|
| 421 |
+
elif b == 0xC2:
|
| 422 |
+
obj = False
|
| 423 |
+
elif b == 0xC3:
|
| 424 |
+
obj = True
|
| 425 |
+
elif 0xC4 <= b <= 0xC6:
|
| 426 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
| 427 |
+
self._reserve(size)
|
| 428 |
+
if len(fmt) > 0:
|
| 429 |
+
n = struct.unpack_from(fmt, self._buffer, self._buff_i)[0]
|
| 430 |
+
else:
|
| 431 |
+
n = self._buffer[self._buff_i]
|
| 432 |
+
self._buff_i += size
|
| 433 |
+
if n > self._max_bin_len:
|
| 434 |
+
raise ValueError(f"{n} exceeds max_bin_len({self._max_bin_len})")
|
| 435 |
+
obj = self._read(n)
|
| 436 |
+
elif 0xC7 <= b <= 0xC9:
|
| 437 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
| 438 |
+
self._reserve(size)
|
| 439 |
+
L, n = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
| 440 |
+
self._buff_i += size
|
| 441 |
+
if L > self._max_ext_len:
|
| 442 |
+
raise ValueError(f"{L} exceeds max_ext_len({self._max_ext_len})")
|
| 443 |
+
obj = self._read(L)
|
| 444 |
+
elif 0xCA <= b <= 0xD3:
|
| 445 |
+
size, fmt = _MSGPACK_HEADERS[b]
|
| 446 |
+
self._reserve(size)
|
| 447 |
+
if len(fmt) > 0:
|
| 448 |
+
obj = struct.unpack_from(fmt, self._buffer, self._buff_i)[0]
|
| 449 |
+
else:
|
| 450 |
+
obj = self._buffer[self._buff_i]
|
| 451 |
+
self._buff_i += size
|
| 452 |
+
elif 0xD4 <= b <= 0xD8:
|
| 453 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
| 454 |
+
if self._max_ext_len < size:
|
| 455 |
+
raise ValueError(f"{size} exceeds max_ext_len({self._max_ext_len})")
|
| 456 |
+
self._reserve(size + 1)
|
| 457 |
+
n, obj = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
| 458 |
+
self._buff_i += size + 1
|
| 459 |
+
elif 0xD9 <= b <= 0xDB:
|
| 460 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
| 461 |
+
self._reserve(size)
|
| 462 |
+
if len(fmt) > 0:
|
| 463 |
+
(n,) = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
| 464 |
+
else:
|
| 465 |
+
n = self._buffer[self._buff_i]
|
| 466 |
+
self._buff_i += size
|
| 467 |
+
if n > self._max_str_len:
|
| 468 |
+
raise ValueError(f"{n} exceeds max_str_len({self._max_str_len})")
|
| 469 |
+
obj = self._read(n)
|
| 470 |
+
elif 0xDC <= b <= 0xDD:
|
| 471 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
| 472 |
+
self._reserve(size)
|
| 473 |
+
(n,) = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
| 474 |
+
self._buff_i += size
|
| 475 |
+
if n > self._max_array_len:
|
| 476 |
+
raise ValueError(f"{n} exceeds max_array_len({self._max_array_len})")
|
| 477 |
+
elif 0xDE <= b <= 0xDF:
|
| 478 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
| 479 |
+
self._reserve(size)
|
| 480 |
+
(n,) = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
| 481 |
+
self._buff_i += size
|
| 482 |
+
if n > self._max_map_len:
|
| 483 |
+
raise ValueError(f"{n} exceeds max_map_len({self._max_map_len})")
|
| 484 |
+
else:
|
| 485 |
+
raise FormatError("Unknown header: 0x%x" % b)
|
| 486 |
+
return typ, n, obj
|
| 487 |
+
|
| 488 |
+
def _unpack(self, execute=EX_CONSTRUCT):
|
| 489 |
+
typ, n, obj = self._read_header()
|
| 490 |
+
|
| 491 |
+
if execute == EX_READ_ARRAY_HEADER:
|
| 492 |
+
if typ != TYPE_ARRAY:
|
| 493 |
+
raise ValueError("Expected array")
|
| 494 |
+
return n
|
| 495 |
+
if execute == EX_READ_MAP_HEADER:
|
| 496 |
+
if typ != TYPE_MAP:
|
| 497 |
+
raise ValueError("Expected map")
|
| 498 |
+
return n
|
| 499 |
+
# TODO should we eliminate the recursion?
|
| 500 |
+
if typ == TYPE_ARRAY:
|
| 501 |
+
if execute == EX_SKIP:
|
| 502 |
+
for i in range(n):
|
| 503 |
+
# TODO check whether we need to call `list_hook`
|
| 504 |
+
self._unpack(EX_SKIP)
|
| 505 |
+
return
|
| 506 |
+
ret = newlist_hint(n)
|
| 507 |
+
for i in range(n):
|
| 508 |
+
ret.append(self._unpack(EX_CONSTRUCT))
|
| 509 |
+
if self._list_hook is not None:
|
| 510 |
+
ret = self._list_hook(ret)
|
| 511 |
+
# TODO is the interaction between `list_hook` and `use_list` ok?
|
| 512 |
+
return ret if self._use_list else tuple(ret)
|
| 513 |
+
if typ == TYPE_MAP:
|
| 514 |
+
if execute == EX_SKIP:
|
| 515 |
+
for i in range(n):
|
| 516 |
+
# TODO check whether we need to call hooks
|
| 517 |
+
self._unpack(EX_SKIP)
|
| 518 |
+
self._unpack(EX_SKIP)
|
| 519 |
+
return
|
| 520 |
+
if self._object_pairs_hook is not None:
|
| 521 |
+
ret = self._object_pairs_hook(
|
| 522 |
+
(self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) for _ in range(n)
|
| 523 |
+
)
|
| 524 |
+
else:
|
| 525 |
+
ret = {}
|
| 526 |
+
for _ in range(n):
|
| 527 |
+
key = self._unpack(EX_CONSTRUCT)
|
| 528 |
+
if self._strict_map_key and type(key) not in (str, bytes):
|
| 529 |
+
raise ValueError("%s is not allowed for map key" % str(type(key)))
|
| 530 |
+
if isinstance(key, str):
|
| 531 |
+
key = sys.intern(key)
|
| 532 |
+
ret[key] = self._unpack(EX_CONSTRUCT)
|
| 533 |
+
if self._object_hook is not None:
|
| 534 |
+
ret = self._object_hook(ret)
|
| 535 |
+
return ret
|
| 536 |
+
if execute == EX_SKIP:
|
| 537 |
+
return
|
| 538 |
+
if typ == TYPE_RAW:
|
| 539 |
+
if self._raw:
|
| 540 |
+
obj = bytes(obj)
|
| 541 |
+
else:
|
| 542 |
+
obj = obj.decode("utf_8", self._unicode_errors)
|
| 543 |
+
return obj
|
| 544 |
+
if typ == TYPE_BIN:
|
| 545 |
+
return bytes(obj)
|
| 546 |
+
if typ == TYPE_EXT:
|
| 547 |
+
if n == -1: # timestamp
|
| 548 |
+
ts = Timestamp.from_bytes(bytes(obj))
|
| 549 |
+
if self._timestamp == 1:
|
| 550 |
+
return ts.to_unix()
|
| 551 |
+
elif self._timestamp == 2:
|
| 552 |
+
return ts.to_unix_nano()
|
| 553 |
+
elif self._timestamp == 3:
|
| 554 |
+
return ts.to_datetime()
|
| 555 |
+
else:
|
| 556 |
+
return ts
|
| 557 |
+
else:
|
| 558 |
+
return self._ext_hook(n, bytes(obj))
|
| 559 |
+
assert typ == TYPE_IMMEDIATE
|
| 560 |
+
return obj
|
| 561 |
+
|
| 562 |
+
def __iter__(self):
|
| 563 |
+
return self
|
| 564 |
+
|
| 565 |
+
def __next__(self):
|
| 566 |
+
try:
|
| 567 |
+
ret = self._unpack(EX_CONSTRUCT)
|
| 568 |
+
self._consume()
|
| 569 |
+
return ret
|
| 570 |
+
except OutOfData:
|
| 571 |
+
self._consume()
|
| 572 |
+
raise StopIteration
|
| 573 |
+
except RecursionError:
|
| 574 |
+
raise StackError
|
| 575 |
+
|
| 576 |
+
next = __next__
|
| 577 |
+
|
| 578 |
+
def skip(self):
|
| 579 |
+
self._unpack(EX_SKIP)
|
| 580 |
+
self._consume()
|
| 581 |
+
|
| 582 |
+
def unpack(self):
|
| 583 |
+
try:
|
| 584 |
+
ret = self._unpack(EX_CONSTRUCT)
|
| 585 |
+
except RecursionError:
|
| 586 |
+
raise StackError
|
| 587 |
+
self._consume()
|
| 588 |
+
return ret
|
| 589 |
+
|
| 590 |
+
def read_array_header(self):
|
| 591 |
+
ret = self._unpack(EX_READ_ARRAY_HEADER)
|
| 592 |
+
self._consume()
|
| 593 |
+
return ret
|
| 594 |
+
|
| 595 |
+
def read_map_header(self):
|
| 596 |
+
ret = self._unpack(EX_READ_MAP_HEADER)
|
| 597 |
+
self._consume()
|
| 598 |
+
return ret
|
| 599 |
+
|
| 600 |
+
def tell(self):
|
| 601 |
+
return self._stream_offset
|
| 602 |
+
|
| 603 |
+
|
| 604 |
+
class Packer:
|
| 605 |
+
"""
|
| 606 |
+
MessagePack Packer
|
| 607 |
+
|
| 608 |
+
Usage::
|
| 609 |
+
|
| 610 |
+
packer = Packer()
|
| 611 |
+
astream.write(packer.pack(a))
|
| 612 |
+
astream.write(packer.pack(b))
|
| 613 |
+
|
| 614 |
+
Packer's constructor has some keyword arguments:
|
| 615 |
+
|
| 616 |
+
:param default:
|
| 617 |
+
When specified, it should be callable.
|
| 618 |
+
Convert user type to builtin type that Packer supports.
|
| 619 |
+
See also simplejson's document.
|
| 620 |
+
|
| 621 |
+
:param bool use_single_float:
|
| 622 |
+
Use single precision float type for float. (default: False)
|
| 623 |
+
|
| 624 |
+
:param bool autoreset:
|
| 625 |
+
Reset buffer after each pack and return its content as `bytes`. (default: True).
|
| 626 |
+
If set this to false, use `bytes()` to get content and `.reset()` to clear buffer.
|
| 627 |
+
|
| 628 |
+
:param bool use_bin_type:
|
| 629 |
+
Use bin type introduced in msgpack spec 2.0 for bytes.
|
| 630 |
+
It also enables str8 type for unicode. (default: True)
|
| 631 |
+
|
| 632 |
+
:param bool strict_types:
|
| 633 |
+
If set to true, types will be checked to be exact. Derived classes
|
| 634 |
+
from serializable types will not be serialized and will be
|
| 635 |
+
treated as unsupported type and forwarded to default.
|
| 636 |
+
Additionally tuples will not be serialized as lists.
|
| 637 |
+
This is useful when trying to implement accurate serialization
|
| 638 |
+
for python types.
|
| 639 |
+
|
| 640 |
+
:param bool datetime:
|
| 641 |
+
If set to true, datetime with tzinfo is packed into Timestamp type.
|
| 642 |
+
Note that the tzinfo is stripped in the timestamp.
|
| 643 |
+
You can get UTC datetime with `timestamp=3` option of the Unpacker.
|
| 644 |
+
|
| 645 |
+
:param str unicode_errors:
|
| 646 |
+
The error handler for encoding unicode. (default: 'strict')
|
| 647 |
+
DO NOT USE THIS!! This option is kept for very specific usage.
|
| 648 |
+
|
| 649 |
+
:param int buf_size:
|
| 650 |
+
Internal buffer size. This option is used only for C implementation.
|
| 651 |
+
"""
|
| 652 |
+
|
| 653 |
+
def __init__(
|
| 654 |
+
self,
|
| 655 |
+
*,
|
| 656 |
+
default=None,
|
| 657 |
+
use_single_float=False,
|
| 658 |
+
autoreset=True,
|
| 659 |
+
use_bin_type=True,
|
| 660 |
+
strict_types=False,
|
| 661 |
+
datetime=False,
|
| 662 |
+
unicode_errors=None,
|
| 663 |
+
buf_size=None,
|
| 664 |
+
):
|
| 665 |
+
self._strict_types = strict_types
|
| 666 |
+
self._use_float = use_single_float
|
| 667 |
+
self._autoreset = autoreset
|
| 668 |
+
self._use_bin_type = use_bin_type
|
| 669 |
+
self._buffer = BytesIO()
|
| 670 |
+
self._datetime = bool(datetime)
|
| 671 |
+
self._unicode_errors = unicode_errors or "strict"
|
| 672 |
+
if default is not None and not callable(default):
|
| 673 |
+
raise TypeError("default must be callable")
|
| 674 |
+
self._default = default
|
| 675 |
+
|
| 676 |
+
def _pack(
|
| 677 |
+
self,
|
| 678 |
+
obj,
|
| 679 |
+
nest_limit=DEFAULT_RECURSE_LIMIT,
|
| 680 |
+
check=isinstance,
|
| 681 |
+
check_type_strict=_check_type_strict,
|
| 682 |
+
):
|
| 683 |
+
default_used = False
|
| 684 |
+
if self._strict_types:
|
| 685 |
+
check = check_type_strict
|
| 686 |
+
list_types = list
|
| 687 |
+
else:
|
| 688 |
+
list_types = (list, tuple)
|
| 689 |
+
while True:
|
| 690 |
+
if nest_limit < 0:
|
| 691 |
+
raise ValueError("recursion limit exceeded")
|
| 692 |
+
if obj is None:
|
| 693 |
+
return self._buffer.write(b"\xc0")
|
| 694 |
+
if check(obj, bool):
|
| 695 |
+
if obj:
|
| 696 |
+
return self._buffer.write(b"\xc3")
|
| 697 |
+
return self._buffer.write(b"\xc2")
|
| 698 |
+
if check(obj, int):
|
| 699 |
+
if 0 <= obj < 0x80:
|
| 700 |
+
return self._buffer.write(struct.pack("B", obj))
|
| 701 |
+
if -0x20 <= obj < 0:
|
| 702 |
+
return self._buffer.write(struct.pack("b", obj))
|
| 703 |
+
if 0x80 <= obj <= 0xFF:
|
| 704 |
+
return self._buffer.write(struct.pack("BB", 0xCC, obj))
|
| 705 |
+
if -0x80 <= obj < 0:
|
| 706 |
+
return self._buffer.write(struct.pack(">Bb", 0xD0, obj))
|
| 707 |
+
if 0xFF < obj <= 0xFFFF:
|
| 708 |
+
return self._buffer.write(struct.pack(">BH", 0xCD, obj))
|
| 709 |
+
if -0x8000 <= obj < -0x80:
|
| 710 |
+
return self._buffer.write(struct.pack(">Bh", 0xD1, obj))
|
| 711 |
+
if 0xFFFF < obj <= 0xFFFFFFFF:
|
| 712 |
+
return self._buffer.write(struct.pack(">BI", 0xCE, obj))
|
| 713 |
+
if -0x80000000 <= obj < -0x8000:
|
| 714 |
+
return self._buffer.write(struct.pack(">Bi", 0xD2, obj))
|
| 715 |
+
if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF:
|
| 716 |
+
return self._buffer.write(struct.pack(">BQ", 0xCF, obj))
|
| 717 |
+
if -0x8000000000000000 <= obj < -0x80000000:
|
| 718 |
+
return self._buffer.write(struct.pack(">Bq", 0xD3, obj))
|
| 719 |
+
if not default_used and self._default is not None:
|
| 720 |
+
obj = self._default(obj)
|
| 721 |
+
default_used = True
|
| 722 |
+
continue
|
| 723 |
+
raise OverflowError("Integer value out of range")
|
| 724 |
+
if check(obj, (bytes, bytearray)):
|
| 725 |
+
n = len(obj)
|
| 726 |
+
if n >= 2**32:
|
| 727 |
+
raise ValueError("%s is too large" % type(obj).__name__)
|
| 728 |
+
self._pack_bin_header(n)
|
| 729 |
+
return self._buffer.write(obj)
|
| 730 |
+
if check(obj, str):
|
| 731 |
+
obj = obj.encode("utf-8", self._unicode_errors)
|
| 732 |
+
n = len(obj)
|
| 733 |
+
if n >= 2**32:
|
| 734 |
+
raise ValueError("String is too large")
|
| 735 |
+
self._pack_raw_header(n)
|
| 736 |
+
return self._buffer.write(obj)
|
| 737 |
+
if check(obj, memoryview):
|
| 738 |
+
n = obj.nbytes
|
| 739 |
+
if n >= 2**32:
|
| 740 |
+
raise ValueError("Memoryview is too large")
|
| 741 |
+
self._pack_bin_header(n)
|
| 742 |
+
return self._buffer.write(obj)
|
| 743 |
+
if check(obj, float):
|
| 744 |
+
if self._use_float:
|
| 745 |
+
return self._buffer.write(struct.pack(">Bf", 0xCA, obj))
|
| 746 |
+
return self._buffer.write(struct.pack(">Bd", 0xCB, obj))
|
| 747 |
+
if check(obj, (ExtType, Timestamp)):
|
| 748 |
+
if check(obj, Timestamp):
|
| 749 |
+
code = -1
|
| 750 |
+
data = obj.to_bytes()
|
| 751 |
+
else:
|
| 752 |
+
code = obj.code
|
| 753 |
+
data = obj.data
|
| 754 |
+
assert isinstance(code, int)
|
| 755 |
+
assert isinstance(data, bytes)
|
| 756 |
+
L = len(data)
|
| 757 |
+
if L == 1:
|
| 758 |
+
self._buffer.write(b"\xd4")
|
| 759 |
+
elif L == 2:
|
| 760 |
+
self._buffer.write(b"\xd5")
|
| 761 |
+
elif L == 4:
|
| 762 |
+
self._buffer.write(b"\xd6")
|
| 763 |
+
elif L == 8:
|
| 764 |
+
self._buffer.write(b"\xd7")
|
| 765 |
+
elif L == 16:
|
| 766 |
+
self._buffer.write(b"\xd8")
|
| 767 |
+
elif L <= 0xFF:
|
| 768 |
+
self._buffer.write(struct.pack(">BB", 0xC7, L))
|
| 769 |
+
elif L <= 0xFFFF:
|
| 770 |
+
self._buffer.write(struct.pack(">BH", 0xC8, L))
|
| 771 |
+
else:
|
| 772 |
+
self._buffer.write(struct.pack(">BI", 0xC9, L))
|
| 773 |
+
self._buffer.write(struct.pack("b", code))
|
| 774 |
+
self._buffer.write(data)
|
| 775 |
+
return
|
| 776 |
+
if check(obj, list_types):
|
| 777 |
+
n = len(obj)
|
| 778 |
+
self._pack_array_header(n)
|
| 779 |
+
for i in range(n):
|
| 780 |
+
self._pack(obj[i], nest_limit - 1)
|
| 781 |
+
return
|
| 782 |
+
if check(obj, dict):
|
| 783 |
+
return self._pack_map_pairs(len(obj), obj.items(), nest_limit - 1)
|
| 784 |
+
|
| 785 |
+
if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None:
|
| 786 |
+
obj = Timestamp.from_datetime(obj)
|
| 787 |
+
default_used = 1
|
| 788 |
+
continue
|
| 789 |
+
|
| 790 |
+
if not default_used and self._default is not None:
|
| 791 |
+
obj = self._default(obj)
|
| 792 |
+
default_used = 1
|
| 793 |
+
continue
|
| 794 |
+
|
| 795 |
+
if self._datetime and check(obj, _DateTime):
|
| 796 |
+
raise ValueError(f"Cannot serialize {obj!r} where tzinfo=None")
|
| 797 |
+
|
| 798 |
+
raise TypeError(f"Cannot serialize {obj!r}")
|
| 799 |
+
|
| 800 |
+
def pack(self, obj):
|
| 801 |
+
try:
|
| 802 |
+
self._pack(obj)
|
| 803 |
+
except:
|
| 804 |
+
self._buffer = BytesIO() # force reset
|
| 805 |
+
raise
|
| 806 |
+
if self._autoreset:
|
| 807 |
+
ret = self._buffer.getvalue()
|
| 808 |
+
self._buffer = BytesIO()
|
| 809 |
+
return ret
|
| 810 |
+
|
| 811 |
+
def pack_map_pairs(self, pairs):
|
| 812 |
+
self._pack_map_pairs(len(pairs), pairs)
|
| 813 |
+
if self._autoreset:
|
| 814 |
+
ret = self._buffer.getvalue()
|
| 815 |
+
self._buffer = BytesIO()
|
| 816 |
+
return ret
|
| 817 |
+
|
| 818 |
+
def pack_array_header(self, n):
|
| 819 |
+
if n >= 2**32:
|
| 820 |
+
raise ValueError
|
| 821 |
+
self._pack_array_header(n)
|
| 822 |
+
if self._autoreset:
|
| 823 |
+
ret = self._buffer.getvalue()
|
| 824 |
+
self._buffer = BytesIO()
|
| 825 |
+
return ret
|
| 826 |
+
|
| 827 |
+
def pack_map_header(self, n):
|
| 828 |
+
if n >= 2**32:
|
| 829 |
+
raise ValueError
|
| 830 |
+
self._pack_map_header(n)
|
| 831 |
+
if self._autoreset:
|
| 832 |
+
ret = self._buffer.getvalue()
|
| 833 |
+
self._buffer = BytesIO()
|
| 834 |
+
return ret
|
| 835 |
+
|
| 836 |
+
def pack_ext_type(self, typecode, data):
|
| 837 |
+
if not isinstance(typecode, int):
|
| 838 |
+
raise TypeError("typecode must have int type.")
|
| 839 |
+
if not 0 <= typecode <= 127:
|
| 840 |
+
raise ValueError("typecode should be 0-127")
|
| 841 |
+
if not isinstance(data, bytes):
|
| 842 |
+
raise TypeError("data must have bytes type")
|
| 843 |
+
L = len(data)
|
| 844 |
+
if L > 0xFFFFFFFF:
|
| 845 |
+
raise ValueError("Too large data")
|
| 846 |
+
if L == 1:
|
| 847 |
+
self._buffer.write(b"\xd4")
|
| 848 |
+
elif L == 2:
|
| 849 |
+
self._buffer.write(b"\xd5")
|
| 850 |
+
elif L == 4:
|
| 851 |
+
self._buffer.write(b"\xd6")
|
| 852 |
+
elif L == 8:
|
| 853 |
+
self._buffer.write(b"\xd7")
|
| 854 |
+
elif L == 16:
|
| 855 |
+
self._buffer.write(b"\xd8")
|
| 856 |
+
elif L <= 0xFF:
|
| 857 |
+
self._buffer.write(b"\xc7" + struct.pack("B", L))
|
| 858 |
+
elif L <= 0xFFFF:
|
| 859 |
+
self._buffer.write(b"\xc8" + struct.pack(">H", L))
|
| 860 |
+
else:
|
| 861 |
+
self._buffer.write(b"\xc9" + struct.pack(">I", L))
|
| 862 |
+
self._buffer.write(struct.pack("B", typecode))
|
| 863 |
+
self._buffer.write(data)
|
| 864 |
+
|
| 865 |
+
def _pack_array_header(self, n):
|
| 866 |
+
if n <= 0x0F:
|
| 867 |
+
return self._buffer.write(struct.pack("B", 0x90 + n))
|
| 868 |
+
if n <= 0xFFFF:
|
| 869 |
+
return self._buffer.write(struct.pack(">BH", 0xDC, n))
|
| 870 |
+
if n <= 0xFFFFFFFF:
|
| 871 |
+
return self._buffer.write(struct.pack(">BI", 0xDD, n))
|
| 872 |
+
raise ValueError("Array is too large")
|
| 873 |
+
|
| 874 |
+
def _pack_map_header(self, n):
|
| 875 |
+
if n <= 0x0F:
|
| 876 |
+
return self._buffer.write(struct.pack("B", 0x80 + n))
|
| 877 |
+
if n <= 0xFFFF:
|
| 878 |
+
return self._buffer.write(struct.pack(">BH", 0xDE, n))
|
| 879 |
+
if n <= 0xFFFFFFFF:
|
| 880 |
+
return self._buffer.write(struct.pack(">BI", 0xDF, n))
|
| 881 |
+
raise ValueError("Dict is too large")
|
| 882 |
+
|
| 883 |
+
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
|
| 884 |
+
self._pack_map_header(n)
|
| 885 |
+
for k, v in pairs:
|
| 886 |
+
self._pack(k, nest_limit - 1)
|
| 887 |
+
self._pack(v, nest_limit - 1)
|
| 888 |
+
|
| 889 |
+
def _pack_raw_header(self, n):
|
| 890 |
+
if n <= 0x1F:
|
| 891 |
+
self._buffer.write(struct.pack("B", 0xA0 + n))
|
| 892 |
+
elif self._use_bin_type and n <= 0xFF:
|
| 893 |
+
self._buffer.write(struct.pack(">BB", 0xD9, n))
|
| 894 |
+
elif n <= 0xFFFF:
|
| 895 |
+
self._buffer.write(struct.pack(">BH", 0xDA, n))
|
| 896 |
+
elif n <= 0xFFFFFFFF:
|
| 897 |
+
self._buffer.write(struct.pack(">BI", 0xDB, n))
|
| 898 |
+
else:
|
| 899 |
+
raise ValueError("Raw is too large")
|
| 900 |
+
|
| 901 |
+
def _pack_bin_header(self, n):
|
| 902 |
+
if not self._use_bin_type:
|
| 903 |
+
return self._pack_raw_header(n)
|
| 904 |
+
elif n <= 0xFF:
|
| 905 |
+
return self._buffer.write(struct.pack(">BB", 0xC4, n))
|
| 906 |
+
elif n <= 0xFFFF:
|
| 907 |
+
return self._buffer.write(struct.pack(">BH", 0xC5, n))
|
| 908 |
+
elif n <= 0xFFFFFFFF:
|
| 909 |
+
return self._buffer.write(struct.pack(">BI", 0xC6, n))
|
| 910 |
+
else:
|
| 911 |
+
raise ValueError("Bin is too large")
|
| 912 |
+
|
| 913 |
+
def bytes(self):
|
| 914 |
+
"""Return internal buffer contents as bytes object"""
|
| 915 |
+
return self._buffer.getvalue()
|
| 916 |
+
|
| 917 |
+
def reset(self):
|
| 918 |
+
"""Reset internal buffer.
|
| 919 |
+
|
| 920 |
+
This method is useful only when autoreset=False.
|
| 921 |
+
"""
|
| 922 |
+
self._buffer = BytesIO()
|
| 923 |
+
|
| 924 |
+
def getbuffer(self):
|
| 925 |
+
"""Return view of internal buffer."""
|
| 926 |
+
if _USING_STRINGBUILDER:
|
| 927 |
+
return memoryview(self.bytes())
|
| 928 |
+
else:
|
| 929 |
+
return self._buffer.getbuffer()
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pygments/__main__.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.__main__
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Main entry point for ``python -m pygments``.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
from pip._vendor.pygments.cmdline import main
|
| 13 |
+
|
| 14 |
+
try:
|
| 15 |
+
sys.exit(main(sys.argv))
|
| 16 |
+
except KeyboardInterrupt:
|
| 17 |
+
sys.exit(1)
|
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pygments/formatter.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pygments.formatter
|
| 3 |
+
~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Base formatter class.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import codecs
|
| 12 |
+
|
| 13 |
+
from pip._vendor.pygments.util import get_bool_opt
|
| 14 |
+
from pip._vendor.pygments.styles import get_style_by_name
|
| 15 |
+
|
| 16 |
+
__all__ = ['Formatter']
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _lookup_style(style):
|
| 20 |
+
if isinstance(style, str):
|
| 21 |
+
return get_style_by_name(style)
|
| 22 |
+
return style
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class Formatter:
|
| 26 |
+
"""
|
| 27 |
+
Converts a token stream to text.
|
| 28 |
+
|
| 29 |
+
Formatters should have attributes to help selecting them. These
|
| 30 |
+
are similar to the corresponding :class:`~pygments.lexer.Lexer`
|
| 31 |
+
attributes.
|
| 32 |
+
|
| 33 |
+
.. autoattribute:: name
|
| 34 |
+
:no-value:
|
| 35 |
+
|
| 36 |
+
.. autoattribute:: aliases
|
| 37 |
+
:no-value:
|
| 38 |
+
|
| 39 |
+
.. autoattribute:: filenames
|
| 40 |
+
:no-value:
|
| 41 |
+
|
| 42 |
+
You can pass options as keyword arguments to the constructor.
|
| 43 |
+
All formatters accept these basic options:
|
| 44 |
+
|
| 45 |
+
``style``
|
| 46 |
+
The style to use, can be a string or a Style subclass
|
| 47 |
+
(default: "default"). Not used by e.g. the
|
| 48 |
+
TerminalFormatter.
|
| 49 |
+
``full``
|
| 50 |
+
Tells the formatter to output a "full" document, i.e.
|
| 51 |
+
a complete self-contained document. This doesn't have
|
| 52 |
+
any effect for some formatters (default: false).
|
| 53 |
+
``title``
|
| 54 |
+
If ``full`` is true, the title that should be used to
|
| 55 |
+
caption the document (default: '').
|
| 56 |
+
``encoding``
|
| 57 |
+
If given, must be an encoding name. This will be used to
|
| 58 |
+
convert the Unicode token strings to byte strings in the
|
| 59 |
+
output. If it is "" or None, Unicode strings will be written
|
| 60 |
+
to the output file, which most file-like objects do not
|
| 61 |
+
support (default: None).
|
| 62 |
+
``outencoding``
|
| 63 |
+
Overrides ``encoding`` if given.
|
| 64 |
+
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
#: Full name for the formatter, in human-readable form.
|
| 68 |
+
name = None
|
| 69 |
+
|
| 70 |
+
#: A list of short, unique identifiers that can be used to lookup
|
| 71 |
+
#: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`.
|
| 72 |
+
aliases = []
|
| 73 |
+
|
| 74 |
+
#: A list of fnmatch patterns that match filenames for which this
|
| 75 |
+
#: formatter can produce output. The patterns in this list should be unique
|
| 76 |
+
#: among all formatters.
|
| 77 |
+
filenames = []
|
| 78 |
+
|
| 79 |
+
#: If True, this formatter outputs Unicode strings when no encoding
|
| 80 |
+
#: option is given.
|
| 81 |
+
unicodeoutput = True
|
| 82 |
+
|
| 83 |
+
def __init__(self, **options):
|
| 84 |
+
"""
|
| 85 |
+
As with lexers, this constructor takes arbitrary optional arguments,
|
| 86 |
+
and if you override it, you should first process your own options, then
|
| 87 |
+
call the base class implementation.
|
| 88 |
+
"""
|
| 89 |
+
self.style = _lookup_style(options.get('style', 'default'))
|
| 90 |
+
self.full = get_bool_opt(options, 'full', False)
|
| 91 |
+
self.title = options.get('title', '')
|
| 92 |
+
self.encoding = options.get('encoding', None) or None
|
| 93 |
+
if self.encoding in ('guess', 'chardet'):
|
| 94 |
+
# can happen for e.g. pygmentize -O encoding=guess
|
| 95 |
+
self.encoding = 'utf-8'
|
| 96 |
+
self.encoding = options.get('outencoding') or self.encoding
|
| 97 |
+
self.options = options
|
| 98 |
+
|
| 99 |
+
def get_style_defs(self, arg=''):
|
| 100 |
+
"""
|
| 101 |
+
This method must return statements or declarations suitable to define
|
| 102 |
+
the current style for subsequent highlighted text (e.g. CSS classes
|
| 103 |
+
in the `HTMLFormatter`).
|
| 104 |
+
|
| 105 |
+
The optional argument `arg` can be used to modify the generation and
|
| 106 |
+
is formatter dependent (it is standardized because it can be given on
|
| 107 |
+
the command line).
|
| 108 |
+
|
| 109 |
+
This method is called by the ``-S`` :doc:`command-line option <cmdline>`,
|
| 110 |
+
the `arg` is then given by the ``-a`` option.
|
| 111 |
+
"""
|
| 112 |
+
return ''
|
| 113 |
+
|
| 114 |
+
def format(self, tokensource, outfile):
|
| 115 |
+
"""
|
| 116 |
+
This method must format the tokens from the `tokensource` iterable and
|
| 117 |
+
write the formatted version to the file object `outfile`.
|
| 118 |
+
|
| 119 |
+
Formatter options can control how exactly the tokens are converted.
|
| 120 |
+
"""
|
| 121 |
+
if self.encoding:
|
| 122 |
+
# wrap the outfile in a StreamWriter
|
| 123 |
+
outfile = codecs.lookup(self.encoding)[3](outfile)
|
| 124 |
+
return self.format_unencoded(tokensource, outfile)
|
| 125 |
+
|
| 126 |
+
# Allow writing Formatter[str] or Formatter[bytes]. That's equivalent to
|
| 127 |
+
# Formatter. This helps when using third-party type stubs from typeshed.
|
| 128 |
+
def __class_getitem__(cls, name):
|
| 129 |
+
return cls
|