Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- evalkit_llava/bin/bzip2 +3 -0
- evalkit_llava/lib/python3.10/_sysconfigdata_x86_64_conda_linux_gnu.py +986 -0
- evalkit_llava/lib/python3.10/calendar.py +759 -0
- evalkit_llava/lib/python3.10/collections/__init__.py +1556 -0
- evalkit_llava/lib/python3.10/dataclasses.py +1453 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/bdist.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/bdist_rpm.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/build.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/build_clib.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/build_ext.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/build_py.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/build_scripts.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/clean.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/config.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/install.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/install_data.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/install_egg_info.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/install_headers.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/install_lib.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/__pycache__/register.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/command/build_scripts.py +160 -0
- evalkit_llava/lib/python3.10/distutils/command/install.py +679 -0
- evalkit_llava/lib/python3.10/distutils/command/register.py +304 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/support.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_bdist.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_bdist_msi.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_dist.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_filelist.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_msvc9compiler.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_text_file.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_unixccompiler.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_version.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_versionpredicate.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/ensurepip/__pycache__/__main__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/ensurepip/_bundled/__init__.py +0 -0
- evalkit_llava/lib/python3.10/getopt.py +215 -0
- evalkit_llava/lib/python3.10/gzip.py +609 -0
- evalkit_llava/lib/python3.10/json/__init__.py +359 -0
- evalkit_llava/lib/python3.10/json/__pycache__/encoder.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/json/__pycache__/scanner.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/json/scanner.py +73 -0
- evalkit_llava/lib/python3.10/keyword.py +63 -0
- evalkit_llava/lib/python3.10/multiprocessing/__init__.py +37 -0
- evalkit_llava/lib/python3.10/multiprocessing/managers.py +1378 -0
- evalkit_llava/lib/python3.10/multiprocessing/popen_forkserver.py +74 -0
- evalkit_llava/lib/python3.10/multiprocessing/util.py +489 -0
- evalkit_llava/lib/python3.10/reprlib.py +161 -0
- evalkit_llava/lib/python3.10/threading.py +1645 -0
.gitattributes
CHANGED
|
@@ -57,3 +57,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 57 |
# Video files - compressed
|
| 58 |
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 59 |
*.webm filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 57 |
# Video files - compressed
|
| 58 |
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 59 |
*.webm filter=lfs diff=lfs merge=lfs -text
|
| 60 |
+
evalkit_llava/bin/bzip2 filter=lfs diff=lfs merge=lfs -text
|
evalkit_llava/bin/bzip2
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8a514cce807cb1656a3bcd59794401e7d63c9554267e9acc77097a406092a8ed
|
| 3 |
+
size 299464
|
evalkit_llava/lib/python3.10/_sysconfigdata_x86_64_conda_linux_gnu.py
ADDED
|
@@ -0,0 +1,986 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# system configuration generated and used by the sysconfig module
|
| 2 |
+
build_time_vars = {'ABIFLAGS': '',
|
| 3 |
+
'AC_APPLE_UNIVERSAL_BUILD': 0,
|
| 4 |
+
'AIX_BUILDDATE': 0,
|
| 5 |
+
'AIX_GENUINE_CPLUSPLUS': 0,
|
| 6 |
+
'ALIGNOF_LONG': 8,
|
| 7 |
+
'ALIGNOF_SIZE_T': 8,
|
| 8 |
+
'ALT_SOABI': 0,
|
| 9 |
+
'ANDROID_API_LEVEL': 0,
|
| 10 |
+
'AR': 'x86_64-conda-linux-gnu-ar',
|
| 11 |
+
'ARFLAGS': 'rcs',
|
| 12 |
+
'BASECFLAGS': '-Wno-unused-result -Wsign-compare',
|
| 13 |
+
'BASECPPFLAGS': '-IObjects -IInclude -IPython',
|
| 14 |
+
'BASEMODLIBS': '',
|
| 15 |
+
'BINDIR': '/root/envs/evalkit_llava/bin',
|
| 16 |
+
'BINLIBDEST': '/root/envs/evalkit_llava/lib/python3.10',
|
| 17 |
+
'BLDLIBRARY': 'libpython3.10.a',
|
| 18 |
+
'BLDSHARED': 'x86_64-conda-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 19 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 20 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 21 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 22 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 23 |
+
'-L/root/envs/evalkit_llava/lib '
|
| 24 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 25 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 26 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 27 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 28 |
+
'-L/root/envs/evalkit_llava/lib',
|
| 29 |
+
'BUILDEXE': '',
|
| 30 |
+
'BUILDPYTHON': 'python',
|
| 31 |
+
'BUILD_GNU_TYPE': 'x86_64-conda-linux-gnu',
|
| 32 |
+
'BYTESTR_DEPS': '\\',
|
| 33 |
+
'CC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 34 |
+
'CCSHARED': '-fPIC',
|
| 35 |
+
'CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 36 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 37 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 38 |
+
'-isystem '
|
| 39 |
+
'/root/envs/evalkit_llava/include '
|
| 40 |
+
' '
|
| 41 |
+
' '
|
| 42 |
+
' '
|
| 43 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 44 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 45 |
+
'-isystem '
|
| 46 |
+
'/root/envs/evalkit_llava/include '
|
| 47 |
+
' '
|
| 48 |
+
' '
|
| 49 |
+
' ',
|
| 50 |
+
'CFLAGSFORSHARED': '',
|
| 51 |
+
'CFLAGS_ALIASING': '',
|
| 52 |
+
'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in '
|
| 53 |
+
'Makefile.pre.in',
|
| 54 |
+
'CONFIGURE_CFLAGS': '-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 55 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 56 |
+
'-ffunction-sections -pipe -isystem '
|
| 57 |
+
'/root/envs/evalkit_llava/include '
|
| 58 |
+
' '
|
| 59 |
+
' '
|
| 60 |
+
' '
|
| 61 |
+
' ',
|
| 62 |
+
'CONFIGURE_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 63 |
+
' '
|
| 64 |
+
' -g -std=c99 -Wextra '
|
| 65 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 66 |
+
'-Wno-missing-field-initializers '
|
| 67 |
+
'-Werror=implicit-function-declaration '
|
| 68 |
+
'-fvisibility=hidden',
|
| 69 |
+
'CONFIGURE_CPPFLAGS': '-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 70 |
+
'/root/envs/evalkit_llava/include '
|
| 71 |
+
'-I/root/envs/evalkit_llava/include',
|
| 72 |
+
'CONFIGURE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 73 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 74 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 75 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 76 |
+
'-L/root/envs/evalkit_llava/lib',
|
| 77 |
+
'CONFIGURE_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 78 |
+
' '
|
| 79 |
+
' -g',
|
| 80 |
+
'CONFIG_ARGS': "'--prefix=/root/envs/evalkit_llava' "
|
| 81 |
+
"'--build=x86_64-conda-linux-gnu' "
|
| 82 |
+
"'--host=x86_64-conda-linux-gnu' '--enable-ipv6' "
|
| 83 |
+
"'--with-ensurepip=no' "
|
| 84 |
+
"'--with-tzpath=/root/envs/evalkit_llava/share/zoneinfo:/root/envs/evalkit_llava/share/tzinfo' "
|
| 85 |
+
"'--with-computed-gotos' '--with-system-ffi' "
|
| 86 |
+
"'--enable-loadable-sqlite-extensions' "
|
| 87 |
+
"'--with-tcltk-includes=-I/root/envs/evalkit_llava/include' "
|
| 88 |
+
"'--with-tcltk-libs=-L/root/envs/evalkit_llava/lib "
|
| 89 |
+
"-ltcl8.6 -ltk8.6' '--with-platlibdir=lib' '--with-lto' "
|
| 90 |
+
"'--enable-optimizations' "
|
| 91 |
+
"'-oldincludedir=/croot/python-split_1733933809325/_build_env/x86_64-conda-linux-gnu/sysroot/usr/include' "
|
| 92 |
+
"'--disable-shared' 'PROFILE_TASK=-m test --pgo' "
|
| 93 |
+
"'build_alias=x86_64-conda-linux-gnu' "
|
| 94 |
+
"'host_alias=x86_64-conda-linux-gnu' 'MACHDEP=linux' "
|
| 95 |
+
"'CC=x86_64-conda-linux-gnu-gcc' 'CFLAGS=-march=nocona "
|
| 96 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 97 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 98 |
+
'-pipe -isystem '
|
| 99 |
+
'/root/envs/evalkit_llava/include '
|
| 100 |
+
' '
|
| 101 |
+
' '
|
| 102 |
+
' '
|
| 103 |
+
"' 'LDFLAGS=-Wl,-O2 -Wl,--sort-common -Wl,--as-needed "
|
| 104 |
+
'-Wl,-z,relro -Wl,-z,now -Wl,--disable-new-dtags '
|
| 105 |
+
'-Wl,--gc-sections '
|
| 106 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 107 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 108 |
+
"-L/root/envs/evalkit_llava/lib' "
|
| 109 |
+
"'CPPFLAGS=-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem "
|
| 110 |
+
'/root/envs/evalkit_llava/include '
|
| 111 |
+
"-I/root/envs/evalkit_llava/include' "
|
| 112 |
+
"'CPP=/croot/python-split_1733933809325/_build_env/bin/x86_64-conda-linux-gnu-cpp' "
|
| 113 |
+
"'PKG_CONFIG_PATH=/root/envs/evalkit_llava/lib/pkgconfig'",
|
| 114 |
+
'CONFINCLUDEDIR': '/root/envs/evalkit_llava/include',
|
| 115 |
+
'CONFINCLUDEPY': '/root/envs/evalkit_llava/include/python3.10',
|
| 116 |
+
'COREPYTHONPATH': '',
|
| 117 |
+
'COVERAGE_INFO': '/croot/python-split_1733933809325/work/build-static/coverage.info',
|
| 118 |
+
'COVERAGE_REPORT': '/croot/python-split_1733933809325/work/build-static/lcov-report',
|
| 119 |
+
'COVERAGE_REPORT_OPTIONS': '--no-branch-coverage --title "CPython lcov '
|
| 120 |
+
'report"',
|
| 121 |
+
'CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 122 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 123 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 124 |
+
'/root/envs/evalkit_llava/include '
|
| 125 |
+
'-I/root/envs/evalkit_llava/include '
|
| 126 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 127 |
+
'/root/envs/evalkit_llava/include '
|
| 128 |
+
'-I/root/envs/evalkit_llava/include',
|
| 129 |
+
'CXX': 'x86_64-conda-linux-gnu-c++ -pthread',
|
| 130 |
+
'DESTDIRS': '/root/envs/evalkit_llava '
|
| 131 |
+
'/root/envs/evalkit_llava/lib '
|
| 132 |
+
'/root/envs/evalkit_llava/lib/python3.10 '
|
| 133 |
+
'/root/envs/evalkit_llava/lib/python3.10/lib-dynload',
|
| 134 |
+
'DESTLIB': '/root/envs/evalkit_llava/lib/python3.10',
|
| 135 |
+
'DESTPATH': '',
|
| 136 |
+
'DESTSHARED': '/root/envs/evalkit_llava/lib/python3.10/lib-dynload',
|
| 137 |
+
'DFLAGS': '',
|
| 138 |
+
'DIRMODE': 755,
|
| 139 |
+
'DIST': 'README.rst ChangeLog configure configure.ac acconfig.h pyconfig.h.in '
|
| 140 |
+
'Makefile.pre.in Include Lib Misc Ext-dummy',
|
| 141 |
+
'DISTDIRS': 'Include Lib Misc Ext-dummy',
|
| 142 |
+
'DISTFILES': 'README.rst ChangeLog configure configure.ac acconfig.h '
|
| 143 |
+
'pyconfig.h.in Makefile.pre.in',
|
| 144 |
+
'DLINCLDIR': '.',
|
| 145 |
+
'DLLLIBRARY': '',
|
| 146 |
+
'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754': 0,
|
| 147 |
+
'DOUBLE_IS_BIG_ENDIAN_IEEE754': 0,
|
| 148 |
+
'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1,
|
| 149 |
+
'DTRACE': '',
|
| 150 |
+
'DTRACE_DEPS': '\\',
|
| 151 |
+
'DTRACE_HEADERS': '',
|
| 152 |
+
'DTRACE_OBJS': '',
|
| 153 |
+
'DYNLOADFILE': 'dynload_shlib.o',
|
| 154 |
+
'ENABLE_IPV6': 1,
|
| 155 |
+
'ENSUREPIP': 'no',
|
| 156 |
+
'EXE': '',
|
| 157 |
+
'EXEMODE': 755,
|
| 158 |
+
'EXPERIMENTAL_ISOLATED_SUBINTERPRETERS': 0,
|
| 159 |
+
'EXPORTSFROM': '',
|
| 160 |
+
'EXPORTSYMS': '',
|
| 161 |
+
'EXTRATESTOPTS': '',
|
| 162 |
+
'EXT_SUFFIX': '.cpython-310-x86_64-linux-gnu.so',
|
| 163 |
+
'FILEMODE': 644,
|
| 164 |
+
'FLOAT_WORDS_BIGENDIAN': 0,
|
| 165 |
+
'FLOCK_NEEDS_LIBBSD': 0,
|
| 166 |
+
'GETPGRP_HAVE_ARG': 0,
|
| 167 |
+
'GITBRANCH': '',
|
| 168 |
+
'GITTAG': '',
|
| 169 |
+
'GITVERSION': '',
|
| 170 |
+
'GNULD': 'no',
|
| 171 |
+
'HAVE_ACCEPT4': 1,
|
| 172 |
+
'HAVE_ACOSH': 1,
|
| 173 |
+
'HAVE_ADDRINFO': 1,
|
| 174 |
+
'HAVE_ALARM': 1,
|
| 175 |
+
'HAVE_ALIGNED_REQUIRED': 0,
|
| 176 |
+
'HAVE_ALLOCA_H': 1,
|
| 177 |
+
'HAVE_ALTZONE': 0,
|
| 178 |
+
'HAVE_ASINH': 1,
|
| 179 |
+
'HAVE_ASM_TYPES_H': 1,
|
| 180 |
+
'HAVE_ATANH': 1,
|
| 181 |
+
'HAVE_BIND_TEXTDOMAIN_CODESET': 1,
|
| 182 |
+
'HAVE_BLUETOOTH_BLUETOOTH_H': 0,
|
| 183 |
+
'HAVE_BLUETOOTH_H': 0,
|
| 184 |
+
'HAVE_BROKEN_MBSTOWCS': 0,
|
| 185 |
+
'HAVE_BROKEN_NICE': 0,
|
| 186 |
+
'HAVE_BROKEN_PIPE_BUF': 0,
|
| 187 |
+
'HAVE_BROKEN_POLL': 0,
|
| 188 |
+
'HAVE_BROKEN_POSIX_SEMAPHORES': 0,
|
| 189 |
+
'HAVE_BROKEN_PTHREAD_SIGMASK': 0,
|
| 190 |
+
'HAVE_BROKEN_SEM_GETVALUE': 0,
|
| 191 |
+
'HAVE_BROKEN_UNSETENV': 0,
|
| 192 |
+
'HAVE_BUILTIN_ATOMIC': 1,
|
| 193 |
+
'HAVE_CHFLAGS': 0,
|
| 194 |
+
'HAVE_CHOWN': 1,
|
| 195 |
+
'HAVE_CHROOT': 1,
|
| 196 |
+
'HAVE_CLOCK': 1,
|
| 197 |
+
'HAVE_CLOCK_GETRES': 1,
|
| 198 |
+
'HAVE_CLOCK_GETTIME': 1,
|
| 199 |
+
'HAVE_CLOCK_SETTIME': 1,
|
| 200 |
+
'HAVE_CLOSE_RANGE': 0,
|
| 201 |
+
'HAVE_COMPUTED_GOTOS': 1,
|
| 202 |
+
'HAVE_CONFSTR': 1,
|
| 203 |
+
'HAVE_CONIO_H': 0,
|
| 204 |
+
'HAVE_COPYSIGN': 1,
|
| 205 |
+
'HAVE_COPY_FILE_RANGE': 0,
|
| 206 |
+
'HAVE_CRYPT_H': 1,
|
| 207 |
+
'HAVE_CRYPT_R': 1,
|
| 208 |
+
'HAVE_CTERMID': 1,
|
| 209 |
+
'HAVE_CTERMID_R': 0,
|
| 210 |
+
'HAVE_CURSES_FILTER': 1,
|
| 211 |
+
'HAVE_CURSES_H': 1,
|
| 212 |
+
'HAVE_CURSES_HAS_KEY': 1,
|
| 213 |
+
'HAVE_CURSES_IMMEDOK': 1,
|
| 214 |
+
'HAVE_CURSES_IS_PAD': 1,
|
| 215 |
+
'HAVE_CURSES_IS_TERM_RESIZED': 1,
|
| 216 |
+
'HAVE_CURSES_RESIZETERM': 1,
|
| 217 |
+
'HAVE_CURSES_RESIZE_TERM': 1,
|
| 218 |
+
'HAVE_CURSES_SYNCOK': 1,
|
| 219 |
+
'HAVE_CURSES_TYPEAHEAD': 1,
|
| 220 |
+
'HAVE_CURSES_USE_ENV': 1,
|
| 221 |
+
'HAVE_CURSES_WCHGAT': 1,
|
| 222 |
+
'HAVE_DECL_ISFINITE': 1,
|
| 223 |
+
'HAVE_DECL_ISINF': 1,
|
| 224 |
+
'HAVE_DECL_ISNAN': 1,
|
| 225 |
+
'HAVE_DECL_RTLD_DEEPBIND': 1,
|
| 226 |
+
'HAVE_DECL_RTLD_GLOBAL': 1,
|
| 227 |
+
'HAVE_DECL_RTLD_LAZY': 1,
|
| 228 |
+
'HAVE_DECL_RTLD_LOCAL': 1,
|
| 229 |
+
'HAVE_DECL_RTLD_MEMBER': 0,
|
| 230 |
+
'HAVE_DECL_RTLD_NODELETE': 1,
|
| 231 |
+
'HAVE_DECL_RTLD_NOLOAD': 1,
|
| 232 |
+
'HAVE_DECL_RTLD_NOW': 1,
|
| 233 |
+
'HAVE_DECL_TZNAME': 0,
|
| 234 |
+
'HAVE_DEVICE_MACROS': 1,
|
| 235 |
+
'HAVE_DEV_PTC': 0,
|
| 236 |
+
'HAVE_DEV_PTMX': 1,
|
| 237 |
+
'HAVE_DIRECT_H': 0,
|
| 238 |
+
'HAVE_DIRENT_D_TYPE': 1,
|
| 239 |
+
'HAVE_DIRENT_H': 1,
|
| 240 |
+
'HAVE_DIRFD': 1,
|
| 241 |
+
'HAVE_DLFCN_H': 1,
|
| 242 |
+
'HAVE_DLOPEN': 1,
|
| 243 |
+
'HAVE_DUP2': 1,
|
| 244 |
+
'HAVE_DUP3': 1,
|
| 245 |
+
'HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH': 0,
|
| 246 |
+
'HAVE_DYNAMIC_LOADING': 1,
|
| 247 |
+
'HAVE_ENDIAN_H': 1,
|
| 248 |
+
'HAVE_EPOLL': 1,
|
| 249 |
+
'HAVE_EPOLL_CREATE1': 1,
|
| 250 |
+
'HAVE_ERF': 1,
|
| 251 |
+
'HAVE_ERFC': 1,
|
| 252 |
+
'HAVE_ERRNO_H': 1,
|
| 253 |
+
'HAVE_EVENTFD': 1,
|
| 254 |
+
'HAVE_EXECV': 1,
|
| 255 |
+
'HAVE_EXPLICIT_BZERO': 0,
|
| 256 |
+
'HAVE_EXPLICIT_MEMSET': 0,
|
| 257 |
+
'HAVE_EXPM1': 1,
|
| 258 |
+
'HAVE_FACCESSAT': 1,
|
| 259 |
+
'HAVE_FCHDIR': 1,
|
| 260 |
+
'HAVE_FCHMOD': 1,
|
| 261 |
+
'HAVE_FCHMODAT': 1,
|
| 262 |
+
'HAVE_FCHOWN': 1,
|
| 263 |
+
'HAVE_FCHOWNAT': 1,
|
| 264 |
+
'HAVE_FCNTL_H': 1,
|
| 265 |
+
'HAVE_FDATASYNC': 1,
|
| 266 |
+
'HAVE_FDOPENDIR': 1,
|
| 267 |
+
'HAVE_FDWALK': 0,
|
| 268 |
+
'HAVE_FEXECVE': 1,
|
| 269 |
+
'HAVE_FINITE': 1,
|
| 270 |
+
'HAVE_FLOCK': 1,
|
| 271 |
+
'HAVE_FORK': 1,
|
| 272 |
+
'HAVE_FORKPTY': 1,
|
| 273 |
+
'HAVE_FPATHCONF': 1,
|
| 274 |
+
'HAVE_FSEEK64': 0,
|
| 275 |
+
'HAVE_FSEEKO': 1,
|
| 276 |
+
'HAVE_FSTATAT': 1,
|
| 277 |
+
'HAVE_FSTATVFS': 1,
|
| 278 |
+
'HAVE_FSYNC': 1,
|
| 279 |
+
'HAVE_FTELL64': 0,
|
| 280 |
+
'HAVE_FTELLO': 1,
|
| 281 |
+
'HAVE_FTIME': 1,
|
| 282 |
+
'HAVE_FTRUNCATE': 1,
|
| 283 |
+
'HAVE_FUTIMENS': 1,
|
| 284 |
+
'HAVE_FUTIMES': 1,
|
| 285 |
+
'HAVE_FUTIMESAT': 1,
|
| 286 |
+
'HAVE_GAI_STRERROR': 1,
|
| 287 |
+
'HAVE_GAMMA': 1,
|
| 288 |
+
'HAVE_GCC_ASM_FOR_MC68881': 0,
|
| 289 |
+
'HAVE_GCC_ASM_FOR_X64': 1,
|
| 290 |
+
'HAVE_GCC_ASM_FOR_X87': 1,
|
| 291 |
+
'HAVE_GCC_UINT128_T': 1,
|
| 292 |
+
'HAVE_GETADDRINFO': 1,
|
| 293 |
+
'HAVE_GETC_UNLOCKED': 1,
|
| 294 |
+
'HAVE_GETENTROPY': 0,
|
| 295 |
+
'HAVE_GETGRGID_R': 1,
|
| 296 |
+
'HAVE_GETGRNAM_R': 1,
|
| 297 |
+
'HAVE_GETGROUPLIST': 1,
|
| 298 |
+
'HAVE_GETGROUPS': 1,
|
| 299 |
+
'HAVE_GETHOSTBYNAME': 0,
|
| 300 |
+
'HAVE_GETHOSTBYNAME_R': 1,
|
| 301 |
+
'HAVE_GETHOSTBYNAME_R_3_ARG': 0,
|
| 302 |
+
'HAVE_GETHOSTBYNAME_R_5_ARG': 0,
|
| 303 |
+
'HAVE_GETHOSTBYNAME_R_6_ARG': 1,
|
| 304 |
+
'HAVE_GETITIMER': 1,
|
| 305 |
+
'HAVE_GETLOADAVG': 1,
|
| 306 |
+
'HAVE_GETLOGIN': 1,
|
| 307 |
+
'HAVE_GETNAMEINFO': 1,
|
| 308 |
+
'HAVE_GETPAGESIZE': 1,
|
| 309 |
+
'HAVE_GETPEERNAME': 1,
|
| 310 |
+
'HAVE_GETPGID': 1,
|
| 311 |
+
'HAVE_GETPGRP': 1,
|
| 312 |
+
'HAVE_GETPID': 1,
|
| 313 |
+
'HAVE_GETPRIORITY': 1,
|
| 314 |
+
'HAVE_GETPWENT': 1,
|
| 315 |
+
'HAVE_GETPWNAM_R': 1,
|
| 316 |
+
'HAVE_GETPWUID_R': 1,
|
| 317 |
+
'HAVE_GETRANDOM': 0,
|
| 318 |
+
'HAVE_GETRANDOM_SYSCALL': 1,
|
| 319 |
+
'HAVE_GETRESGID': 1,
|
| 320 |
+
'HAVE_GETRESUID': 1,
|
| 321 |
+
'HAVE_GETSID': 1,
|
| 322 |
+
'HAVE_GETSPENT': 1,
|
| 323 |
+
'HAVE_GETSPNAM': 1,
|
| 324 |
+
'HAVE_GETWD': 1,
|
| 325 |
+
'HAVE_GLIBC_MEMMOVE_BUG': 0,
|
| 326 |
+
'HAVE_GRP_H': 1,
|
| 327 |
+
'HAVE_HSTRERROR': 1,
|
| 328 |
+
'HAVE_HTOLE64': 1,
|
| 329 |
+
'HAVE_HYPOT': 1,
|
| 330 |
+
'HAVE_IEEEFP_H': 0,
|
| 331 |
+
'HAVE_IF_NAMEINDEX': 1,
|
| 332 |
+
'HAVE_INET_ATON': 1,
|
| 333 |
+
'HAVE_INET_PTON': 1,
|
| 334 |
+
'HAVE_INITGROUPS': 1,
|
| 335 |
+
'HAVE_INTTYPES_H': 1,
|
| 336 |
+
'HAVE_IO_H': 0,
|
| 337 |
+
'HAVE_IPA_PURE_CONST_BUG': 0,
|
| 338 |
+
'HAVE_KILL': 1,
|
| 339 |
+
'HAVE_KILLPG': 1,
|
| 340 |
+
'HAVE_KQUEUE': 0,
|
| 341 |
+
'HAVE_LANGINFO_H': 1,
|
| 342 |
+
'HAVE_LARGEFILE_SUPPORT': 0,
|
| 343 |
+
'HAVE_LCHFLAGS': 0,
|
| 344 |
+
'HAVE_LCHMOD': 0,
|
| 345 |
+
'HAVE_LCHOWN': 1,
|
| 346 |
+
'HAVE_LGAMMA': 1,
|
| 347 |
+
'HAVE_LIBDL': 1,
|
| 348 |
+
'HAVE_LIBDLD': 0,
|
| 349 |
+
'HAVE_LIBIEEE': 0,
|
| 350 |
+
'HAVE_LIBINTL_H': 1,
|
| 351 |
+
'HAVE_LIBREADLINE': 1,
|
| 352 |
+
'HAVE_LIBRESOLV': 0,
|
| 353 |
+
'HAVE_LIBSENDFILE': 0,
|
| 354 |
+
'HAVE_LIBUTIL_H': 0,
|
| 355 |
+
'HAVE_LIBUUID': 1,
|
| 356 |
+
'HAVE_LINK': 1,
|
| 357 |
+
'HAVE_LINKAT': 1,
|
| 358 |
+
'HAVE_LINUX_AUXVEC_H': 1,
|
| 359 |
+
'HAVE_LINUX_CAN_BCM_H': 1,
|
| 360 |
+
'HAVE_LINUX_CAN_H': 1,
|
| 361 |
+
'HAVE_LINUX_CAN_J1939_H': 0,
|
| 362 |
+
'HAVE_LINUX_CAN_RAW_FD_FRAMES': 1,
|
| 363 |
+
'HAVE_LINUX_CAN_RAW_H': 1,
|
| 364 |
+
'HAVE_LINUX_CAN_RAW_JOIN_FILTERS': 1,
|
| 365 |
+
'HAVE_LINUX_MEMFD_H': 1,
|
| 366 |
+
'HAVE_LINUX_NETLINK_H': 1,
|
| 367 |
+
'HAVE_LINUX_QRTR_H': 0,
|
| 368 |
+
'HAVE_LINUX_RANDOM_H': 1,
|
| 369 |
+
'HAVE_LINUX_TIPC_H': 1,
|
| 370 |
+
'HAVE_LINUX_VM_SOCKETS_H': 1,
|
| 371 |
+
'HAVE_LINUX_WAIT_H': 1,
|
| 372 |
+
'HAVE_LOCKF': 1,
|
| 373 |
+
'HAVE_LOG1P': 1,
|
| 374 |
+
'HAVE_LOG2': 1,
|
| 375 |
+
'HAVE_LONG_DOUBLE': 1,
|
| 376 |
+
'HAVE_LSTAT': 1,
|
| 377 |
+
'HAVE_LUTIMES': 1,
|
| 378 |
+
'HAVE_MADVISE': 1,
|
| 379 |
+
'HAVE_MAKEDEV': 1,
|
| 380 |
+
'HAVE_MBRTOWC': 1,
|
| 381 |
+
'HAVE_MEMFD_CREATE': 0,
|
| 382 |
+
'HAVE_MEMORY_H': 1,
|
| 383 |
+
'HAVE_MEMRCHR': 1,
|
| 384 |
+
'HAVE_MKDIRAT': 1,
|
| 385 |
+
'HAVE_MKFIFO': 1,
|
| 386 |
+
'HAVE_MKFIFOAT': 1,
|
| 387 |
+
'HAVE_MKNOD': 1,
|
| 388 |
+
'HAVE_MKNODAT': 1,
|
| 389 |
+
'HAVE_MKTIME': 1,
|
| 390 |
+
'HAVE_MMAP': 1,
|
| 391 |
+
'HAVE_MREMAP': 1,
|
| 392 |
+
'HAVE_NCURSES_H': 1,
|
| 393 |
+
'HAVE_NDIR_H': 0,
|
| 394 |
+
'HAVE_NETPACKET_PACKET_H': 1,
|
| 395 |
+
'HAVE_NET_IF_H': 1,
|
| 396 |
+
'HAVE_NICE': 1,
|
| 397 |
+
'HAVE_NON_UNICODE_WCHAR_T_REPRESENTATION': 0,
|
| 398 |
+
'HAVE_OPENAT': 1,
|
| 399 |
+
'HAVE_OPENPTY': 1,
|
| 400 |
+
'HAVE_PATHCONF': 1,
|
| 401 |
+
'HAVE_PAUSE': 1,
|
| 402 |
+
'HAVE_PIPE2': 1,
|
| 403 |
+
'HAVE_PLOCK': 0,
|
| 404 |
+
'HAVE_POLL': 1,
|
| 405 |
+
'HAVE_POLL_H': 1,
|
| 406 |
+
'HAVE_POSIX_FADVISE': 1,
|
| 407 |
+
'HAVE_POSIX_FALLOCATE': 1,
|
| 408 |
+
'HAVE_POSIX_SPAWN': 1,
|
| 409 |
+
'HAVE_POSIX_SPAWNP': 1,
|
| 410 |
+
'HAVE_PREAD': 1,
|
| 411 |
+
'HAVE_PREADV': 1,
|
| 412 |
+
'HAVE_PREADV2': 0,
|
| 413 |
+
'HAVE_PRLIMIT': 1,
|
| 414 |
+
'HAVE_PROCESS_H': 0,
|
| 415 |
+
'HAVE_PROTOTYPES': 1,
|
| 416 |
+
'HAVE_PTHREAD_CONDATTR_SETCLOCK': 1,
|
| 417 |
+
'HAVE_PTHREAD_DESTRUCTOR': 0,
|
| 418 |
+
'HAVE_PTHREAD_GETCPUCLOCKID': 1,
|
| 419 |
+
'HAVE_PTHREAD_H': 1,
|
| 420 |
+
'HAVE_PTHREAD_INIT': 0,
|
| 421 |
+
'HAVE_PTHREAD_KILL': 1,
|
| 422 |
+
'HAVE_PTHREAD_SIGMASK': 1,
|
| 423 |
+
'HAVE_PTY_H': 1,
|
| 424 |
+
'HAVE_PWRITE': 1,
|
| 425 |
+
'HAVE_PWRITEV': 1,
|
| 426 |
+
'HAVE_PWRITEV2': 0,
|
| 427 |
+
'HAVE_READLINK': 1,
|
| 428 |
+
'HAVE_READLINKAT': 1,
|
| 429 |
+
'HAVE_READV': 1,
|
| 430 |
+
'HAVE_REALPATH': 1,
|
| 431 |
+
'HAVE_RENAMEAT': 1,
|
| 432 |
+
'HAVE_RL_APPEND_HISTORY': 1,
|
| 433 |
+
'HAVE_RL_CATCH_SIGNAL': 1,
|
| 434 |
+
'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1,
|
| 435 |
+
'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1,
|
| 436 |
+
'HAVE_RL_COMPLETION_MATCHES': 1,
|
| 437 |
+
'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1,
|
| 438 |
+
'HAVE_RL_PRE_INPUT_HOOK': 1,
|
| 439 |
+
'HAVE_RL_RESIZE_TERMINAL': 1,
|
| 440 |
+
'HAVE_ROUND': 1,
|
| 441 |
+
'HAVE_RTPSPAWN': 0,
|
| 442 |
+
'HAVE_SCHED_GET_PRIORITY_MAX': 1,
|
| 443 |
+
'HAVE_SCHED_H': 1,
|
| 444 |
+
'HAVE_SCHED_RR_GET_INTERVAL': 1,
|
| 445 |
+
'HAVE_SCHED_SETAFFINITY': 1,
|
| 446 |
+
'HAVE_SCHED_SETPARAM': 1,
|
| 447 |
+
'HAVE_SCHED_SETSCHEDULER': 1,
|
| 448 |
+
'HAVE_SEM_CLOCKWAIT': 0,
|
| 449 |
+
'HAVE_SEM_GETVALUE': 1,
|
| 450 |
+
'HAVE_SEM_OPEN': 1,
|
| 451 |
+
'HAVE_SEM_TIMEDWAIT': 1,
|
| 452 |
+
'HAVE_SEM_UNLINK': 1,
|
| 453 |
+
'HAVE_SENDFILE': 1,
|
| 454 |
+
'HAVE_SETEGID': 1,
|
| 455 |
+
'HAVE_SETEUID': 1,
|
| 456 |
+
'HAVE_SETGID': 1,
|
| 457 |
+
'HAVE_SETGROUPS': 1,
|
| 458 |
+
'HAVE_SETHOSTNAME': 1,
|
| 459 |
+
'HAVE_SETITIMER': 1,
|
| 460 |
+
'HAVE_SETLOCALE': 1,
|
| 461 |
+
'HAVE_SETPGID': 1,
|
| 462 |
+
'HAVE_SETPGRP': 1,
|
| 463 |
+
'HAVE_SETPRIORITY': 1,
|
| 464 |
+
'HAVE_SETREGID': 1,
|
| 465 |
+
'HAVE_SETRESGID': 1,
|
| 466 |
+
'HAVE_SETRESUID': 1,
|
| 467 |
+
'HAVE_SETREUID': 1,
|
| 468 |
+
'HAVE_SETSID': 1,
|
| 469 |
+
'HAVE_SETUID': 1,
|
| 470 |
+
'HAVE_SETVBUF': 1,
|
| 471 |
+
'HAVE_SHADOW_H': 1,
|
| 472 |
+
'HAVE_SHM_OPEN': 1,
|
| 473 |
+
'HAVE_SHM_UNLINK': 1,
|
| 474 |
+
'HAVE_SIGACTION': 1,
|
| 475 |
+
'HAVE_SIGALTSTACK': 1,
|
| 476 |
+
'HAVE_SIGFILLSET': 1,
|
| 477 |
+
'HAVE_SIGINFO_T_SI_BAND': 1,
|
| 478 |
+
'HAVE_SIGINTERRUPT': 1,
|
| 479 |
+
'HAVE_SIGNAL_H': 1,
|
| 480 |
+
'HAVE_SIGPENDING': 1,
|
| 481 |
+
'HAVE_SIGRELSE': 1,
|
| 482 |
+
'HAVE_SIGTIMEDWAIT': 1,
|
| 483 |
+
'HAVE_SIGWAIT': 1,
|
| 484 |
+
'HAVE_SIGWAITINFO': 1,
|
| 485 |
+
'HAVE_SNPRINTF': 1,
|
| 486 |
+
'HAVE_SOCKADDR_ALG': 1,
|
| 487 |
+
'HAVE_SOCKADDR_SA_LEN': 0,
|
| 488 |
+
'HAVE_SOCKADDR_STORAGE': 1,
|
| 489 |
+
'HAVE_SOCKETPAIR': 1,
|
| 490 |
+
'HAVE_SPAWN_H': 1,
|
| 491 |
+
'HAVE_SPLICE': 1,
|
| 492 |
+
'HAVE_SSIZE_T': 1,
|
| 493 |
+
'HAVE_STATVFS': 1,
|
| 494 |
+
'HAVE_STAT_TV_NSEC': 1,
|
| 495 |
+
'HAVE_STAT_TV_NSEC2': 0,
|
| 496 |
+
'HAVE_STDARG_PROTOTYPES': 1,
|
| 497 |
+
'HAVE_STDINT_H': 1,
|
| 498 |
+
'HAVE_STDLIB_H': 1,
|
| 499 |
+
'HAVE_STD_ATOMIC': 1,
|
| 500 |
+
'HAVE_STRFTIME': 1,
|
| 501 |
+
'HAVE_STRINGS_H': 1,
|
| 502 |
+
'HAVE_STRING_H': 1,
|
| 503 |
+
'HAVE_STRLCPY': 0,
|
| 504 |
+
'HAVE_STROPTS_H': 0,
|
| 505 |
+
'HAVE_STRSIGNAL': 1,
|
| 506 |
+
'HAVE_STRUCT_PASSWD_PW_GECOS': 1,
|
| 507 |
+
'HAVE_STRUCT_PASSWD_PW_PASSWD': 1,
|
| 508 |
+
'HAVE_STRUCT_STAT_ST_BIRTHTIME': 0,
|
| 509 |
+
'HAVE_STRUCT_STAT_ST_BLKSIZE': 1,
|
| 510 |
+
'HAVE_STRUCT_STAT_ST_BLOCKS': 1,
|
| 511 |
+
'HAVE_STRUCT_STAT_ST_FLAGS': 0,
|
| 512 |
+
'HAVE_STRUCT_STAT_ST_GEN': 0,
|
| 513 |
+
'HAVE_STRUCT_STAT_ST_RDEV': 1,
|
| 514 |
+
'HAVE_STRUCT_TM_TM_ZONE': 1,
|
| 515 |
+
'HAVE_SYMLINK': 1,
|
| 516 |
+
'HAVE_SYMLINKAT': 1,
|
| 517 |
+
'HAVE_SYNC': 1,
|
| 518 |
+
'HAVE_SYSCONF': 1,
|
| 519 |
+
'HAVE_SYSEXITS_H': 1,
|
| 520 |
+
'HAVE_SYS_AUDIOIO_H': 0,
|
| 521 |
+
'HAVE_SYS_AUXV_H': 1,
|
| 522 |
+
'HAVE_SYS_BSDTTY_H': 0,
|
| 523 |
+
'HAVE_SYS_DEVPOLL_H': 0,
|
| 524 |
+
'HAVE_SYS_DIR_H': 0,
|
| 525 |
+
'HAVE_SYS_ENDIAN_H': 0,
|
| 526 |
+
'HAVE_SYS_EPOLL_H': 1,
|
| 527 |
+
'HAVE_SYS_EVENTFD_H': 1,
|
| 528 |
+
'HAVE_SYS_EVENT_H': 0,
|
| 529 |
+
'HAVE_SYS_FILE_H': 1,
|
| 530 |
+
'HAVE_SYS_IOCTL_H': 1,
|
| 531 |
+
'HAVE_SYS_KERN_CONTROL_H': 0,
|
| 532 |
+
'HAVE_SYS_LOADAVG_H': 0,
|
| 533 |
+
'HAVE_SYS_LOCK_H': 0,
|
| 534 |
+
'HAVE_SYS_MEMFD_H': 0,
|
| 535 |
+
'HAVE_SYS_MKDEV_H': 0,
|
| 536 |
+
'HAVE_SYS_MMAN_H': 1,
|
| 537 |
+
'HAVE_SYS_MODEM_H': 0,
|
| 538 |
+
'HAVE_SYS_NDIR_H': 0,
|
| 539 |
+
'HAVE_SYS_PARAM_H': 1,
|
| 540 |
+
'HAVE_SYS_POLL_H': 1,
|
| 541 |
+
'HAVE_SYS_RANDOM_H': 0,
|
| 542 |
+
'HAVE_SYS_RESOURCE_H': 1,
|
| 543 |
+
'HAVE_SYS_SELECT_H': 1,
|
| 544 |
+
'HAVE_SYS_SENDFILE_H': 1,
|
| 545 |
+
'HAVE_SYS_SOCKET_H': 1,
|
| 546 |
+
'HAVE_SYS_STATVFS_H': 1,
|
| 547 |
+
'HAVE_SYS_STAT_H': 1,
|
| 548 |
+
'HAVE_SYS_SYSCALL_H': 1,
|
| 549 |
+
'HAVE_SYS_SYSMACROS_H': 1,
|
| 550 |
+
'HAVE_SYS_SYS_DOMAIN_H': 0,
|
| 551 |
+
'HAVE_SYS_TERMIO_H': 0,
|
| 552 |
+
'HAVE_SYS_TIMES_H': 1,
|
| 553 |
+
'HAVE_SYS_TIME_H': 1,
|
| 554 |
+
'HAVE_SYS_TYPES_H': 1,
|
| 555 |
+
'HAVE_SYS_UIO_H': 1,
|
| 556 |
+
'HAVE_SYS_UN_H': 1,
|
| 557 |
+
'HAVE_SYS_UTSNAME_H': 1,
|
| 558 |
+
'HAVE_SYS_WAIT_H': 1,
|
| 559 |
+
'HAVE_SYS_XATTR_H': 1,
|
| 560 |
+
'HAVE_TCGETPGRP': 1,
|
| 561 |
+
'HAVE_TCSETPGRP': 1,
|
| 562 |
+
'HAVE_TEMPNAM': 1,
|
| 563 |
+
'HAVE_TERMIOS_H': 1,
|
| 564 |
+
'HAVE_TERM_H': 1,
|
| 565 |
+
'HAVE_TGAMMA': 1,
|
| 566 |
+
'HAVE_TIMEGM': 1,
|
| 567 |
+
'HAVE_TIMES': 1,
|
| 568 |
+
'HAVE_TMPFILE': 1,
|
| 569 |
+
'HAVE_TMPNAM': 1,
|
| 570 |
+
'HAVE_TMPNAM_R': 1,
|
| 571 |
+
'HAVE_TM_ZONE': 1,
|
| 572 |
+
'HAVE_TRUNCATE': 1,
|
| 573 |
+
'HAVE_TZNAME': 0,
|
| 574 |
+
'HAVE_UCS4_TCL': 0,
|
| 575 |
+
'HAVE_UNAME': 1,
|
| 576 |
+
'HAVE_UNISTD_H': 1,
|
| 577 |
+
'HAVE_UNLINKAT': 1,
|
| 578 |
+
'HAVE_USABLE_WCHAR_T': 0,
|
| 579 |
+
'HAVE_UTIL_H': 0,
|
| 580 |
+
'HAVE_UTIMENSAT': 1,
|
| 581 |
+
'HAVE_UTIMES': 1,
|
| 582 |
+
'HAVE_UTIME_H': 1,
|
| 583 |
+
'HAVE_UUID_CREATE': 0,
|
| 584 |
+
'HAVE_UUID_ENC_BE': 0,
|
| 585 |
+
'HAVE_UUID_GENERATE_TIME_SAFE': 1,
|
| 586 |
+
'HAVE_UUID_H': 1,
|
| 587 |
+
'HAVE_UUID_UUID_H': 1,
|
| 588 |
+
'HAVE_VFORK': 1,
|
| 589 |
+
'HAVE_WAIT3': 1,
|
| 590 |
+
'HAVE_WAIT4': 1,
|
| 591 |
+
'HAVE_WAITID': 1,
|
| 592 |
+
'HAVE_WAITPID': 1,
|
| 593 |
+
'HAVE_WCHAR_H': 1,
|
| 594 |
+
'HAVE_WCSCOLL': 1,
|
| 595 |
+
'HAVE_WCSFTIME': 1,
|
| 596 |
+
'HAVE_WCSXFRM': 1,
|
| 597 |
+
'HAVE_WMEMCMP': 1,
|
| 598 |
+
'HAVE_WORKING_TZSET': 1,
|
| 599 |
+
'HAVE_WRITEV': 1,
|
| 600 |
+
'HAVE_ZLIB_COPY': 1,
|
| 601 |
+
'HAVE__GETPTY': 0,
|
| 602 |
+
'HOST_GNU_TYPE': 'x86_64-conda-linux-gnu',
|
| 603 |
+
'INCLDIRSTOMAKE': '/root/envs/evalkit_llava/include '
|
| 604 |
+
'/root/envs/evalkit_llava/include '
|
| 605 |
+
'/root/envs/evalkit_llava/include/python3.10 '
|
| 606 |
+
'/root/envs/evalkit_llava/include/python3.10',
|
| 607 |
+
'INCLUDEDIR': '/root/envs/evalkit_llava/include',
|
| 608 |
+
'INCLUDEPY': '/root/envs/evalkit_llava/include/python3.10',
|
| 609 |
+
'INSTALL': '/usr/bin/install -c',
|
| 610 |
+
'INSTALL_DATA': '/usr/bin/install -c -m 644',
|
| 611 |
+
'INSTALL_PROGRAM': '/usr/bin/install -c',
|
| 612 |
+
'INSTALL_SCRIPT': '/usr/bin/install -c',
|
| 613 |
+
'INSTALL_SHARED': '/usr/bin/install -c -m 755',
|
| 614 |
+
'INSTSONAME': 'libpython3.10.a',
|
| 615 |
+
'IO_H': 'Modules/_io/_iomodule.h',
|
| 616 |
+
'IO_OBJS': '\\',
|
| 617 |
+
'LDCXXSHARED': 'x86_64-conda-linux-gnu-c++ -pthread -shared',
|
| 618 |
+
'LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 619 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 620 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 621 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 622 |
+
'-L/root/envs/evalkit_llava/lib '
|
| 623 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 624 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 625 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 626 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 627 |
+
'-L/root/envs/evalkit_llava/lib',
|
| 628 |
+
'LDLIBRARY': 'libpython3.10.a',
|
| 629 |
+
'LDLIBRARYDIR': '',
|
| 630 |
+
'LDSHARED': 'x86_64-conda-linux-gnu-gcc -pthread -shared -Wl,-O2 '
|
| 631 |
+
'-Wl,--sort-common -Wl,--as-needed -Wl,-z,relro -Wl,-z,now '
|
| 632 |
+
'-Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 633 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 634 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 635 |
+
'-L/root/envs/evalkit_llava/lib '
|
| 636 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 637 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 638 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 639 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 640 |
+
'-L/root/envs/evalkit_llava/lib',
|
| 641 |
+
'LDVERSION': '3.10',
|
| 642 |
+
'LIBC': '',
|
| 643 |
+
'LIBDEST': '/root/envs/evalkit_llava/lib/python3.10',
|
| 644 |
+
'LIBDIR': '/root/envs/evalkit_llava/lib',
|
| 645 |
+
'LIBFFI_INCLUDEDIR': '/root/envs/evalkit_llava/include',
|
| 646 |
+
'LIBM': '-lm',
|
| 647 |
+
'LIBOBJDIR': 'Python/',
|
| 648 |
+
'LIBOBJS': '',
|
| 649 |
+
'LIBPC': '/root/envs/evalkit_llava/lib/pkgconfig',
|
| 650 |
+
'LIBPL': '/root/envs/evalkit_llava/lib/python3.10/config-3.10-x86_64-linux-gnu',
|
| 651 |
+
'LIBPYTHON': '',
|
| 652 |
+
'LIBRARY': 'libpython3.10.a',
|
| 653 |
+
'LIBRARY_DEPS': 'libpython3.10.a',
|
| 654 |
+
'LIBRARY_OBJS': '\\',
|
| 655 |
+
'LIBRARY_OBJS_OMIT_FROZEN': '\\',
|
| 656 |
+
'LIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 657 |
+
'LIBSUBDIRS': 'asyncio \\',
|
| 658 |
+
'LINKCC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 659 |
+
'LINKFORSHARED': '-Xlinker -export-dynamic',
|
| 660 |
+
'LIPO_32BIT_FLAGS': '',
|
| 661 |
+
'LIPO_INTEL64_FLAGS': '',
|
| 662 |
+
'LLVM_PROF_ERR': 'no',
|
| 663 |
+
'LLVM_PROF_FILE': '',
|
| 664 |
+
'LLVM_PROF_MERGER': 'true',
|
| 665 |
+
'LN': 'ln',
|
| 666 |
+
'LOCALMODLIBS': '',
|
| 667 |
+
'MACHDEP': 'linux',
|
| 668 |
+
'MACHDEP_OBJS': '',
|
| 669 |
+
'MACHDESTLIB': '/root/envs/evalkit_llava/lib/python3.10',
|
| 670 |
+
'MACOSX_DEPLOYMENT_TARGET': '',
|
| 671 |
+
'MAINCC': 'x86_64-conda-linux-gnu-gcc -pthread',
|
| 672 |
+
'MAJOR_IN_MKDEV': 0,
|
| 673 |
+
'MAJOR_IN_SYSMACROS': 0,
|
| 674 |
+
'MAKESETUP': '/croot/python-split_1733933809325/work/Modules/makesetup',
|
| 675 |
+
'MANDIR': '/root/envs/evalkit_llava/share/man',
|
| 676 |
+
'MKDIR_P': '/usr/bin/mkdir -p',
|
| 677 |
+
'MODBUILT_NAMES': 'posix errno pwd _sre _codecs _weakref _functools '
|
| 678 |
+
'_operator _collections _abc itertools atexit _signal '
|
| 679 |
+
'_stat time _thread _locale _io faulthandler '
|
| 680 |
+
'_tracemalloc _symtable xxsubtype',
|
| 681 |
+
'MODDISABLED_NAMES': '',
|
| 682 |
+
'MODLIBS': '',
|
| 683 |
+
'MODOBJS': 'Modules/posixmodule.o Modules/errnomodule.o '
|
| 684 |
+
'Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o '
|
| 685 |
+
'Modules/_weakref.o Modules/_functoolsmodule.o '
|
| 686 |
+
'Modules/_operator.o Modules/_collectionsmodule.o '
|
| 687 |
+
'Modules/_abc.o Modules/itertoolsmodule.o '
|
| 688 |
+
'Modules/atexitmodule.o Modules/signalmodule.o Modules/_stat.o '
|
| 689 |
+
'Modules/timemodule.o Modules/_threadmodule.o '
|
| 690 |
+
'Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o '
|
| 691 |
+
'Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o '
|
| 692 |
+
'Modules/textio.o Modules/stringio.o Modules/faulthandler.o '
|
| 693 |
+
'Modules/_tracemalloc.o Modules/symtablemodule.o '
|
| 694 |
+
'Modules/xxsubtype.o',
|
| 695 |
+
'MODULE_OBJS': '\\',
|
| 696 |
+
'MULTIARCH': 'x86_64-linux-gnu',
|
| 697 |
+
'MULTIARCH_CPPFLAGS': '-DMULTIARCH=\\"x86_64-linux-gnu\\"',
|
| 698 |
+
'MVWDELCH_IS_EXPRESSION': 1,
|
| 699 |
+
'NO_AS_NEEDED': '-Wl,--no-as-needed',
|
| 700 |
+
'OBJECT_OBJS': '\\',
|
| 701 |
+
'OPENSSL_INCLUDES': '-I/root/envs/evalkit_llava/include',
|
| 702 |
+
'OPENSSL_LDFLAGS': '-L/root/envs/evalkit_llava/lib',
|
| 703 |
+
'OPENSSL_LIBS': '-lssl -lcrypto',
|
| 704 |
+
'OPENSSL_RPATH': '',
|
| 705 |
+
'OPT': '-DNDEBUG -fwrapv -O2 -Wall',
|
| 706 |
+
'OTHER_LIBTOOL_OPT': '',
|
| 707 |
+
'PACKAGE_BUGREPORT': 0,
|
| 708 |
+
'PACKAGE_NAME': 0,
|
| 709 |
+
'PACKAGE_STRING': 0,
|
| 710 |
+
'PACKAGE_TARNAME': 0,
|
| 711 |
+
'PACKAGE_URL': 0,
|
| 712 |
+
'PACKAGE_VERSION': 0,
|
| 713 |
+
'PARSER_HEADERS': '\\',
|
| 714 |
+
'PARSER_OBJS': '\\ \\ Parser/myreadline.o Parser/tokenizer.o',
|
| 715 |
+
'PEGEN_HEADERS': '\\',
|
| 716 |
+
'PEGEN_OBJS': '\\',
|
| 717 |
+
'PGO_PROF_GEN_FLAG': '-fprofile-generate',
|
| 718 |
+
'PGO_PROF_USE_FLAG': ' ',
|
| 719 |
+
'PLATLIBDIR': 'lib',
|
| 720 |
+
'POBJS': '\\',
|
| 721 |
+
'POSIX_SEMAPHORES_NOT_ENABLED': 0,
|
| 722 |
+
'PROFILE_TASK': '-m test --pgo',
|
| 723 |
+
'PTHREAD_KEY_T_IS_COMPATIBLE_WITH_INT': 1,
|
| 724 |
+
'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1,
|
| 725 |
+
'PURIFY': '',
|
| 726 |
+
'PY3LIBRARY': '',
|
| 727 |
+
'PYLONG_BITS_IN_DIGIT': 0,
|
| 728 |
+
'PYTHON': 'python',
|
| 729 |
+
'PYTHONFRAMEWORK': '',
|
| 730 |
+
'PYTHONFRAMEWORKDIR': 'no-framework',
|
| 731 |
+
'PYTHONFRAMEWORKINSTALLDIR': '',
|
| 732 |
+
'PYTHONFRAMEWORKPREFIX': '',
|
| 733 |
+
'PYTHONPATH': '',
|
| 734 |
+
'PYTHON_FOR_BUILD': './python -E',
|
| 735 |
+
'PYTHON_FOR_REGEN': '',
|
| 736 |
+
'PYTHON_HEADERS': '\\',
|
| 737 |
+
'PYTHON_OBJS': '\\',
|
| 738 |
+
'PY_BUILD_ENVIRON': '',
|
| 739 |
+
'PY_BUILTIN_HASHLIB_HASHES': '"md5,sha1,sha256,sha512,sha3,blake2"',
|
| 740 |
+
'PY_BUILTIN_MODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG '
|
| 741 |
+
'-fwrapv -O2 -Wall -march=nocona -mtune=haswell '
|
| 742 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 743 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 744 |
+
'/root/envs/evalkit_llava/include '
|
| 745 |
+
' '
|
| 746 |
+
' '
|
| 747 |
+
' '
|
| 748 |
+
' -march=nocona '
|
| 749 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 750 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 751 |
+
'-ffunction-sections -pipe -isystem '
|
| 752 |
+
'/root/envs/evalkit_llava/include '
|
| 753 |
+
' '
|
| 754 |
+
' '
|
| 755 |
+
' '
|
| 756 |
+
' '
|
| 757 |
+
'-fno-semantic-interposition '
|
| 758 |
+
' '
|
| 759 |
+
' -g -std=c99 -Wextra '
|
| 760 |
+
'-Wno-unused-result -Wno-unused-parameter '
|
| 761 |
+
'-Wno-missing-field-initializers '
|
| 762 |
+
'-Werror=implicit-function-declaration '
|
| 763 |
+
'-fvisibility=hidden '
|
| 764 |
+
' '
|
| 765 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 766 |
+
'-IObjects -IInclude -IPython -I. '
|
| 767 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 768 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 769 |
+
'/root/envs/evalkit_llava/include '
|
| 770 |
+
'-I/root/envs/evalkit_llava/include '
|
| 771 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 772 |
+
'/root/envs/evalkit_llava/include '
|
| 773 |
+
'-I/root/envs/evalkit_llava/include '
|
| 774 |
+
'-DPy_BUILD_CORE_BUILTIN',
|
| 775 |
+
'PY_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 -Wall '
|
| 776 |
+
'-march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 777 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 778 |
+
'-isystem '
|
| 779 |
+
'/root/envs/evalkit_llava/include '
|
| 780 |
+
' '
|
| 781 |
+
' '
|
| 782 |
+
' '
|
| 783 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 784 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections -pipe '
|
| 785 |
+
'-isystem '
|
| 786 |
+
'/root/envs/evalkit_llava/include '
|
| 787 |
+
' '
|
| 788 |
+
' '
|
| 789 |
+
' '
|
| 790 |
+
'',
|
| 791 |
+
'PY_CFLAGS_NODIST': '-fno-semantic-interposition '
|
| 792 |
+
' -g -std=c99 '
|
| 793 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 794 |
+
'-Wno-missing-field-initializers '
|
| 795 |
+
'-Werror=implicit-function-declaration '
|
| 796 |
+
'-fvisibility=hidden '
|
| 797 |
+
'-I/croot/python-split_1733933809325/work/Include/internal',
|
| 798 |
+
'PY_COERCE_C_LOCALE': 1,
|
| 799 |
+
'PY_CORE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv -O2 '
|
| 800 |
+
'-Wall -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 801 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 802 |
+
'-pipe -isystem '
|
| 803 |
+
'/root/envs/evalkit_llava/include '
|
| 804 |
+
' '
|
| 805 |
+
' '
|
| 806 |
+
' '
|
| 807 |
+
' -march=nocona -mtune=haswell -ftree-vectorize -fPIC '
|
| 808 |
+
'-fstack-protector-strong -fno-plt -O2 -ffunction-sections '
|
| 809 |
+
'-pipe -isystem '
|
| 810 |
+
'/root/envs/evalkit_llava/include '
|
| 811 |
+
' '
|
| 812 |
+
' '
|
| 813 |
+
' '
|
| 814 |
+
' -fno-semantic-interposition '
|
| 815 |
+
' '
|
| 816 |
+
'-g -std=c99 -Wextra -Wno-unused-result '
|
| 817 |
+
'-Wno-unused-parameter -Wno-missing-field-initializers '
|
| 818 |
+
'-Werror=implicit-function-declaration -fvisibility=hidden '
|
| 819 |
+
' '
|
| 820 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 821 |
+
'-IObjects -IInclude -IPython -I. '
|
| 822 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 823 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 824 |
+
'/root/envs/evalkit_llava/include '
|
| 825 |
+
'-I/root/envs/evalkit_llava/include '
|
| 826 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 827 |
+
'/root/envs/evalkit_llava/include '
|
| 828 |
+
'-I/root/envs/evalkit_llava/include '
|
| 829 |
+
'-DPy_BUILD_CORE',
|
| 830 |
+
'PY_CORE_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 831 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 832 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 833 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 834 |
+
'-L/root/envs/evalkit_llava/lib '
|
| 835 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 836 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 837 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 838 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 839 |
+
'-L/root/envs/evalkit_llava/lib '
|
| 840 |
+
'-fno-semantic-interposition '
|
| 841 |
+
' -g',
|
| 842 |
+
'PY_CPPFLAGS': '-IObjects -IInclude -IPython -I. '
|
| 843 |
+
'-I/croot/python-split_1733933809325/work/Include -DNDEBUG '
|
| 844 |
+
'-D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 845 |
+
'/root/envs/evalkit_llava/include '
|
| 846 |
+
'-I/root/envs/evalkit_llava/include '
|
| 847 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 848 |
+
'/root/envs/evalkit_llava/include '
|
| 849 |
+
'-I/root/envs/evalkit_llava/include',
|
| 850 |
+
'PY_ENABLE_SHARED': 0,
|
| 851 |
+
'PY_FORMAT_SIZE_T': '"z"',
|
| 852 |
+
'PY_LDFLAGS': '-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 853 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 854 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 855 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 856 |
+
'-L/root/envs/evalkit_llava/lib '
|
| 857 |
+
'-Wl,-O2 -Wl,--sort-common -Wl,--as-needed -Wl,-z,relro '
|
| 858 |
+
'-Wl,-z,now -Wl,--disable-new-dtags -Wl,--gc-sections '
|
| 859 |
+
'-Wl,-rpath,/root/envs/evalkit_llava/lib '
|
| 860 |
+
'-Wl,-rpath-link,/root/envs/evalkit_llava/lib '
|
| 861 |
+
'-L/root/envs/evalkit_llava/lib',
|
| 862 |
+
'PY_LDFLAGS_NODIST': '-fno-semantic-interposition '
|
| 863 |
+
' -g',
|
| 864 |
+
'PY_SSL_DEFAULT_CIPHERS': 1,
|
| 865 |
+
'PY_SSL_DEFAULT_CIPHER_STRING': 0,
|
| 866 |
+
'PY_STDMODULE_CFLAGS': '-Wno-unused-result -Wsign-compare -DNDEBUG -fwrapv '
|
| 867 |
+
'-O2 -Wall -march=nocona -mtune=haswell '
|
| 868 |
+
'-ftree-vectorize -fPIC -fstack-protector-strong '
|
| 869 |
+
'-fno-plt -O2 -ffunction-sections -pipe -isystem '
|
| 870 |
+
'/root/envs/evalkit_llava/include '
|
| 871 |
+
' '
|
| 872 |
+
' '
|
| 873 |
+
' '
|
| 874 |
+
' -march=nocona '
|
| 875 |
+
'-mtune=haswell -ftree-vectorize -fPIC '
|
| 876 |
+
'-fstack-protector-strong -fno-plt -O2 '
|
| 877 |
+
'-ffunction-sections -pipe -isystem '
|
| 878 |
+
'/root/envs/evalkit_llava/include '
|
| 879 |
+
' '
|
| 880 |
+
' '
|
| 881 |
+
' '
|
| 882 |
+
' '
|
| 883 |
+
'-fno-semantic-interposition '
|
| 884 |
+
' -g -std=c99 '
|
| 885 |
+
'-Wextra -Wno-unused-result -Wno-unused-parameter '
|
| 886 |
+
'-Wno-missing-field-initializers '
|
| 887 |
+
'-Werror=implicit-function-declaration '
|
| 888 |
+
'-fvisibility=hidden '
|
| 889 |
+
' '
|
| 890 |
+
'-I/croot/python-split_1733933809325/work/Include/internal '
|
| 891 |
+
'-IObjects -IInclude -IPython -I. '
|
| 892 |
+
'-I/croot/python-split_1733933809325/work/Include '
|
| 893 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 894 |
+
'/root/envs/evalkit_llava/include '
|
| 895 |
+
'-I/root/envs/evalkit_llava/include '
|
| 896 |
+
'-DNDEBUG -D_FORTIFY_SOURCE=2 -O2 -isystem '
|
| 897 |
+
'/root/envs/evalkit_llava/include '
|
| 898 |
+
'-I/root/envs/evalkit_llava/include',
|
| 899 |
+
'Py_DEBUG': 0,
|
| 900 |
+
'Py_ENABLE_SHARED': 0,
|
| 901 |
+
'Py_HASH_ALGORITHM': 0,
|
| 902 |
+
'Py_TRACE_REFS': 0,
|
| 903 |
+
'QUICKTESTOPTS': '-x test_subprocess test_io test_lib2to3 \\',
|
| 904 |
+
'READELF': 'x86_64-conda-linux-gnu-readelf',
|
| 905 |
+
'RESSRCDIR': 'Mac/Resources/framework',
|
| 906 |
+
'RETSIGTYPE': 'void',
|
| 907 |
+
'RUNSHARED': '',
|
| 908 |
+
'SCRIPTDIR': '/root/envs/evalkit_llava/lib',
|
| 909 |
+
'SETPGRP_HAVE_ARG': 0,
|
| 910 |
+
'SHELL': '/bin/sh',
|
| 911 |
+
'SHLIBS': '-lcrypt -lpthread -ldl -lutil -lm',
|
| 912 |
+
'SHLIB_SUFFIX': '.so',
|
| 913 |
+
'SHM_NEEDS_LIBRT': 1,
|
| 914 |
+
'SIGNED_RIGHT_SHIFT_ZERO_FILLS': 0,
|
| 915 |
+
'SITEPATH': '',
|
| 916 |
+
'SIZEOF_DOUBLE': 8,
|
| 917 |
+
'SIZEOF_FLOAT': 4,
|
| 918 |
+
'SIZEOF_FPOS_T': 16,
|
| 919 |
+
'SIZEOF_INT': 4,
|
| 920 |
+
'SIZEOF_LONG': 8,
|
| 921 |
+
'SIZEOF_LONG_DOUBLE': 16,
|
| 922 |
+
'SIZEOF_LONG_LONG': 8,
|
| 923 |
+
'SIZEOF_OFF_T': 8,
|
| 924 |
+
'SIZEOF_PID_T': 4,
|
| 925 |
+
'SIZEOF_PTHREAD_KEY_T': 4,
|
| 926 |
+
'SIZEOF_PTHREAD_T': 8,
|
| 927 |
+
'SIZEOF_SHORT': 2,
|
| 928 |
+
'SIZEOF_SIZE_T': 8,
|
| 929 |
+
'SIZEOF_TIME_T': 8,
|
| 930 |
+
'SIZEOF_UINTPTR_T': 8,
|
| 931 |
+
'SIZEOF_VOID_P': 8,
|
| 932 |
+
'SIZEOF_WCHAR_T': 4,
|
| 933 |
+
'SIZEOF__BOOL': 1,
|
| 934 |
+
'SOABI': 'cpython-310-x86_64-linux-gnu',
|
| 935 |
+
'SRCDIRS': 'Parser Objects Python Modules Modules/_io Programs',
|
| 936 |
+
'SRC_GDB_HOOKS': '/croot/python-split_1733933809325/work/Tools/gdb/libpython.py',
|
| 937 |
+
'STATIC_LIBPYTHON': 1,
|
| 938 |
+
'STDC_HEADERS': 1,
|
| 939 |
+
'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */",
|
| 940 |
+
'STRIPFLAG': '-s',
|
| 941 |
+
'SUBDIRS': '',
|
| 942 |
+
'SUBDIRSTOO': 'Include Lib Misc',
|
| 943 |
+
'SYSLIBS': '-lm',
|
| 944 |
+
'SYS_SELECT_WITH_SYS_TIME': 1,
|
| 945 |
+
'TCLTK_INCLUDES': '-I/root/envs/evalkit_llava/include',
|
| 946 |
+
'TCLTK_LIBS': '-L/root/envs/evalkit_llava/lib '
|
| 947 |
+
'-ltcl8.6 -ltk8.6',
|
| 948 |
+
'TESTOPTS': '',
|
| 949 |
+
'TESTPATH': '',
|
| 950 |
+
'TESTPYTHON': './python',
|
| 951 |
+
'TESTPYTHONOPTS': '',
|
| 952 |
+
'TESTRUNNER': './python '
|
| 953 |
+
'/croot/python-split_1733933809325/work/Tools/scripts/run_tests.py',
|
| 954 |
+
'TESTSUBDIRS': 'ctypes/test \\',
|
| 955 |
+
'TESTTIMEOUT': 1200,
|
| 956 |
+
'TEST_MODULES': 'yes',
|
| 957 |
+
'THREAD_STACK_SIZE': 0,
|
| 958 |
+
'TIMEMODULE_LIB': 0,
|
| 959 |
+
'TIME_WITH_SYS_TIME': 1,
|
| 960 |
+
'TM_IN_SYS_TIME': 0,
|
| 961 |
+
'TZPATH': '/root/envs/evalkit_llava/share/zoneinfo:/root/envs/evalkit_llava/share/tzinfo',
|
| 962 |
+
'UNICODE_DEPS': '\\',
|
| 963 |
+
'UNIVERSALSDK': '',
|
| 964 |
+
'UPDATE_FILE': '/croot/python-split_1733933809325/work/Tools/scripts/update_file.py',
|
| 965 |
+
'USE_COMPUTED_GOTOS': 1,
|
| 966 |
+
'VERSION': '3.10',
|
| 967 |
+
'VPATH': '/croot/python-split_1733933809325/work',
|
| 968 |
+
'WHEEL_PKG_DIR': '',
|
| 969 |
+
'WINDOW_HAS_FLAGS': 1,
|
| 970 |
+
'WITH_DECIMAL_CONTEXTVAR': 1,
|
| 971 |
+
'WITH_DOC_STRINGS': 1,
|
| 972 |
+
'WITH_DTRACE': 0,
|
| 973 |
+
'WITH_DYLD': 0,
|
| 974 |
+
'WITH_EDITLINE': 0,
|
| 975 |
+
'WITH_LIBINTL': 0,
|
| 976 |
+
'WITH_NEXT_FRAMEWORK': 0,
|
| 977 |
+
'WITH_PYMALLOC': 1,
|
| 978 |
+
'WITH_VALGRIND': 0,
|
| 979 |
+
'X87_DOUBLE_ROUNDING': 0,
|
| 980 |
+
'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax',
|
| 981 |
+
'abs_builddir': '/croot/python-split_1733933809325/work/build-static',
|
| 982 |
+
'abs_srcdir': '/croot/python-split_1733933809325/work',
|
| 983 |
+
'datarootdir': '/root/envs/evalkit_llava/share',
|
| 984 |
+
'exec_prefix': '/root/envs/evalkit_llava',
|
| 985 |
+
'prefix': '/root/envs/evalkit_llava',
|
| 986 |
+
'srcdir': '/croot/python-split_1733933809325/work'}
|
evalkit_llava/lib/python3.10/calendar.py
ADDED
|
@@ -0,0 +1,759 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Calendar printing functions
|
| 2 |
+
|
| 3 |
+
Note when comparing these calendars to the ones printed by cal(1): By
|
| 4 |
+
default, these calendars have Monday as the first day of the week, and
|
| 5 |
+
Sunday as the last (the European convention). Use setfirstweekday() to
|
| 6 |
+
set the first day of the week (0=Monday, 6=Sunday)."""
|
| 7 |
+
|
| 8 |
+
import sys
|
| 9 |
+
import datetime
|
| 10 |
+
import locale as _locale
|
| 11 |
+
from itertools import repeat
|
| 12 |
+
|
| 13 |
+
__all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday",
|
| 14 |
+
"firstweekday", "isleap", "leapdays", "weekday", "monthrange",
|
| 15 |
+
"monthcalendar", "prmonth", "month", "prcal", "calendar",
|
| 16 |
+
"timegm", "month_name", "month_abbr", "day_name", "day_abbr",
|
| 17 |
+
"Calendar", "TextCalendar", "HTMLCalendar", "LocaleTextCalendar",
|
| 18 |
+
"LocaleHTMLCalendar", "weekheader",
|
| 19 |
+
"MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY",
|
| 20 |
+
"SATURDAY", "SUNDAY"]
|
| 21 |
+
|
| 22 |
+
# Exception raised for bad input (with string parameter for details)
|
| 23 |
+
error = ValueError
|
| 24 |
+
|
| 25 |
+
# Exceptions raised for bad input
|
| 26 |
+
class IllegalMonthError(ValueError):
|
| 27 |
+
def __init__(self, month):
|
| 28 |
+
self.month = month
|
| 29 |
+
def __str__(self):
|
| 30 |
+
return "bad month number %r; must be 1-12" % self.month
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class IllegalWeekdayError(ValueError):
|
| 34 |
+
def __init__(self, weekday):
|
| 35 |
+
self.weekday = weekday
|
| 36 |
+
def __str__(self):
|
| 37 |
+
return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
# Constants for months referenced later
|
| 41 |
+
January = 1
|
| 42 |
+
February = 2
|
| 43 |
+
|
| 44 |
+
# Number of days per month (except for February in leap years)
|
| 45 |
+
mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
|
| 46 |
+
|
| 47 |
+
# This module used to have hard-coded lists of day and month names, as
|
| 48 |
+
# English strings. The classes following emulate a read-only version of
|
| 49 |
+
# that, but supply localized names. Note that the values are computed
|
| 50 |
+
# fresh on each call, in case the user changes locale between calls.
|
| 51 |
+
|
| 52 |
+
class _localized_month:
|
| 53 |
+
|
| 54 |
+
_months = [datetime.date(2001, i+1, 1).strftime for i in range(12)]
|
| 55 |
+
_months.insert(0, lambda x: "")
|
| 56 |
+
|
| 57 |
+
def __init__(self, format):
|
| 58 |
+
self.format = format
|
| 59 |
+
|
| 60 |
+
def __getitem__(self, i):
|
| 61 |
+
funcs = self._months[i]
|
| 62 |
+
if isinstance(i, slice):
|
| 63 |
+
return [f(self.format) for f in funcs]
|
| 64 |
+
else:
|
| 65 |
+
return funcs(self.format)
|
| 66 |
+
|
| 67 |
+
def __len__(self):
|
| 68 |
+
return 13
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class _localized_day:
|
| 72 |
+
|
| 73 |
+
# January 1, 2001, was a Monday.
|
| 74 |
+
_days = [datetime.date(2001, 1, i+1).strftime for i in range(7)]
|
| 75 |
+
|
| 76 |
+
def __init__(self, format):
|
| 77 |
+
self.format = format
|
| 78 |
+
|
| 79 |
+
def __getitem__(self, i):
|
| 80 |
+
funcs = self._days[i]
|
| 81 |
+
if isinstance(i, slice):
|
| 82 |
+
return [f(self.format) for f in funcs]
|
| 83 |
+
else:
|
| 84 |
+
return funcs(self.format)
|
| 85 |
+
|
| 86 |
+
def __len__(self):
|
| 87 |
+
return 7
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
# Full and abbreviated names of weekdays
|
| 91 |
+
day_name = _localized_day('%A')
|
| 92 |
+
day_abbr = _localized_day('%a')
|
| 93 |
+
|
| 94 |
+
# Full and abbreviated names of months (1-based arrays!!!)
|
| 95 |
+
month_name = _localized_month('%B')
|
| 96 |
+
month_abbr = _localized_month('%b')
|
| 97 |
+
|
| 98 |
+
# Constants for weekdays
|
| 99 |
+
(MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def isleap(year):
|
| 103 |
+
"""Return True for leap years, False for non-leap years."""
|
| 104 |
+
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def leapdays(y1, y2):
|
| 108 |
+
"""Return number of leap years in range [y1, y2).
|
| 109 |
+
Assume y1 <= y2."""
|
| 110 |
+
y1 -= 1
|
| 111 |
+
y2 -= 1
|
| 112 |
+
return (y2//4 - y1//4) - (y2//100 - y1//100) + (y2//400 - y1//400)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def weekday(year, month, day):
|
| 116 |
+
"""Return weekday (0-6 ~ Mon-Sun) for year, month (1-12), day (1-31)."""
|
| 117 |
+
if not datetime.MINYEAR <= year <= datetime.MAXYEAR:
|
| 118 |
+
year = 2000 + year % 400
|
| 119 |
+
return datetime.date(year, month, day).weekday()
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def monthrange(year, month):
|
| 123 |
+
"""Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for
|
| 124 |
+
year, month."""
|
| 125 |
+
if not 1 <= month <= 12:
|
| 126 |
+
raise IllegalMonthError(month)
|
| 127 |
+
day1 = weekday(year, month, 1)
|
| 128 |
+
ndays = mdays[month] + (month == February and isleap(year))
|
| 129 |
+
return day1, ndays
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def _monthlen(year, month):
|
| 133 |
+
return mdays[month] + (month == February and isleap(year))
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def _prevmonth(year, month):
|
| 137 |
+
if month == 1:
|
| 138 |
+
return year-1, 12
|
| 139 |
+
else:
|
| 140 |
+
return year, month-1
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def _nextmonth(year, month):
|
| 144 |
+
if month == 12:
|
| 145 |
+
return year+1, 1
|
| 146 |
+
else:
|
| 147 |
+
return year, month+1
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class Calendar(object):
|
| 151 |
+
"""
|
| 152 |
+
Base calendar class. This class doesn't do any formatting. It simply
|
| 153 |
+
provides data to subclasses.
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
def __init__(self, firstweekday=0):
|
| 157 |
+
self.firstweekday = firstweekday # 0 = Monday, 6 = Sunday
|
| 158 |
+
|
| 159 |
+
def getfirstweekday(self):
|
| 160 |
+
return self._firstweekday % 7
|
| 161 |
+
|
| 162 |
+
def setfirstweekday(self, firstweekday):
|
| 163 |
+
self._firstweekday = firstweekday
|
| 164 |
+
|
| 165 |
+
firstweekday = property(getfirstweekday, setfirstweekday)
|
| 166 |
+
|
| 167 |
+
def iterweekdays(self):
|
| 168 |
+
"""
|
| 169 |
+
Return an iterator for one week of weekday numbers starting with the
|
| 170 |
+
configured first one.
|
| 171 |
+
"""
|
| 172 |
+
for i in range(self.firstweekday, self.firstweekday + 7):
|
| 173 |
+
yield i%7
|
| 174 |
+
|
| 175 |
+
def itermonthdates(self, year, month):
|
| 176 |
+
"""
|
| 177 |
+
Return an iterator for one month. The iterator will yield datetime.date
|
| 178 |
+
values and will always iterate through complete weeks, so it will yield
|
| 179 |
+
dates outside the specified month.
|
| 180 |
+
"""
|
| 181 |
+
for y, m, d in self.itermonthdays3(year, month):
|
| 182 |
+
yield datetime.date(y, m, d)
|
| 183 |
+
|
| 184 |
+
def itermonthdays(self, year, month):
|
| 185 |
+
"""
|
| 186 |
+
Like itermonthdates(), but will yield day numbers. For days outside
|
| 187 |
+
the specified month the day number is 0.
|
| 188 |
+
"""
|
| 189 |
+
day1, ndays = monthrange(year, month)
|
| 190 |
+
days_before = (day1 - self.firstweekday) % 7
|
| 191 |
+
yield from repeat(0, days_before)
|
| 192 |
+
yield from range(1, ndays + 1)
|
| 193 |
+
days_after = (self.firstweekday - day1 - ndays) % 7
|
| 194 |
+
yield from repeat(0, days_after)
|
| 195 |
+
|
| 196 |
+
def itermonthdays2(self, year, month):
|
| 197 |
+
"""
|
| 198 |
+
Like itermonthdates(), but will yield (day number, weekday number)
|
| 199 |
+
tuples. For days outside the specified month the day number is 0.
|
| 200 |
+
"""
|
| 201 |
+
for i, d in enumerate(self.itermonthdays(year, month), self.firstweekday):
|
| 202 |
+
yield d, i % 7
|
| 203 |
+
|
| 204 |
+
def itermonthdays3(self, year, month):
|
| 205 |
+
"""
|
| 206 |
+
Like itermonthdates(), but will yield (year, month, day) tuples. Can be
|
| 207 |
+
used for dates outside of datetime.date range.
|
| 208 |
+
"""
|
| 209 |
+
day1, ndays = monthrange(year, month)
|
| 210 |
+
days_before = (day1 - self.firstweekday) % 7
|
| 211 |
+
days_after = (self.firstweekday - day1 - ndays) % 7
|
| 212 |
+
y, m = _prevmonth(year, month)
|
| 213 |
+
end = _monthlen(y, m) + 1
|
| 214 |
+
for d in range(end-days_before, end):
|
| 215 |
+
yield y, m, d
|
| 216 |
+
for d in range(1, ndays + 1):
|
| 217 |
+
yield year, month, d
|
| 218 |
+
y, m = _nextmonth(year, month)
|
| 219 |
+
for d in range(1, days_after + 1):
|
| 220 |
+
yield y, m, d
|
| 221 |
+
|
| 222 |
+
def itermonthdays4(self, year, month):
|
| 223 |
+
"""
|
| 224 |
+
Like itermonthdates(), but will yield (year, month, day, day_of_week) tuples.
|
| 225 |
+
Can be used for dates outside of datetime.date range.
|
| 226 |
+
"""
|
| 227 |
+
for i, (y, m, d) in enumerate(self.itermonthdays3(year, month)):
|
| 228 |
+
yield y, m, d, (self.firstweekday + i) % 7
|
| 229 |
+
|
| 230 |
+
def monthdatescalendar(self, year, month):
|
| 231 |
+
"""
|
| 232 |
+
Return a matrix (list of lists) representing a month's calendar.
|
| 233 |
+
Each row represents a week; week entries are datetime.date values.
|
| 234 |
+
"""
|
| 235 |
+
dates = list(self.itermonthdates(year, month))
|
| 236 |
+
return [ dates[i:i+7] for i in range(0, len(dates), 7) ]
|
| 237 |
+
|
| 238 |
+
def monthdays2calendar(self, year, month):
|
| 239 |
+
"""
|
| 240 |
+
Return a matrix representing a month's calendar.
|
| 241 |
+
Each row represents a week; week entries are
|
| 242 |
+
(day number, weekday number) tuples. Day numbers outside this month
|
| 243 |
+
are zero.
|
| 244 |
+
"""
|
| 245 |
+
days = list(self.itermonthdays2(year, month))
|
| 246 |
+
return [ days[i:i+7] for i in range(0, len(days), 7) ]
|
| 247 |
+
|
| 248 |
+
def monthdayscalendar(self, year, month):
|
| 249 |
+
"""
|
| 250 |
+
Return a matrix representing a month's calendar.
|
| 251 |
+
Each row represents a week; days outside this month are zero.
|
| 252 |
+
"""
|
| 253 |
+
days = list(self.itermonthdays(year, month))
|
| 254 |
+
return [ days[i:i+7] for i in range(0, len(days), 7) ]
|
| 255 |
+
|
| 256 |
+
def yeardatescalendar(self, year, width=3):
|
| 257 |
+
"""
|
| 258 |
+
Return the data for the specified year ready for formatting. The return
|
| 259 |
+
value is a list of month rows. Each month row contains up to width months.
|
| 260 |
+
Each month contains between 4 and 6 weeks and each week contains 1-7
|
| 261 |
+
days. Days are datetime.date objects.
|
| 262 |
+
"""
|
| 263 |
+
months = [
|
| 264 |
+
self.monthdatescalendar(year, i)
|
| 265 |
+
for i in range(January, January+12)
|
| 266 |
+
]
|
| 267 |
+
return [months[i:i+width] for i in range(0, len(months), width) ]
|
| 268 |
+
|
| 269 |
+
def yeardays2calendar(self, year, width=3):
|
| 270 |
+
"""
|
| 271 |
+
Return the data for the specified year ready for formatting (similar to
|
| 272 |
+
yeardatescalendar()). Entries in the week lists are
|
| 273 |
+
(day number, weekday number) tuples. Day numbers outside this month are
|
| 274 |
+
zero.
|
| 275 |
+
"""
|
| 276 |
+
months = [
|
| 277 |
+
self.monthdays2calendar(year, i)
|
| 278 |
+
for i in range(January, January+12)
|
| 279 |
+
]
|
| 280 |
+
return [months[i:i+width] for i in range(0, len(months), width) ]
|
| 281 |
+
|
| 282 |
+
def yeardayscalendar(self, year, width=3):
|
| 283 |
+
"""
|
| 284 |
+
Return the data for the specified year ready for formatting (similar to
|
| 285 |
+
yeardatescalendar()). Entries in the week lists are day numbers.
|
| 286 |
+
Day numbers outside this month are zero.
|
| 287 |
+
"""
|
| 288 |
+
months = [
|
| 289 |
+
self.monthdayscalendar(year, i)
|
| 290 |
+
for i in range(January, January+12)
|
| 291 |
+
]
|
| 292 |
+
return [months[i:i+width] for i in range(0, len(months), width) ]
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
class TextCalendar(Calendar):
|
| 296 |
+
"""
|
| 297 |
+
Subclass of Calendar that outputs a calendar as a simple plain text
|
| 298 |
+
similar to the UNIX program cal.
|
| 299 |
+
"""
|
| 300 |
+
|
| 301 |
+
def prweek(self, theweek, width):
|
| 302 |
+
"""
|
| 303 |
+
Print a single week (no newline).
|
| 304 |
+
"""
|
| 305 |
+
print(self.formatweek(theweek, width), end='')
|
| 306 |
+
|
| 307 |
+
def formatday(self, day, weekday, width):
|
| 308 |
+
"""
|
| 309 |
+
Returns a formatted day.
|
| 310 |
+
"""
|
| 311 |
+
if day == 0:
|
| 312 |
+
s = ''
|
| 313 |
+
else:
|
| 314 |
+
s = '%2i' % day # right-align single-digit days
|
| 315 |
+
return s.center(width)
|
| 316 |
+
|
| 317 |
+
def formatweek(self, theweek, width):
|
| 318 |
+
"""
|
| 319 |
+
Returns a single week in a string (no newline).
|
| 320 |
+
"""
|
| 321 |
+
return ' '.join(self.formatday(d, wd, width) for (d, wd) in theweek)
|
| 322 |
+
|
| 323 |
+
def formatweekday(self, day, width):
|
| 324 |
+
"""
|
| 325 |
+
Returns a formatted week day name.
|
| 326 |
+
"""
|
| 327 |
+
if width >= 9:
|
| 328 |
+
names = day_name
|
| 329 |
+
else:
|
| 330 |
+
names = day_abbr
|
| 331 |
+
return names[day][:width].center(width)
|
| 332 |
+
|
| 333 |
+
def formatweekheader(self, width):
|
| 334 |
+
"""
|
| 335 |
+
Return a header for a week.
|
| 336 |
+
"""
|
| 337 |
+
return ' '.join(self.formatweekday(i, width) for i in self.iterweekdays())
|
| 338 |
+
|
| 339 |
+
def formatmonthname(self, theyear, themonth, width, withyear=True):
|
| 340 |
+
"""
|
| 341 |
+
Return a formatted month name.
|
| 342 |
+
"""
|
| 343 |
+
s = month_name[themonth]
|
| 344 |
+
if withyear:
|
| 345 |
+
s = "%s %r" % (s, theyear)
|
| 346 |
+
return s.center(width)
|
| 347 |
+
|
| 348 |
+
def prmonth(self, theyear, themonth, w=0, l=0):
|
| 349 |
+
"""
|
| 350 |
+
Print a month's calendar.
|
| 351 |
+
"""
|
| 352 |
+
print(self.formatmonth(theyear, themonth, w, l), end='')
|
| 353 |
+
|
| 354 |
+
def formatmonth(self, theyear, themonth, w=0, l=0):
|
| 355 |
+
"""
|
| 356 |
+
Return a month's calendar string (multi-line).
|
| 357 |
+
"""
|
| 358 |
+
w = max(2, w)
|
| 359 |
+
l = max(1, l)
|
| 360 |
+
s = self.formatmonthname(theyear, themonth, 7 * (w + 1) - 1)
|
| 361 |
+
s = s.rstrip()
|
| 362 |
+
s += '\n' * l
|
| 363 |
+
s += self.formatweekheader(w).rstrip()
|
| 364 |
+
s += '\n' * l
|
| 365 |
+
for week in self.monthdays2calendar(theyear, themonth):
|
| 366 |
+
s += self.formatweek(week, w).rstrip()
|
| 367 |
+
s += '\n' * l
|
| 368 |
+
return s
|
| 369 |
+
|
| 370 |
+
def formatyear(self, theyear, w=2, l=1, c=6, m=3):
|
| 371 |
+
"""
|
| 372 |
+
Returns a year's calendar as a multi-line string.
|
| 373 |
+
"""
|
| 374 |
+
w = max(2, w)
|
| 375 |
+
l = max(1, l)
|
| 376 |
+
c = max(2, c)
|
| 377 |
+
colwidth = (w + 1) * 7 - 1
|
| 378 |
+
v = []
|
| 379 |
+
a = v.append
|
| 380 |
+
a(repr(theyear).center(colwidth*m+c*(m-1)).rstrip())
|
| 381 |
+
a('\n'*l)
|
| 382 |
+
header = self.formatweekheader(w)
|
| 383 |
+
for (i, row) in enumerate(self.yeardays2calendar(theyear, m)):
|
| 384 |
+
# months in this row
|
| 385 |
+
months = range(m*i+1, min(m*(i+1)+1, 13))
|
| 386 |
+
a('\n'*l)
|
| 387 |
+
names = (self.formatmonthname(theyear, k, colwidth, False)
|
| 388 |
+
for k in months)
|
| 389 |
+
a(formatstring(names, colwidth, c).rstrip())
|
| 390 |
+
a('\n'*l)
|
| 391 |
+
headers = (header for k in months)
|
| 392 |
+
a(formatstring(headers, colwidth, c).rstrip())
|
| 393 |
+
a('\n'*l)
|
| 394 |
+
# max number of weeks for this row
|
| 395 |
+
height = max(len(cal) for cal in row)
|
| 396 |
+
for j in range(height):
|
| 397 |
+
weeks = []
|
| 398 |
+
for cal in row:
|
| 399 |
+
if j >= len(cal):
|
| 400 |
+
weeks.append('')
|
| 401 |
+
else:
|
| 402 |
+
weeks.append(self.formatweek(cal[j], w))
|
| 403 |
+
a(formatstring(weeks, colwidth, c).rstrip())
|
| 404 |
+
a('\n' * l)
|
| 405 |
+
return ''.join(v)
|
| 406 |
+
|
| 407 |
+
def pryear(self, theyear, w=0, l=0, c=6, m=3):
|
| 408 |
+
"""Print a year's calendar."""
|
| 409 |
+
print(self.formatyear(theyear, w, l, c, m), end='')
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
class HTMLCalendar(Calendar):
|
| 413 |
+
"""
|
| 414 |
+
This calendar returns complete HTML pages.
|
| 415 |
+
"""
|
| 416 |
+
|
| 417 |
+
# CSS classes for the day <td>s
|
| 418 |
+
cssclasses = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
|
| 419 |
+
|
| 420 |
+
# CSS classes for the day <th>s
|
| 421 |
+
cssclasses_weekday_head = cssclasses
|
| 422 |
+
|
| 423 |
+
# CSS class for the days before and after current month
|
| 424 |
+
cssclass_noday = "noday"
|
| 425 |
+
|
| 426 |
+
# CSS class for the month's head
|
| 427 |
+
cssclass_month_head = "month"
|
| 428 |
+
|
| 429 |
+
# CSS class for the month
|
| 430 |
+
cssclass_month = "month"
|
| 431 |
+
|
| 432 |
+
# CSS class for the year's table head
|
| 433 |
+
cssclass_year_head = "year"
|
| 434 |
+
|
| 435 |
+
# CSS class for the whole year table
|
| 436 |
+
cssclass_year = "year"
|
| 437 |
+
|
| 438 |
+
def formatday(self, day, weekday):
|
| 439 |
+
"""
|
| 440 |
+
Return a day as a table cell.
|
| 441 |
+
"""
|
| 442 |
+
if day == 0:
|
| 443 |
+
# day outside month
|
| 444 |
+
return '<td class="%s"> </td>' % self.cssclass_noday
|
| 445 |
+
else:
|
| 446 |
+
return '<td class="%s">%d</td>' % (self.cssclasses[weekday], day)
|
| 447 |
+
|
| 448 |
+
def formatweek(self, theweek):
|
| 449 |
+
"""
|
| 450 |
+
Return a complete week as a table row.
|
| 451 |
+
"""
|
| 452 |
+
s = ''.join(self.formatday(d, wd) for (d, wd) in theweek)
|
| 453 |
+
return '<tr>%s</tr>' % s
|
| 454 |
+
|
| 455 |
+
def formatweekday(self, day):
|
| 456 |
+
"""
|
| 457 |
+
Return a weekday name as a table header.
|
| 458 |
+
"""
|
| 459 |
+
return '<th class="%s">%s</th>' % (
|
| 460 |
+
self.cssclasses_weekday_head[day], day_abbr[day])
|
| 461 |
+
|
| 462 |
+
def formatweekheader(self):
|
| 463 |
+
"""
|
| 464 |
+
Return a header for a week as a table row.
|
| 465 |
+
"""
|
| 466 |
+
s = ''.join(self.formatweekday(i) for i in self.iterweekdays())
|
| 467 |
+
return '<tr>%s</tr>' % s
|
| 468 |
+
|
| 469 |
+
def formatmonthname(self, theyear, themonth, withyear=True):
|
| 470 |
+
"""
|
| 471 |
+
Return a month name as a table row.
|
| 472 |
+
"""
|
| 473 |
+
if withyear:
|
| 474 |
+
s = '%s %s' % (month_name[themonth], theyear)
|
| 475 |
+
else:
|
| 476 |
+
s = '%s' % month_name[themonth]
|
| 477 |
+
return '<tr><th colspan="7" class="%s">%s</th></tr>' % (
|
| 478 |
+
self.cssclass_month_head, s)
|
| 479 |
+
|
| 480 |
+
def formatmonth(self, theyear, themonth, withyear=True):
|
| 481 |
+
"""
|
| 482 |
+
Return a formatted month as a table.
|
| 483 |
+
"""
|
| 484 |
+
v = []
|
| 485 |
+
a = v.append
|
| 486 |
+
a('<table border="0" cellpadding="0" cellspacing="0" class="%s">' % (
|
| 487 |
+
self.cssclass_month))
|
| 488 |
+
a('\n')
|
| 489 |
+
a(self.formatmonthname(theyear, themonth, withyear=withyear))
|
| 490 |
+
a('\n')
|
| 491 |
+
a(self.formatweekheader())
|
| 492 |
+
a('\n')
|
| 493 |
+
for week in self.monthdays2calendar(theyear, themonth):
|
| 494 |
+
a(self.formatweek(week))
|
| 495 |
+
a('\n')
|
| 496 |
+
a('</table>')
|
| 497 |
+
a('\n')
|
| 498 |
+
return ''.join(v)
|
| 499 |
+
|
| 500 |
+
def formatyear(self, theyear, width=3):
|
| 501 |
+
"""
|
| 502 |
+
Return a formatted year as a table of tables.
|
| 503 |
+
"""
|
| 504 |
+
v = []
|
| 505 |
+
a = v.append
|
| 506 |
+
width = max(width, 1)
|
| 507 |
+
a('<table border="0" cellpadding="0" cellspacing="0" class="%s">' %
|
| 508 |
+
self.cssclass_year)
|
| 509 |
+
a('\n')
|
| 510 |
+
a('<tr><th colspan="%d" class="%s">%s</th></tr>' % (
|
| 511 |
+
width, self.cssclass_year_head, theyear))
|
| 512 |
+
for i in range(January, January+12, width):
|
| 513 |
+
# months in this row
|
| 514 |
+
months = range(i, min(i+width, 13))
|
| 515 |
+
a('<tr>')
|
| 516 |
+
for m in months:
|
| 517 |
+
a('<td>')
|
| 518 |
+
a(self.formatmonth(theyear, m, withyear=False))
|
| 519 |
+
a('</td>')
|
| 520 |
+
a('</tr>')
|
| 521 |
+
a('</table>')
|
| 522 |
+
return ''.join(v)
|
| 523 |
+
|
| 524 |
+
def formatyearpage(self, theyear, width=3, css='calendar.css', encoding=None):
|
| 525 |
+
"""
|
| 526 |
+
Return a formatted year as a complete HTML page.
|
| 527 |
+
"""
|
| 528 |
+
if encoding is None:
|
| 529 |
+
encoding = sys.getdefaultencoding()
|
| 530 |
+
v = []
|
| 531 |
+
a = v.append
|
| 532 |
+
a('<?xml version="1.0" encoding="%s"?>\n' % encoding)
|
| 533 |
+
a('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n')
|
| 534 |
+
a('<html>\n')
|
| 535 |
+
a('<head>\n')
|
| 536 |
+
a('<meta http-equiv="Content-Type" content="text/html; charset=%s" />\n' % encoding)
|
| 537 |
+
if css is not None:
|
| 538 |
+
a('<link rel="stylesheet" type="text/css" href="%s" />\n' % css)
|
| 539 |
+
a('<title>Calendar for %d</title>\n' % theyear)
|
| 540 |
+
a('</head>\n')
|
| 541 |
+
a('<body>\n')
|
| 542 |
+
a(self.formatyear(theyear, width))
|
| 543 |
+
a('</body>\n')
|
| 544 |
+
a('</html>\n')
|
| 545 |
+
return ''.join(v).encode(encoding, "xmlcharrefreplace")
|
| 546 |
+
|
| 547 |
+
|
| 548 |
+
class different_locale:
|
| 549 |
+
def __init__(self, locale):
|
| 550 |
+
self.locale = locale
|
| 551 |
+
|
| 552 |
+
def __enter__(self):
|
| 553 |
+
self.oldlocale = _locale.getlocale(_locale.LC_TIME)
|
| 554 |
+
_locale.setlocale(_locale.LC_TIME, self.locale)
|
| 555 |
+
|
| 556 |
+
def __exit__(self, *args):
|
| 557 |
+
_locale.setlocale(_locale.LC_TIME, self.oldlocale)
|
| 558 |
+
|
| 559 |
+
|
| 560 |
+
class LocaleTextCalendar(TextCalendar):
|
| 561 |
+
"""
|
| 562 |
+
This class can be passed a locale name in the constructor and will return
|
| 563 |
+
month and weekday names in the specified locale. If this locale includes
|
| 564 |
+
an encoding all strings containing month and weekday names will be returned
|
| 565 |
+
as unicode.
|
| 566 |
+
"""
|
| 567 |
+
|
| 568 |
+
def __init__(self, firstweekday=0, locale=None):
|
| 569 |
+
TextCalendar.__init__(self, firstweekday)
|
| 570 |
+
if locale is None:
|
| 571 |
+
locale = _locale.getdefaultlocale()
|
| 572 |
+
self.locale = locale
|
| 573 |
+
|
| 574 |
+
def formatweekday(self, day, width):
|
| 575 |
+
with different_locale(self.locale):
|
| 576 |
+
return super().formatweekday(day, width)
|
| 577 |
+
|
| 578 |
+
def formatmonthname(self, theyear, themonth, width, withyear=True):
|
| 579 |
+
with different_locale(self.locale):
|
| 580 |
+
return super().formatmonthname(theyear, themonth, width, withyear)
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
class LocaleHTMLCalendar(HTMLCalendar):
|
| 584 |
+
"""
|
| 585 |
+
This class can be passed a locale name in the constructor and will return
|
| 586 |
+
month and weekday names in the specified locale. If this locale includes
|
| 587 |
+
an encoding all strings containing month and weekday names will be returned
|
| 588 |
+
as unicode.
|
| 589 |
+
"""
|
| 590 |
+
def __init__(self, firstweekday=0, locale=None):
|
| 591 |
+
HTMLCalendar.__init__(self, firstweekday)
|
| 592 |
+
if locale is None:
|
| 593 |
+
locale = _locale.getdefaultlocale()
|
| 594 |
+
self.locale = locale
|
| 595 |
+
|
| 596 |
+
def formatweekday(self, day):
|
| 597 |
+
with different_locale(self.locale):
|
| 598 |
+
return super().formatweekday(day)
|
| 599 |
+
|
| 600 |
+
def formatmonthname(self, theyear, themonth, withyear=True):
|
| 601 |
+
with different_locale(self.locale):
|
| 602 |
+
return super().formatmonthname(theyear, themonth, withyear)
|
| 603 |
+
|
| 604 |
+
# Support for old module level interface
|
| 605 |
+
c = TextCalendar()
|
| 606 |
+
|
| 607 |
+
firstweekday = c.getfirstweekday
|
| 608 |
+
|
| 609 |
+
def setfirstweekday(firstweekday):
|
| 610 |
+
if not MONDAY <= firstweekday <= SUNDAY:
|
| 611 |
+
raise IllegalWeekdayError(firstweekday)
|
| 612 |
+
c.firstweekday = firstweekday
|
| 613 |
+
|
| 614 |
+
monthcalendar = c.monthdayscalendar
|
| 615 |
+
prweek = c.prweek
|
| 616 |
+
week = c.formatweek
|
| 617 |
+
weekheader = c.formatweekheader
|
| 618 |
+
prmonth = c.prmonth
|
| 619 |
+
month = c.formatmonth
|
| 620 |
+
calendar = c.formatyear
|
| 621 |
+
prcal = c.pryear
|
| 622 |
+
|
| 623 |
+
|
| 624 |
+
# Spacing of month columns for multi-column year calendar
|
| 625 |
+
_colwidth = 7*3 - 1 # Amount printed by prweek()
|
| 626 |
+
_spacing = 6 # Number of spaces between columns
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
def format(cols, colwidth=_colwidth, spacing=_spacing):
|
| 630 |
+
"""Prints multi-column formatting for year calendars"""
|
| 631 |
+
print(formatstring(cols, colwidth, spacing))
|
| 632 |
+
|
| 633 |
+
|
| 634 |
+
def formatstring(cols, colwidth=_colwidth, spacing=_spacing):
|
| 635 |
+
"""Returns a string formatted from n strings, centered within n columns."""
|
| 636 |
+
spacing *= ' '
|
| 637 |
+
return spacing.join(c.center(colwidth) for c in cols)
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
EPOCH = 1970
|
| 641 |
+
_EPOCH_ORD = datetime.date(EPOCH, 1, 1).toordinal()
|
| 642 |
+
|
| 643 |
+
|
| 644 |
+
def timegm(tuple):
|
| 645 |
+
"""Unrelated but handy function to calculate Unix timestamp from GMT."""
|
| 646 |
+
year, month, day, hour, minute, second = tuple[:6]
|
| 647 |
+
days = datetime.date(year, month, 1).toordinal() - _EPOCH_ORD + day - 1
|
| 648 |
+
hours = days*24 + hour
|
| 649 |
+
minutes = hours*60 + minute
|
| 650 |
+
seconds = minutes*60 + second
|
| 651 |
+
return seconds
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
def main(args):
|
| 655 |
+
import argparse
|
| 656 |
+
parser = argparse.ArgumentParser()
|
| 657 |
+
textgroup = parser.add_argument_group('text only arguments')
|
| 658 |
+
htmlgroup = parser.add_argument_group('html only arguments')
|
| 659 |
+
textgroup.add_argument(
|
| 660 |
+
"-w", "--width",
|
| 661 |
+
type=int, default=2,
|
| 662 |
+
help="width of date column (default 2)"
|
| 663 |
+
)
|
| 664 |
+
textgroup.add_argument(
|
| 665 |
+
"-l", "--lines",
|
| 666 |
+
type=int, default=1,
|
| 667 |
+
help="number of lines for each week (default 1)"
|
| 668 |
+
)
|
| 669 |
+
textgroup.add_argument(
|
| 670 |
+
"-s", "--spacing",
|
| 671 |
+
type=int, default=6,
|
| 672 |
+
help="spacing between months (default 6)"
|
| 673 |
+
)
|
| 674 |
+
textgroup.add_argument(
|
| 675 |
+
"-m", "--months",
|
| 676 |
+
type=int, default=3,
|
| 677 |
+
help="months per row (default 3)"
|
| 678 |
+
)
|
| 679 |
+
htmlgroup.add_argument(
|
| 680 |
+
"-c", "--css",
|
| 681 |
+
default="calendar.css",
|
| 682 |
+
help="CSS to use for page"
|
| 683 |
+
)
|
| 684 |
+
parser.add_argument(
|
| 685 |
+
"-L", "--locale",
|
| 686 |
+
default=None,
|
| 687 |
+
help="locale to be used from month and weekday names"
|
| 688 |
+
)
|
| 689 |
+
parser.add_argument(
|
| 690 |
+
"-e", "--encoding",
|
| 691 |
+
default=None,
|
| 692 |
+
help="encoding to use for output"
|
| 693 |
+
)
|
| 694 |
+
parser.add_argument(
|
| 695 |
+
"-t", "--type",
|
| 696 |
+
default="text",
|
| 697 |
+
choices=("text", "html"),
|
| 698 |
+
help="output type (text or html)"
|
| 699 |
+
)
|
| 700 |
+
parser.add_argument(
|
| 701 |
+
"year",
|
| 702 |
+
nargs='?', type=int,
|
| 703 |
+
help="year number (1-9999)"
|
| 704 |
+
)
|
| 705 |
+
parser.add_argument(
|
| 706 |
+
"month",
|
| 707 |
+
nargs='?', type=int,
|
| 708 |
+
help="month number (1-12, text only)"
|
| 709 |
+
)
|
| 710 |
+
|
| 711 |
+
options = parser.parse_args(args[1:])
|
| 712 |
+
|
| 713 |
+
if options.locale and not options.encoding:
|
| 714 |
+
parser.error("if --locale is specified --encoding is required")
|
| 715 |
+
sys.exit(1)
|
| 716 |
+
|
| 717 |
+
locale = options.locale, options.encoding
|
| 718 |
+
|
| 719 |
+
if options.type == "html":
|
| 720 |
+
if options.locale:
|
| 721 |
+
cal = LocaleHTMLCalendar(locale=locale)
|
| 722 |
+
else:
|
| 723 |
+
cal = HTMLCalendar()
|
| 724 |
+
encoding = options.encoding
|
| 725 |
+
if encoding is None:
|
| 726 |
+
encoding = sys.getdefaultencoding()
|
| 727 |
+
optdict = dict(encoding=encoding, css=options.css)
|
| 728 |
+
write = sys.stdout.buffer.write
|
| 729 |
+
if options.year is None:
|
| 730 |
+
write(cal.formatyearpage(datetime.date.today().year, **optdict))
|
| 731 |
+
elif options.month is None:
|
| 732 |
+
write(cal.formatyearpage(options.year, **optdict))
|
| 733 |
+
else:
|
| 734 |
+
parser.error("incorrect number of arguments")
|
| 735 |
+
sys.exit(1)
|
| 736 |
+
else:
|
| 737 |
+
if options.locale:
|
| 738 |
+
cal = LocaleTextCalendar(locale=locale)
|
| 739 |
+
else:
|
| 740 |
+
cal = TextCalendar()
|
| 741 |
+
optdict = dict(w=options.width, l=options.lines)
|
| 742 |
+
if options.month is None:
|
| 743 |
+
optdict["c"] = options.spacing
|
| 744 |
+
optdict["m"] = options.months
|
| 745 |
+
if options.year is None:
|
| 746 |
+
result = cal.formatyear(datetime.date.today().year, **optdict)
|
| 747 |
+
elif options.month is None:
|
| 748 |
+
result = cal.formatyear(options.year, **optdict)
|
| 749 |
+
else:
|
| 750 |
+
result = cal.formatmonth(options.year, options.month, **optdict)
|
| 751 |
+
write = sys.stdout.write
|
| 752 |
+
if options.encoding:
|
| 753 |
+
result = result.encode(options.encoding)
|
| 754 |
+
write = sys.stdout.buffer.write
|
| 755 |
+
write(result)
|
| 756 |
+
|
| 757 |
+
|
| 758 |
+
if __name__ == "__main__":
|
| 759 |
+
main(sys.argv)
|
evalkit_llava/lib/python3.10/collections/__init__.py
ADDED
|
@@ -0,0 +1,1556 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''This module implements specialized container datatypes providing
|
| 2 |
+
alternatives to Python's general purpose built-in containers, dict,
|
| 3 |
+
list, set, and tuple.
|
| 4 |
+
|
| 5 |
+
* namedtuple factory function for creating tuple subclasses with named fields
|
| 6 |
+
* deque list-like container with fast appends and pops on either end
|
| 7 |
+
* ChainMap dict-like class for creating a single view of multiple mappings
|
| 8 |
+
* Counter dict subclass for counting hashable objects
|
| 9 |
+
* OrderedDict dict subclass that remembers the order entries were added
|
| 10 |
+
* defaultdict dict subclass that calls a factory function to supply missing values
|
| 11 |
+
* UserDict wrapper around dictionary objects for easier dict subclassing
|
| 12 |
+
* UserList wrapper around list objects for easier list subclassing
|
| 13 |
+
* UserString wrapper around string objects for easier string subclassing
|
| 14 |
+
|
| 15 |
+
'''
|
| 16 |
+
|
| 17 |
+
__all__ = [
|
| 18 |
+
'ChainMap',
|
| 19 |
+
'Counter',
|
| 20 |
+
'OrderedDict',
|
| 21 |
+
'UserDict',
|
| 22 |
+
'UserList',
|
| 23 |
+
'UserString',
|
| 24 |
+
'defaultdict',
|
| 25 |
+
'deque',
|
| 26 |
+
'namedtuple',
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
+
import _collections_abc
|
| 30 |
+
import sys as _sys
|
| 31 |
+
|
| 32 |
+
from itertools import chain as _chain
|
| 33 |
+
from itertools import repeat as _repeat
|
| 34 |
+
from itertools import starmap as _starmap
|
| 35 |
+
from keyword import iskeyword as _iskeyword
|
| 36 |
+
from operator import eq as _eq
|
| 37 |
+
from operator import itemgetter as _itemgetter
|
| 38 |
+
from reprlib import recursive_repr as _recursive_repr
|
| 39 |
+
from _weakref import proxy as _proxy
|
| 40 |
+
|
| 41 |
+
try:
|
| 42 |
+
from _collections import deque
|
| 43 |
+
except ImportError:
|
| 44 |
+
pass
|
| 45 |
+
else:
|
| 46 |
+
_collections_abc.MutableSequence.register(deque)
|
| 47 |
+
|
| 48 |
+
try:
|
| 49 |
+
from _collections import defaultdict
|
| 50 |
+
except ImportError:
|
| 51 |
+
pass
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
################################################################################
|
| 55 |
+
### OrderedDict
|
| 56 |
+
################################################################################
|
| 57 |
+
|
| 58 |
+
class _OrderedDictKeysView(_collections_abc.KeysView):
|
| 59 |
+
|
| 60 |
+
def __reversed__(self):
|
| 61 |
+
yield from reversed(self._mapping)
|
| 62 |
+
|
| 63 |
+
class _OrderedDictItemsView(_collections_abc.ItemsView):
|
| 64 |
+
|
| 65 |
+
def __reversed__(self):
|
| 66 |
+
for key in reversed(self._mapping):
|
| 67 |
+
yield (key, self._mapping[key])
|
| 68 |
+
|
| 69 |
+
class _OrderedDictValuesView(_collections_abc.ValuesView):
|
| 70 |
+
|
| 71 |
+
def __reversed__(self):
|
| 72 |
+
for key in reversed(self._mapping):
|
| 73 |
+
yield self._mapping[key]
|
| 74 |
+
|
| 75 |
+
class _Link(object):
|
| 76 |
+
__slots__ = 'prev', 'next', 'key', '__weakref__'
|
| 77 |
+
|
| 78 |
+
class OrderedDict(dict):
|
| 79 |
+
'Dictionary that remembers insertion order'
|
| 80 |
+
# An inherited dict maps keys to values.
|
| 81 |
+
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
| 82 |
+
# The remaining methods are order-aware.
|
| 83 |
+
# Big-O running times for all methods are the same as regular dictionaries.
|
| 84 |
+
|
| 85 |
+
# The internal self.__map dict maps keys to links in a doubly linked list.
|
| 86 |
+
# The circular doubly linked list starts and ends with a sentinel element.
|
| 87 |
+
# The sentinel element never gets deleted (this simplifies the algorithm).
|
| 88 |
+
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
|
| 89 |
+
# The prev links are weakref proxies (to prevent circular references).
|
| 90 |
+
# Individual links are kept alive by the hard reference in self.__map.
|
| 91 |
+
# Those hard references disappear when a key is deleted from an OrderedDict.
|
| 92 |
+
|
| 93 |
+
def __init__(self, other=(), /, **kwds):
|
| 94 |
+
'''Initialize an ordered dictionary. The signature is the same as
|
| 95 |
+
regular dictionaries. Keyword argument order is preserved.
|
| 96 |
+
'''
|
| 97 |
+
try:
|
| 98 |
+
self.__root
|
| 99 |
+
except AttributeError:
|
| 100 |
+
self.__hardroot = _Link()
|
| 101 |
+
self.__root = root = _proxy(self.__hardroot)
|
| 102 |
+
root.prev = root.next = root
|
| 103 |
+
self.__map = {}
|
| 104 |
+
self.__update(other, **kwds)
|
| 105 |
+
|
| 106 |
+
def __setitem__(self, key, value,
|
| 107 |
+
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
|
| 108 |
+
'od.__setitem__(i, y) <==> od[i]=y'
|
| 109 |
+
# Setting a new item creates a new link at the end of the linked list,
|
| 110 |
+
# and the inherited dictionary is updated with the new key/value pair.
|
| 111 |
+
if key not in self:
|
| 112 |
+
self.__map[key] = link = Link()
|
| 113 |
+
root = self.__root
|
| 114 |
+
last = root.prev
|
| 115 |
+
link.prev, link.next, link.key = last, root, key
|
| 116 |
+
last.next = link
|
| 117 |
+
root.prev = proxy(link)
|
| 118 |
+
dict_setitem(self, key, value)
|
| 119 |
+
|
| 120 |
+
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
| 121 |
+
'od.__delitem__(y) <==> del od[y]'
|
| 122 |
+
# Deleting an existing item uses self.__map to find the link which gets
|
| 123 |
+
# removed by updating the links in the predecessor and successor nodes.
|
| 124 |
+
dict_delitem(self, key)
|
| 125 |
+
link = self.__map.pop(key)
|
| 126 |
+
link_prev = link.prev
|
| 127 |
+
link_next = link.next
|
| 128 |
+
link_prev.next = link_next
|
| 129 |
+
link_next.prev = link_prev
|
| 130 |
+
link.prev = None
|
| 131 |
+
link.next = None
|
| 132 |
+
|
| 133 |
+
def __iter__(self):
|
| 134 |
+
'od.__iter__() <==> iter(od)'
|
| 135 |
+
# Traverse the linked list in order.
|
| 136 |
+
root = self.__root
|
| 137 |
+
curr = root.next
|
| 138 |
+
while curr is not root:
|
| 139 |
+
yield curr.key
|
| 140 |
+
curr = curr.next
|
| 141 |
+
|
| 142 |
+
def __reversed__(self):
|
| 143 |
+
'od.__reversed__() <==> reversed(od)'
|
| 144 |
+
# Traverse the linked list in reverse order.
|
| 145 |
+
root = self.__root
|
| 146 |
+
curr = root.prev
|
| 147 |
+
while curr is not root:
|
| 148 |
+
yield curr.key
|
| 149 |
+
curr = curr.prev
|
| 150 |
+
|
| 151 |
+
def clear(self):
|
| 152 |
+
'od.clear() -> None. Remove all items from od.'
|
| 153 |
+
root = self.__root
|
| 154 |
+
root.prev = root.next = root
|
| 155 |
+
self.__map.clear()
|
| 156 |
+
dict.clear(self)
|
| 157 |
+
|
| 158 |
+
def popitem(self, last=True):
|
| 159 |
+
'''Remove and return a (key, value) pair from the dictionary.
|
| 160 |
+
|
| 161 |
+
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
| 162 |
+
'''
|
| 163 |
+
if not self:
|
| 164 |
+
raise KeyError('dictionary is empty')
|
| 165 |
+
root = self.__root
|
| 166 |
+
if last:
|
| 167 |
+
link = root.prev
|
| 168 |
+
link_prev = link.prev
|
| 169 |
+
link_prev.next = root
|
| 170 |
+
root.prev = link_prev
|
| 171 |
+
else:
|
| 172 |
+
link = root.next
|
| 173 |
+
link_next = link.next
|
| 174 |
+
root.next = link_next
|
| 175 |
+
link_next.prev = root
|
| 176 |
+
key = link.key
|
| 177 |
+
del self.__map[key]
|
| 178 |
+
value = dict.pop(self, key)
|
| 179 |
+
return key, value
|
| 180 |
+
|
| 181 |
+
def move_to_end(self, key, last=True):
|
| 182 |
+
'''Move an existing element to the end (or beginning if last is false).
|
| 183 |
+
|
| 184 |
+
Raise KeyError if the element does not exist.
|
| 185 |
+
'''
|
| 186 |
+
link = self.__map[key]
|
| 187 |
+
link_prev = link.prev
|
| 188 |
+
link_next = link.next
|
| 189 |
+
soft_link = link_next.prev
|
| 190 |
+
link_prev.next = link_next
|
| 191 |
+
link_next.prev = link_prev
|
| 192 |
+
root = self.__root
|
| 193 |
+
if last:
|
| 194 |
+
last = root.prev
|
| 195 |
+
link.prev = last
|
| 196 |
+
link.next = root
|
| 197 |
+
root.prev = soft_link
|
| 198 |
+
last.next = link
|
| 199 |
+
else:
|
| 200 |
+
first = root.next
|
| 201 |
+
link.prev = root
|
| 202 |
+
link.next = first
|
| 203 |
+
first.prev = soft_link
|
| 204 |
+
root.next = link
|
| 205 |
+
|
| 206 |
+
def __sizeof__(self):
|
| 207 |
+
sizeof = _sys.getsizeof
|
| 208 |
+
n = len(self) + 1 # number of links including root
|
| 209 |
+
size = sizeof(self.__dict__) # instance dictionary
|
| 210 |
+
size += sizeof(self.__map) * 2 # internal dict and inherited dict
|
| 211 |
+
size += sizeof(self.__hardroot) * n # link objects
|
| 212 |
+
size += sizeof(self.__root) * n # proxy objects
|
| 213 |
+
return size
|
| 214 |
+
|
| 215 |
+
update = __update = _collections_abc.MutableMapping.update
|
| 216 |
+
|
| 217 |
+
def keys(self):
|
| 218 |
+
"D.keys() -> a set-like object providing a view on D's keys"
|
| 219 |
+
return _OrderedDictKeysView(self)
|
| 220 |
+
|
| 221 |
+
def items(self):
|
| 222 |
+
"D.items() -> a set-like object providing a view on D's items"
|
| 223 |
+
return _OrderedDictItemsView(self)
|
| 224 |
+
|
| 225 |
+
def values(self):
|
| 226 |
+
"D.values() -> an object providing a view on D's values"
|
| 227 |
+
return _OrderedDictValuesView(self)
|
| 228 |
+
|
| 229 |
+
__ne__ = _collections_abc.MutableMapping.__ne__
|
| 230 |
+
|
| 231 |
+
__marker = object()
|
| 232 |
+
|
| 233 |
+
def pop(self, key, default=__marker):
|
| 234 |
+
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
|
| 235 |
+
value. If key is not found, d is returned if given, otherwise KeyError
|
| 236 |
+
is raised.
|
| 237 |
+
|
| 238 |
+
'''
|
| 239 |
+
if key in self:
|
| 240 |
+
result = self[key]
|
| 241 |
+
del self[key]
|
| 242 |
+
return result
|
| 243 |
+
if default is self.__marker:
|
| 244 |
+
raise KeyError(key)
|
| 245 |
+
return default
|
| 246 |
+
|
| 247 |
+
def setdefault(self, key, default=None):
|
| 248 |
+
'''Insert key with a value of default if key is not in the dictionary.
|
| 249 |
+
|
| 250 |
+
Return the value for key if key is in the dictionary, else default.
|
| 251 |
+
'''
|
| 252 |
+
if key in self:
|
| 253 |
+
return self[key]
|
| 254 |
+
self[key] = default
|
| 255 |
+
return default
|
| 256 |
+
|
| 257 |
+
@_recursive_repr()
|
| 258 |
+
def __repr__(self):
|
| 259 |
+
'od.__repr__() <==> repr(od)'
|
| 260 |
+
if not self:
|
| 261 |
+
return '%s()' % (self.__class__.__name__,)
|
| 262 |
+
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
|
| 263 |
+
|
| 264 |
+
def __reduce__(self):
|
| 265 |
+
'Return state information for pickling'
|
| 266 |
+
inst_dict = vars(self).copy()
|
| 267 |
+
for k in vars(OrderedDict()):
|
| 268 |
+
inst_dict.pop(k, None)
|
| 269 |
+
return self.__class__, (), inst_dict or None, None, iter(self.items())
|
| 270 |
+
|
| 271 |
+
def copy(self):
|
| 272 |
+
'od.copy() -> a shallow copy of od'
|
| 273 |
+
return self.__class__(self)
|
| 274 |
+
|
| 275 |
+
@classmethod
|
| 276 |
+
def fromkeys(cls, iterable, value=None):
|
| 277 |
+
'''Create a new ordered dictionary with keys from iterable and values set to value.
|
| 278 |
+
'''
|
| 279 |
+
self = cls()
|
| 280 |
+
for key in iterable:
|
| 281 |
+
self[key] = value
|
| 282 |
+
return self
|
| 283 |
+
|
| 284 |
+
def __eq__(self, other):
|
| 285 |
+
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
| 286 |
+
while comparison to a regular mapping is order-insensitive.
|
| 287 |
+
|
| 288 |
+
'''
|
| 289 |
+
if isinstance(other, OrderedDict):
|
| 290 |
+
return dict.__eq__(self, other) and all(map(_eq, self, other))
|
| 291 |
+
return dict.__eq__(self, other)
|
| 292 |
+
|
| 293 |
+
def __ior__(self, other):
|
| 294 |
+
self.update(other)
|
| 295 |
+
return self
|
| 296 |
+
|
| 297 |
+
def __or__(self, other):
|
| 298 |
+
if not isinstance(other, dict):
|
| 299 |
+
return NotImplemented
|
| 300 |
+
new = self.__class__(self)
|
| 301 |
+
new.update(other)
|
| 302 |
+
return new
|
| 303 |
+
|
| 304 |
+
def __ror__(self, other):
|
| 305 |
+
if not isinstance(other, dict):
|
| 306 |
+
return NotImplemented
|
| 307 |
+
new = self.__class__(other)
|
| 308 |
+
new.update(self)
|
| 309 |
+
return new
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
try:
|
| 313 |
+
from _collections import OrderedDict
|
| 314 |
+
except ImportError:
|
| 315 |
+
# Leave the pure Python version in place.
|
| 316 |
+
pass
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
################################################################################
|
| 320 |
+
### namedtuple
|
| 321 |
+
################################################################################
|
| 322 |
+
|
| 323 |
+
try:
|
| 324 |
+
from _collections import _tuplegetter
|
| 325 |
+
except ImportError:
|
| 326 |
+
_tuplegetter = lambda index, doc: property(_itemgetter(index), doc=doc)
|
| 327 |
+
|
| 328 |
+
def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None):
|
| 329 |
+
"""Returns a new subclass of tuple with named fields.
|
| 330 |
+
|
| 331 |
+
>>> Point = namedtuple('Point', ['x', 'y'])
|
| 332 |
+
>>> Point.__doc__ # docstring for the new class
|
| 333 |
+
'Point(x, y)'
|
| 334 |
+
>>> p = Point(11, y=22) # instantiate with positional args or keywords
|
| 335 |
+
>>> p[0] + p[1] # indexable like a plain tuple
|
| 336 |
+
33
|
| 337 |
+
>>> x, y = p # unpack like a regular tuple
|
| 338 |
+
>>> x, y
|
| 339 |
+
(11, 22)
|
| 340 |
+
>>> p.x + p.y # fields also accessible by name
|
| 341 |
+
33
|
| 342 |
+
>>> d = p._asdict() # convert to a dictionary
|
| 343 |
+
>>> d['x']
|
| 344 |
+
11
|
| 345 |
+
>>> Point(**d) # convert from a dictionary
|
| 346 |
+
Point(x=11, y=22)
|
| 347 |
+
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
|
| 348 |
+
Point(x=100, y=22)
|
| 349 |
+
|
| 350 |
+
"""
|
| 351 |
+
|
| 352 |
+
# Validate the field names. At the user's option, either generate an error
|
| 353 |
+
# message or automatically replace the field name with a valid name.
|
| 354 |
+
if isinstance(field_names, str):
|
| 355 |
+
field_names = field_names.replace(',', ' ').split()
|
| 356 |
+
field_names = list(map(str, field_names))
|
| 357 |
+
typename = _sys.intern(str(typename))
|
| 358 |
+
|
| 359 |
+
if rename:
|
| 360 |
+
seen = set()
|
| 361 |
+
for index, name in enumerate(field_names):
|
| 362 |
+
if (not name.isidentifier()
|
| 363 |
+
or _iskeyword(name)
|
| 364 |
+
or name.startswith('_')
|
| 365 |
+
or name in seen):
|
| 366 |
+
field_names[index] = f'_{index}'
|
| 367 |
+
seen.add(name)
|
| 368 |
+
|
| 369 |
+
for name in [typename] + field_names:
|
| 370 |
+
if type(name) is not str:
|
| 371 |
+
raise TypeError('Type names and field names must be strings')
|
| 372 |
+
if not name.isidentifier():
|
| 373 |
+
raise ValueError('Type names and field names must be valid '
|
| 374 |
+
f'identifiers: {name!r}')
|
| 375 |
+
if _iskeyword(name):
|
| 376 |
+
raise ValueError('Type names and field names cannot be a '
|
| 377 |
+
f'keyword: {name!r}')
|
| 378 |
+
|
| 379 |
+
seen = set()
|
| 380 |
+
for name in field_names:
|
| 381 |
+
if name.startswith('_') and not rename:
|
| 382 |
+
raise ValueError('Field names cannot start with an underscore: '
|
| 383 |
+
f'{name!r}')
|
| 384 |
+
if name in seen:
|
| 385 |
+
raise ValueError(f'Encountered duplicate field name: {name!r}')
|
| 386 |
+
seen.add(name)
|
| 387 |
+
|
| 388 |
+
field_defaults = {}
|
| 389 |
+
if defaults is not None:
|
| 390 |
+
defaults = tuple(defaults)
|
| 391 |
+
if len(defaults) > len(field_names):
|
| 392 |
+
raise TypeError('Got more default values than field names')
|
| 393 |
+
field_defaults = dict(reversed(list(zip(reversed(field_names),
|
| 394 |
+
reversed(defaults)))))
|
| 395 |
+
|
| 396 |
+
# Variables used in the methods and docstrings
|
| 397 |
+
field_names = tuple(map(_sys.intern, field_names))
|
| 398 |
+
num_fields = len(field_names)
|
| 399 |
+
arg_list = ', '.join(field_names)
|
| 400 |
+
if num_fields == 1:
|
| 401 |
+
arg_list += ','
|
| 402 |
+
repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
|
| 403 |
+
tuple_new = tuple.__new__
|
| 404 |
+
_dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
|
| 405 |
+
|
| 406 |
+
# Create all the named tuple methods to be added to the class namespace
|
| 407 |
+
|
| 408 |
+
namespace = {
|
| 409 |
+
'_tuple_new': tuple_new,
|
| 410 |
+
'__builtins__': {},
|
| 411 |
+
'__name__': f'namedtuple_{typename}',
|
| 412 |
+
}
|
| 413 |
+
code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
|
| 414 |
+
__new__ = eval(code, namespace)
|
| 415 |
+
__new__.__name__ = '__new__'
|
| 416 |
+
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
|
| 417 |
+
if defaults is not None:
|
| 418 |
+
__new__.__defaults__ = defaults
|
| 419 |
+
|
| 420 |
+
@classmethod
|
| 421 |
+
def _make(cls, iterable):
|
| 422 |
+
result = tuple_new(cls, iterable)
|
| 423 |
+
if _len(result) != num_fields:
|
| 424 |
+
raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')
|
| 425 |
+
return result
|
| 426 |
+
|
| 427 |
+
_make.__func__.__doc__ = (f'Make a new {typename} object from a sequence '
|
| 428 |
+
'or iterable')
|
| 429 |
+
|
| 430 |
+
def _replace(self, /, **kwds):
|
| 431 |
+
result = self._make(_map(kwds.pop, field_names, self))
|
| 432 |
+
if kwds:
|
| 433 |
+
raise ValueError(f'Got unexpected field names: {list(kwds)!r}')
|
| 434 |
+
return result
|
| 435 |
+
|
| 436 |
+
_replace.__doc__ = (f'Return a new {typename} object replacing specified '
|
| 437 |
+
'fields with new values')
|
| 438 |
+
|
| 439 |
+
def __repr__(self):
|
| 440 |
+
'Return a nicely formatted representation string'
|
| 441 |
+
return self.__class__.__name__ + repr_fmt % self
|
| 442 |
+
|
| 443 |
+
def _asdict(self):
|
| 444 |
+
'Return a new dict which maps field names to their values.'
|
| 445 |
+
return _dict(_zip(self._fields, self))
|
| 446 |
+
|
| 447 |
+
def __getnewargs__(self):
|
| 448 |
+
'Return self as a plain tuple. Used by copy and pickle.'
|
| 449 |
+
return _tuple(self)
|
| 450 |
+
|
| 451 |
+
# Modify function metadata to help with introspection and debugging
|
| 452 |
+
for method in (
|
| 453 |
+
__new__,
|
| 454 |
+
_make.__func__,
|
| 455 |
+
_replace,
|
| 456 |
+
__repr__,
|
| 457 |
+
_asdict,
|
| 458 |
+
__getnewargs__,
|
| 459 |
+
):
|
| 460 |
+
method.__qualname__ = f'{typename}.{method.__name__}'
|
| 461 |
+
|
| 462 |
+
# Build-up the class namespace dictionary
|
| 463 |
+
# and use type() to build the result class
|
| 464 |
+
class_namespace = {
|
| 465 |
+
'__doc__': f'{typename}({arg_list})',
|
| 466 |
+
'__slots__': (),
|
| 467 |
+
'_fields': field_names,
|
| 468 |
+
'_field_defaults': field_defaults,
|
| 469 |
+
'__new__': __new__,
|
| 470 |
+
'_make': _make,
|
| 471 |
+
'_replace': _replace,
|
| 472 |
+
'__repr__': __repr__,
|
| 473 |
+
'_asdict': _asdict,
|
| 474 |
+
'__getnewargs__': __getnewargs__,
|
| 475 |
+
'__match_args__': field_names,
|
| 476 |
+
}
|
| 477 |
+
for index, name in enumerate(field_names):
|
| 478 |
+
doc = _sys.intern(f'Alias for field number {index}')
|
| 479 |
+
class_namespace[name] = _tuplegetter(index, doc)
|
| 480 |
+
|
| 481 |
+
result = type(typename, (tuple,), class_namespace)
|
| 482 |
+
|
| 483 |
+
# For pickling to work, the __module__ variable needs to be set to the frame
|
| 484 |
+
# where the named tuple is created. Bypass this step in environments where
|
| 485 |
+
# sys._getframe is not defined (Jython for example) or sys._getframe is not
|
| 486 |
+
# defined for arguments greater than 0 (IronPython), or where the user has
|
| 487 |
+
# specified a particular module.
|
| 488 |
+
if module is None:
|
| 489 |
+
try:
|
| 490 |
+
module = _sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 491 |
+
except (AttributeError, ValueError):
|
| 492 |
+
pass
|
| 493 |
+
if module is not None:
|
| 494 |
+
result.__module__ = module
|
| 495 |
+
|
| 496 |
+
return result
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
########################################################################
|
| 500 |
+
### Counter
|
| 501 |
+
########################################################################
|
| 502 |
+
|
| 503 |
+
def _count_elements(mapping, iterable):
|
| 504 |
+
'Tally elements from the iterable.'
|
| 505 |
+
mapping_get = mapping.get
|
| 506 |
+
for elem in iterable:
|
| 507 |
+
mapping[elem] = mapping_get(elem, 0) + 1
|
| 508 |
+
|
| 509 |
+
try: # Load C helper function if available
|
| 510 |
+
from _collections import _count_elements
|
| 511 |
+
except ImportError:
|
| 512 |
+
pass
|
| 513 |
+
|
| 514 |
+
class Counter(dict):
|
| 515 |
+
'''Dict subclass for counting hashable items. Sometimes called a bag
|
| 516 |
+
or multiset. Elements are stored as dictionary keys and their counts
|
| 517 |
+
are stored as dictionary values.
|
| 518 |
+
|
| 519 |
+
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
|
| 520 |
+
|
| 521 |
+
>>> c.most_common(3) # three most common elements
|
| 522 |
+
[('a', 5), ('b', 4), ('c', 3)]
|
| 523 |
+
>>> sorted(c) # list all unique elements
|
| 524 |
+
['a', 'b', 'c', 'd', 'e']
|
| 525 |
+
>>> ''.join(sorted(c.elements())) # list elements with repetitions
|
| 526 |
+
'aaaaabbbbcccdde'
|
| 527 |
+
>>> sum(c.values()) # total of all counts
|
| 528 |
+
15
|
| 529 |
+
|
| 530 |
+
>>> c['a'] # count of letter 'a'
|
| 531 |
+
5
|
| 532 |
+
>>> for elem in 'shazam': # update counts from an iterable
|
| 533 |
+
... c[elem] += 1 # by adding 1 to each element's count
|
| 534 |
+
>>> c['a'] # now there are seven 'a'
|
| 535 |
+
7
|
| 536 |
+
>>> del c['b'] # remove all 'b'
|
| 537 |
+
>>> c['b'] # now there are zero 'b'
|
| 538 |
+
0
|
| 539 |
+
|
| 540 |
+
>>> d = Counter('simsalabim') # make another counter
|
| 541 |
+
>>> c.update(d) # add in the second counter
|
| 542 |
+
>>> c['a'] # now there are nine 'a'
|
| 543 |
+
9
|
| 544 |
+
|
| 545 |
+
>>> c.clear() # empty the counter
|
| 546 |
+
>>> c
|
| 547 |
+
Counter()
|
| 548 |
+
|
| 549 |
+
Note: If a count is set to zero or reduced to zero, it will remain
|
| 550 |
+
in the counter until the entry is deleted or the counter is cleared:
|
| 551 |
+
|
| 552 |
+
>>> c = Counter('aaabbc')
|
| 553 |
+
>>> c['b'] -= 2 # reduce the count of 'b' by two
|
| 554 |
+
>>> c.most_common() # 'b' is still in, but its count is zero
|
| 555 |
+
[('a', 3), ('c', 1), ('b', 0)]
|
| 556 |
+
|
| 557 |
+
'''
|
| 558 |
+
# References:
|
| 559 |
+
# http://en.wikipedia.org/wiki/Multiset
|
| 560 |
+
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
|
| 561 |
+
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
|
| 562 |
+
# http://code.activestate.com/recipes/259174/
|
| 563 |
+
# Knuth, TAOCP Vol. II section 4.6.3
|
| 564 |
+
|
| 565 |
+
def __init__(self, iterable=None, /, **kwds):
|
| 566 |
+
'''Create a new, empty Counter object. And if given, count elements
|
| 567 |
+
from an input iterable. Or, initialize the count from another mapping
|
| 568 |
+
of elements to their counts.
|
| 569 |
+
|
| 570 |
+
>>> c = Counter() # a new, empty counter
|
| 571 |
+
>>> c = Counter('gallahad') # a new counter from an iterable
|
| 572 |
+
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
|
| 573 |
+
>>> c = Counter(a=4, b=2) # a new counter from keyword args
|
| 574 |
+
|
| 575 |
+
'''
|
| 576 |
+
super().__init__()
|
| 577 |
+
self.update(iterable, **kwds)
|
| 578 |
+
|
| 579 |
+
def __missing__(self, key):
|
| 580 |
+
'The count of elements not in the Counter is zero.'
|
| 581 |
+
# Needed so that self[missing_item] does not raise KeyError
|
| 582 |
+
return 0
|
| 583 |
+
|
| 584 |
+
def total(self):
|
| 585 |
+
'Sum of the counts'
|
| 586 |
+
return sum(self.values())
|
| 587 |
+
|
| 588 |
+
def most_common(self, n=None):
|
| 589 |
+
'''List the n most common elements and their counts from the most
|
| 590 |
+
common to the least. If n is None, then list all element counts.
|
| 591 |
+
|
| 592 |
+
>>> Counter('abracadabra').most_common(3)
|
| 593 |
+
[('a', 5), ('b', 2), ('r', 2)]
|
| 594 |
+
|
| 595 |
+
'''
|
| 596 |
+
# Emulate Bag.sortedByCount from Smalltalk
|
| 597 |
+
if n is None:
|
| 598 |
+
return sorted(self.items(), key=_itemgetter(1), reverse=True)
|
| 599 |
+
|
| 600 |
+
# Lazy import to speedup Python startup time
|
| 601 |
+
import heapq
|
| 602 |
+
return heapq.nlargest(n, self.items(), key=_itemgetter(1))
|
| 603 |
+
|
| 604 |
+
def elements(self):
|
| 605 |
+
'''Iterator over elements repeating each as many times as its count.
|
| 606 |
+
|
| 607 |
+
>>> c = Counter('ABCABC')
|
| 608 |
+
>>> sorted(c.elements())
|
| 609 |
+
['A', 'A', 'B', 'B', 'C', 'C']
|
| 610 |
+
|
| 611 |
+
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
|
| 612 |
+
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
|
| 613 |
+
>>> product = 1
|
| 614 |
+
>>> for factor in prime_factors.elements(): # loop over factors
|
| 615 |
+
... product *= factor # and multiply them
|
| 616 |
+
>>> product
|
| 617 |
+
1836
|
| 618 |
+
|
| 619 |
+
Note, if an element's count has been set to zero or is a negative
|
| 620 |
+
number, elements() will ignore it.
|
| 621 |
+
|
| 622 |
+
'''
|
| 623 |
+
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
|
| 624 |
+
return _chain.from_iterable(_starmap(_repeat, self.items()))
|
| 625 |
+
|
| 626 |
+
# Override dict methods where necessary
|
| 627 |
+
|
| 628 |
+
@classmethod
|
| 629 |
+
def fromkeys(cls, iterable, v=None):
|
| 630 |
+
# There is no equivalent method for counters because the semantics
|
| 631 |
+
# would be ambiguous in cases such as Counter.fromkeys('aaabbc', v=2).
|
| 632 |
+
# Initializing counters to zero values isn't necessary because zero
|
| 633 |
+
# is already the default value for counter lookups. Initializing
|
| 634 |
+
# to one is easily accomplished with Counter(set(iterable)). For
|
| 635 |
+
# more exotic cases, create a dictionary first using a dictionary
|
| 636 |
+
# comprehension or dict.fromkeys().
|
| 637 |
+
raise NotImplementedError(
|
| 638 |
+
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
|
| 639 |
+
|
| 640 |
+
def update(self, iterable=None, /, **kwds):
|
| 641 |
+
'''Like dict.update() but add counts instead of replacing them.
|
| 642 |
+
|
| 643 |
+
Source can be an iterable, a dictionary, or another Counter instance.
|
| 644 |
+
|
| 645 |
+
>>> c = Counter('which')
|
| 646 |
+
>>> c.update('witch') # add elements from another iterable
|
| 647 |
+
>>> d = Counter('watch')
|
| 648 |
+
>>> c.update(d) # add elements from another counter
|
| 649 |
+
>>> c['h'] # four 'h' in which, witch, and watch
|
| 650 |
+
4
|
| 651 |
+
|
| 652 |
+
'''
|
| 653 |
+
# The regular dict.update() operation makes no sense here because the
|
| 654 |
+
# replace behavior results in the some of original untouched counts
|
| 655 |
+
# being mixed-in with all of the other counts for a mismash that
|
| 656 |
+
# doesn't have a straight-forward interpretation in most counting
|
| 657 |
+
# contexts. Instead, we implement straight-addition. Both the inputs
|
| 658 |
+
# and outputs are allowed to contain zero and negative counts.
|
| 659 |
+
|
| 660 |
+
if iterable is not None:
|
| 661 |
+
if isinstance(iterable, _collections_abc.Mapping):
|
| 662 |
+
if self:
|
| 663 |
+
self_get = self.get
|
| 664 |
+
for elem, count in iterable.items():
|
| 665 |
+
self[elem] = count + self_get(elem, 0)
|
| 666 |
+
else:
|
| 667 |
+
# fast path when counter is empty
|
| 668 |
+
super().update(iterable)
|
| 669 |
+
else:
|
| 670 |
+
_count_elements(self, iterable)
|
| 671 |
+
if kwds:
|
| 672 |
+
self.update(kwds)
|
| 673 |
+
|
| 674 |
+
def subtract(self, iterable=None, /, **kwds):
|
| 675 |
+
'''Like dict.update() but subtracts counts instead of replacing them.
|
| 676 |
+
Counts can be reduced below zero. Both the inputs and outputs are
|
| 677 |
+
allowed to contain zero and negative counts.
|
| 678 |
+
|
| 679 |
+
Source can be an iterable, a dictionary, or another Counter instance.
|
| 680 |
+
|
| 681 |
+
>>> c = Counter('which')
|
| 682 |
+
>>> c.subtract('witch') # subtract elements from another iterable
|
| 683 |
+
>>> c.subtract(Counter('watch')) # subtract elements from another counter
|
| 684 |
+
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
|
| 685 |
+
0
|
| 686 |
+
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
|
| 687 |
+
-1
|
| 688 |
+
|
| 689 |
+
'''
|
| 690 |
+
if iterable is not None:
|
| 691 |
+
self_get = self.get
|
| 692 |
+
if isinstance(iterable, _collections_abc.Mapping):
|
| 693 |
+
for elem, count in iterable.items():
|
| 694 |
+
self[elem] = self_get(elem, 0) - count
|
| 695 |
+
else:
|
| 696 |
+
for elem in iterable:
|
| 697 |
+
self[elem] = self_get(elem, 0) - 1
|
| 698 |
+
if kwds:
|
| 699 |
+
self.subtract(kwds)
|
| 700 |
+
|
| 701 |
+
def copy(self):
|
| 702 |
+
'Return a shallow copy.'
|
| 703 |
+
return self.__class__(self)
|
| 704 |
+
|
| 705 |
+
def __reduce__(self):
|
| 706 |
+
return self.__class__, (dict(self),)
|
| 707 |
+
|
| 708 |
+
def __delitem__(self, elem):
|
| 709 |
+
'Like dict.__delitem__() but does not raise KeyError for missing values.'
|
| 710 |
+
if elem in self:
|
| 711 |
+
super().__delitem__(elem)
|
| 712 |
+
|
| 713 |
+
def __eq__(self, other):
|
| 714 |
+
'True if all counts agree. Missing counts are treated as zero.'
|
| 715 |
+
if not isinstance(other, Counter):
|
| 716 |
+
return NotImplemented
|
| 717 |
+
return all(self[e] == other[e] for c in (self, other) for e in c)
|
| 718 |
+
|
| 719 |
+
def __ne__(self, other):
|
| 720 |
+
'True if any counts disagree. Missing counts are treated as zero.'
|
| 721 |
+
if not isinstance(other, Counter):
|
| 722 |
+
return NotImplemented
|
| 723 |
+
return not self == other
|
| 724 |
+
|
| 725 |
+
def __le__(self, other):
|
| 726 |
+
'True if all counts in self are a subset of those in other.'
|
| 727 |
+
if not isinstance(other, Counter):
|
| 728 |
+
return NotImplemented
|
| 729 |
+
return all(self[e] <= other[e] for c in (self, other) for e in c)
|
| 730 |
+
|
| 731 |
+
def __lt__(self, other):
|
| 732 |
+
'True if all counts in self are a proper subset of those in other.'
|
| 733 |
+
if not isinstance(other, Counter):
|
| 734 |
+
return NotImplemented
|
| 735 |
+
return self <= other and self != other
|
| 736 |
+
|
| 737 |
+
def __ge__(self, other):
|
| 738 |
+
'True if all counts in self are a superset of those in other.'
|
| 739 |
+
if not isinstance(other, Counter):
|
| 740 |
+
return NotImplemented
|
| 741 |
+
return all(self[e] >= other[e] for c in (self, other) for e in c)
|
| 742 |
+
|
| 743 |
+
def __gt__(self, other):
|
| 744 |
+
'True if all counts in self are a proper superset of those in other.'
|
| 745 |
+
if not isinstance(other, Counter):
|
| 746 |
+
return NotImplemented
|
| 747 |
+
return self >= other and self != other
|
| 748 |
+
|
| 749 |
+
def __repr__(self):
|
| 750 |
+
if not self:
|
| 751 |
+
return f'{self.__class__.__name__}()'
|
| 752 |
+
try:
|
| 753 |
+
# dict() preserves the ordering returned by most_common()
|
| 754 |
+
d = dict(self.most_common())
|
| 755 |
+
except TypeError:
|
| 756 |
+
# handle case where values are not orderable
|
| 757 |
+
d = dict(self)
|
| 758 |
+
return f'{self.__class__.__name__}({d!r})'
|
| 759 |
+
|
| 760 |
+
# Multiset-style mathematical operations discussed in:
|
| 761 |
+
# Knuth TAOCP Volume II section 4.6.3 exercise 19
|
| 762 |
+
# and at http://en.wikipedia.org/wiki/Multiset
|
| 763 |
+
#
|
| 764 |
+
# Outputs guaranteed to only include positive counts.
|
| 765 |
+
#
|
| 766 |
+
# To strip negative and zero counts, add-in an empty counter:
|
| 767 |
+
# c += Counter()
|
| 768 |
+
#
|
| 769 |
+
# Results are ordered according to when an element is first
|
| 770 |
+
# encountered in the left operand and then by the order
|
| 771 |
+
# encountered in the right operand.
|
| 772 |
+
#
|
| 773 |
+
# When the multiplicities are all zero or one, multiset operations
|
| 774 |
+
# are guaranteed to be equivalent to the corresponding operations
|
| 775 |
+
# for regular sets.
|
| 776 |
+
# Given counter multisets such as:
|
| 777 |
+
# cp = Counter(a=1, b=0, c=1)
|
| 778 |
+
# cq = Counter(c=1, d=0, e=1)
|
| 779 |
+
# The corresponding regular sets would be:
|
| 780 |
+
# sp = {'a', 'c'}
|
| 781 |
+
# sq = {'c', 'e'}
|
| 782 |
+
# All of the following relations would hold:
|
| 783 |
+
# set(cp + cq) == sp | sq
|
| 784 |
+
# set(cp - cq) == sp - sq
|
| 785 |
+
# set(cp | cq) == sp | sq
|
| 786 |
+
# set(cp & cq) == sp & sq
|
| 787 |
+
# (cp == cq) == (sp == sq)
|
| 788 |
+
# (cp != cq) == (sp != sq)
|
| 789 |
+
# (cp <= cq) == (sp <= sq)
|
| 790 |
+
# (cp < cq) == (sp < sq)
|
| 791 |
+
# (cp >= cq) == (sp >= sq)
|
| 792 |
+
# (cp > cq) == (sp > sq)
|
| 793 |
+
|
| 794 |
+
def __add__(self, other):
|
| 795 |
+
'''Add counts from two counters.
|
| 796 |
+
|
| 797 |
+
>>> Counter('abbb') + Counter('bcc')
|
| 798 |
+
Counter({'b': 4, 'c': 2, 'a': 1})
|
| 799 |
+
|
| 800 |
+
'''
|
| 801 |
+
if not isinstance(other, Counter):
|
| 802 |
+
return NotImplemented
|
| 803 |
+
result = Counter()
|
| 804 |
+
for elem, count in self.items():
|
| 805 |
+
newcount = count + other[elem]
|
| 806 |
+
if newcount > 0:
|
| 807 |
+
result[elem] = newcount
|
| 808 |
+
for elem, count in other.items():
|
| 809 |
+
if elem not in self and count > 0:
|
| 810 |
+
result[elem] = count
|
| 811 |
+
return result
|
| 812 |
+
|
| 813 |
+
def __sub__(self, other):
|
| 814 |
+
''' Subtract count, but keep only results with positive counts.
|
| 815 |
+
|
| 816 |
+
>>> Counter('abbbc') - Counter('bccd')
|
| 817 |
+
Counter({'b': 2, 'a': 1})
|
| 818 |
+
|
| 819 |
+
'''
|
| 820 |
+
if not isinstance(other, Counter):
|
| 821 |
+
return NotImplemented
|
| 822 |
+
result = Counter()
|
| 823 |
+
for elem, count in self.items():
|
| 824 |
+
newcount = count - other[elem]
|
| 825 |
+
if newcount > 0:
|
| 826 |
+
result[elem] = newcount
|
| 827 |
+
for elem, count in other.items():
|
| 828 |
+
if elem not in self and count < 0:
|
| 829 |
+
result[elem] = 0 - count
|
| 830 |
+
return result
|
| 831 |
+
|
| 832 |
+
def __or__(self, other):
|
| 833 |
+
'''Union is the maximum of value in either of the input counters.
|
| 834 |
+
|
| 835 |
+
>>> Counter('abbb') | Counter('bcc')
|
| 836 |
+
Counter({'b': 3, 'c': 2, 'a': 1})
|
| 837 |
+
|
| 838 |
+
'''
|
| 839 |
+
if not isinstance(other, Counter):
|
| 840 |
+
return NotImplemented
|
| 841 |
+
result = Counter()
|
| 842 |
+
for elem, count in self.items():
|
| 843 |
+
other_count = other[elem]
|
| 844 |
+
newcount = other_count if count < other_count else count
|
| 845 |
+
if newcount > 0:
|
| 846 |
+
result[elem] = newcount
|
| 847 |
+
for elem, count in other.items():
|
| 848 |
+
if elem not in self and count > 0:
|
| 849 |
+
result[elem] = count
|
| 850 |
+
return result
|
| 851 |
+
|
| 852 |
+
def __and__(self, other):
|
| 853 |
+
''' Intersection is the minimum of corresponding counts.
|
| 854 |
+
|
| 855 |
+
>>> Counter('abbb') & Counter('bcc')
|
| 856 |
+
Counter({'b': 1})
|
| 857 |
+
|
| 858 |
+
'''
|
| 859 |
+
if not isinstance(other, Counter):
|
| 860 |
+
return NotImplemented
|
| 861 |
+
result = Counter()
|
| 862 |
+
for elem, count in self.items():
|
| 863 |
+
other_count = other[elem]
|
| 864 |
+
newcount = count if count < other_count else other_count
|
| 865 |
+
if newcount > 0:
|
| 866 |
+
result[elem] = newcount
|
| 867 |
+
return result
|
| 868 |
+
|
| 869 |
+
def __pos__(self):
|
| 870 |
+
'Adds an empty counter, effectively stripping negative and zero counts'
|
| 871 |
+
result = Counter()
|
| 872 |
+
for elem, count in self.items():
|
| 873 |
+
if count > 0:
|
| 874 |
+
result[elem] = count
|
| 875 |
+
return result
|
| 876 |
+
|
| 877 |
+
def __neg__(self):
|
| 878 |
+
'''Subtracts from an empty counter. Strips positive and zero counts,
|
| 879 |
+
and flips the sign on negative counts.
|
| 880 |
+
|
| 881 |
+
'''
|
| 882 |
+
result = Counter()
|
| 883 |
+
for elem, count in self.items():
|
| 884 |
+
if count < 0:
|
| 885 |
+
result[elem] = 0 - count
|
| 886 |
+
return result
|
| 887 |
+
|
| 888 |
+
def _keep_positive(self):
|
| 889 |
+
'''Internal method to strip elements with a negative or zero count'''
|
| 890 |
+
nonpositive = [elem for elem, count in self.items() if not count > 0]
|
| 891 |
+
for elem in nonpositive:
|
| 892 |
+
del self[elem]
|
| 893 |
+
return self
|
| 894 |
+
|
| 895 |
+
def __iadd__(self, other):
|
| 896 |
+
'''Inplace add from another counter, keeping only positive counts.
|
| 897 |
+
|
| 898 |
+
>>> c = Counter('abbb')
|
| 899 |
+
>>> c += Counter('bcc')
|
| 900 |
+
>>> c
|
| 901 |
+
Counter({'b': 4, 'c': 2, 'a': 1})
|
| 902 |
+
|
| 903 |
+
'''
|
| 904 |
+
for elem, count in other.items():
|
| 905 |
+
self[elem] += count
|
| 906 |
+
return self._keep_positive()
|
| 907 |
+
|
| 908 |
+
def __isub__(self, other):
|
| 909 |
+
'''Inplace subtract counter, but keep only results with positive counts.
|
| 910 |
+
|
| 911 |
+
>>> c = Counter('abbbc')
|
| 912 |
+
>>> c -= Counter('bccd')
|
| 913 |
+
>>> c
|
| 914 |
+
Counter({'b': 2, 'a': 1})
|
| 915 |
+
|
| 916 |
+
'''
|
| 917 |
+
for elem, count in other.items():
|
| 918 |
+
self[elem] -= count
|
| 919 |
+
return self._keep_positive()
|
| 920 |
+
|
| 921 |
+
def __ior__(self, other):
|
| 922 |
+
'''Inplace union is the maximum of value from either counter.
|
| 923 |
+
|
| 924 |
+
>>> c = Counter('abbb')
|
| 925 |
+
>>> c |= Counter('bcc')
|
| 926 |
+
>>> c
|
| 927 |
+
Counter({'b': 3, 'c': 2, 'a': 1})
|
| 928 |
+
|
| 929 |
+
'''
|
| 930 |
+
for elem, other_count in other.items():
|
| 931 |
+
count = self[elem]
|
| 932 |
+
if other_count > count:
|
| 933 |
+
self[elem] = other_count
|
| 934 |
+
return self._keep_positive()
|
| 935 |
+
|
| 936 |
+
def __iand__(self, other):
|
| 937 |
+
'''Inplace intersection is the minimum of corresponding counts.
|
| 938 |
+
|
| 939 |
+
>>> c = Counter('abbb')
|
| 940 |
+
>>> c &= Counter('bcc')
|
| 941 |
+
>>> c
|
| 942 |
+
Counter({'b': 1})
|
| 943 |
+
|
| 944 |
+
'''
|
| 945 |
+
for elem, count in self.items():
|
| 946 |
+
other_count = other[elem]
|
| 947 |
+
if other_count < count:
|
| 948 |
+
self[elem] = other_count
|
| 949 |
+
return self._keep_positive()
|
| 950 |
+
|
| 951 |
+
|
| 952 |
+
########################################################################
|
| 953 |
+
### ChainMap
|
| 954 |
+
########################################################################
|
| 955 |
+
|
| 956 |
+
class ChainMap(_collections_abc.MutableMapping):
|
| 957 |
+
''' A ChainMap groups multiple dicts (or other mappings) together
|
| 958 |
+
to create a single, updateable view.
|
| 959 |
+
|
| 960 |
+
The underlying mappings are stored in a list. That list is public and can
|
| 961 |
+
be accessed or updated using the *maps* attribute. There is no other
|
| 962 |
+
state.
|
| 963 |
+
|
| 964 |
+
Lookups search the underlying mappings successively until a key is found.
|
| 965 |
+
In contrast, writes, updates, and deletions only operate on the first
|
| 966 |
+
mapping.
|
| 967 |
+
|
| 968 |
+
'''
|
| 969 |
+
|
| 970 |
+
def __init__(self, *maps):
|
| 971 |
+
'''Initialize a ChainMap by setting *maps* to the given mappings.
|
| 972 |
+
If no mappings are provided, a single empty dictionary is used.
|
| 973 |
+
|
| 974 |
+
'''
|
| 975 |
+
self.maps = list(maps) or [{}] # always at least one map
|
| 976 |
+
|
| 977 |
+
def __missing__(self, key):
|
| 978 |
+
raise KeyError(key)
|
| 979 |
+
|
| 980 |
+
def __getitem__(self, key):
|
| 981 |
+
for mapping in self.maps:
|
| 982 |
+
try:
|
| 983 |
+
return mapping[key] # can't use 'key in mapping' with defaultdict
|
| 984 |
+
except KeyError:
|
| 985 |
+
pass
|
| 986 |
+
return self.__missing__(key) # support subclasses that define __missing__
|
| 987 |
+
|
| 988 |
+
def get(self, key, default=None):
|
| 989 |
+
return self[key] if key in self else default
|
| 990 |
+
|
| 991 |
+
def __len__(self):
|
| 992 |
+
return len(set().union(*self.maps)) # reuses stored hash values if possible
|
| 993 |
+
|
| 994 |
+
def __iter__(self):
|
| 995 |
+
d = {}
|
| 996 |
+
for mapping in reversed(self.maps):
|
| 997 |
+
d.update(dict.fromkeys(mapping)) # reuses stored hash values if possible
|
| 998 |
+
return iter(d)
|
| 999 |
+
|
| 1000 |
+
def __contains__(self, key):
|
| 1001 |
+
return any(key in m for m in self.maps)
|
| 1002 |
+
|
| 1003 |
+
def __bool__(self):
|
| 1004 |
+
return any(self.maps)
|
| 1005 |
+
|
| 1006 |
+
@_recursive_repr()
|
| 1007 |
+
def __repr__(self):
|
| 1008 |
+
return f'{self.__class__.__name__}({", ".join(map(repr, self.maps))})'
|
| 1009 |
+
|
| 1010 |
+
@classmethod
|
| 1011 |
+
def fromkeys(cls, iterable, *args):
|
| 1012 |
+
'Create a ChainMap with a single dict created from the iterable.'
|
| 1013 |
+
return cls(dict.fromkeys(iterable, *args))
|
| 1014 |
+
|
| 1015 |
+
def copy(self):
|
| 1016 |
+
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
|
| 1017 |
+
return self.__class__(self.maps[0].copy(), *self.maps[1:])
|
| 1018 |
+
|
| 1019 |
+
__copy__ = copy
|
| 1020 |
+
|
| 1021 |
+
def new_child(self, m=None, **kwargs): # like Django's Context.push()
|
| 1022 |
+
'''New ChainMap with a new map followed by all previous maps.
|
| 1023 |
+
If no map is provided, an empty dict is used.
|
| 1024 |
+
Keyword arguments update the map or new empty dict.
|
| 1025 |
+
'''
|
| 1026 |
+
if m is None:
|
| 1027 |
+
m = kwargs
|
| 1028 |
+
elif kwargs:
|
| 1029 |
+
m.update(kwargs)
|
| 1030 |
+
return self.__class__(m, *self.maps)
|
| 1031 |
+
|
| 1032 |
+
@property
|
| 1033 |
+
def parents(self): # like Django's Context.pop()
|
| 1034 |
+
'New ChainMap from maps[1:].'
|
| 1035 |
+
return self.__class__(*self.maps[1:])
|
| 1036 |
+
|
| 1037 |
+
def __setitem__(self, key, value):
|
| 1038 |
+
self.maps[0][key] = value
|
| 1039 |
+
|
| 1040 |
+
def __delitem__(self, key):
|
| 1041 |
+
try:
|
| 1042 |
+
del self.maps[0][key]
|
| 1043 |
+
except KeyError:
|
| 1044 |
+
raise KeyError(f'Key not found in the first mapping: {key!r}')
|
| 1045 |
+
|
| 1046 |
+
def popitem(self):
|
| 1047 |
+
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
|
| 1048 |
+
try:
|
| 1049 |
+
return self.maps[0].popitem()
|
| 1050 |
+
except KeyError:
|
| 1051 |
+
raise KeyError('No keys found in the first mapping.')
|
| 1052 |
+
|
| 1053 |
+
def pop(self, key, *args):
|
| 1054 |
+
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
|
| 1055 |
+
try:
|
| 1056 |
+
return self.maps[0].pop(key, *args)
|
| 1057 |
+
except KeyError:
|
| 1058 |
+
raise KeyError(f'Key not found in the first mapping: {key!r}')
|
| 1059 |
+
|
| 1060 |
+
def clear(self):
|
| 1061 |
+
'Clear maps[0], leaving maps[1:] intact.'
|
| 1062 |
+
self.maps[0].clear()
|
| 1063 |
+
|
| 1064 |
+
def __ior__(self, other):
|
| 1065 |
+
self.maps[0].update(other)
|
| 1066 |
+
return self
|
| 1067 |
+
|
| 1068 |
+
def __or__(self, other):
|
| 1069 |
+
if not isinstance(other, _collections_abc.Mapping):
|
| 1070 |
+
return NotImplemented
|
| 1071 |
+
m = self.copy()
|
| 1072 |
+
m.maps[0].update(other)
|
| 1073 |
+
return m
|
| 1074 |
+
|
| 1075 |
+
def __ror__(self, other):
|
| 1076 |
+
if not isinstance(other, _collections_abc.Mapping):
|
| 1077 |
+
return NotImplemented
|
| 1078 |
+
m = dict(other)
|
| 1079 |
+
for child in reversed(self.maps):
|
| 1080 |
+
m.update(child)
|
| 1081 |
+
return self.__class__(m)
|
| 1082 |
+
|
| 1083 |
+
|
| 1084 |
+
################################################################################
|
| 1085 |
+
### UserDict
|
| 1086 |
+
################################################################################
|
| 1087 |
+
|
| 1088 |
+
class UserDict(_collections_abc.MutableMapping):
|
| 1089 |
+
|
| 1090 |
+
# Start by filling-out the abstract methods
|
| 1091 |
+
def __init__(self, dict=None, /, **kwargs):
|
| 1092 |
+
self.data = {}
|
| 1093 |
+
if dict is not None:
|
| 1094 |
+
self.update(dict)
|
| 1095 |
+
if kwargs:
|
| 1096 |
+
self.update(kwargs)
|
| 1097 |
+
|
| 1098 |
+
def __len__(self):
|
| 1099 |
+
return len(self.data)
|
| 1100 |
+
|
| 1101 |
+
def __getitem__(self, key):
|
| 1102 |
+
if key in self.data:
|
| 1103 |
+
return self.data[key]
|
| 1104 |
+
if hasattr(self.__class__, "__missing__"):
|
| 1105 |
+
return self.__class__.__missing__(self, key)
|
| 1106 |
+
raise KeyError(key)
|
| 1107 |
+
|
| 1108 |
+
def __setitem__(self, key, item):
|
| 1109 |
+
self.data[key] = item
|
| 1110 |
+
|
| 1111 |
+
def __delitem__(self, key):
|
| 1112 |
+
del self.data[key]
|
| 1113 |
+
|
| 1114 |
+
def __iter__(self):
|
| 1115 |
+
return iter(self.data)
|
| 1116 |
+
|
| 1117 |
+
# Modify __contains__ to work correctly when __missing__ is present
|
| 1118 |
+
def __contains__(self, key):
|
| 1119 |
+
return key in self.data
|
| 1120 |
+
|
| 1121 |
+
# Now, add the methods in dicts but not in MutableMapping
|
| 1122 |
+
def __repr__(self):
|
| 1123 |
+
return repr(self.data)
|
| 1124 |
+
|
| 1125 |
+
def __or__(self, other):
|
| 1126 |
+
if isinstance(other, UserDict):
|
| 1127 |
+
return self.__class__(self.data | other.data)
|
| 1128 |
+
if isinstance(other, dict):
|
| 1129 |
+
return self.__class__(self.data | other)
|
| 1130 |
+
return NotImplemented
|
| 1131 |
+
|
| 1132 |
+
def __ror__(self, other):
|
| 1133 |
+
if isinstance(other, UserDict):
|
| 1134 |
+
return self.__class__(other.data | self.data)
|
| 1135 |
+
if isinstance(other, dict):
|
| 1136 |
+
return self.__class__(other | self.data)
|
| 1137 |
+
return NotImplemented
|
| 1138 |
+
|
| 1139 |
+
def __ior__(self, other):
|
| 1140 |
+
if isinstance(other, UserDict):
|
| 1141 |
+
self.data |= other.data
|
| 1142 |
+
else:
|
| 1143 |
+
self.data |= other
|
| 1144 |
+
return self
|
| 1145 |
+
|
| 1146 |
+
def __copy__(self):
|
| 1147 |
+
inst = self.__class__.__new__(self.__class__)
|
| 1148 |
+
inst.__dict__.update(self.__dict__)
|
| 1149 |
+
# Create a copy and avoid triggering descriptors
|
| 1150 |
+
inst.__dict__["data"] = self.__dict__["data"].copy()
|
| 1151 |
+
return inst
|
| 1152 |
+
|
| 1153 |
+
def copy(self):
|
| 1154 |
+
if self.__class__ is UserDict:
|
| 1155 |
+
return UserDict(self.data.copy())
|
| 1156 |
+
import copy
|
| 1157 |
+
data = self.data
|
| 1158 |
+
try:
|
| 1159 |
+
self.data = {}
|
| 1160 |
+
c = copy.copy(self)
|
| 1161 |
+
finally:
|
| 1162 |
+
self.data = data
|
| 1163 |
+
c.update(self)
|
| 1164 |
+
return c
|
| 1165 |
+
|
| 1166 |
+
@classmethod
|
| 1167 |
+
def fromkeys(cls, iterable, value=None):
|
| 1168 |
+
d = cls()
|
| 1169 |
+
for key in iterable:
|
| 1170 |
+
d[key] = value
|
| 1171 |
+
return d
|
| 1172 |
+
|
| 1173 |
+
|
| 1174 |
+
################################################################################
|
| 1175 |
+
### UserList
|
| 1176 |
+
################################################################################
|
| 1177 |
+
|
| 1178 |
+
class UserList(_collections_abc.MutableSequence):
|
| 1179 |
+
"""A more or less complete user-defined wrapper around list objects."""
|
| 1180 |
+
|
| 1181 |
+
def __init__(self, initlist=None):
|
| 1182 |
+
self.data = []
|
| 1183 |
+
if initlist is not None:
|
| 1184 |
+
# XXX should this accept an arbitrary sequence?
|
| 1185 |
+
if type(initlist) == type(self.data):
|
| 1186 |
+
self.data[:] = initlist
|
| 1187 |
+
elif isinstance(initlist, UserList):
|
| 1188 |
+
self.data[:] = initlist.data[:]
|
| 1189 |
+
else:
|
| 1190 |
+
self.data = list(initlist)
|
| 1191 |
+
|
| 1192 |
+
def __repr__(self):
|
| 1193 |
+
return repr(self.data)
|
| 1194 |
+
|
| 1195 |
+
def __lt__(self, other):
|
| 1196 |
+
return self.data < self.__cast(other)
|
| 1197 |
+
|
| 1198 |
+
def __le__(self, other):
|
| 1199 |
+
return self.data <= self.__cast(other)
|
| 1200 |
+
|
| 1201 |
+
def __eq__(self, other):
|
| 1202 |
+
return self.data == self.__cast(other)
|
| 1203 |
+
|
| 1204 |
+
def __gt__(self, other):
|
| 1205 |
+
return self.data > self.__cast(other)
|
| 1206 |
+
|
| 1207 |
+
def __ge__(self, other):
|
| 1208 |
+
return self.data >= self.__cast(other)
|
| 1209 |
+
|
| 1210 |
+
def __cast(self, other):
|
| 1211 |
+
return other.data if isinstance(other, UserList) else other
|
| 1212 |
+
|
| 1213 |
+
def __contains__(self, item):
|
| 1214 |
+
return item in self.data
|
| 1215 |
+
|
| 1216 |
+
def __len__(self):
|
| 1217 |
+
return len(self.data)
|
| 1218 |
+
|
| 1219 |
+
def __getitem__(self, i):
|
| 1220 |
+
if isinstance(i, slice):
|
| 1221 |
+
return self.__class__(self.data[i])
|
| 1222 |
+
else:
|
| 1223 |
+
return self.data[i]
|
| 1224 |
+
|
| 1225 |
+
def __setitem__(self, i, item):
|
| 1226 |
+
self.data[i] = item
|
| 1227 |
+
|
| 1228 |
+
def __delitem__(self, i):
|
| 1229 |
+
del self.data[i]
|
| 1230 |
+
|
| 1231 |
+
def __add__(self, other):
|
| 1232 |
+
if isinstance(other, UserList):
|
| 1233 |
+
return self.__class__(self.data + other.data)
|
| 1234 |
+
elif isinstance(other, type(self.data)):
|
| 1235 |
+
return self.__class__(self.data + other)
|
| 1236 |
+
return self.__class__(self.data + list(other))
|
| 1237 |
+
|
| 1238 |
+
def __radd__(self, other):
|
| 1239 |
+
if isinstance(other, UserList):
|
| 1240 |
+
return self.__class__(other.data + self.data)
|
| 1241 |
+
elif isinstance(other, type(self.data)):
|
| 1242 |
+
return self.__class__(other + self.data)
|
| 1243 |
+
return self.__class__(list(other) + self.data)
|
| 1244 |
+
|
| 1245 |
+
def __iadd__(self, other):
|
| 1246 |
+
if isinstance(other, UserList):
|
| 1247 |
+
self.data += other.data
|
| 1248 |
+
elif isinstance(other, type(self.data)):
|
| 1249 |
+
self.data += other
|
| 1250 |
+
else:
|
| 1251 |
+
self.data += list(other)
|
| 1252 |
+
return self
|
| 1253 |
+
|
| 1254 |
+
def __mul__(self, n):
|
| 1255 |
+
return self.__class__(self.data * n)
|
| 1256 |
+
|
| 1257 |
+
__rmul__ = __mul__
|
| 1258 |
+
|
| 1259 |
+
def __imul__(self, n):
|
| 1260 |
+
self.data *= n
|
| 1261 |
+
return self
|
| 1262 |
+
|
| 1263 |
+
def __copy__(self):
|
| 1264 |
+
inst = self.__class__.__new__(self.__class__)
|
| 1265 |
+
inst.__dict__.update(self.__dict__)
|
| 1266 |
+
# Create a copy and avoid triggering descriptors
|
| 1267 |
+
inst.__dict__["data"] = self.__dict__["data"][:]
|
| 1268 |
+
return inst
|
| 1269 |
+
|
| 1270 |
+
def append(self, item):
|
| 1271 |
+
self.data.append(item)
|
| 1272 |
+
|
| 1273 |
+
def insert(self, i, item):
|
| 1274 |
+
self.data.insert(i, item)
|
| 1275 |
+
|
| 1276 |
+
def pop(self, i=-1):
|
| 1277 |
+
return self.data.pop(i)
|
| 1278 |
+
|
| 1279 |
+
def remove(self, item):
|
| 1280 |
+
self.data.remove(item)
|
| 1281 |
+
|
| 1282 |
+
def clear(self):
|
| 1283 |
+
self.data.clear()
|
| 1284 |
+
|
| 1285 |
+
def copy(self):
|
| 1286 |
+
return self.__class__(self)
|
| 1287 |
+
|
| 1288 |
+
def count(self, item):
|
| 1289 |
+
return self.data.count(item)
|
| 1290 |
+
|
| 1291 |
+
def index(self, item, *args):
|
| 1292 |
+
return self.data.index(item, *args)
|
| 1293 |
+
|
| 1294 |
+
def reverse(self):
|
| 1295 |
+
self.data.reverse()
|
| 1296 |
+
|
| 1297 |
+
def sort(self, /, *args, **kwds):
|
| 1298 |
+
self.data.sort(*args, **kwds)
|
| 1299 |
+
|
| 1300 |
+
def extend(self, other):
|
| 1301 |
+
if isinstance(other, UserList):
|
| 1302 |
+
self.data.extend(other.data)
|
| 1303 |
+
else:
|
| 1304 |
+
self.data.extend(other)
|
| 1305 |
+
|
| 1306 |
+
|
| 1307 |
+
################################################################################
|
| 1308 |
+
### UserString
|
| 1309 |
+
################################################################################
|
| 1310 |
+
|
| 1311 |
+
class UserString(_collections_abc.Sequence):
|
| 1312 |
+
|
| 1313 |
+
def __init__(self, seq):
|
| 1314 |
+
if isinstance(seq, str):
|
| 1315 |
+
self.data = seq
|
| 1316 |
+
elif isinstance(seq, UserString):
|
| 1317 |
+
self.data = seq.data[:]
|
| 1318 |
+
else:
|
| 1319 |
+
self.data = str(seq)
|
| 1320 |
+
|
| 1321 |
+
def __str__(self):
|
| 1322 |
+
return str(self.data)
|
| 1323 |
+
|
| 1324 |
+
def __repr__(self):
|
| 1325 |
+
return repr(self.data)
|
| 1326 |
+
|
| 1327 |
+
def __int__(self):
|
| 1328 |
+
return int(self.data)
|
| 1329 |
+
|
| 1330 |
+
def __float__(self):
|
| 1331 |
+
return float(self.data)
|
| 1332 |
+
|
| 1333 |
+
def __complex__(self):
|
| 1334 |
+
return complex(self.data)
|
| 1335 |
+
|
| 1336 |
+
def __hash__(self):
|
| 1337 |
+
return hash(self.data)
|
| 1338 |
+
|
| 1339 |
+
def __getnewargs__(self):
|
| 1340 |
+
return (self.data[:],)
|
| 1341 |
+
|
| 1342 |
+
def __eq__(self, string):
|
| 1343 |
+
if isinstance(string, UserString):
|
| 1344 |
+
return self.data == string.data
|
| 1345 |
+
return self.data == string
|
| 1346 |
+
|
| 1347 |
+
def __lt__(self, string):
|
| 1348 |
+
if isinstance(string, UserString):
|
| 1349 |
+
return self.data < string.data
|
| 1350 |
+
return self.data < string
|
| 1351 |
+
|
| 1352 |
+
def __le__(self, string):
|
| 1353 |
+
if isinstance(string, UserString):
|
| 1354 |
+
return self.data <= string.data
|
| 1355 |
+
return self.data <= string
|
| 1356 |
+
|
| 1357 |
+
def __gt__(self, string):
|
| 1358 |
+
if isinstance(string, UserString):
|
| 1359 |
+
return self.data > string.data
|
| 1360 |
+
return self.data > string
|
| 1361 |
+
|
| 1362 |
+
def __ge__(self, string):
|
| 1363 |
+
if isinstance(string, UserString):
|
| 1364 |
+
return self.data >= string.data
|
| 1365 |
+
return self.data >= string
|
| 1366 |
+
|
| 1367 |
+
def __contains__(self, char):
|
| 1368 |
+
if isinstance(char, UserString):
|
| 1369 |
+
char = char.data
|
| 1370 |
+
return char in self.data
|
| 1371 |
+
|
| 1372 |
+
def __len__(self):
|
| 1373 |
+
return len(self.data)
|
| 1374 |
+
|
| 1375 |
+
def __getitem__(self, index):
|
| 1376 |
+
return self.__class__(self.data[index])
|
| 1377 |
+
|
| 1378 |
+
def __add__(self, other):
|
| 1379 |
+
if isinstance(other, UserString):
|
| 1380 |
+
return self.__class__(self.data + other.data)
|
| 1381 |
+
elif isinstance(other, str):
|
| 1382 |
+
return self.__class__(self.data + other)
|
| 1383 |
+
return self.__class__(self.data + str(other))
|
| 1384 |
+
|
| 1385 |
+
def __radd__(self, other):
|
| 1386 |
+
if isinstance(other, str):
|
| 1387 |
+
return self.__class__(other + self.data)
|
| 1388 |
+
return self.__class__(str(other) + self.data)
|
| 1389 |
+
|
| 1390 |
+
def __mul__(self, n):
|
| 1391 |
+
return self.__class__(self.data * n)
|
| 1392 |
+
|
| 1393 |
+
__rmul__ = __mul__
|
| 1394 |
+
|
| 1395 |
+
def __mod__(self, args):
|
| 1396 |
+
return self.__class__(self.data % args)
|
| 1397 |
+
|
| 1398 |
+
def __rmod__(self, template):
|
| 1399 |
+
return self.__class__(str(template) % self)
|
| 1400 |
+
|
| 1401 |
+
# the following methods are defined in alphabetical order:
|
| 1402 |
+
def capitalize(self):
|
| 1403 |
+
return self.__class__(self.data.capitalize())
|
| 1404 |
+
|
| 1405 |
+
def casefold(self):
|
| 1406 |
+
return self.__class__(self.data.casefold())
|
| 1407 |
+
|
| 1408 |
+
def center(self, width, *args):
|
| 1409 |
+
return self.__class__(self.data.center(width, *args))
|
| 1410 |
+
|
| 1411 |
+
def count(self, sub, start=0, end=_sys.maxsize):
|
| 1412 |
+
if isinstance(sub, UserString):
|
| 1413 |
+
sub = sub.data
|
| 1414 |
+
return self.data.count(sub, start, end)
|
| 1415 |
+
|
| 1416 |
+
def removeprefix(self, prefix, /):
|
| 1417 |
+
if isinstance(prefix, UserString):
|
| 1418 |
+
prefix = prefix.data
|
| 1419 |
+
return self.__class__(self.data.removeprefix(prefix))
|
| 1420 |
+
|
| 1421 |
+
def removesuffix(self, suffix, /):
|
| 1422 |
+
if isinstance(suffix, UserString):
|
| 1423 |
+
suffix = suffix.data
|
| 1424 |
+
return self.__class__(self.data.removesuffix(suffix))
|
| 1425 |
+
|
| 1426 |
+
def encode(self, encoding='utf-8', errors='strict'):
|
| 1427 |
+
encoding = 'utf-8' if encoding is None else encoding
|
| 1428 |
+
errors = 'strict' if errors is None else errors
|
| 1429 |
+
return self.data.encode(encoding, errors)
|
| 1430 |
+
|
| 1431 |
+
def endswith(self, suffix, start=0, end=_sys.maxsize):
|
| 1432 |
+
return self.data.endswith(suffix, start, end)
|
| 1433 |
+
|
| 1434 |
+
def expandtabs(self, tabsize=8):
|
| 1435 |
+
return self.__class__(self.data.expandtabs(tabsize))
|
| 1436 |
+
|
| 1437 |
+
def find(self, sub, start=0, end=_sys.maxsize):
|
| 1438 |
+
if isinstance(sub, UserString):
|
| 1439 |
+
sub = sub.data
|
| 1440 |
+
return self.data.find(sub, start, end)
|
| 1441 |
+
|
| 1442 |
+
def format(self, /, *args, **kwds):
|
| 1443 |
+
return self.data.format(*args, **kwds)
|
| 1444 |
+
|
| 1445 |
+
def format_map(self, mapping):
|
| 1446 |
+
return self.data.format_map(mapping)
|
| 1447 |
+
|
| 1448 |
+
def index(self, sub, start=0, end=_sys.maxsize):
|
| 1449 |
+
return self.data.index(sub, start, end)
|
| 1450 |
+
|
| 1451 |
+
def isalpha(self):
|
| 1452 |
+
return self.data.isalpha()
|
| 1453 |
+
|
| 1454 |
+
def isalnum(self):
|
| 1455 |
+
return self.data.isalnum()
|
| 1456 |
+
|
| 1457 |
+
def isascii(self):
|
| 1458 |
+
return self.data.isascii()
|
| 1459 |
+
|
| 1460 |
+
def isdecimal(self):
|
| 1461 |
+
return self.data.isdecimal()
|
| 1462 |
+
|
| 1463 |
+
def isdigit(self):
|
| 1464 |
+
return self.data.isdigit()
|
| 1465 |
+
|
| 1466 |
+
def isidentifier(self):
|
| 1467 |
+
return self.data.isidentifier()
|
| 1468 |
+
|
| 1469 |
+
def islower(self):
|
| 1470 |
+
return self.data.islower()
|
| 1471 |
+
|
| 1472 |
+
def isnumeric(self):
|
| 1473 |
+
return self.data.isnumeric()
|
| 1474 |
+
|
| 1475 |
+
def isprintable(self):
|
| 1476 |
+
return self.data.isprintable()
|
| 1477 |
+
|
| 1478 |
+
def isspace(self):
|
| 1479 |
+
return self.data.isspace()
|
| 1480 |
+
|
| 1481 |
+
def istitle(self):
|
| 1482 |
+
return self.data.istitle()
|
| 1483 |
+
|
| 1484 |
+
def isupper(self):
|
| 1485 |
+
return self.data.isupper()
|
| 1486 |
+
|
| 1487 |
+
def join(self, seq):
|
| 1488 |
+
return self.data.join(seq)
|
| 1489 |
+
|
| 1490 |
+
def ljust(self, width, *args):
|
| 1491 |
+
return self.__class__(self.data.ljust(width, *args))
|
| 1492 |
+
|
| 1493 |
+
def lower(self):
|
| 1494 |
+
return self.__class__(self.data.lower())
|
| 1495 |
+
|
| 1496 |
+
def lstrip(self, chars=None):
|
| 1497 |
+
return self.__class__(self.data.lstrip(chars))
|
| 1498 |
+
|
| 1499 |
+
maketrans = str.maketrans
|
| 1500 |
+
|
| 1501 |
+
def partition(self, sep):
|
| 1502 |
+
return self.data.partition(sep)
|
| 1503 |
+
|
| 1504 |
+
def replace(self, old, new, maxsplit=-1):
|
| 1505 |
+
if isinstance(old, UserString):
|
| 1506 |
+
old = old.data
|
| 1507 |
+
if isinstance(new, UserString):
|
| 1508 |
+
new = new.data
|
| 1509 |
+
return self.__class__(self.data.replace(old, new, maxsplit))
|
| 1510 |
+
|
| 1511 |
+
def rfind(self, sub, start=0, end=_sys.maxsize):
|
| 1512 |
+
if isinstance(sub, UserString):
|
| 1513 |
+
sub = sub.data
|
| 1514 |
+
return self.data.rfind(sub, start, end)
|
| 1515 |
+
|
| 1516 |
+
def rindex(self, sub, start=0, end=_sys.maxsize):
|
| 1517 |
+
return self.data.rindex(sub, start, end)
|
| 1518 |
+
|
| 1519 |
+
def rjust(self, width, *args):
|
| 1520 |
+
return self.__class__(self.data.rjust(width, *args))
|
| 1521 |
+
|
| 1522 |
+
def rpartition(self, sep):
|
| 1523 |
+
return self.data.rpartition(sep)
|
| 1524 |
+
|
| 1525 |
+
def rstrip(self, chars=None):
|
| 1526 |
+
return self.__class__(self.data.rstrip(chars))
|
| 1527 |
+
|
| 1528 |
+
def split(self, sep=None, maxsplit=-1):
|
| 1529 |
+
return self.data.split(sep, maxsplit)
|
| 1530 |
+
|
| 1531 |
+
def rsplit(self, sep=None, maxsplit=-1):
|
| 1532 |
+
return self.data.rsplit(sep, maxsplit)
|
| 1533 |
+
|
| 1534 |
+
def splitlines(self, keepends=False):
|
| 1535 |
+
return self.data.splitlines(keepends)
|
| 1536 |
+
|
| 1537 |
+
def startswith(self, prefix, start=0, end=_sys.maxsize):
|
| 1538 |
+
return self.data.startswith(prefix, start, end)
|
| 1539 |
+
|
| 1540 |
+
def strip(self, chars=None):
|
| 1541 |
+
return self.__class__(self.data.strip(chars))
|
| 1542 |
+
|
| 1543 |
+
def swapcase(self):
|
| 1544 |
+
return self.__class__(self.data.swapcase())
|
| 1545 |
+
|
| 1546 |
+
def title(self):
|
| 1547 |
+
return self.__class__(self.data.title())
|
| 1548 |
+
|
| 1549 |
+
def translate(self, *args):
|
| 1550 |
+
return self.__class__(self.data.translate(*args))
|
| 1551 |
+
|
| 1552 |
+
def upper(self):
|
| 1553 |
+
return self.__class__(self.data.upper())
|
| 1554 |
+
|
| 1555 |
+
def zfill(self, width):
|
| 1556 |
+
return self.__class__(self.data.zfill(width))
|
evalkit_llava/lib/python3.10/dataclasses.py
ADDED
|
@@ -0,0 +1,1453 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
import sys
|
| 3 |
+
import copy
|
| 4 |
+
import types
|
| 5 |
+
import inspect
|
| 6 |
+
import keyword
|
| 7 |
+
import builtins
|
| 8 |
+
import functools
|
| 9 |
+
import abc
|
| 10 |
+
import _thread
|
| 11 |
+
from types import FunctionType, GenericAlias
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
__all__ = ['dataclass',
|
| 15 |
+
'field',
|
| 16 |
+
'Field',
|
| 17 |
+
'FrozenInstanceError',
|
| 18 |
+
'InitVar',
|
| 19 |
+
'KW_ONLY',
|
| 20 |
+
'MISSING',
|
| 21 |
+
|
| 22 |
+
# Helper functions.
|
| 23 |
+
'fields',
|
| 24 |
+
'asdict',
|
| 25 |
+
'astuple',
|
| 26 |
+
'make_dataclass',
|
| 27 |
+
'replace',
|
| 28 |
+
'is_dataclass',
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
# Conditions for adding methods. The boxes indicate what action the
|
| 32 |
+
# dataclass decorator takes. For all of these tables, when I talk
|
| 33 |
+
# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm
|
| 34 |
+
# referring to the arguments to the @dataclass decorator. When
|
| 35 |
+
# checking if a dunder method already exists, I mean check for an
|
| 36 |
+
# entry in the class's __dict__. I never check to see if an attribute
|
| 37 |
+
# is defined in a base class.
|
| 38 |
+
|
| 39 |
+
# Key:
|
| 40 |
+
# +=========+=========================================+
|
| 41 |
+
# + Value | Meaning |
|
| 42 |
+
# +=========+=========================================+
|
| 43 |
+
# | <blank> | No action: no method is added. |
|
| 44 |
+
# +---------+-----------------------------------------+
|
| 45 |
+
# | add | Generated method is added. |
|
| 46 |
+
# +---------+-----------------------------------------+
|
| 47 |
+
# | raise | TypeError is raised. |
|
| 48 |
+
# +---------+-----------------------------------------+
|
| 49 |
+
# | None | Attribute is set to None. |
|
| 50 |
+
# +=========+=========================================+
|
| 51 |
+
|
| 52 |
+
# __init__
|
| 53 |
+
#
|
| 54 |
+
# +--- init= parameter
|
| 55 |
+
# |
|
| 56 |
+
# v | | |
|
| 57 |
+
# | no | yes | <--- class has __init__ in __dict__?
|
| 58 |
+
# +=======+=======+=======+
|
| 59 |
+
# | False | | |
|
| 60 |
+
# +-------+-------+-------+
|
| 61 |
+
# | True | add | | <- the default
|
| 62 |
+
# +=======+=======+=======+
|
| 63 |
+
|
| 64 |
+
# __repr__
|
| 65 |
+
#
|
| 66 |
+
# +--- repr= parameter
|
| 67 |
+
# |
|
| 68 |
+
# v | | |
|
| 69 |
+
# | no | yes | <--- class has __repr__ in __dict__?
|
| 70 |
+
# +=======+=======+=======+
|
| 71 |
+
# | False | | |
|
| 72 |
+
# +-------+-------+-------+
|
| 73 |
+
# | True | add | | <- the default
|
| 74 |
+
# +=======+=======+=======+
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
# __setattr__
|
| 78 |
+
# __delattr__
|
| 79 |
+
#
|
| 80 |
+
# +--- frozen= parameter
|
| 81 |
+
# |
|
| 82 |
+
# v | | |
|
| 83 |
+
# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__?
|
| 84 |
+
# +=======+=======+=======+
|
| 85 |
+
# | False | | | <- the default
|
| 86 |
+
# +-------+-------+-------+
|
| 87 |
+
# | True | add | raise |
|
| 88 |
+
# +=======+=======+=======+
|
| 89 |
+
# Raise because not adding these methods would break the "frozen-ness"
|
| 90 |
+
# of the class.
|
| 91 |
+
|
| 92 |
+
# __eq__
|
| 93 |
+
#
|
| 94 |
+
# +--- eq= parameter
|
| 95 |
+
# |
|
| 96 |
+
# v | | |
|
| 97 |
+
# | no | yes | <--- class has __eq__ in __dict__?
|
| 98 |
+
# +=======+=======+=======+
|
| 99 |
+
# | False | | |
|
| 100 |
+
# +-------+-------+-------+
|
| 101 |
+
# | True | add | | <- the default
|
| 102 |
+
# +=======+=======+=======+
|
| 103 |
+
|
| 104 |
+
# __lt__
|
| 105 |
+
# __le__
|
| 106 |
+
# __gt__
|
| 107 |
+
# __ge__
|
| 108 |
+
#
|
| 109 |
+
# +--- order= parameter
|
| 110 |
+
# |
|
| 111 |
+
# v | | |
|
| 112 |
+
# | no | yes | <--- class has any comparison method in __dict__?
|
| 113 |
+
# +=======+=======+=======+
|
| 114 |
+
# | False | | | <- the default
|
| 115 |
+
# +-------+-------+-------+
|
| 116 |
+
# | True | add | raise |
|
| 117 |
+
# +=======+=======+=======+
|
| 118 |
+
# Raise because to allow this case would interfere with using
|
| 119 |
+
# functools.total_ordering.
|
| 120 |
+
|
| 121 |
+
# __hash__
|
| 122 |
+
|
| 123 |
+
# +------------------- unsafe_hash= parameter
|
| 124 |
+
# | +----------- eq= parameter
|
| 125 |
+
# | | +--- frozen= parameter
|
| 126 |
+
# | | |
|
| 127 |
+
# v v v | | |
|
| 128 |
+
# | no | yes | <--- class has explicitly defined __hash__
|
| 129 |
+
# +=======+=======+=======+========+========+
|
| 130 |
+
# | False | False | False | | | No __eq__, use the base class __hash__
|
| 131 |
+
# +-------+-------+-------+--------+--------+
|
| 132 |
+
# | False | False | True | | | No __eq__, use the base class __hash__
|
| 133 |
+
# +-------+-------+-------+--------+--------+
|
| 134 |
+
# | False | True | False | None | | <-- the default, not hashable
|
| 135 |
+
# +-------+-------+-------+--------+--------+
|
| 136 |
+
# | False | True | True | add | | Frozen, so hashable, allows override
|
| 137 |
+
# +-------+-------+-------+--------+--------+
|
| 138 |
+
# | True | False | False | add | raise | Has no __eq__, but hashable
|
| 139 |
+
# +-------+-------+-------+--------+--------+
|
| 140 |
+
# | True | False | True | add | raise | Has no __eq__, but hashable
|
| 141 |
+
# +-------+-------+-------+--------+--------+
|
| 142 |
+
# | True | True | False | add | raise | Not frozen, but hashable
|
| 143 |
+
# +-------+-------+-------+--------+--------+
|
| 144 |
+
# | True | True | True | add | raise | Frozen, so hashable
|
| 145 |
+
# +=======+=======+=======+========+========+
|
| 146 |
+
# For boxes that are blank, __hash__ is untouched and therefore
|
| 147 |
+
# inherited from the base class. If the base is object, then
|
| 148 |
+
# id-based hashing is used.
|
| 149 |
+
#
|
| 150 |
+
# Note that a class may already have __hash__=None if it specified an
|
| 151 |
+
# __eq__ method in the class body (not one that was created by
|
| 152 |
+
# @dataclass).
|
| 153 |
+
#
|
| 154 |
+
# See _hash_action (below) for a coded version of this table.
|
| 155 |
+
|
| 156 |
+
# __match_args__
|
| 157 |
+
#
|
| 158 |
+
# +--- match_args= parameter
|
| 159 |
+
# |
|
| 160 |
+
# v | | |
|
| 161 |
+
# | no | yes | <--- class has __match_args__ in __dict__?
|
| 162 |
+
# +=======+=======+=======+
|
| 163 |
+
# | False | | |
|
| 164 |
+
# +-------+-------+-------+
|
| 165 |
+
# | True | add | | <- the default
|
| 166 |
+
# +=======+=======+=======+
|
| 167 |
+
# __match_args__ is always added unless the class already defines it. It is a
|
| 168 |
+
# tuple of __init__ parameter names; non-init fields must be matched by keyword.
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
# Raised when an attempt is made to modify a frozen class.
|
| 172 |
+
class FrozenInstanceError(AttributeError): pass
|
| 173 |
+
|
| 174 |
+
# A sentinel object for default values to signal that a default
|
| 175 |
+
# factory will be used. This is given a nice repr() which will appear
|
| 176 |
+
# in the function signature of dataclasses' constructors.
|
| 177 |
+
class _HAS_DEFAULT_FACTORY_CLASS:
|
| 178 |
+
def __repr__(self):
|
| 179 |
+
return '<factory>'
|
| 180 |
+
_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS()
|
| 181 |
+
|
| 182 |
+
# A sentinel object to detect if a parameter is supplied or not. Use
|
| 183 |
+
# a class to give it a better repr.
|
| 184 |
+
class _MISSING_TYPE:
|
| 185 |
+
pass
|
| 186 |
+
MISSING = _MISSING_TYPE()
|
| 187 |
+
|
| 188 |
+
# A sentinel object to indicate that following fields are keyword-only by
|
| 189 |
+
# default. Use a class to give it a better repr.
|
| 190 |
+
class _KW_ONLY_TYPE:
|
| 191 |
+
pass
|
| 192 |
+
KW_ONLY = _KW_ONLY_TYPE()
|
| 193 |
+
|
| 194 |
+
# Since most per-field metadata will be unused, create an empty
|
| 195 |
+
# read-only proxy that can be shared among all fields.
|
| 196 |
+
_EMPTY_METADATA = types.MappingProxyType({})
|
| 197 |
+
|
| 198 |
+
# Markers for the various kinds of fields and pseudo-fields.
|
| 199 |
+
class _FIELD_BASE:
|
| 200 |
+
def __init__(self, name):
|
| 201 |
+
self.name = name
|
| 202 |
+
def __repr__(self):
|
| 203 |
+
return self.name
|
| 204 |
+
_FIELD = _FIELD_BASE('_FIELD')
|
| 205 |
+
_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR')
|
| 206 |
+
_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR')
|
| 207 |
+
|
| 208 |
+
# The name of an attribute on the class where we store the Field
|
| 209 |
+
# objects. Also used to check if a class is a Data Class.
|
| 210 |
+
_FIELDS = '__dataclass_fields__'
|
| 211 |
+
|
| 212 |
+
# The name of an attribute on the class that stores the parameters to
|
| 213 |
+
# @dataclass.
|
| 214 |
+
_PARAMS = '__dataclass_params__'
|
| 215 |
+
|
| 216 |
+
# The name of the function, that if it exists, is called at the end of
|
| 217 |
+
# __init__.
|
| 218 |
+
_POST_INIT_NAME = '__post_init__'
|
| 219 |
+
|
| 220 |
+
# String regex that string annotations for ClassVar or InitVar must match.
|
| 221 |
+
# Allows "identifier.identifier[" or "identifier[".
|
| 222 |
+
# https://bugs.python.org/issue33453 for details.
|
| 223 |
+
_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')
|
| 224 |
+
|
| 225 |
+
# This function's logic is copied from "recursive_repr" function in
|
| 226 |
+
# reprlib module to avoid dependency.
|
| 227 |
+
def _recursive_repr(user_function):
|
| 228 |
+
# Decorator to make a repr function return "..." for a recursive
|
| 229 |
+
# call.
|
| 230 |
+
repr_running = set()
|
| 231 |
+
|
| 232 |
+
@functools.wraps(user_function)
|
| 233 |
+
def wrapper(self):
|
| 234 |
+
key = id(self), _thread.get_ident()
|
| 235 |
+
if key in repr_running:
|
| 236 |
+
return '...'
|
| 237 |
+
repr_running.add(key)
|
| 238 |
+
try:
|
| 239 |
+
result = user_function(self)
|
| 240 |
+
finally:
|
| 241 |
+
repr_running.discard(key)
|
| 242 |
+
return result
|
| 243 |
+
return wrapper
|
| 244 |
+
|
| 245 |
+
class InitVar:
|
| 246 |
+
__slots__ = ('type', )
|
| 247 |
+
|
| 248 |
+
def __init__(self, type):
|
| 249 |
+
self.type = type
|
| 250 |
+
|
| 251 |
+
def __repr__(self):
|
| 252 |
+
if isinstance(self.type, type) and not isinstance(self.type, GenericAlias):
|
| 253 |
+
type_name = self.type.__name__
|
| 254 |
+
else:
|
| 255 |
+
# typing objects, e.g. List[int]
|
| 256 |
+
type_name = repr(self.type)
|
| 257 |
+
return f'dataclasses.InitVar[{type_name}]'
|
| 258 |
+
|
| 259 |
+
def __class_getitem__(cls, type):
|
| 260 |
+
return InitVar(type)
|
| 261 |
+
|
| 262 |
+
# Instances of Field are only ever created from within this module,
|
| 263 |
+
# and only from the field() function, although Field instances are
|
| 264 |
+
# exposed externally as (conceptually) read-only objects.
|
| 265 |
+
#
|
| 266 |
+
# name and type are filled in after the fact, not in __init__.
|
| 267 |
+
# They're not known at the time this class is instantiated, but it's
|
| 268 |
+
# convenient if they're available later.
|
| 269 |
+
#
|
| 270 |
+
# When cls._FIELDS is filled in with a list of Field objects, the name
|
| 271 |
+
# and type fields will have been populated.
|
| 272 |
+
class Field:
|
| 273 |
+
__slots__ = ('name',
|
| 274 |
+
'type',
|
| 275 |
+
'default',
|
| 276 |
+
'default_factory',
|
| 277 |
+
'repr',
|
| 278 |
+
'hash',
|
| 279 |
+
'init',
|
| 280 |
+
'compare',
|
| 281 |
+
'metadata',
|
| 282 |
+
'kw_only',
|
| 283 |
+
'_field_type', # Private: not to be used by user code.
|
| 284 |
+
)
|
| 285 |
+
|
| 286 |
+
def __init__(self, default, default_factory, init, repr, hash, compare,
|
| 287 |
+
metadata, kw_only):
|
| 288 |
+
self.name = None
|
| 289 |
+
self.type = None
|
| 290 |
+
self.default = default
|
| 291 |
+
self.default_factory = default_factory
|
| 292 |
+
self.init = init
|
| 293 |
+
self.repr = repr
|
| 294 |
+
self.hash = hash
|
| 295 |
+
self.compare = compare
|
| 296 |
+
self.metadata = (_EMPTY_METADATA
|
| 297 |
+
if metadata is None else
|
| 298 |
+
types.MappingProxyType(metadata))
|
| 299 |
+
self.kw_only = kw_only
|
| 300 |
+
self._field_type = None
|
| 301 |
+
|
| 302 |
+
@_recursive_repr
|
| 303 |
+
def __repr__(self):
|
| 304 |
+
return ('Field('
|
| 305 |
+
f'name={self.name!r},'
|
| 306 |
+
f'type={self.type!r},'
|
| 307 |
+
f'default={self.default!r},'
|
| 308 |
+
f'default_factory={self.default_factory!r},'
|
| 309 |
+
f'init={self.init!r},'
|
| 310 |
+
f'repr={self.repr!r},'
|
| 311 |
+
f'hash={self.hash!r},'
|
| 312 |
+
f'compare={self.compare!r},'
|
| 313 |
+
f'metadata={self.metadata!r},'
|
| 314 |
+
f'kw_only={self.kw_only!r},'
|
| 315 |
+
f'_field_type={self._field_type}'
|
| 316 |
+
')')
|
| 317 |
+
|
| 318 |
+
# This is used to support the PEP 487 __set_name__ protocol in the
|
| 319 |
+
# case where we're using a field that contains a descriptor as a
|
| 320 |
+
# default value. For details on __set_name__, see
|
| 321 |
+
# https://www.python.org/dev/peps/pep-0487/#implementation-details.
|
| 322 |
+
#
|
| 323 |
+
# Note that in _process_class, this Field object is overwritten
|
| 324 |
+
# with the default value, so the end result is a descriptor that
|
| 325 |
+
# had __set_name__ called on it at the right time.
|
| 326 |
+
def __set_name__(self, owner, name):
|
| 327 |
+
func = getattr(type(self.default), '__set_name__', None)
|
| 328 |
+
if func:
|
| 329 |
+
# There is a __set_name__ method on the descriptor, call
|
| 330 |
+
# it.
|
| 331 |
+
func(self.default, owner, name)
|
| 332 |
+
|
| 333 |
+
__class_getitem__ = classmethod(GenericAlias)
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
class _DataclassParams:
|
| 337 |
+
__slots__ = ('init',
|
| 338 |
+
'repr',
|
| 339 |
+
'eq',
|
| 340 |
+
'order',
|
| 341 |
+
'unsafe_hash',
|
| 342 |
+
'frozen',
|
| 343 |
+
)
|
| 344 |
+
|
| 345 |
+
def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
|
| 346 |
+
self.init = init
|
| 347 |
+
self.repr = repr
|
| 348 |
+
self.eq = eq
|
| 349 |
+
self.order = order
|
| 350 |
+
self.unsafe_hash = unsafe_hash
|
| 351 |
+
self.frozen = frozen
|
| 352 |
+
|
| 353 |
+
def __repr__(self):
|
| 354 |
+
return ('_DataclassParams('
|
| 355 |
+
f'init={self.init!r},'
|
| 356 |
+
f'repr={self.repr!r},'
|
| 357 |
+
f'eq={self.eq!r},'
|
| 358 |
+
f'order={self.order!r},'
|
| 359 |
+
f'unsafe_hash={self.unsafe_hash!r},'
|
| 360 |
+
f'frozen={self.frozen!r}'
|
| 361 |
+
')')
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
# This function is used instead of exposing Field creation directly,
|
| 365 |
+
# so that a type checker can be told (via overloads) that this is a
|
| 366 |
+
# function whose type depends on its parameters.
|
| 367 |
+
def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True,
|
| 368 |
+
hash=None, compare=True, metadata=None, kw_only=MISSING):
|
| 369 |
+
"""Return an object to identify dataclass fields.
|
| 370 |
+
|
| 371 |
+
default is the default value of the field. default_factory is a
|
| 372 |
+
0-argument function called to initialize a field's value. If init
|
| 373 |
+
is true, the field will be a parameter to the class's __init__()
|
| 374 |
+
function. If repr is true, the field will be included in the
|
| 375 |
+
object's repr(). If hash is true, the field will be included in the
|
| 376 |
+
object's hash(). If compare is true, the field will be used in
|
| 377 |
+
comparison functions. metadata, if specified, must be a mapping
|
| 378 |
+
which is stored but not otherwise examined by dataclass. If kw_only
|
| 379 |
+
is true, the field will become a keyword-only parameter to
|
| 380 |
+
__init__().
|
| 381 |
+
|
| 382 |
+
It is an error to specify both default and default_factory.
|
| 383 |
+
"""
|
| 384 |
+
|
| 385 |
+
if default is not MISSING and default_factory is not MISSING:
|
| 386 |
+
raise ValueError('cannot specify both default and default_factory')
|
| 387 |
+
return Field(default, default_factory, init, repr, hash, compare,
|
| 388 |
+
metadata, kw_only)
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
def _fields_in_init_order(fields):
|
| 392 |
+
# Returns the fields as __init__ will output them. It returns 2 tuples:
|
| 393 |
+
# the first for normal args, and the second for keyword args.
|
| 394 |
+
|
| 395 |
+
return (tuple(f for f in fields if f.init and not f.kw_only),
|
| 396 |
+
tuple(f for f in fields if f.init and f.kw_only)
|
| 397 |
+
)
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
def _tuple_str(obj_name, fields):
|
| 401 |
+
# Return a string representing each field of obj_name as a tuple
|
| 402 |
+
# member. So, if fields is ['x', 'y'] and obj_name is "self",
|
| 403 |
+
# return "(self.x,self.y)".
|
| 404 |
+
|
| 405 |
+
# Special case for the 0-tuple.
|
| 406 |
+
if not fields:
|
| 407 |
+
return '()'
|
| 408 |
+
# Note the trailing comma, needed if this turns out to be a 1-tuple.
|
| 409 |
+
return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
def _create_fn(name, args, body, *, globals=None, locals=None,
|
| 413 |
+
return_type=MISSING):
|
| 414 |
+
# Note that we may mutate locals. Callers beware!
|
| 415 |
+
# The only callers are internal to this module, so no
|
| 416 |
+
# worries about external callers.
|
| 417 |
+
if locals is None:
|
| 418 |
+
locals = {}
|
| 419 |
+
return_annotation = ''
|
| 420 |
+
if return_type is not MISSING:
|
| 421 |
+
locals['_return_type'] = return_type
|
| 422 |
+
return_annotation = '->_return_type'
|
| 423 |
+
args = ','.join(args)
|
| 424 |
+
body = '\n'.join(f' {b}' for b in body)
|
| 425 |
+
|
| 426 |
+
# Compute the text of the entire function.
|
| 427 |
+
txt = f' def {name}({args}){return_annotation}:\n{body}'
|
| 428 |
+
|
| 429 |
+
local_vars = ', '.join(locals.keys())
|
| 430 |
+
txt = f"def __create_fn__({local_vars}):\n{txt}\n return {name}"
|
| 431 |
+
ns = {}
|
| 432 |
+
exec(txt, globals, ns)
|
| 433 |
+
return ns['__create_fn__'](**locals)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
def _field_assign(frozen, name, value, self_name):
|
| 437 |
+
# If we're a frozen class, then assign to our fields in __init__
|
| 438 |
+
# via object.__setattr__. Otherwise, just use a simple
|
| 439 |
+
# assignment.
|
| 440 |
+
#
|
| 441 |
+
# self_name is what "self" is called in this function: don't
|
| 442 |
+
# hard-code "self", since that might be a field name.
|
| 443 |
+
if frozen:
|
| 444 |
+
return f'__dataclass_builtins_object__.__setattr__({self_name},{name!r},{value})'
|
| 445 |
+
return f'{self_name}.{name}={value}'
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
def _field_init(f, frozen, globals, self_name, slots):
|
| 449 |
+
# Return the text of the line in the body of __init__ that will
|
| 450 |
+
# initialize this field.
|
| 451 |
+
|
| 452 |
+
default_name = f'_dflt_{f.name}'
|
| 453 |
+
if f.default_factory is not MISSING:
|
| 454 |
+
if f.init:
|
| 455 |
+
# This field has a default factory. If a parameter is
|
| 456 |
+
# given, use it. If not, call the factory.
|
| 457 |
+
globals[default_name] = f.default_factory
|
| 458 |
+
value = (f'{default_name}() '
|
| 459 |
+
f'if {f.name} is _HAS_DEFAULT_FACTORY '
|
| 460 |
+
f'else {f.name}')
|
| 461 |
+
else:
|
| 462 |
+
# This is a field that's not in the __init__ params, but
|
| 463 |
+
# has a default factory function. It needs to be
|
| 464 |
+
# initialized here by calling the factory function,
|
| 465 |
+
# because there's no other way to initialize it.
|
| 466 |
+
|
| 467 |
+
# For a field initialized with a default=defaultvalue, the
|
| 468 |
+
# class dict just has the default value
|
| 469 |
+
# (cls.fieldname=defaultvalue). But that won't work for a
|
| 470 |
+
# default factory, the factory must be called in __init__
|
| 471 |
+
# and we must assign that to self.fieldname. We can't
|
| 472 |
+
# fall back to the class dict's value, both because it's
|
| 473 |
+
# not set, and because it might be different per-class
|
| 474 |
+
# (which, after all, is why we have a factory function!).
|
| 475 |
+
|
| 476 |
+
globals[default_name] = f.default_factory
|
| 477 |
+
value = f'{default_name}()'
|
| 478 |
+
else:
|
| 479 |
+
# No default factory.
|
| 480 |
+
if f.init:
|
| 481 |
+
if f.default is MISSING:
|
| 482 |
+
# There's no default, just do an assignment.
|
| 483 |
+
value = f.name
|
| 484 |
+
elif f.default is not MISSING:
|
| 485 |
+
globals[default_name] = f.default
|
| 486 |
+
value = f.name
|
| 487 |
+
else:
|
| 488 |
+
# If the class has slots, then initialize this field.
|
| 489 |
+
if slots and f.default is not MISSING:
|
| 490 |
+
globals[default_name] = f.default
|
| 491 |
+
value = default_name
|
| 492 |
+
else:
|
| 493 |
+
# This field does not need initialization: reading from it will
|
| 494 |
+
# just use the class attribute that contains the default.
|
| 495 |
+
# Signify that to the caller by returning None.
|
| 496 |
+
return None
|
| 497 |
+
|
| 498 |
+
# Only test this now, so that we can create variables for the
|
| 499 |
+
# default. However, return None to signify that we're not going
|
| 500 |
+
# to actually do the assignment statement for InitVars.
|
| 501 |
+
if f._field_type is _FIELD_INITVAR:
|
| 502 |
+
return None
|
| 503 |
+
|
| 504 |
+
# Now, actually generate the field assignment.
|
| 505 |
+
return _field_assign(frozen, f.name, value, self_name)
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
def _init_param(f):
|
| 509 |
+
# Return the __init__ parameter string for this field. For
|
| 510 |
+
# example, the equivalent of 'x:int=3' (except instead of 'int',
|
| 511 |
+
# reference a variable set to int, and instead of '3', reference a
|
| 512 |
+
# variable set to 3).
|
| 513 |
+
if f.default is MISSING and f.default_factory is MISSING:
|
| 514 |
+
# There's no default, and no default_factory, just output the
|
| 515 |
+
# variable name and type.
|
| 516 |
+
default = ''
|
| 517 |
+
elif f.default is not MISSING:
|
| 518 |
+
# There's a default, this will be the name that's used to look
|
| 519 |
+
# it up.
|
| 520 |
+
default = f'=_dflt_{f.name}'
|
| 521 |
+
elif f.default_factory is not MISSING:
|
| 522 |
+
# There's a factory function. Set a marker.
|
| 523 |
+
default = '=_HAS_DEFAULT_FACTORY'
|
| 524 |
+
return f'{f.name}:_type_{f.name}{default}'
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init,
|
| 528 |
+
self_name, globals, slots):
|
| 529 |
+
# fields contains both real fields and InitVar pseudo-fields.
|
| 530 |
+
|
| 531 |
+
# Make sure we don't have fields without defaults following fields
|
| 532 |
+
# with defaults. This actually would be caught when exec-ing the
|
| 533 |
+
# function source code, but catching it here gives a better error
|
| 534 |
+
# message, and future-proofs us in case we build up the function
|
| 535 |
+
# using ast.
|
| 536 |
+
|
| 537 |
+
seen_default = False
|
| 538 |
+
for f in std_fields:
|
| 539 |
+
# Only consider the non-kw-only fields in the __init__ call.
|
| 540 |
+
if f.init:
|
| 541 |
+
if not (f.default is MISSING and f.default_factory is MISSING):
|
| 542 |
+
seen_default = True
|
| 543 |
+
elif seen_default:
|
| 544 |
+
raise TypeError(f'non-default argument {f.name!r} '
|
| 545 |
+
'follows default argument')
|
| 546 |
+
|
| 547 |
+
locals = {f'_type_{f.name}': f.type for f in fields}
|
| 548 |
+
locals.update({
|
| 549 |
+
'MISSING': MISSING,
|
| 550 |
+
'_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY,
|
| 551 |
+
'__dataclass_builtins_object__': object,
|
| 552 |
+
})
|
| 553 |
+
|
| 554 |
+
body_lines = []
|
| 555 |
+
for f in fields:
|
| 556 |
+
line = _field_init(f, frozen, locals, self_name, slots)
|
| 557 |
+
# line is None means that this field doesn't require
|
| 558 |
+
# initialization (it's a pseudo-field). Just skip it.
|
| 559 |
+
if line:
|
| 560 |
+
body_lines.append(line)
|
| 561 |
+
|
| 562 |
+
# Does this class have a post-init function?
|
| 563 |
+
if has_post_init:
|
| 564 |
+
params_str = ','.join(f.name for f in fields
|
| 565 |
+
if f._field_type is _FIELD_INITVAR)
|
| 566 |
+
body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})')
|
| 567 |
+
|
| 568 |
+
# If no body lines, use 'pass'.
|
| 569 |
+
if not body_lines:
|
| 570 |
+
body_lines = ['pass']
|
| 571 |
+
|
| 572 |
+
_init_params = [_init_param(f) for f in std_fields]
|
| 573 |
+
if kw_only_fields:
|
| 574 |
+
# Add the keyword-only args. Because the * can only be added if
|
| 575 |
+
# there's at least one keyword-only arg, there needs to be a test here
|
| 576 |
+
# (instead of just concatenting the lists together).
|
| 577 |
+
_init_params += ['*']
|
| 578 |
+
_init_params += [_init_param(f) for f in kw_only_fields]
|
| 579 |
+
return _create_fn('__init__',
|
| 580 |
+
[self_name] + _init_params,
|
| 581 |
+
body_lines,
|
| 582 |
+
locals=locals,
|
| 583 |
+
globals=globals,
|
| 584 |
+
return_type=None)
|
| 585 |
+
|
| 586 |
+
|
| 587 |
+
def _repr_fn(fields, globals):
|
| 588 |
+
fn = _create_fn('__repr__',
|
| 589 |
+
('self',),
|
| 590 |
+
['return self.__class__.__qualname__ + f"(' +
|
| 591 |
+
', '.join([f"{f.name}={{self.{f.name}!r}}"
|
| 592 |
+
for f in fields]) +
|
| 593 |
+
')"'],
|
| 594 |
+
globals=globals)
|
| 595 |
+
return _recursive_repr(fn)
|
| 596 |
+
|
| 597 |
+
|
| 598 |
+
def _frozen_get_del_attr(cls, fields, globals):
|
| 599 |
+
locals = {'cls': cls,
|
| 600 |
+
'FrozenInstanceError': FrozenInstanceError}
|
| 601 |
+
if fields:
|
| 602 |
+
fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)'
|
| 603 |
+
else:
|
| 604 |
+
# Special case for the zero-length tuple.
|
| 605 |
+
fields_str = '()'
|
| 606 |
+
return (_create_fn('__setattr__',
|
| 607 |
+
('self', 'name', 'value'),
|
| 608 |
+
(f'if type(self) is cls or name in {fields_str}:',
|
| 609 |
+
' raise FrozenInstanceError(f"cannot assign to field {name!r}")',
|
| 610 |
+
f'super(cls, self).__setattr__(name, value)'),
|
| 611 |
+
locals=locals,
|
| 612 |
+
globals=globals),
|
| 613 |
+
_create_fn('__delattr__',
|
| 614 |
+
('self', 'name'),
|
| 615 |
+
(f'if type(self) is cls or name in {fields_str}:',
|
| 616 |
+
' raise FrozenInstanceError(f"cannot delete field {name!r}")',
|
| 617 |
+
f'super(cls, self).__delattr__(name)'),
|
| 618 |
+
locals=locals,
|
| 619 |
+
globals=globals),
|
| 620 |
+
)
|
| 621 |
+
|
| 622 |
+
|
| 623 |
+
def _cmp_fn(name, op, self_tuple, other_tuple, globals):
|
| 624 |
+
# Create a comparison function. If the fields in the object are
|
| 625 |
+
# named 'x' and 'y', then self_tuple is the string
|
| 626 |
+
# '(self.x,self.y)' and other_tuple is the string
|
| 627 |
+
# '(other.x,other.y)'.
|
| 628 |
+
|
| 629 |
+
return _create_fn(name,
|
| 630 |
+
('self', 'other'),
|
| 631 |
+
[ 'if other.__class__ is self.__class__:',
|
| 632 |
+
f' return {self_tuple}{op}{other_tuple}',
|
| 633 |
+
'return NotImplemented'],
|
| 634 |
+
globals=globals)
|
| 635 |
+
|
| 636 |
+
|
| 637 |
+
def _hash_fn(fields, globals):
|
| 638 |
+
self_tuple = _tuple_str('self', fields)
|
| 639 |
+
return _create_fn('__hash__',
|
| 640 |
+
('self',),
|
| 641 |
+
[f'return hash({self_tuple})'],
|
| 642 |
+
globals=globals)
|
| 643 |
+
|
| 644 |
+
|
| 645 |
+
def _is_classvar(a_type, typing):
|
| 646 |
+
# This test uses a typing internal class, but it's the best way to
|
| 647 |
+
# test if this is a ClassVar.
|
| 648 |
+
return (a_type is typing.ClassVar
|
| 649 |
+
or (type(a_type) is typing._GenericAlias
|
| 650 |
+
and a_type.__origin__ is typing.ClassVar))
|
| 651 |
+
|
| 652 |
+
|
| 653 |
+
def _is_initvar(a_type, dataclasses):
|
| 654 |
+
# The module we're checking against is the module we're
|
| 655 |
+
# currently in (dataclasses.py).
|
| 656 |
+
return (a_type is dataclasses.InitVar
|
| 657 |
+
or type(a_type) is dataclasses.InitVar)
|
| 658 |
+
|
| 659 |
+
def _is_kw_only(a_type, dataclasses):
|
| 660 |
+
return a_type is dataclasses.KW_ONLY
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
def _is_type(annotation, cls, a_module, a_type, is_type_predicate):
|
| 664 |
+
# Given a type annotation string, does it refer to a_type in
|
| 665 |
+
# a_module? For example, when checking that annotation denotes a
|
| 666 |
+
# ClassVar, then a_module is typing, and a_type is
|
| 667 |
+
# typing.ClassVar.
|
| 668 |
+
|
| 669 |
+
# It's possible to look up a_module given a_type, but it involves
|
| 670 |
+
# looking in sys.modules (again!), and seems like a waste since
|
| 671 |
+
# the caller already knows a_module.
|
| 672 |
+
|
| 673 |
+
# - annotation is a string type annotation
|
| 674 |
+
# - cls is the class that this annotation was found in
|
| 675 |
+
# - a_module is the module we want to match
|
| 676 |
+
# - a_type is the type in that module we want to match
|
| 677 |
+
# - is_type_predicate is a function called with (obj, a_module)
|
| 678 |
+
# that determines if obj is of the desired type.
|
| 679 |
+
|
| 680 |
+
# Since this test does not do a local namespace lookup (and
|
| 681 |
+
# instead only a module (global) lookup), there are some things it
|
| 682 |
+
# gets wrong.
|
| 683 |
+
|
| 684 |
+
# With string annotations, cv0 will be detected as a ClassVar:
|
| 685 |
+
# CV = ClassVar
|
| 686 |
+
# @dataclass
|
| 687 |
+
# class C0:
|
| 688 |
+
# cv0: CV
|
| 689 |
+
|
| 690 |
+
# But in this example cv1 will not be detected as a ClassVar:
|
| 691 |
+
# @dataclass
|
| 692 |
+
# class C1:
|
| 693 |
+
# CV = ClassVar
|
| 694 |
+
# cv1: CV
|
| 695 |
+
|
| 696 |
+
# In C1, the code in this function (_is_type) will look up "CV" in
|
| 697 |
+
# the module and not find it, so it will not consider cv1 as a
|
| 698 |
+
# ClassVar. This is a fairly obscure corner case, and the best
|
| 699 |
+
# way to fix it would be to eval() the string "CV" with the
|
| 700 |
+
# correct global and local namespaces. However that would involve
|
| 701 |
+
# a eval() penalty for every single field of every dataclass
|
| 702 |
+
# that's defined. It was judged not worth it.
|
| 703 |
+
|
| 704 |
+
match = _MODULE_IDENTIFIER_RE.match(annotation)
|
| 705 |
+
if match:
|
| 706 |
+
ns = None
|
| 707 |
+
module_name = match.group(1)
|
| 708 |
+
if not module_name:
|
| 709 |
+
# No module name, assume the class's module did
|
| 710 |
+
# "from dataclasses import InitVar".
|
| 711 |
+
ns = sys.modules.get(cls.__module__).__dict__
|
| 712 |
+
else:
|
| 713 |
+
# Look up module_name in the class's module.
|
| 714 |
+
module = sys.modules.get(cls.__module__)
|
| 715 |
+
if module and module.__dict__.get(module_name) is a_module:
|
| 716 |
+
ns = sys.modules.get(a_type.__module__).__dict__
|
| 717 |
+
if ns and is_type_predicate(ns.get(match.group(2)), a_module):
|
| 718 |
+
return True
|
| 719 |
+
return False
|
| 720 |
+
|
| 721 |
+
|
| 722 |
+
def _get_field(cls, a_name, a_type, default_kw_only):
|
| 723 |
+
# Return a Field object for this field name and type. ClassVars and
|
| 724 |
+
# InitVars are also returned, but marked as such (see f._field_type).
|
| 725 |
+
# default_kw_only is the value of kw_only to use if there isn't a field()
|
| 726 |
+
# that defines it.
|
| 727 |
+
|
| 728 |
+
# If the default value isn't derived from Field, then it's only a
|
| 729 |
+
# normal default value. Convert it to a Field().
|
| 730 |
+
default = getattr(cls, a_name, MISSING)
|
| 731 |
+
if isinstance(default, Field):
|
| 732 |
+
f = default
|
| 733 |
+
else:
|
| 734 |
+
if isinstance(default, types.MemberDescriptorType):
|
| 735 |
+
# This is a field in __slots__, so it has no default value.
|
| 736 |
+
default = MISSING
|
| 737 |
+
f = field(default=default)
|
| 738 |
+
|
| 739 |
+
# Only at this point do we know the name and the type. Set them.
|
| 740 |
+
f.name = a_name
|
| 741 |
+
f.type = a_type
|
| 742 |
+
|
| 743 |
+
# Assume it's a normal field until proven otherwise. We're next
|
| 744 |
+
# going to decide if it's a ClassVar or InitVar, everything else
|
| 745 |
+
# is just a normal field.
|
| 746 |
+
f._field_type = _FIELD
|
| 747 |
+
|
| 748 |
+
# In addition to checking for actual types here, also check for
|
| 749 |
+
# string annotations. get_type_hints() won't always work for us
|
| 750 |
+
# (see https://github.com/python/typing/issues/508 for example),
|
| 751 |
+
# plus it's expensive and would require an eval for every string
|
| 752 |
+
# annotation. So, make a best effort to see if this is a ClassVar
|
| 753 |
+
# or InitVar using regex's and checking that the thing referenced
|
| 754 |
+
# is actually of the correct type.
|
| 755 |
+
|
| 756 |
+
# For the complete discussion, see https://bugs.python.org/issue33453
|
| 757 |
+
|
| 758 |
+
# If typing has not been imported, then it's impossible for any
|
| 759 |
+
# annotation to be a ClassVar. So, only look for ClassVar if
|
| 760 |
+
# typing has been imported by any module (not necessarily cls's
|
| 761 |
+
# module).
|
| 762 |
+
typing = sys.modules.get('typing')
|
| 763 |
+
if typing:
|
| 764 |
+
if (_is_classvar(a_type, typing)
|
| 765 |
+
or (isinstance(f.type, str)
|
| 766 |
+
and _is_type(f.type, cls, typing, typing.ClassVar,
|
| 767 |
+
_is_classvar))):
|
| 768 |
+
f._field_type = _FIELD_CLASSVAR
|
| 769 |
+
|
| 770 |
+
# If the type is InitVar, or if it's a matching string annotation,
|
| 771 |
+
# then it's an InitVar.
|
| 772 |
+
if f._field_type is _FIELD:
|
| 773 |
+
# The module we're checking against is the module we're
|
| 774 |
+
# currently in (dataclasses.py).
|
| 775 |
+
dataclasses = sys.modules[__name__]
|
| 776 |
+
if (_is_initvar(a_type, dataclasses)
|
| 777 |
+
or (isinstance(f.type, str)
|
| 778 |
+
and _is_type(f.type, cls, dataclasses, dataclasses.InitVar,
|
| 779 |
+
_is_initvar))):
|
| 780 |
+
f._field_type = _FIELD_INITVAR
|
| 781 |
+
|
| 782 |
+
# Validations for individual fields. This is delayed until now,
|
| 783 |
+
# instead of in the Field() constructor, since only here do we
|
| 784 |
+
# know the field name, which allows for better error reporting.
|
| 785 |
+
|
| 786 |
+
# Special restrictions for ClassVar and InitVar.
|
| 787 |
+
if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR):
|
| 788 |
+
if f.default_factory is not MISSING:
|
| 789 |
+
raise TypeError(f'field {f.name} cannot have a '
|
| 790 |
+
'default factory')
|
| 791 |
+
# Should I check for other field settings? default_factory
|
| 792 |
+
# seems the most serious to check for. Maybe add others. For
|
| 793 |
+
# example, how about init=False (or really,
|
| 794 |
+
# init=<not-the-default-init-value>)? It makes no sense for
|
| 795 |
+
# ClassVar and InitVar to specify init=<anything>.
|
| 796 |
+
|
| 797 |
+
# kw_only validation and assignment.
|
| 798 |
+
if f._field_type in (_FIELD, _FIELD_INITVAR):
|
| 799 |
+
# For real and InitVar fields, if kw_only wasn't specified use the
|
| 800 |
+
# default value.
|
| 801 |
+
if f.kw_only is MISSING:
|
| 802 |
+
f.kw_only = default_kw_only
|
| 803 |
+
else:
|
| 804 |
+
# Make sure kw_only isn't set for ClassVars
|
| 805 |
+
assert f._field_type is _FIELD_CLASSVAR
|
| 806 |
+
if f.kw_only is not MISSING:
|
| 807 |
+
raise TypeError(f'field {f.name} is a ClassVar but specifies '
|
| 808 |
+
'kw_only')
|
| 809 |
+
|
| 810 |
+
# For real fields, disallow mutable defaults for known types.
|
| 811 |
+
if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
|
| 812 |
+
raise ValueError(f'mutable default {type(f.default)} for field '
|
| 813 |
+
f'{f.name} is not allowed: use default_factory')
|
| 814 |
+
|
| 815 |
+
return f
|
| 816 |
+
|
| 817 |
+
def _set_qualname(cls, value):
|
| 818 |
+
# Ensure that the functions returned from _create_fn uses the proper
|
| 819 |
+
# __qualname__ (the class they belong to).
|
| 820 |
+
if isinstance(value, FunctionType):
|
| 821 |
+
value.__qualname__ = f"{cls.__qualname__}.{value.__name__}"
|
| 822 |
+
return value
|
| 823 |
+
|
| 824 |
+
def _set_new_attribute(cls, name, value):
|
| 825 |
+
# Never overwrites an existing attribute. Returns True if the
|
| 826 |
+
# attribute already exists.
|
| 827 |
+
if name in cls.__dict__:
|
| 828 |
+
return True
|
| 829 |
+
_set_qualname(cls, value)
|
| 830 |
+
setattr(cls, name, value)
|
| 831 |
+
return False
|
| 832 |
+
|
| 833 |
+
|
| 834 |
+
# Decide if/how we're going to create a hash function. Key is
|
| 835 |
+
# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to
|
| 836 |
+
# take. The common case is to do nothing, so instead of providing a
|
| 837 |
+
# function that is a no-op, use None to signify that.
|
| 838 |
+
|
| 839 |
+
def _hash_set_none(cls, fields, globals):
|
| 840 |
+
return None
|
| 841 |
+
|
| 842 |
+
def _hash_add(cls, fields, globals):
|
| 843 |
+
flds = [f for f in fields if (f.compare if f.hash is None else f.hash)]
|
| 844 |
+
return _set_qualname(cls, _hash_fn(flds, globals))
|
| 845 |
+
|
| 846 |
+
def _hash_exception(cls, fields, globals):
|
| 847 |
+
# Raise an exception.
|
| 848 |
+
raise TypeError(f'Cannot overwrite attribute __hash__ '
|
| 849 |
+
f'in class {cls.__name__}')
|
| 850 |
+
|
| 851 |
+
#
|
| 852 |
+
# +-------------------------------------- unsafe_hash?
|
| 853 |
+
# | +------------------------------- eq?
|
| 854 |
+
# | | +------------------------ frozen?
|
| 855 |
+
# | | | +---------------- has-explicit-hash?
|
| 856 |
+
# | | | |
|
| 857 |
+
# | | | | +------- action
|
| 858 |
+
# | | | | |
|
| 859 |
+
# v v v v v
|
| 860 |
+
_hash_action = {(False, False, False, False): None,
|
| 861 |
+
(False, False, False, True ): None,
|
| 862 |
+
(False, False, True, False): None,
|
| 863 |
+
(False, False, True, True ): None,
|
| 864 |
+
(False, True, False, False): _hash_set_none,
|
| 865 |
+
(False, True, False, True ): None,
|
| 866 |
+
(False, True, True, False): _hash_add,
|
| 867 |
+
(False, True, True, True ): None,
|
| 868 |
+
(True, False, False, False): _hash_add,
|
| 869 |
+
(True, False, False, True ): _hash_exception,
|
| 870 |
+
(True, False, True, False): _hash_add,
|
| 871 |
+
(True, False, True, True ): _hash_exception,
|
| 872 |
+
(True, True, False, False): _hash_add,
|
| 873 |
+
(True, True, False, True ): _hash_exception,
|
| 874 |
+
(True, True, True, False): _hash_add,
|
| 875 |
+
(True, True, True, True ): _hash_exception,
|
| 876 |
+
}
|
| 877 |
+
# See https://bugs.python.org/issue32929#msg312829 for an if-statement
|
| 878 |
+
# version of this table.
|
| 879 |
+
|
| 880 |
+
|
| 881 |
+
def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen,
|
| 882 |
+
match_args, kw_only, slots):
|
| 883 |
+
# Now that dicts retain insertion order, there's no reason to use
|
| 884 |
+
# an ordered dict. I am leveraging that ordering here, because
|
| 885 |
+
# derived class fields overwrite base class fields, but the order
|
| 886 |
+
# is defined by the base class, which is found first.
|
| 887 |
+
fields = {}
|
| 888 |
+
|
| 889 |
+
if cls.__module__ in sys.modules:
|
| 890 |
+
globals = sys.modules[cls.__module__].__dict__
|
| 891 |
+
else:
|
| 892 |
+
# Theoretically this can happen if someone writes
|
| 893 |
+
# a custom string to cls.__module__. In which case
|
| 894 |
+
# such dataclass won't be fully introspectable
|
| 895 |
+
# (w.r.t. typing.get_type_hints) but will still function
|
| 896 |
+
# correctly.
|
| 897 |
+
globals = {}
|
| 898 |
+
|
| 899 |
+
setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order,
|
| 900 |
+
unsafe_hash, frozen))
|
| 901 |
+
|
| 902 |
+
# Find our base classes in reverse MRO order, and exclude
|
| 903 |
+
# ourselves. In reversed order so that more derived classes
|
| 904 |
+
# override earlier field definitions in base classes. As long as
|
| 905 |
+
# we're iterating over them, see if any are frozen.
|
| 906 |
+
any_frozen_base = False
|
| 907 |
+
has_dataclass_bases = False
|
| 908 |
+
for b in cls.__mro__[-1:0:-1]:
|
| 909 |
+
# Only process classes that have been processed by our
|
| 910 |
+
# decorator. That is, they have a _FIELDS attribute.
|
| 911 |
+
base_fields = getattr(b, _FIELDS, None)
|
| 912 |
+
if base_fields is not None:
|
| 913 |
+
has_dataclass_bases = True
|
| 914 |
+
for f in base_fields.values():
|
| 915 |
+
fields[f.name] = f
|
| 916 |
+
if getattr(b, _PARAMS).frozen:
|
| 917 |
+
any_frozen_base = True
|
| 918 |
+
|
| 919 |
+
# Annotations that are defined in this class (not in base
|
| 920 |
+
# classes). If __annotations__ isn't present, then this class
|
| 921 |
+
# adds no new annotations. We use this to compute fields that are
|
| 922 |
+
# added by this class.
|
| 923 |
+
#
|
| 924 |
+
# Fields are found from cls_annotations, which is guaranteed to be
|
| 925 |
+
# ordered. Default values are from class attributes, if a field
|
| 926 |
+
# has a default. If the default value is a Field(), then it
|
| 927 |
+
# contains additional info beyond (and possibly including) the
|
| 928 |
+
# actual default value. Pseudo-fields ClassVars and InitVars are
|
| 929 |
+
# included, despite the fact that they're not real fields. That's
|
| 930 |
+
# dealt with later.
|
| 931 |
+
cls_annotations = cls.__dict__.get('__annotations__', {})
|
| 932 |
+
|
| 933 |
+
# Now find fields in our class. While doing so, validate some
|
| 934 |
+
# things, and set the default values (as class attributes) where
|
| 935 |
+
# we can.
|
| 936 |
+
cls_fields = []
|
| 937 |
+
# Get a reference to this module for the _is_kw_only() test.
|
| 938 |
+
KW_ONLY_seen = False
|
| 939 |
+
dataclasses = sys.modules[__name__]
|
| 940 |
+
for name, type in cls_annotations.items():
|
| 941 |
+
# See if this is a marker to change the value of kw_only.
|
| 942 |
+
if (_is_kw_only(type, dataclasses)
|
| 943 |
+
or (isinstance(type, str)
|
| 944 |
+
and _is_type(type, cls, dataclasses, dataclasses.KW_ONLY,
|
| 945 |
+
_is_kw_only))):
|
| 946 |
+
# Switch the default to kw_only=True, and ignore this
|
| 947 |
+
# annotation: it's not a real field.
|
| 948 |
+
if KW_ONLY_seen:
|
| 949 |
+
raise TypeError(f'{name!r} is KW_ONLY, but KW_ONLY '
|
| 950 |
+
'has already been specified')
|
| 951 |
+
KW_ONLY_seen = True
|
| 952 |
+
kw_only = True
|
| 953 |
+
else:
|
| 954 |
+
# Otherwise it's a field of some type.
|
| 955 |
+
cls_fields.append(_get_field(cls, name, type, kw_only))
|
| 956 |
+
|
| 957 |
+
for f in cls_fields:
|
| 958 |
+
fields[f.name] = f
|
| 959 |
+
|
| 960 |
+
# If the class attribute (which is the default value for this
|
| 961 |
+
# field) exists and is of type 'Field', replace it with the
|
| 962 |
+
# real default. This is so that normal class introspection
|
| 963 |
+
# sees a real default value, not a Field.
|
| 964 |
+
if isinstance(getattr(cls, f.name, None), Field):
|
| 965 |
+
if f.default is MISSING:
|
| 966 |
+
# If there's no default, delete the class attribute.
|
| 967 |
+
# This happens if we specify field(repr=False), for
|
| 968 |
+
# example (that is, we specified a field object, but
|
| 969 |
+
# no default value). Also if we're using a default
|
| 970 |
+
# factory. The class attribute should not be set at
|
| 971 |
+
# all in the post-processed class.
|
| 972 |
+
delattr(cls, f.name)
|
| 973 |
+
else:
|
| 974 |
+
setattr(cls, f.name, f.default)
|
| 975 |
+
|
| 976 |
+
# Do we have any Field members that don't also have annotations?
|
| 977 |
+
for name, value in cls.__dict__.items():
|
| 978 |
+
if isinstance(value, Field) and not name in cls_annotations:
|
| 979 |
+
raise TypeError(f'{name!r} is a field but has no type annotation')
|
| 980 |
+
|
| 981 |
+
# Check rules that apply if we are derived from any dataclasses.
|
| 982 |
+
if has_dataclass_bases:
|
| 983 |
+
# Raise an exception if any of our bases are frozen, but we're not.
|
| 984 |
+
if any_frozen_base and not frozen:
|
| 985 |
+
raise TypeError('cannot inherit non-frozen dataclass from a '
|
| 986 |
+
'frozen one')
|
| 987 |
+
|
| 988 |
+
# Raise an exception if we're frozen, but none of our bases are.
|
| 989 |
+
if not any_frozen_base and frozen:
|
| 990 |
+
raise TypeError('cannot inherit frozen dataclass from a '
|
| 991 |
+
'non-frozen one')
|
| 992 |
+
|
| 993 |
+
# Remember all of the fields on our class (including bases). This
|
| 994 |
+
# also marks this class as being a dataclass.
|
| 995 |
+
setattr(cls, _FIELDS, fields)
|
| 996 |
+
|
| 997 |
+
# Was this class defined with an explicit __hash__? Note that if
|
| 998 |
+
# __eq__ is defined in this class, then python will automatically
|
| 999 |
+
# set __hash__ to None. This is a heuristic, as it's possible
|
| 1000 |
+
# that such a __hash__ == None was not auto-generated, but it
|
| 1001 |
+
# close enough.
|
| 1002 |
+
class_hash = cls.__dict__.get('__hash__', MISSING)
|
| 1003 |
+
has_explicit_hash = not (class_hash is MISSING or
|
| 1004 |
+
(class_hash is None and '__eq__' in cls.__dict__))
|
| 1005 |
+
|
| 1006 |
+
# If we're generating ordering methods, we must be generating the
|
| 1007 |
+
# eq methods.
|
| 1008 |
+
if order and not eq:
|
| 1009 |
+
raise ValueError('eq must be true if order is true')
|
| 1010 |
+
|
| 1011 |
+
# Include InitVars and regular fields (so, not ClassVars). This is
|
| 1012 |
+
# initialized here, outside of the "if init:" test, because std_init_fields
|
| 1013 |
+
# is used with match_args, below.
|
| 1014 |
+
all_init_fields = [f for f in fields.values()
|
| 1015 |
+
if f._field_type in (_FIELD, _FIELD_INITVAR)]
|
| 1016 |
+
(std_init_fields,
|
| 1017 |
+
kw_only_init_fields) = _fields_in_init_order(all_init_fields)
|
| 1018 |
+
|
| 1019 |
+
if init:
|
| 1020 |
+
# Does this class have a post-init function?
|
| 1021 |
+
has_post_init = hasattr(cls, _POST_INIT_NAME)
|
| 1022 |
+
|
| 1023 |
+
_set_new_attribute(cls, '__init__',
|
| 1024 |
+
_init_fn(all_init_fields,
|
| 1025 |
+
std_init_fields,
|
| 1026 |
+
kw_only_init_fields,
|
| 1027 |
+
frozen,
|
| 1028 |
+
has_post_init,
|
| 1029 |
+
# The name to use for the "self"
|
| 1030 |
+
# param in __init__. Use "self"
|
| 1031 |
+
# if possible.
|
| 1032 |
+
'__dataclass_self__' if 'self' in fields
|
| 1033 |
+
else 'self',
|
| 1034 |
+
globals,
|
| 1035 |
+
slots,
|
| 1036 |
+
))
|
| 1037 |
+
|
| 1038 |
+
# Get the fields as a list, and include only real fields. This is
|
| 1039 |
+
# used in all of the following methods.
|
| 1040 |
+
field_list = [f for f in fields.values() if f._field_type is _FIELD]
|
| 1041 |
+
|
| 1042 |
+
if repr:
|
| 1043 |
+
flds = [f for f in field_list if f.repr]
|
| 1044 |
+
_set_new_attribute(cls, '__repr__', _repr_fn(flds, globals))
|
| 1045 |
+
|
| 1046 |
+
if eq:
|
| 1047 |
+
# Create __eq__ method. There's no need for a __ne__ method,
|
| 1048 |
+
# since python will call __eq__ and negate it.
|
| 1049 |
+
flds = [f for f in field_list if f.compare]
|
| 1050 |
+
self_tuple = _tuple_str('self', flds)
|
| 1051 |
+
other_tuple = _tuple_str('other', flds)
|
| 1052 |
+
_set_new_attribute(cls, '__eq__',
|
| 1053 |
+
_cmp_fn('__eq__', '==',
|
| 1054 |
+
self_tuple, other_tuple,
|
| 1055 |
+
globals=globals))
|
| 1056 |
+
|
| 1057 |
+
if order:
|
| 1058 |
+
# Create and set the ordering methods.
|
| 1059 |
+
flds = [f for f in field_list if f.compare]
|
| 1060 |
+
self_tuple = _tuple_str('self', flds)
|
| 1061 |
+
other_tuple = _tuple_str('other', flds)
|
| 1062 |
+
for name, op in [('__lt__', '<'),
|
| 1063 |
+
('__le__', '<='),
|
| 1064 |
+
('__gt__', '>'),
|
| 1065 |
+
('__ge__', '>='),
|
| 1066 |
+
]:
|
| 1067 |
+
if _set_new_attribute(cls, name,
|
| 1068 |
+
_cmp_fn(name, op, self_tuple, other_tuple,
|
| 1069 |
+
globals=globals)):
|
| 1070 |
+
raise TypeError(f'Cannot overwrite attribute {name} '
|
| 1071 |
+
f'in class {cls.__name__}. Consider using '
|
| 1072 |
+
'functools.total_ordering')
|
| 1073 |
+
|
| 1074 |
+
if frozen:
|
| 1075 |
+
for fn in _frozen_get_del_attr(cls, field_list, globals):
|
| 1076 |
+
if _set_new_attribute(cls, fn.__name__, fn):
|
| 1077 |
+
raise TypeError(f'Cannot overwrite attribute {fn.__name__} '
|
| 1078 |
+
f'in class {cls.__name__}')
|
| 1079 |
+
|
| 1080 |
+
# Decide if/how we're going to create a hash function.
|
| 1081 |
+
hash_action = _hash_action[bool(unsafe_hash),
|
| 1082 |
+
bool(eq),
|
| 1083 |
+
bool(frozen),
|
| 1084 |
+
has_explicit_hash]
|
| 1085 |
+
if hash_action:
|
| 1086 |
+
# No need to call _set_new_attribute here, since by the time
|
| 1087 |
+
# we're here the overwriting is unconditional.
|
| 1088 |
+
cls.__hash__ = hash_action(cls, field_list, globals)
|
| 1089 |
+
|
| 1090 |
+
if not getattr(cls, '__doc__'):
|
| 1091 |
+
# Create a class doc-string.
|
| 1092 |
+
cls.__doc__ = (cls.__name__ +
|
| 1093 |
+
str(inspect.signature(cls)).replace(' -> None', ''))
|
| 1094 |
+
|
| 1095 |
+
if match_args:
|
| 1096 |
+
# I could probably compute this once
|
| 1097 |
+
_set_new_attribute(cls, '__match_args__',
|
| 1098 |
+
tuple(f.name for f in std_init_fields))
|
| 1099 |
+
|
| 1100 |
+
if slots:
|
| 1101 |
+
cls = _add_slots(cls, frozen)
|
| 1102 |
+
|
| 1103 |
+
abc.update_abstractmethods(cls)
|
| 1104 |
+
|
| 1105 |
+
return cls
|
| 1106 |
+
|
| 1107 |
+
|
| 1108 |
+
# _dataclass_getstate and _dataclass_setstate are needed for pickling frozen
|
| 1109 |
+
# classes with slots. These could be slighly more performant if we generated
|
| 1110 |
+
# the code instead of iterating over fields. But that can be a project for
|
| 1111 |
+
# another day, if performance becomes an issue.
|
| 1112 |
+
def _dataclass_getstate(self):
|
| 1113 |
+
return [getattr(self, f.name) for f in fields(self)]
|
| 1114 |
+
|
| 1115 |
+
|
| 1116 |
+
def _dataclass_setstate(self, state):
|
| 1117 |
+
for field, value in zip(fields(self), state):
|
| 1118 |
+
# use setattr because dataclass may be frozen
|
| 1119 |
+
object.__setattr__(self, field.name, value)
|
| 1120 |
+
|
| 1121 |
+
|
| 1122 |
+
def _add_slots(cls, is_frozen):
|
| 1123 |
+
# Need to create a new class, since we can't set __slots__
|
| 1124 |
+
# after a class has been created.
|
| 1125 |
+
|
| 1126 |
+
# Make sure __slots__ isn't already set.
|
| 1127 |
+
if '__slots__' in cls.__dict__:
|
| 1128 |
+
raise TypeError(f'{cls.__name__} already specifies __slots__')
|
| 1129 |
+
|
| 1130 |
+
# Create a new dict for our new class.
|
| 1131 |
+
cls_dict = dict(cls.__dict__)
|
| 1132 |
+
field_names = tuple(f.name for f in fields(cls))
|
| 1133 |
+
cls_dict['__slots__'] = field_names
|
| 1134 |
+
for field_name in field_names:
|
| 1135 |
+
# Remove our attributes, if present. They'll still be
|
| 1136 |
+
# available in _MARKER.
|
| 1137 |
+
cls_dict.pop(field_name, None)
|
| 1138 |
+
|
| 1139 |
+
# Remove __dict__ itself.
|
| 1140 |
+
cls_dict.pop('__dict__', None)
|
| 1141 |
+
|
| 1142 |
+
# And finally create the class.
|
| 1143 |
+
qualname = getattr(cls, '__qualname__', None)
|
| 1144 |
+
cls = type(cls)(cls.__name__, cls.__bases__, cls_dict)
|
| 1145 |
+
if qualname is not None:
|
| 1146 |
+
cls.__qualname__ = qualname
|
| 1147 |
+
|
| 1148 |
+
if is_frozen:
|
| 1149 |
+
# Need this for pickling frozen classes with slots.
|
| 1150 |
+
cls.__getstate__ = _dataclass_getstate
|
| 1151 |
+
cls.__setstate__ = _dataclass_setstate
|
| 1152 |
+
|
| 1153 |
+
return cls
|
| 1154 |
+
|
| 1155 |
+
|
| 1156 |
+
def dataclass(cls=None, /, *, init=True, repr=True, eq=True, order=False,
|
| 1157 |
+
unsafe_hash=False, frozen=False, match_args=True,
|
| 1158 |
+
kw_only=False, slots=False):
|
| 1159 |
+
"""Returns the same class as was passed in, with dunder methods
|
| 1160 |
+
added based on the fields defined in the class.
|
| 1161 |
+
|
| 1162 |
+
Examines PEP 526 __annotations__ to determine fields.
|
| 1163 |
+
|
| 1164 |
+
If init is true, an __init__() method is added to the class. If
|
| 1165 |
+
repr is true, a __repr__() method is added. If order is true, rich
|
| 1166 |
+
comparison dunder methods are added. If unsafe_hash is true, a
|
| 1167 |
+
__hash__() method function is added. If frozen is true, fields may
|
| 1168 |
+
not be assigned to after instance creation. If match_args is true,
|
| 1169 |
+
the __match_args__ tuple is added. If kw_only is true, then by
|
| 1170 |
+
default all fields are keyword-only. If slots is true, an
|
| 1171 |
+
__slots__ attribute is added.
|
| 1172 |
+
"""
|
| 1173 |
+
|
| 1174 |
+
def wrap(cls):
|
| 1175 |
+
return _process_class(cls, init, repr, eq, order, unsafe_hash,
|
| 1176 |
+
frozen, match_args, kw_only, slots)
|
| 1177 |
+
|
| 1178 |
+
# See if we're being called as @dataclass or @dataclass().
|
| 1179 |
+
if cls is None:
|
| 1180 |
+
# We're called with parens.
|
| 1181 |
+
return wrap
|
| 1182 |
+
|
| 1183 |
+
# We're called as @dataclass without parens.
|
| 1184 |
+
return wrap(cls)
|
| 1185 |
+
|
| 1186 |
+
|
| 1187 |
+
def fields(class_or_instance):
|
| 1188 |
+
"""Return a tuple describing the fields of this dataclass.
|
| 1189 |
+
|
| 1190 |
+
Accepts a dataclass or an instance of one. Tuple elements are of
|
| 1191 |
+
type Field.
|
| 1192 |
+
"""
|
| 1193 |
+
|
| 1194 |
+
# Might it be worth caching this, per class?
|
| 1195 |
+
try:
|
| 1196 |
+
fields = getattr(class_or_instance, _FIELDS)
|
| 1197 |
+
except AttributeError:
|
| 1198 |
+
raise TypeError('must be called with a dataclass type or instance') from None
|
| 1199 |
+
|
| 1200 |
+
# Exclude pseudo-fields. Note that fields is sorted by insertion
|
| 1201 |
+
# order, so the order of the tuple is as the fields were defined.
|
| 1202 |
+
return tuple(f for f in fields.values() if f._field_type is _FIELD)
|
| 1203 |
+
|
| 1204 |
+
|
| 1205 |
+
def _is_dataclass_instance(obj):
|
| 1206 |
+
"""Returns True if obj is an instance of a dataclass."""
|
| 1207 |
+
return hasattr(type(obj), _FIELDS)
|
| 1208 |
+
|
| 1209 |
+
|
| 1210 |
+
def is_dataclass(obj):
|
| 1211 |
+
"""Returns True if obj is a dataclass or an instance of a
|
| 1212 |
+
dataclass."""
|
| 1213 |
+
cls = obj if isinstance(obj, type) and not isinstance(obj, GenericAlias) else type(obj)
|
| 1214 |
+
return hasattr(cls, _FIELDS)
|
| 1215 |
+
|
| 1216 |
+
|
| 1217 |
+
def asdict(obj, *, dict_factory=dict):
|
| 1218 |
+
"""Return the fields of a dataclass instance as a new dictionary mapping
|
| 1219 |
+
field names to field values.
|
| 1220 |
+
|
| 1221 |
+
Example usage:
|
| 1222 |
+
|
| 1223 |
+
@dataclass
|
| 1224 |
+
class C:
|
| 1225 |
+
x: int
|
| 1226 |
+
y: int
|
| 1227 |
+
|
| 1228 |
+
c = C(1, 2)
|
| 1229 |
+
assert asdict(c) == {'x': 1, 'y': 2}
|
| 1230 |
+
|
| 1231 |
+
If given, 'dict_factory' will be used instead of built-in dict.
|
| 1232 |
+
The function applies recursively to field values that are
|
| 1233 |
+
dataclass instances. This will also look into built-in containers:
|
| 1234 |
+
tuples, lists, and dicts.
|
| 1235 |
+
"""
|
| 1236 |
+
if not _is_dataclass_instance(obj):
|
| 1237 |
+
raise TypeError("asdict() should be called on dataclass instances")
|
| 1238 |
+
return _asdict_inner(obj, dict_factory)
|
| 1239 |
+
|
| 1240 |
+
|
| 1241 |
+
def _asdict_inner(obj, dict_factory):
|
| 1242 |
+
if _is_dataclass_instance(obj):
|
| 1243 |
+
result = []
|
| 1244 |
+
for f in fields(obj):
|
| 1245 |
+
value = _asdict_inner(getattr(obj, f.name), dict_factory)
|
| 1246 |
+
result.append((f.name, value))
|
| 1247 |
+
return dict_factory(result)
|
| 1248 |
+
elif isinstance(obj, tuple) and hasattr(obj, '_fields'):
|
| 1249 |
+
# obj is a namedtuple. Recurse into it, but the returned
|
| 1250 |
+
# object is another namedtuple of the same type. This is
|
| 1251 |
+
# similar to how other list- or tuple-derived classes are
|
| 1252 |
+
# treated (see below), but we just need to create them
|
| 1253 |
+
# differently because a namedtuple's __init__ needs to be
|
| 1254 |
+
# called differently (see bpo-34363).
|
| 1255 |
+
|
| 1256 |
+
# I'm not using namedtuple's _asdict()
|
| 1257 |
+
# method, because:
|
| 1258 |
+
# - it does not recurse in to the namedtuple fields and
|
| 1259 |
+
# convert them to dicts (using dict_factory).
|
| 1260 |
+
# - I don't actually want to return a dict here. The main
|
| 1261 |
+
# use case here is json.dumps, and it handles converting
|
| 1262 |
+
# namedtuples to lists. Admittedly we're losing some
|
| 1263 |
+
# information here when we produce a json list instead of a
|
| 1264 |
+
# dict. Note that if we returned dicts here instead of
|
| 1265 |
+
# namedtuples, we could no longer call asdict() on a data
|
| 1266 |
+
# structure where a namedtuple was used as a dict key.
|
| 1267 |
+
|
| 1268 |
+
return type(obj)(*[_asdict_inner(v, dict_factory) for v in obj])
|
| 1269 |
+
elif isinstance(obj, (list, tuple)):
|
| 1270 |
+
# Assume we can create an object of this type by passing in a
|
| 1271 |
+
# generator (which is not true for namedtuples, handled
|
| 1272 |
+
# above).
|
| 1273 |
+
return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
|
| 1274 |
+
elif isinstance(obj, dict):
|
| 1275 |
+
return type(obj)((_asdict_inner(k, dict_factory),
|
| 1276 |
+
_asdict_inner(v, dict_factory))
|
| 1277 |
+
for k, v in obj.items())
|
| 1278 |
+
else:
|
| 1279 |
+
return copy.deepcopy(obj)
|
| 1280 |
+
|
| 1281 |
+
|
| 1282 |
+
def astuple(obj, *, tuple_factory=tuple):
|
| 1283 |
+
"""Return the fields of a dataclass instance as a new tuple of field values.
|
| 1284 |
+
|
| 1285 |
+
Example usage::
|
| 1286 |
+
|
| 1287 |
+
@dataclass
|
| 1288 |
+
class C:
|
| 1289 |
+
x: int
|
| 1290 |
+
y: int
|
| 1291 |
+
|
| 1292 |
+
c = C(1, 2)
|
| 1293 |
+
assert astuple(c) == (1, 2)
|
| 1294 |
+
|
| 1295 |
+
If given, 'tuple_factory' will be used instead of built-in tuple.
|
| 1296 |
+
The function applies recursively to field values that are
|
| 1297 |
+
dataclass instances. This will also look into built-in containers:
|
| 1298 |
+
tuples, lists, and dicts.
|
| 1299 |
+
"""
|
| 1300 |
+
|
| 1301 |
+
if not _is_dataclass_instance(obj):
|
| 1302 |
+
raise TypeError("astuple() should be called on dataclass instances")
|
| 1303 |
+
return _astuple_inner(obj, tuple_factory)
|
| 1304 |
+
|
| 1305 |
+
|
| 1306 |
+
def _astuple_inner(obj, tuple_factory):
|
| 1307 |
+
if _is_dataclass_instance(obj):
|
| 1308 |
+
result = []
|
| 1309 |
+
for f in fields(obj):
|
| 1310 |
+
value = _astuple_inner(getattr(obj, f.name), tuple_factory)
|
| 1311 |
+
result.append(value)
|
| 1312 |
+
return tuple_factory(result)
|
| 1313 |
+
elif isinstance(obj, tuple) and hasattr(obj, '_fields'):
|
| 1314 |
+
# obj is a namedtuple. Recurse into it, but the returned
|
| 1315 |
+
# object is another namedtuple of the same type. This is
|
| 1316 |
+
# similar to how other list- or tuple-derived classes are
|
| 1317 |
+
# treated (see below), but we just need to create them
|
| 1318 |
+
# differently because a namedtuple's __init__ needs to be
|
| 1319 |
+
# called differently (see bpo-34363).
|
| 1320 |
+
return type(obj)(*[_astuple_inner(v, tuple_factory) for v in obj])
|
| 1321 |
+
elif isinstance(obj, (list, tuple)):
|
| 1322 |
+
# Assume we can create an object of this type by passing in a
|
| 1323 |
+
# generator (which is not true for namedtuples, handled
|
| 1324 |
+
# above).
|
| 1325 |
+
return type(obj)(_astuple_inner(v, tuple_factory) for v in obj)
|
| 1326 |
+
elif isinstance(obj, dict):
|
| 1327 |
+
return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory))
|
| 1328 |
+
for k, v in obj.items())
|
| 1329 |
+
else:
|
| 1330 |
+
return copy.deepcopy(obj)
|
| 1331 |
+
|
| 1332 |
+
|
| 1333 |
+
def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
|
| 1334 |
+
repr=True, eq=True, order=False, unsafe_hash=False,
|
| 1335 |
+
frozen=False, match_args=True, kw_only=False, slots=False):
|
| 1336 |
+
"""Return a new dynamically created dataclass.
|
| 1337 |
+
|
| 1338 |
+
The dataclass name will be 'cls_name'. 'fields' is an iterable
|
| 1339 |
+
of either (name), (name, type) or (name, type, Field) objects. If type is
|
| 1340 |
+
omitted, use the string 'typing.Any'. Field objects are created by
|
| 1341 |
+
the equivalent of calling 'field(name, type [, Field-info])'.
|
| 1342 |
+
|
| 1343 |
+
C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,))
|
| 1344 |
+
|
| 1345 |
+
is equivalent to:
|
| 1346 |
+
|
| 1347 |
+
@dataclass
|
| 1348 |
+
class C(Base):
|
| 1349 |
+
x: 'typing.Any'
|
| 1350 |
+
y: int
|
| 1351 |
+
z: int = field(init=False)
|
| 1352 |
+
|
| 1353 |
+
For the bases and namespace parameters, see the builtin type() function.
|
| 1354 |
+
|
| 1355 |
+
The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to
|
| 1356 |
+
dataclass().
|
| 1357 |
+
"""
|
| 1358 |
+
|
| 1359 |
+
if namespace is None:
|
| 1360 |
+
namespace = {}
|
| 1361 |
+
|
| 1362 |
+
# While we're looking through the field names, validate that they
|
| 1363 |
+
# are identifiers, are not keywords, and not duplicates.
|
| 1364 |
+
seen = set()
|
| 1365 |
+
annotations = {}
|
| 1366 |
+
defaults = {}
|
| 1367 |
+
for item in fields:
|
| 1368 |
+
if isinstance(item, str):
|
| 1369 |
+
name = item
|
| 1370 |
+
tp = 'typing.Any'
|
| 1371 |
+
elif len(item) == 2:
|
| 1372 |
+
name, tp, = item
|
| 1373 |
+
elif len(item) == 3:
|
| 1374 |
+
name, tp, spec = item
|
| 1375 |
+
defaults[name] = spec
|
| 1376 |
+
else:
|
| 1377 |
+
raise TypeError(f'Invalid field: {item!r}')
|
| 1378 |
+
|
| 1379 |
+
if not isinstance(name, str) or not name.isidentifier():
|
| 1380 |
+
raise TypeError(f'Field names must be valid identifiers: {name!r}')
|
| 1381 |
+
if keyword.iskeyword(name):
|
| 1382 |
+
raise TypeError(f'Field names must not be keywords: {name!r}')
|
| 1383 |
+
if name in seen:
|
| 1384 |
+
raise TypeError(f'Field name duplicated: {name!r}')
|
| 1385 |
+
|
| 1386 |
+
seen.add(name)
|
| 1387 |
+
annotations[name] = tp
|
| 1388 |
+
|
| 1389 |
+
# Update 'ns' with the user-supplied namespace plus our calculated values.
|
| 1390 |
+
def exec_body_callback(ns):
|
| 1391 |
+
ns.update(namespace)
|
| 1392 |
+
ns.update(defaults)
|
| 1393 |
+
ns['__annotations__'] = annotations
|
| 1394 |
+
|
| 1395 |
+
# We use `types.new_class()` instead of simply `type()` to allow dynamic creation
|
| 1396 |
+
# of generic dataclasses.
|
| 1397 |
+
cls = types.new_class(cls_name, bases, {}, exec_body_callback)
|
| 1398 |
+
|
| 1399 |
+
# Apply the normal decorator.
|
| 1400 |
+
return dataclass(cls, init=init, repr=repr, eq=eq, order=order,
|
| 1401 |
+
unsafe_hash=unsafe_hash, frozen=frozen,
|
| 1402 |
+
match_args=match_args, kw_only=kw_only, slots=slots)
|
| 1403 |
+
|
| 1404 |
+
|
| 1405 |
+
def replace(obj, /, **changes):
|
| 1406 |
+
"""Return a new object replacing specified fields with new values.
|
| 1407 |
+
|
| 1408 |
+
This is especially useful for frozen classes. Example usage:
|
| 1409 |
+
|
| 1410 |
+
@dataclass(frozen=True)
|
| 1411 |
+
class C:
|
| 1412 |
+
x: int
|
| 1413 |
+
y: int
|
| 1414 |
+
|
| 1415 |
+
c = C(1, 2)
|
| 1416 |
+
c1 = replace(c, x=3)
|
| 1417 |
+
assert c1.x == 3 and c1.y == 2
|
| 1418 |
+
"""
|
| 1419 |
+
|
| 1420 |
+
# We're going to mutate 'changes', but that's okay because it's a
|
| 1421 |
+
# new dict, even if called with 'replace(obj, **my_changes)'.
|
| 1422 |
+
|
| 1423 |
+
if not _is_dataclass_instance(obj):
|
| 1424 |
+
raise TypeError("replace() should be called on dataclass instances")
|
| 1425 |
+
|
| 1426 |
+
# It's an error to have init=False fields in 'changes'.
|
| 1427 |
+
# If a field is not in 'changes', read its value from the provided obj.
|
| 1428 |
+
|
| 1429 |
+
for f in getattr(obj, _FIELDS).values():
|
| 1430 |
+
# Only consider normal fields or InitVars.
|
| 1431 |
+
if f._field_type is _FIELD_CLASSVAR:
|
| 1432 |
+
continue
|
| 1433 |
+
|
| 1434 |
+
if not f.init:
|
| 1435 |
+
# Error if this field is specified in changes.
|
| 1436 |
+
if f.name in changes:
|
| 1437 |
+
raise ValueError(f'field {f.name} is declared with '
|
| 1438 |
+
'init=False, it cannot be specified with '
|
| 1439 |
+
'replace()')
|
| 1440 |
+
continue
|
| 1441 |
+
|
| 1442 |
+
if f.name not in changes:
|
| 1443 |
+
if f._field_type is _FIELD_INITVAR and f.default is MISSING:
|
| 1444 |
+
raise ValueError(f"InitVar {f.name!r} "
|
| 1445 |
+
'must be specified with replace()')
|
| 1446 |
+
changes[f.name] = getattr(obj, f.name)
|
| 1447 |
+
|
| 1448 |
+
# Create the new object, which calls __init__() and
|
| 1449 |
+
# __post_init__() (if defined), using all of the init fields we've
|
| 1450 |
+
# added and/or left in 'changes'. If there are values supplied in
|
| 1451 |
+
# changes that aren't fields, this will correctly raise a
|
| 1452 |
+
# TypeError.
|
| 1453 |
+
return obj.__class__(**changes)
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (715 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/bdist.cpython-310.pyc
ADDED
|
Binary file (3.8 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/bdist_rpm.cpython-310.pyc
ADDED
|
Binary file (12.5 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/build.cpython-310.pyc
ADDED
|
Binary file (4.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/build_clib.cpython-310.pyc
ADDED
|
Binary file (5.07 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/build_ext.cpython-310.pyc
ADDED
|
Binary file (16.4 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/build_py.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/build_scripts.cpython-310.pyc
ADDED
|
Binary file (4.32 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/clean.cpython-310.pyc
ADDED
|
Binary file (2.09 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/config.cpython-310.pyc
ADDED
|
Binary file (10.5 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/install.cpython-310.pyc
ADDED
|
Binary file (14.1 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/install_data.cpython-310.pyc
ADDED
|
Binary file (2.29 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/install_egg_info.cpython-310.pyc
ADDED
|
Binary file (3.03 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/install_headers.cpython-310.pyc
ADDED
|
Binary file (1.71 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/install_lib.cpython-310.pyc
ADDED
|
Binary file (5.11 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/__pycache__/register.cpython-310.pyc
ADDED
|
Binary file (8.88 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/command/build_scripts.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_scripts
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_scripts' command."""
|
| 4 |
+
|
| 5 |
+
import os, re
|
| 6 |
+
from stat import ST_MODE
|
| 7 |
+
from distutils import sysconfig
|
| 8 |
+
from distutils.core import Command
|
| 9 |
+
from distutils.dep_util import newer
|
| 10 |
+
from distutils.util import convert_path, Mixin2to3
|
| 11 |
+
from distutils import log
|
| 12 |
+
import tokenize
|
| 13 |
+
|
| 14 |
+
# check if Python is called on the first line with this expression
|
| 15 |
+
first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$')
|
| 16 |
+
|
| 17 |
+
class build_scripts(Command):
|
| 18 |
+
|
| 19 |
+
description = "\"build\" scripts (copy and fixup #! line)"
|
| 20 |
+
|
| 21 |
+
user_options = [
|
| 22 |
+
('build-dir=', 'd', "directory to \"build\" (copy) to"),
|
| 23 |
+
('force', 'f', "forcibly build everything (ignore file timestamps"),
|
| 24 |
+
('executable=', 'e', "specify final destination interpreter path"),
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
boolean_options = ['force']
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def initialize_options(self):
|
| 31 |
+
self.build_dir = None
|
| 32 |
+
self.scripts = None
|
| 33 |
+
self.force = None
|
| 34 |
+
self.executable = None
|
| 35 |
+
self.outfiles = None
|
| 36 |
+
|
| 37 |
+
def finalize_options(self):
|
| 38 |
+
self.set_undefined_options('build',
|
| 39 |
+
('build_scripts', 'build_dir'),
|
| 40 |
+
('force', 'force'),
|
| 41 |
+
('executable', 'executable'))
|
| 42 |
+
self.scripts = self.distribution.scripts
|
| 43 |
+
|
| 44 |
+
def get_source_files(self):
|
| 45 |
+
return self.scripts
|
| 46 |
+
|
| 47 |
+
def run(self):
|
| 48 |
+
if not self.scripts:
|
| 49 |
+
return
|
| 50 |
+
self.copy_scripts()
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def copy_scripts(self):
|
| 54 |
+
r"""Copy each script listed in 'self.scripts'; if it's marked as a
|
| 55 |
+
Python script in the Unix way (first line matches 'first_line_re',
|
| 56 |
+
ie. starts with "\#!" and contains "python"), then adjust the first
|
| 57 |
+
line to refer to the current Python interpreter as we copy.
|
| 58 |
+
"""
|
| 59 |
+
self.mkpath(self.build_dir)
|
| 60 |
+
outfiles = []
|
| 61 |
+
updated_files = []
|
| 62 |
+
for script in self.scripts:
|
| 63 |
+
adjust = False
|
| 64 |
+
script = convert_path(script)
|
| 65 |
+
outfile = os.path.join(self.build_dir, os.path.basename(script))
|
| 66 |
+
outfiles.append(outfile)
|
| 67 |
+
|
| 68 |
+
if not self.force and not newer(script, outfile):
|
| 69 |
+
log.debug("not copying %s (up-to-date)", script)
|
| 70 |
+
continue
|
| 71 |
+
|
| 72 |
+
# Always open the file, but ignore failures in dry-run mode --
|
| 73 |
+
# that way, we'll get accurate feedback if we can read the
|
| 74 |
+
# script.
|
| 75 |
+
try:
|
| 76 |
+
f = open(script, "rb")
|
| 77 |
+
except OSError:
|
| 78 |
+
if not self.dry_run:
|
| 79 |
+
raise
|
| 80 |
+
f = None
|
| 81 |
+
else:
|
| 82 |
+
encoding, lines = tokenize.detect_encoding(f.readline)
|
| 83 |
+
f.seek(0)
|
| 84 |
+
first_line = f.readline()
|
| 85 |
+
if not first_line:
|
| 86 |
+
self.warn("%s is an empty file (skipping)" % script)
|
| 87 |
+
continue
|
| 88 |
+
|
| 89 |
+
match = first_line_re.match(first_line)
|
| 90 |
+
if match:
|
| 91 |
+
adjust = True
|
| 92 |
+
post_interp = match.group(1) or b''
|
| 93 |
+
|
| 94 |
+
if adjust:
|
| 95 |
+
log.info("copying and adjusting %s -> %s", script,
|
| 96 |
+
self.build_dir)
|
| 97 |
+
updated_files.append(outfile)
|
| 98 |
+
if not self.dry_run:
|
| 99 |
+
if not sysconfig.python_build:
|
| 100 |
+
executable = self.executable
|
| 101 |
+
else:
|
| 102 |
+
executable = os.path.join(
|
| 103 |
+
sysconfig.get_config_var("BINDIR"),
|
| 104 |
+
"python%s%s" % (sysconfig.get_config_var("VERSION"),
|
| 105 |
+
sysconfig.get_config_var("EXE")))
|
| 106 |
+
executable = os.fsencode(executable)
|
| 107 |
+
shebang = b"#!" + executable + post_interp + b"\n"
|
| 108 |
+
# Python parser starts to read a script using UTF-8 until
|
| 109 |
+
# it gets a #coding:xxx cookie. The shebang has to be the
|
| 110 |
+
# first line of a file, the #coding:xxx cookie cannot be
|
| 111 |
+
# written before. So the shebang has to be decodable from
|
| 112 |
+
# UTF-8.
|
| 113 |
+
try:
|
| 114 |
+
shebang.decode('utf-8')
|
| 115 |
+
except UnicodeDecodeError:
|
| 116 |
+
raise ValueError(
|
| 117 |
+
"The shebang ({!r}) is not decodable "
|
| 118 |
+
"from utf-8".format(shebang))
|
| 119 |
+
# If the script is encoded to a custom encoding (use a
|
| 120 |
+
# #coding:xxx cookie), the shebang has to be decodable from
|
| 121 |
+
# the script encoding too.
|
| 122 |
+
try:
|
| 123 |
+
shebang.decode(encoding)
|
| 124 |
+
except UnicodeDecodeError:
|
| 125 |
+
raise ValueError(
|
| 126 |
+
"The shebang ({!r}) is not decodable "
|
| 127 |
+
"from the script encoding ({})"
|
| 128 |
+
.format(shebang, encoding))
|
| 129 |
+
with open(outfile, "wb") as outf:
|
| 130 |
+
outf.write(shebang)
|
| 131 |
+
outf.writelines(f.readlines())
|
| 132 |
+
if f:
|
| 133 |
+
f.close()
|
| 134 |
+
else:
|
| 135 |
+
if f:
|
| 136 |
+
f.close()
|
| 137 |
+
updated_files.append(outfile)
|
| 138 |
+
self.copy_file(script, outfile)
|
| 139 |
+
|
| 140 |
+
if os.name == 'posix':
|
| 141 |
+
for file in outfiles:
|
| 142 |
+
if self.dry_run:
|
| 143 |
+
log.info("changing mode of %s", file)
|
| 144 |
+
else:
|
| 145 |
+
oldmode = os.stat(file)[ST_MODE] & 0o7777
|
| 146 |
+
newmode = (oldmode | 0o555) & 0o7777
|
| 147 |
+
if newmode != oldmode:
|
| 148 |
+
log.info("changing mode of %s from %o to %o",
|
| 149 |
+
file, oldmode, newmode)
|
| 150 |
+
os.chmod(file, newmode)
|
| 151 |
+
# XXX should we modify self.outfiles?
|
| 152 |
+
return outfiles, updated_files
|
| 153 |
+
|
| 154 |
+
class build_scripts_2to3(build_scripts, Mixin2to3):
|
| 155 |
+
|
| 156 |
+
def copy_scripts(self):
|
| 157 |
+
outfiles, updated_files = build_scripts.copy_scripts(self)
|
| 158 |
+
if not self.dry_run:
|
| 159 |
+
self.run_2to3(updated_files)
|
| 160 |
+
return outfiles, updated_files
|
evalkit_llava/lib/python3.10/distutils/command/install.py
ADDED
|
@@ -0,0 +1,679 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install' command."""
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
import sysconfig
|
| 7 |
+
import os
|
| 8 |
+
import re
|
| 9 |
+
|
| 10 |
+
from distutils import log
|
| 11 |
+
from distutils.core import Command
|
| 12 |
+
from distutils.debug import DEBUG
|
| 13 |
+
from distutils.sysconfig import get_config_vars
|
| 14 |
+
from distutils.errors import DistutilsPlatformError
|
| 15 |
+
from distutils.file_util import write_file
|
| 16 |
+
from distutils.util import convert_path, subst_vars, change_root
|
| 17 |
+
from distutils.util import get_platform
|
| 18 |
+
from distutils.errors import DistutilsOptionError
|
| 19 |
+
|
| 20 |
+
from site import USER_BASE
|
| 21 |
+
from site import USER_SITE
|
| 22 |
+
|
| 23 |
+
HAS_USER_SITE = (USER_SITE is not None)
|
| 24 |
+
|
| 25 |
+
# The keys to an installation scheme; if any new types of files are to be
|
| 26 |
+
# installed, be sure to add an entry to every scheme in
|
| 27 |
+
# sysconfig._INSTALL_SCHEMES, and to SCHEME_KEYS here.
|
| 28 |
+
SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
|
| 29 |
+
|
| 30 |
+
# The following code provides backward-compatible INSTALL_SCHEMES
|
| 31 |
+
# while making the sysconfig module the single point of truth.
|
| 32 |
+
# This makes it easier for OS distributions where they need to
|
| 33 |
+
# alter locations for packages installations in a single place.
|
| 34 |
+
# Note that this module is deprecated (PEP 632); all consumers
|
| 35 |
+
# of this information should switch to using sysconfig directly.
|
| 36 |
+
INSTALL_SCHEMES = {"unix_prefix": {}, "unix_home": {}, "nt": {}}
|
| 37 |
+
|
| 38 |
+
# Copy from sysconfig._INSTALL_SCHEMES
|
| 39 |
+
for key in SCHEME_KEYS:
|
| 40 |
+
for distutils_scheme_name, sys_scheme_name in (
|
| 41 |
+
("unix_prefix", "posix_prefix"), ("unix_home", "posix_home"),
|
| 42 |
+
("nt", "nt")):
|
| 43 |
+
sys_key = key
|
| 44 |
+
sys_scheme = sysconfig._INSTALL_SCHEMES[sys_scheme_name]
|
| 45 |
+
if key == "headers" and key not in sys_scheme:
|
| 46 |
+
# On POSIX-y platforms, Python will:
|
| 47 |
+
# - Build from .h files in 'headers' (only there when
|
| 48 |
+
# building CPython)
|
| 49 |
+
# - Install .h files to 'include'
|
| 50 |
+
# When 'headers' is missing, fall back to 'include'
|
| 51 |
+
sys_key = 'include'
|
| 52 |
+
INSTALL_SCHEMES[distutils_scheme_name][key] = sys_scheme[sys_key]
|
| 53 |
+
|
| 54 |
+
# Transformation to different template format
|
| 55 |
+
for main_key in INSTALL_SCHEMES:
|
| 56 |
+
for key, value in INSTALL_SCHEMES[main_key].items():
|
| 57 |
+
# Change all ocurences of {variable} to $variable
|
| 58 |
+
value = re.sub(r"\{(.+?)\}", r"$\g<1>", value)
|
| 59 |
+
value = value.replace("$installed_base", "$base")
|
| 60 |
+
value = value.replace("$py_version_nodot_plat", "$py_version_nodot")
|
| 61 |
+
if key == "headers":
|
| 62 |
+
value += "/$dist_name"
|
| 63 |
+
if sys.version_info >= (3, 9) and key == "platlib":
|
| 64 |
+
# platlibdir is available since 3.9: bpo-1294959
|
| 65 |
+
value = value.replace("/lib/", "/$platlibdir/")
|
| 66 |
+
INSTALL_SCHEMES[main_key][key] = value
|
| 67 |
+
|
| 68 |
+
# The following part of INSTALL_SCHEMES has a different definition
|
| 69 |
+
# than the one in sysconfig, but because both depend on the site module,
|
| 70 |
+
# the outcomes should be the same.
|
| 71 |
+
if HAS_USER_SITE:
|
| 72 |
+
INSTALL_SCHEMES['nt_user'] = {
|
| 73 |
+
'purelib': '$usersite',
|
| 74 |
+
'platlib': '$usersite',
|
| 75 |
+
'headers': '$userbase/Python$py_version_nodot/Include/$dist_name',
|
| 76 |
+
'scripts': '$userbase/Python$py_version_nodot/Scripts',
|
| 77 |
+
'data' : '$userbase',
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
INSTALL_SCHEMES['unix_user'] = {
|
| 81 |
+
'purelib': '$usersite',
|
| 82 |
+
'platlib': '$usersite',
|
| 83 |
+
'headers':
|
| 84 |
+
'$userbase/include/python$py_version_short$abiflags/$dist_name',
|
| 85 |
+
'scripts': '$userbase/bin',
|
| 86 |
+
'data' : '$userbase',
|
| 87 |
+
}
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class install(Command):
|
| 91 |
+
|
| 92 |
+
description = "install everything from build directory"
|
| 93 |
+
|
| 94 |
+
user_options = [
|
| 95 |
+
# Select installation scheme and set base director(y|ies)
|
| 96 |
+
('prefix=', None,
|
| 97 |
+
"installation prefix"),
|
| 98 |
+
('exec-prefix=', None,
|
| 99 |
+
"(Unix only) prefix for platform-specific files"),
|
| 100 |
+
('home=', None,
|
| 101 |
+
"(Unix only) home directory to install under"),
|
| 102 |
+
|
| 103 |
+
# Or, just set the base director(y|ies)
|
| 104 |
+
('install-base=', None,
|
| 105 |
+
"base installation directory (instead of --prefix or --home)"),
|
| 106 |
+
('install-platbase=', None,
|
| 107 |
+
"base installation directory for platform-specific files " +
|
| 108 |
+
"(instead of --exec-prefix or --home)"),
|
| 109 |
+
('root=', None,
|
| 110 |
+
"install everything relative to this alternate root directory"),
|
| 111 |
+
|
| 112 |
+
# Or, explicitly set the installation scheme
|
| 113 |
+
('install-purelib=', None,
|
| 114 |
+
"installation directory for pure Python module distributions"),
|
| 115 |
+
('install-platlib=', None,
|
| 116 |
+
"installation directory for non-pure module distributions"),
|
| 117 |
+
('install-lib=', None,
|
| 118 |
+
"installation directory for all module distributions " +
|
| 119 |
+
"(overrides --install-purelib and --install-platlib)"),
|
| 120 |
+
|
| 121 |
+
('install-headers=', None,
|
| 122 |
+
"installation directory for C/C++ headers"),
|
| 123 |
+
('install-scripts=', None,
|
| 124 |
+
"installation directory for Python scripts"),
|
| 125 |
+
('install-data=', None,
|
| 126 |
+
"installation directory for data files"),
|
| 127 |
+
|
| 128 |
+
# Byte-compilation options -- see install_lib.py for details, as
|
| 129 |
+
# these are duplicated from there (but only install_lib does
|
| 130 |
+
# anything with them).
|
| 131 |
+
('compile', 'c', "compile .py to .pyc [default]"),
|
| 132 |
+
('no-compile', None, "don't compile .py files"),
|
| 133 |
+
('optimize=', 'O',
|
| 134 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
| 135 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
| 136 |
+
|
| 137 |
+
# Miscellaneous control options
|
| 138 |
+
('force', 'f',
|
| 139 |
+
"force installation (overwrite any existing files)"),
|
| 140 |
+
('skip-build', None,
|
| 141 |
+
"skip rebuilding everything (for testing/debugging)"),
|
| 142 |
+
|
| 143 |
+
# Where to install documentation (eventually!)
|
| 144 |
+
#('doc-format=', None, "format of documentation to generate"),
|
| 145 |
+
#('install-man=', None, "directory for Unix man pages"),
|
| 146 |
+
#('install-html=', None, "directory for HTML documentation"),
|
| 147 |
+
#('install-info=', None, "directory for GNU info files"),
|
| 148 |
+
|
| 149 |
+
('record=', None,
|
| 150 |
+
"filename in which to record list of installed files"),
|
| 151 |
+
]
|
| 152 |
+
|
| 153 |
+
boolean_options = ['compile', 'force', 'skip-build']
|
| 154 |
+
|
| 155 |
+
if HAS_USER_SITE:
|
| 156 |
+
user_options.append(('user', None,
|
| 157 |
+
"install in user site-package '%s'" % USER_SITE))
|
| 158 |
+
boolean_options.append('user')
|
| 159 |
+
|
| 160 |
+
negative_opt = {'no-compile' : 'compile'}
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def initialize_options(self):
|
| 164 |
+
"""Initializes options."""
|
| 165 |
+
# High-level options: these select both an installation base
|
| 166 |
+
# and scheme.
|
| 167 |
+
self.prefix = None
|
| 168 |
+
self.exec_prefix = None
|
| 169 |
+
self.home = None
|
| 170 |
+
self.user = 0
|
| 171 |
+
|
| 172 |
+
# These select only the installation base; it's up to the user to
|
| 173 |
+
# specify the installation scheme (currently, that means supplying
|
| 174 |
+
# the --install-{platlib,purelib,scripts,data} options).
|
| 175 |
+
self.install_base = None
|
| 176 |
+
self.install_platbase = None
|
| 177 |
+
self.root = None
|
| 178 |
+
|
| 179 |
+
# These options are the actual installation directories; if not
|
| 180 |
+
# supplied by the user, they are filled in using the installation
|
| 181 |
+
# scheme implied by prefix/exec-prefix/home and the contents of
|
| 182 |
+
# that installation scheme.
|
| 183 |
+
self.install_purelib = None # for pure module distributions
|
| 184 |
+
self.install_platlib = None # non-pure (dists w/ extensions)
|
| 185 |
+
self.install_headers = None # for C/C++ headers
|
| 186 |
+
self.install_lib = None # set to either purelib or platlib
|
| 187 |
+
self.install_scripts = None
|
| 188 |
+
self.install_data = None
|
| 189 |
+
if HAS_USER_SITE:
|
| 190 |
+
self.install_userbase = USER_BASE
|
| 191 |
+
self.install_usersite = USER_SITE
|
| 192 |
+
|
| 193 |
+
self.compile = None
|
| 194 |
+
self.optimize = None
|
| 195 |
+
|
| 196 |
+
# Deprecated
|
| 197 |
+
# These two are for putting non-packagized distributions into their
|
| 198 |
+
# own directory and creating a .pth file if it makes sense.
|
| 199 |
+
# 'extra_path' comes from the setup file; 'install_path_file' can
|
| 200 |
+
# be turned off if it makes no sense to install a .pth file. (But
|
| 201 |
+
# better to install it uselessly than to guess wrong and not
|
| 202 |
+
# install it when it's necessary and would be used!) Currently,
|
| 203 |
+
# 'install_path_file' is always true unless some outsider meddles
|
| 204 |
+
# with it.
|
| 205 |
+
self.extra_path = None
|
| 206 |
+
self.install_path_file = 1
|
| 207 |
+
|
| 208 |
+
# 'force' forces installation, even if target files are not
|
| 209 |
+
# out-of-date. 'skip_build' skips running the "build" command,
|
| 210 |
+
# handy if you know it's not necessary. 'warn_dir' (which is *not*
|
| 211 |
+
# a user option, it's just there so the bdist_* commands can turn
|
| 212 |
+
# it off) determines whether we warn about installing to a
|
| 213 |
+
# directory not in sys.path.
|
| 214 |
+
self.force = 0
|
| 215 |
+
self.skip_build = 0
|
| 216 |
+
self.warn_dir = 1
|
| 217 |
+
|
| 218 |
+
# These are only here as a conduit from the 'build' command to the
|
| 219 |
+
# 'install_*' commands that do the real work. ('build_base' isn't
|
| 220 |
+
# actually used anywhere, but it might be useful in future.) They
|
| 221 |
+
# are not user options, because if the user told the install
|
| 222 |
+
# command where the build directory is, that wouldn't affect the
|
| 223 |
+
# build command.
|
| 224 |
+
self.build_base = None
|
| 225 |
+
self.build_lib = None
|
| 226 |
+
|
| 227 |
+
# Not defined yet because we don't know anything about
|
| 228 |
+
# documentation yet.
|
| 229 |
+
#self.install_man = None
|
| 230 |
+
#self.install_html = None
|
| 231 |
+
#self.install_info = None
|
| 232 |
+
|
| 233 |
+
self.record = None
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
# -- Option finalizing methods -------------------------------------
|
| 237 |
+
# (This is rather more involved than for most commands,
|
| 238 |
+
# because this is where the policy for installing third-
|
| 239 |
+
# party Python modules on various platforms given a wide
|
| 240 |
+
# array of user input is decided. Yes, it's quite complex!)
|
| 241 |
+
|
| 242 |
+
def finalize_options(self):
|
| 243 |
+
"""Finalizes options."""
|
| 244 |
+
# This method (and its helpers, like 'finalize_unix()',
|
| 245 |
+
# 'finalize_other()', and 'select_scheme()') is where the default
|
| 246 |
+
# installation directories for modules, extension modules, and
|
| 247 |
+
# anything else we care to install from a Python module
|
| 248 |
+
# distribution. Thus, this code makes a pretty important policy
|
| 249 |
+
# statement about how third-party stuff is added to a Python
|
| 250 |
+
# installation! Note that the actual work of installation is done
|
| 251 |
+
# by the relatively simple 'install_*' commands; they just take
|
| 252 |
+
# their orders from the installation directory options determined
|
| 253 |
+
# here.
|
| 254 |
+
|
| 255 |
+
# Check for errors/inconsistencies in the options; first, stuff
|
| 256 |
+
# that's wrong on any platform.
|
| 257 |
+
|
| 258 |
+
if ((self.prefix or self.exec_prefix or self.home) and
|
| 259 |
+
(self.install_base or self.install_platbase)):
|
| 260 |
+
raise DistutilsOptionError(
|
| 261 |
+
"must supply either prefix/exec-prefix/home or " +
|
| 262 |
+
"install-base/install-platbase -- not both")
|
| 263 |
+
|
| 264 |
+
if self.home and (self.prefix or self.exec_prefix):
|
| 265 |
+
raise DistutilsOptionError(
|
| 266 |
+
"must supply either home or prefix/exec-prefix -- not both")
|
| 267 |
+
|
| 268 |
+
if self.user and (self.prefix or self.exec_prefix or self.home or
|
| 269 |
+
self.install_base or self.install_platbase):
|
| 270 |
+
raise DistutilsOptionError("can't combine user with prefix, "
|
| 271 |
+
"exec_prefix/home, or install_(plat)base")
|
| 272 |
+
|
| 273 |
+
# Next, stuff that's wrong (or dubious) only on certain platforms.
|
| 274 |
+
if os.name != "posix":
|
| 275 |
+
if self.exec_prefix:
|
| 276 |
+
self.warn("exec-prefix option ignored on this platform")
|
| 277 |
+
self.exec_prefix = None
|
| 278 |
+
|
| 279 |
+
# Now the interesting logic -- so interesting that we farm it out
|
| 280 |
+
# to other methods. The goal of these methods is to set the final
|
| 281 |
+
# values for the install_{lib,scripts,data,...} options, using as
|
| 282 |
+
# input a heady brew of prefix, exec_prefix, home, install_base,
|
| 283 |
+
# install_platbase, user-supplied versions of
|
| 284 |
+
# install_{purelib,platlib,lib,scripts,data,...}, and the
|
| 285 |
+
# INSTALL_SCHEME dictionary above. Phew!
|
| 286 |
+
|
| 287 |
+
self.dump_dirs("pre-finalize_{unix,other}")
|
| 288 |
+
|
| 289 |
+
if os.name == 'posix':
|
| 290 |
+
self.finalize_unix()
|
| 291 |
+
else:
|
| 292 |
+
self.finalize_other()
|
| 293 |
+
|
| 294 |
+
self.dump_dirs("post-finalize_{unix,other}()")
|
| 295 |
+
|
| 296 |
+
# Expand configuration variables, tilde, etc. in self.install_base
|
| 297 |
+
# and self.install_platbase -- that way, we can use $base or
|
| 298 |
+
# $platbase in the other installation directories and not worry
|
| 299 |
+
# about needing recursive variable expansion (shudder).
|
| 300 |
+
|
| 301 |
+
py_version = sys.version.split()[0]
|
| 302 |
+
(prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
|
| 303 |
+
try:
|
| 304 |
+
abiflags = sys.abiflags
|
| 305 |
+
except AttributeError:
|
| 306 |
+
# sys.abiflags may not be defined on all platforms.
|
| 307 |
+
abiflags = ''
|
| 308 |
+
self.config_vars = {'dist_name': self.distribution.get_name(),
|
| 309 |
+
'dist_version': self.distribution.get_version(),
|
| 310 |
+
'dist_fullname': self.distribution.get_fullname(),
|
| 311 |
+
'py_version': py_version,
|
| 312 |
+
'py_version_short': '%d.%d' % sys.version_info[:2],
|
| 313 |
+
'py_version_nodot': '%d%d' % sys.version_info[:2],
|
| 314 |
+
'sys_prefix': prefix,
|
| 315 |
+
'prefix': prefix,
|
| 316 |
+
'sys_exec_prefix': exec_prefix,
|
| 317 |
+
'exec_prefix': exec_prefix,
|
| 318 |
+
'abiflags': abiflags,
|
| 319 |
+
'platlibdir': sys.platlibdir,
|
| 320 |
+
}
|
| 321 |
+
|
| 322 |
+
if HAS_USER_SITE:
|
| 323 |
+
self.config_vars['userbase'] = self.install_userbase
|
| 324 |
+
self.config_vars['usersite'] = self.install_usersite
|
| 325 |
+
|
| 326 |
+
if sysconfig.is_python_build(True):
|
| 327 |
+
self.config_vars['srcdir'] = sysconfig.get_config_var('srcdir')
|
| 328 |
+
|
| 329 |
+
self.expand_basedirs()
|
| 330 |
+
|
| 331 |
+
self.dump_dirs("post-expand_basedirs()")
|
| 332 |
+
|
| 333 |
+
# Now define config vars for the base directories so we can expand
|
| 334 |
+
# everything else.
|
| 335 |
+
self.config_vars['base'] = self.install_base
|
| 336 |
+
self.config_vars['platbase'] = self.install_platbase
|
| 337 |
+
|
| 338 |
+
if DEBUG:
|
| 339 |
+
from pprint import pprint
|
| 340 |
+
print("config vars:")
|
| 341 |
+
pprint(self.config_vars)
|
| 342 |
+
|
| 343 |
+
# Expand "~" and configuration variables in the installation
|
| 344 |
+
# directories.
|
| 345 |
+
self.expand_dirs()
|
| 346 |
+
|
| 347 |
+
self.dump_dirs("post-expand_dirs()")
|
| 348 |
+
|
| 349 |
+
# Create directories in the home dir:
|
| 350 |
+
if self.user:
|
| 351 |
+
self.create_home_path()
|
| 352 |
+
|
| 353 |
+
# Pick the actual directory to install all modules to: either
|
| 354 |
+
# install_purelib or install_platlib, depending on whether this
|
| 355 |
+
# module distribution is pure or not. Of course, if the user
|
| 356 |
+
# already specified install_lib, use their selection.
|
| 357 |
+
if self.install_lib is None:
|
| 358 |
+
if self.distribution.ext_modules: # has extensions: non-pure
|
| 359 |
+
self.install_lib = self.install_platlib
|
| 360 |
+
else:
|
| 361 |
+
self.install_lib = self.install_purelib
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
# Convert directories from Unix /-separated syntax to the local
|
| 365 |
+
# convention.
|
| 366 |
+
self.convert_paths('lib', 'purelib', 'platlib',
|
| 367 |
+
'scripts', 'data', 'headers')
|
| 368 |
+
if HAS_USER_SITE:
|
| 369 |
+
self.convert_paths('userbase', 'usersite')
|
| 370 |
+
|
| 371 |
+
# Deprecated
|
| 372 |
+
# Well, we're not actually fully completely finalized yet: we still
|
| 373 |
+
# have to deal with 'extra_path', which is the hack for allowing
|
| 374 |
+
# non-packagized module distributions (hello, Numerical Python!) to
|
| 375 |
+
# get their own directories.
|
| 376 |
+
self.handle_extra_path()
|
| 377 |
+
self.install_libbase = self.install_lib # needed for .pth file
|
| 378 |
+
self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
|
| 379 |
+
|
| 380 |
+
# If a new root directory was supplied, make all the installation
|
| 381 |
+
# dirs relative to it.
|
| 382 |
+
if self.root is not None:
|
| 383 |
+
self.change_roots('libbase', 'lib', 'purelib', 'platlib',
|
| 384 |
+
'scripts', 'data', 'headers')
|
| 385 |
+
|
| 386 |
+
self.dump_dirs("after prepending root")
|
| 387 |
+
|
| 388 |
+
# Find out the build directories, ie. where to install from.
|
| 389 |
+
self.set_undefined_options('build',
|
| 390 |
+
('build_base', 'build_base'),
|
| 391 |
+
('build_lib', 'build_lib'))
|
| 392 |
+
|
| 393 |
+
# Punt on doc directories for now -- after all, we're punting on
|
| 394 |
+
# documentation completely!
|
| 395 |
+
|
| 396 |
+
def dump_dirs(self, msg):
|
| 397 |
+
"""Dumps the list of user options."""
|
| 398 |
+
if not DEBUG:
|
| 399 |
+
return
|
| 400 |
+
from distutils.fancy_getopt import longopt_xlate
|
| 401 |
+
log.debug(msg + ":")
|
| 402 |
+
for opt in self.user_options:
|
| 403 |
+
opt_name = opt[0]
|
| 404 |
+
if opt_name[-1] == "=":
|
| 405 |
+
opt_name = opt_name[0:-1]
|
| 406 |
+
if opt_name in self.negative_opt:
|
| 407 |
+
opt_name = self.negative_opt[opt_name]
|
| 408 |
+
opt_name = opt_name.translate(longopt_xlate)
|
| 409 |
+
val = not getattr(self, opt_name)
|
| 410 |
+
else:
|
| 411 |
+
opt_name = opt_name.translate(longopt_xlate)
|
| 412 |
+
val = getattr(self, opt_name)
|
| 413 |
+
log.debug(" %s: %s", opt_name, val)
|
| 414 |
+
|
| 415 |
+
def finalize_unix(self):
|
| 416 |
+
"""Finalizes options for posix platforms."""
|
| 417 |
+
if self.install_base is not None or self.install_platbase is not None:
|
| 418 |
+
if ((self.install_lib is None and
|
| 419 |
+
self.install_purelib is None and
|
| 420 |
+
self.install_platlib is None) or
|
| 421 |
+
self.install_headers is None or
|
| 422 |
+
self.install_scripts is None or
|
| 423 |
+
self.install_data is None):
|
| 424 |
+
raise DistutilsOptionError(
|
| 425 |
+
"install-base or install-platbase supplied, but "
|
| 426 |
+
"installation scheme is incomplete")
|
| 427 |
+
return
|
| 428 |
+
|
| 429 |
+
if self.user:
|
| 430 |
+
if self.install_userbase is None:
|
| 431 |
+
raise DistutilsPlatformError(
|
| 432 |
+
"User base directory is not specified")
|
| 433 |
+
self.install_base = self.install_platbase = self.install_userbase
|
| 434 |
+
self.select_scheme("unix_user")
|
| 435 |
+
elif self.home is not None:
|
| 436 |
+
self.install_base = self.install_platbase = self.home
|
| 437 |
+
self.select_scheme("unix_home")
|
| 438 |
+
else:
|
| 439 |
+
if self.prefix is None:
|
| 440 |
+
if self.exec_prefix is not None:
|
| 441 |
+
raise DistutilsOptionError(
|
| 442 |
+
"must not supply exec-prefix without prefix")
|
| 443 |
+
|
| 444 |
+
self.prefix = os.path.normpath(sys.prefix)
|
| 445 |
+
self.exec_prefix = os.path.normpath(sys.exec_prefix)
|
| 446 |
+
|
| 447 |
+
else:
|
| 448 |
+
if self.exec_prefix is None:
|
| 449 |
+
self.exec_prefix = self.prefix
|
| 450 |
+
|
| 451 |
+
self.install_base = self.prefix
|
| 452 |
+
self.install_platbase = self.exec_prefix
|
| 453 |
+
self.select_scheme("unix_prefix")
|
| 454 |
+
|
| 455 |
+
def finalize_other(self):
|
| 456 |
+
"""Finalizes options for non-posix platforms"""
|
| 457 |
+
if self.user:
|
| 458 |
+
if self.install_userbase is None:
|
| 459 |
+
raise DistutilsPlatformError(
|
| 460 |
+
"User base directory is not specified")
|
| 461 |
+
self.install_base = self.install_platbase = self.install_userbase
|
| 462 |
+
self.select_scheme(os.name + "_user")
|
| 463 |
+
elif self.home is not None:
|
| 464 |
+
self.install_base = self.install_platbase = self.home
|
| 465 |
+
self.select_scheme("unix_home")
|
| 466 |
+
else:
|
| 467 |
+
if self.prefix is None:
|
| 468 |
+
self.prefix = os.path.normpath(sys.prefix)
|
| 469 |
+
|
| 470 |
+
self.install_base = self.install_platbase = self.prefix
|
| 471 |
+
try:
|
| 472 |
+
self.select_scheme(os.name)
|
| 473 |
+
except KeyError:
|
| 474 |
+
raise DistutilsPlatformError(
|
| 475 |
+
"I don't know how to install stuff on '%s'" % os.name)
|
| 476 |
+
|
| 477 |
+
def select_scheme(self, name):
|
| 478 |
+
"""Sets the install directories by applying the install schemes."""
|
| 479 |
+
# it's the caller's problem if they supply a bad name!
|
| 480 |
+
scheme = INSTALL_SCHEMES[name]
|
| 481 |
+
for key in SCHEME_KEYS:
|
| 482 |
+
attrname = 'install_' + key
|
| 483 |
+
if getattr(self, attrname) is None:
|
| 484 |
+
setattr(self, attrname, scheme[key])
|
| 485 |
+
|
| 486 |
+
def _expand_attrs(self, attrs):
|
| 487 |
+
for attr in attrs:
|
| 488 |
+
val = getattr(self, attr)
|
| 489 |
+
if val is not None:
|
| 490 |
+
if os.name == 'posix' or os.name == 'nt':
|
| 491 |
+
val = os.path.expanduser(val)
|
| 492 |
+
val = subst_vars(val, self.config_vars)
|
| 493 |
+
setattr(self, attr, val)
|
| 494 |
+
|
| 495 |
+
def expand_basedirs(self):
|
| 496 |
+
"""Calls `os.path.expanduser` on install_base, install_platbase and
|
| 497 |
+
root."""
|
| 498 |
+
self._expand_attrs(['install_base', 'install_platbase', 'root'])
|
| 499 |
+
|
| 500 |
+
def expand_dirs(self):
|
| 501 |
+
"""Calls `os.path.expanduser` on install dirs."""
|
| 502 |
+
self._expand_attrs(['install_purelib', 'install_platlib',
|
| 503 |
+
'install_lib', 'install_headers',
|
| 504 |
+
'install_scripts', 'install_data',])
|
| 505 |
+
|
| 506 |
+
def convert_paths(self, *names):
|
| 507 |
+
"""Call `convert_path` over `names`."""
|
| 508 |
+
for name in names:
|
| 509 |
+
attr = "install_" + name
|
| 510 |
+
setattr(self, attr, convert_path(getattr(self, attr)))
|
| 511 |
+
|
| 512 |
+
def handle_extra_path(self):
|
| 513 |
+
"""Set `path_file` and `extra_dirs` using `extra_path`."""
|
| 514 |
+
if self.extra_path is None:
|
| 515 |
+
self.extra_path = self.distribution.extra_path
|
| 516 |
+
|
| 517 |
+
if self.extra_path is not None:
|
| 518 |
+
log.warn(
|
| 519 |
+
"Distribution option extra_path is deprecated. "
|
| 520 |
+
"See issue27919 for details."
|
| 521 |
+
)
|
| 522 |
+
if isinstance(self.extra_path, str):
|
| 523 |
+
self.extra_path = self.extra_path.split(',')
|
| 524 |
+
|
| 525 |
+
if len(self.extra_path) == 1:
|
| 526 |
+
path_file = extra_dirs = self.extra_path[0]
|
| 527 |
+
elif len(self.extra_path) == 2:
|
| 528 |
+
path_file, extra_dirs = self.extra_path
|
| 529 |
+
else:
|
| 530 |
+
raise DistutilsOptionError(
|
| 531 |
+
"'extra_path' option must be a list, tuple, or "
|
| 532 |
+
"comma-separated string with 1 or 2 elements")
|
| 533 |
+
|
| 534 |
+
# convert to local form in case Unix notation used (as it
|
| 535 |
+
# should be in setup scripts)
|
| 536 |
+
extra_dirs = convert_path(extra_dirs)
|
| 537 |
+
else:
|
| 538 |
+
path_file = None
|
| 539 |
+
extra_dirs = ''
|
| 540 |
+
|
| 541 |
+
# XXX should we warn if path_file and not extra_dirs? (in which
|
| 542 |
+
# case the path file would be harmless but pointless)
|
| 543 |
+
self.path_file = path_file
|
| 544 |
+
self.extra_dirs = extra_dirs
|
| 545 |
+
|
| 546 |
+
def change_roots(self, *names):
|
| 547 |
+
"""Change the install directories pointed by name using root."""
|
| 548 |
+
for name in names:
|
| 549 |
+
attr = "install_" + name
|
| 550 |
+
setattr(self, attr, change_root(self.root, getattr(self, attr)))
|
| 551 |
+
|
| 552 |
+
def create_home_path(self):
|
| 553 |
+
"""Create directories under ~."""
|
| 554 |
+
if not self.user:
|
| 555 |
+
return
|
| 556 |
+
home = convert_path(os.path.expanduser("~"))
|
| 557 |
+
for name, path in self.config_vars.items():
|
| 558 |
+
if path.startswith(home) and not os.path.isdir(path):
|
| 559 |
+
self.debug_print("os.makedirs('%s', 0o700)" % path)
|
| 560 |
+
os.makedirs(path, 0o700)
|
| 561 |
+
|
| 562 |
+
# -- Command execution methods -------------------------------------
|
| 563 |
+
|
| 564 |
+
def run(self):
|
| 565 |
+
"""Runs the command."""
|
| 566 |
+
# Obviously have to build before we can install
|
| 567 |
+
if not self.skip_build:
|
| 568 |
+
self.run_command('build')
|
| 569 |
+
# If we built for any other platform, we can't install.
|
| 570 |
+
build_plat = self.distribution.get_command_obj('build').plat_name
|
| 571 |
+
# check warn_dir - it is a clue that the 'install' is happening
|
| 572 |
+
# internally, and not to sys.path, so we don't check the platform
|
| 573 |
+
# matches what we are running.
|
| 574 |
+
if self.warn_dir and build_plat != get_platform():
|
| 575 |
+
raise DistutilsPlatformError("Can't install when "
|
| 576 |
+
"cross-compiling")
|
| 577 |
+
|
| 578 |
+
# Run all sub-commands (at least those that need to be run)
|
| 579 |
+
for cmd_name in self.get_sub_commands():
|
| 580 |
+
self.run_command(cmd_name)
|
| 581 |
+
|
| 582 |
+
if self.path_file:
|
| 583 |
+
self.create_path_file()
|
| 584 |
+
|
| 585 |
+
# write list of installed files, if requested.
|
| 586 |
+
if self.record:
|
| 587 |
+
outputs = self.get_outputs()
|
| 588 |
+
if self.root: # strip any package prefix
|
| 589 |
+
root_len = len(self.root)
|
| 590 |
+
for counter in range(len(outputs)):
|
| 591 |
+
outputs[counter] = outputs[counter][root_len:]
|
| 592 |
+
self.execute(write_file,
|
| 593 |
+
(self.record, outputs),
|
| 594 |
+
"writing list of installed files to '%s'" %
|
| 595 |
+
self.record)
|
| 596 |
+
|
| 597 |
+
sys_path = map(os.path.normpath, sys.path)
|
| 598 |
+
sys_path = map(os.path.normcase, sys_path)
|
| 599 |
+
install_lib = os.path.normcase(os.path.normpath(self.install_lib))
|
| 600 |
+
if (self.warn_dir and
|
| 601 |
+
not (self.path_file and self.install_path_file) and
|
| 602 |
+
install_lib not in sys_path):
|
| 603 |
+
log.debug(("modules installed to '%s', which is not in "
|
| 604 |
+
"Python's module search path (sys.path) -- "
|
| 605 |
+
"you'll have to change the search path yourself"),
|
| 606 |
+
self.install_lib)
|
| 607 |
+
|
| 608 |
+
def create_path_file(self):
|
| 609 |
+
"""Creates the .pth file"""
|
| 610 |
+
filename = os.path.join(self.install_libbase,
|
| 611 |
+
self.path_file + ".pth")
|
| 612 |
+
if self.install_path_file:
|
| 613 |
+
self.execute(write_file,
|
| 614 |
+
(filename, [self.extra_dirs]),
|
| 615 |
+
"creating %s" % filename)
|
| 616 |
+
else:
|
| 617 |
+
self.warn("path file '%s' not created" % filename)
|
| 618 |
+
|
| 619 |
+
|
| 620 |
+
# -- Reporting methods ---------------------------------------------
|
| 621 |
+
|
| 622 |
+
def get_outputs(self):
|
| 623 |
+
"""Assembles the outputs of all the sub-commands."""
|
| 624 |
+
outputs = []
|
| 625 |
+
for cmd_name in self.get_sub_commands():
|
| 626 |
+
cmd = self.get_finalized_command(cmd_name)
|
| 627 |
+
# Add the contents of cmd.get_outputs(), ensuring
|
| 628 |
+
# that outputs doesn't contain duplicate entries
|
| 629 |
+
for filename in cmd.get_outputs():
|
| 630 |
+
if filename not in outputs:
|
| 631 |
+
outputs.append(filename)
|
| 632 |
+
|
| 633 |
+
if self.path_file and self.install_path_file:
|
| 634 |
+
outputs.append(os.path.join(self.install_libbase,
|
| 635 |
+
self.path_file + ".pth"))
|
| 636 |
+
|
| 637 |
+
return outputs
|
| 638 |
+
|
| 639 |
+
def get_inputs(self):
|
| 640 |
+
"""Returns the inputs of all the sub-commands"""
|
| 641 |
+
# XXX gee, this looks familiar ;-(
|
| 642 |
+
inputs = []
|
| 643 |
+
for cmd_name in self.get_sub_commands():
|
| 644 |
+
cmd = self.get_finalized_command(cmd_name)
|
| 645 |
+
inputs.extend(cmd.get_inputs())
|
| 646 |
+
|
| 647 |
+
return inputs
|
| 648 |
+
|
| 649 |
+
# -- Predicates for sub-command list -------------------------------
|
| 650 |
+
|
| 651 |
+
def has_lib(self):
|
| 652 |
+
"""Returns true if the current distribution has any Python
|
| 653 |
+
modules to install."""
|
| 654 |
+
return (self.distribution.has_pure_modules() or
|
| 655 |
+
self.distribution.has_ext_modules())
|
| 656 |
+
|
| 657 |
+
def has_headers(self):
|
| 658 |
+
"""Returns true if the current distribution has any headers to
|
| 659 |
+
install."""
|
| 660 |
+
return self.distribution.has_headers()
|
| 661 |
+
|
| 662 |
+
def has_scripts(self):
|
| 663 |
+
"""Returns true if the current distribution has any scripts to.
|
| 664 |
+
install."""
|
| 665 |
+
return self.distribution.has_scripts()
|
| 666 |
+
|
| 667 |
+
def has_data(self):
|
| 668 |
+
"""Returns true if the current distribution has any data to.
|
| 669 |
+
install."""
|
| 670 |
+
return self.distribution.has_data_files()
|
| 671 |
+
|
| 672 |
+
# 'sub_commands': a list of commands this command might have to run to
|
| 673 |
+
# get its work done. See cmd.py for more info.
|
| 674 |
+
sub_commands = [('install_lib', has_lib),
|
| 675 |
+
('install_headers', has_headers),
|
| 676 |
+
('install_scripts', has_scripts),
|
| 677 |
+
('install_data', has_data),
|
| 678 |
+
('install_egg_info', lambda self:True),
|
| 679 |
+
]
|
evalkit_llava/lib/python3.10/distutils/command/register.py
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.register
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'register' command (register with the repository).
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# created 2002/10/21, Richard Jones
|
| 7 |
+
|
| 8 |
+
import getpass
|
| 9 |
+
import io
|
| 10 |
+
import urllib.parse, urllib.request
|
| 11 |
+
from warnings import warn
|
| 12 |
+
|
| 13 |
+
from distutils.core import PyPIRCCommand
|
| 14 |
+
from distutils.errors import *
|
| 15 |
+
from distutils import log
|
| 16 |
+
|
| 17 |
+
class register(PyPIRCCommand):
|
| 18 |
+
|
| 19 |
+
description = ("register the distribution with the Python package index")
|
| 20 |
+
user_options = PyPIRCCommand.user_options + [
|
| 21 |
+
('list-classifiers', None,
|
| 22 |
+
'list the valid Trove classifiers'),
|
| 23 |
+
('strict', None ,
|
| 24 |
+
'Will stop the registering if the meta-data are not fully compliant')
|
| 25 |
+
]
|
| 26 |
+
boolean_options = PyPIRCCommand.boolean_options + [
|
| 27 |
+
'verify', 'list-classifiers', 'strict']
|
| 28 |
+
|
| 29 |
+
sub_commands = [('check', lambda self: True)]
|
| 30 |
+
|
| 31 |
+
def initialize_options(self):
|
| 32 |
+
PyPIRCCommand.initialize_options(self)
|
| 33 |
+
self.list_classifiers = 0
|
| 34 |
+
self.strict = 0
|
| 35 |
+
|
| 36 |
+
def finalize_options(self):
|
| 37 |
+
PyPIRCCommand.finalize_options(self)
|
| 38 |
+
# setting options for the `check` subcommand
|
| 39 |
+
check_options = {'strict': ('register', self.strict),
|
| 40 |
+
'restructuredtext': ('register', 1)}
|
| 41 |
+
self.distribution.command_options['check'] = check_options
|
| 42 |
+
|
| 43 |
+
def run(self):
|
| 44 |
+
self.finalize_options()
|
| 45 |
+
self._set_config()
|
| 46 |
+
|
| 47 |
+
# Run sub commands
|
| 48 |
+
for cmd_name in self.get_sub_commands():
|
| 49 |
+
self.run_command(cmd_name)
|
| 50 |
+
|
| 51 |
+
if self.dry_run:
|
| 52 |
+
self.verify_metadata()
|
| 53 |
+
elif self.list_classifiers:
|
| 54 |
+
self.classifiers()
|
| 55 |
+
else:
|
| 56 |
+
self.send_metadata()
|
| 57 |
+
|
| 58 |
+
def check_metadata(self):
|
| 59 |
+
"""Deprecated API."""
|
| 60 |
+
warn("distutils.command.register.check_metadata is deprecated, \
|
| 61 |
+
use the check command instead", PendingDeprecationWarning)
|
| 62 |
+
check = self.distribution.get_command_obj('check')
|
| 63 |
+
check.ensure_finalized()
|
| 64 |
+
check.strict = self.strict
|
| 65 |
+
check.restructuredtext = 1
|
| 66 |
+
check.run()
|
| 67 |
+
|
| 68 |
+
def _set_config(self):
|
| 69 |
+
''' Reads the configuration file and set attributes.
|
| 70 |
+
'''
|
| 71 |
+
config = self._read_pypirc()
|
| 72 |
+
if config != {}:
|
| 73 |
+
self.username = config['username']
|
| 74 |
+
self.password = config['password']
|
| 75 |
+
self.repository = config['repository']
|
| 76 |
+
self.realm = config['realm']
|
| 77 |
+
self.has_config = True
|
| 78 |
+
else:
|
| 79 |
+
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
|
| 80 |
+
raise ValueError('%s not found in .pypirc' % self.repository)
|
| 81 |
+
if self.repository == 'pypi':
|
| 82 |
+
self.repository = self.DEFAULT_REPOSITORY
|
| 83 |
+
self.has_config = False
|
| 84 |
+
|
| 85 |
+
def classifiers(self):
|
| 86 |
+
''' Fetch the list of classifiers from the server.
|
| 87 |
+
'''
|
| 88 |
+
url = self.repository+'?:action=list_classifiers'
|
| 89 |
+
response = urllib.request.urlopen(url)
|
| 90 |
+
log.info(self._read_pypi_response(response))
|
| 91 |
+
|
| 92 |
+
def verify_metadata(self):
|
| 93 |
+
''' Send the metadata to the package index server to be checked.
|
| 94 |
+
'''
|
| 95 |
+
# send the info to the server and report the result
|
| 96 |
+
(code, result) = self.post_to_server(self.build_post_data('verify'))
|
| 97 |
+
log.info('Server response (%s): %s', code, result)
|
| 98 |
+
|
| 99 |
+
def send_metadata(self):
|
| 100 |
+
''' Send the metadata to the package index server.
|
| 101 |
+
|
| 102 |
+
Well, do the following:
|
| 103 |
+
1. figure who the user is, and then
|
| 104 |
+
2. send the data as a Basic auth'ed POST.
|
| 105 |
+
|
| 106 |
+
First we try to read the username/password from $HOME/.pypirc,
|
| 107 |
+
which is a ConfigParser-formatted file with a section
|
| 108 |
+
[distutils] containing username and password entries (both
|
| 109 |
+
in clear text). Eg:
|
| 110 |
+
|
| 111 |
+
[distutils]
|
| 112 |
+
index-servers =
|
| 113 |
+
pypi
|
| 114 |
+
|
| 115 |
+
[pypi]
|
| 116 |
+
username: fred
|
| 117 |
+
password: sekrit
|
| 118 |
+
|
| 119 |
+
Otherwise, to figure who the user is, we offer the user three
|
| 120 |
+
choices:
|
| 121 |
+
|
| 122 |
+
1. use existing login,
|
| 123 |
+
2. register as a new user, or
|
| 124 |
+
3. set the password to a random string and email the user.
|
| 125 |
+
|
| 126 |
+
'''
|
| 127 |
+
# see if we can short-cut and get the username/password from the
|
| 128 |
+
# config
|
| 129 |
+
if self.has_config:
|
| 130 |
+
choice = '1'
|
| 131 |
+
username = self.username
|
| 132 |
+
password = self.password
|
| 133 |
+
else:
|
| 134 |
+
choice = 'x'
|
| 135 |
+
username = password = ''
|
| 136 |
+
|
| 137 |
+
# get the user's login info
|
| 138 |
+
choices = '1 2 3 4'.split()
|
| 139 |
+
while choice not in choices:
|
| 140 |
+
self.announce('''\
|
| 141 |
+
We need to know who you are, so please choose either:
|
| 142 |
+
1. use your existing login,
|
| 143 |
+
2. register as a new user,
|
| 144 |
+
3. have the server generate a new password for you (and email it to you), or
|
| 145 |
+
4. quit
|
| 146 |
+
Your selection [default 1]: ''', log.INFO)
|
| 147 |
+
choice = input()
|
| 148 |
+
if not choice:
|
| 149 |
+
choice = '1'
|
| 150 |
+
elif choice not in choices:
|
| 151 |
+
print('Please choose one of the four options!')
|
| 152 |
+
|
| 153 |
+
if choice == '1':
|
| 154 |
+
# get the username and password
|
| 155 |
+
while not username:
|
| 156 |
+
username = input('Username: ')
|
| 157 |
+
while not password:
|
| 158 |
+
password = getpass.getpass('Password: ')
|
| 159 |
+
|
| 160 |
+
# set up the authentication
|
| 161 |
+
auth = urllib.request.HTTPPasswordMgr()
|
| 162 |
+
host = urllib.parse.urlparse(self.repository)[1]
|
| 163 |
+
auth.add_password(self.realm, host, username, password)
|
| 164 |
+
# send the info to the server and report the result
|
| 165 |
+
code, result = self.post_to_server(self.build_post_data('submit'),
|
| 166 |
+
auth)
|
| 167 |
+
self.announce('Server response (%s): %s' % (code, result),
|
| 168 |
+
log.INFO)
|
| 169 |
+
|
| 170 |
+
# possibly save the login
|
| 171 |
+
if code == 200:
|
| 172 |
+
if self.has_config:
|
| 173 |
+
# sharing the password in the distribution instance
|
| 174 |
+
# so the upload command can reuse it
|
| 175 |
+
self.distribution.password = password
|
| 176 |
+
else:
|
| 177 |
+
self.announce(('I can store your PyPI login so future '
|
| 178 |
+
'submissions will be faster.'), log.INFO)
|
| 179 |
+
self.announce('(the login will be stored in %s)' % \
|
| 180 |
+
self._get_rc_file(), log.INFO)
|
| 181 |
+
choice = 'X'
|
| 182 |
+
while choice.lower() not in 'yn':
|
| 183 |
+
choice = input('Save your login (y/N)?')
|
| 184 |
+
if not choice:
|
| 185 |
+
choice = 'n'
|
| 186 |
+
if choice.lower() == 'y':
|
| 187 |
+
self._store_pypirc(username, password)
|
| 188 |
+
|
| 189 |
+
elif choice == '2':
|
| 190 |
+
data = {':action': 'user'}
|
| 191 |
+
data['name'] = data['password'] = data['email'] = ''
|
| 192 |
+
data['confirm'] = None
|
| 193 |
+
while not data['name']:
|
| 194 |
+
data['name'] = input('Username: ')
|
| 195 |
+
while data['password'] != data['confirm']:
|
| 196 |
+
while not data['password']:
|
| 197 |
+
data['password'] = getpass.getpass('Password: ')
|
| 198 |
+
while not data['confirm']:
|
| 199 |
+
data['confirm'] = getpass.getpass(' Confirm: ')
|
| 200 |
+
if data['password'] != data['confirm']:
|
| 201 |
+
data['password'] = ''
|
| 202 |
+
data['confirm'] = None
|
| 203 |
+
print("Password and confirm don't match!")
|
| 204 |
+
while not data['email']:
|
| 205 |
+
data['email'] = input(' EMail: ')
|
| 206 |
+
code, result = self.post_to_server(data)
|
| 207 |
+
if code != 200:
|
| 208 |
+
log.info('Server response (%s): %s', code, result)
|
| 209 |
+
else:
|
| 210 |
+
log.info('You will receive an email shortly.')
|
| 211 |
+
log.info(('Follow the instructions in it to '
|
| 212 |
+
'complete registration.'))
|
| 213 |
+
elif choice == '3':
|
| 214 |
+
data = {':action': 'password_reset'}
|
| 215 |
+
data['email'] = ''
|
| 216 |
+
while not data['email']:
|
| 217 |
+
data['email'] = input('Your email address: ')
|
| 218 |
+
code, result = self.post_to_server(data)
|
| 219 |
+
log.info('Server response (%s): %s', code, result)
|
| 220 |
+
|
| 221 |
+
def build_post_data(self, action):
|
| 222 |
+
# figure the data to send - the metadata plus some additional
|
| 223 |
+
# information used by the package server
|
| 224 |
+
meta = self.distribution.metadata
|
| 225 |
+
data = {
|
| 226 |
+
':action': action,
|
| 227 |
+
'metadata_version' : '1.0',
|
| 228 |
+
'name': meta.get_name(),
|
| 229 |
+
'version': meta.get_version(),
|
| 230 |
+
'summary': meta.get_description(),
|
| 231 |
+
'home_page': meta.get_url(),
|
| 232 |
+
'author': meta.get_contact(),
|
| 233 |
+
'author_email': meta.get_contact_email(),
|
| 234 |
+
'license': meta.get_licence(),
|
| 235 |
+
'description': meta.get_long_description(),
|
| 236 |
+
'keywords': meta.get_keywords(),
|
| 237 |
+
'platform': meta.get_platforms(),
|
| 238 |
+
'classifiers': meta.get_classifiers(),
|
| 239 |
+
'download_url': meta.get_download_url(),
|
| 240 |
+
# PEP 314
|
| 241 |
+
'provides': meta.get_provides(),
|
| 242 |
+
'requires': meta.get_requires(),
|
| 243 |
+
'obsoletes': meta.get_obsoletes(),
|
| 244 |
+
}
|
| 245 |
+
if data['provides'] or data['requires'] or data['obsoletes']:
|
| 246 |
+
data['metadata_version'] = '1.1'
|
| 247 |
+
return data
|
| 248 |
+
|
| 249 |
+
def post_to_server(self, data, auth=None):
|
| 250 |
+
''' Post a query to the server, and return a string response.
|
| 251 |
+
'''
|
| 252 |
+
if 'name' in data:
|
| 253 |
+
self.announce('Registering %s to %s' % (data['name'],
|
| 254 |
+
self.repository),
|
| 255 |
+
log.INFO)
|
| 256 |
+
# Build up the MIME payload for the urllib2 POST data
|
| 257 |
+
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
| 258 |
+
sep_boundary = '\n--' + boundary
|
| 259 |
+
end_boundary = sep_boundary + '--'
|
| 260 |
+
body = io.StringIO()
|
| 261 |
+
for key, value in data.items():
|
| 262 |
+
# handle multiple entries for the same name
|
| 263 |
+
if type(value) not in (type([]), type( () )):
|
| 264 |
+
value = [value]
|
| 265 |
+
for value in value:
|
| 266 |
+
value = str(value)
|
| 267 |
+
body.write(sep_boundary)
|
| 268 |
+
body.write('\nContent-Disposition: form-data; name="%s"'%key)
|
| 269 |
+
body.write("\n\n")
|
| 270 |
+
body.write(value)
|
| 271 |
+
if value and value[-1] == '\r':
|
| 272 |
+
body.write('\n') # write an extra newline (lurve Macs)
|
| 273 |
+
body.write(end_boundary)
|
| 274 |
+
body.write("\n")
|
| 275 |
+
body = body.getvalue().encode("utf-8")
|
| 276 |
+
|
| 277 |
+
# build the Request
|
| 278 |
+
headers = {
|
| 279 |
+
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
|
| 280 |
+
'Content-length': str(len(body))
|
| 281 |
+
}
|
| 282 |
+
req = urllib.request.Request(self.repository, body, headers)
|
| 283 |
+
|
| 284 |
+
# handle HTTP and include the Basic Auth handler
|
| 285 |
+
opener = urllib.request.build_opener(
|
| 286 |
+
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
|
| 287 |
+
)
|
| 288 |
+
data = ''
|
| 289 |
+
try:
|
| 290 |
+
result = opener.open(req)
|
| 291 |
+
except urllib.error.HTTPError as e:
|
| 292 |
+
if self.show_response:
|
| 293 |
+
data = e.fp.read()
|
| 294 |
+
result = e.code, e.msg
|
| 295 |
+
except urllib.error.URLError as e:
|
| 296 |
+
result = 500, str(e)
|
| 297 |
+
else:
|
| 298 |
+
if self.show_response:
|
| 299 |
+
data = self._read_pypi_response(result)
|
| 300 |
+
result = 200, 'OK'
|
| 301 |
+
if self.show_response:
|
| 302 |
+
msg = '\n'.join(('-' * 75, data, '-' * 75))
|
| 303 |
+
self.announce(msg, log.INFO)
|
| 304 |
+
return result
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/support.cpython-310.pyc
ADDED
|
Binary file (7.73 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_bdist.cpython-310.pyc
ADDED
|
Binary file (2.04 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_bdist_msi.cpython-310.pyc
ADDED
|
Binary file (1.49 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_dist.cpython-310.pyc
ADDED
|
Binary file (16 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_filelist.cpython-310.pyc
ADDED
|
Binary file (8.68 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_msvc9compiler.cpython-310.pyc
ADDED
|
Binary file (5.93 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_text_file.cpython-310.pyc
ADDED
|
Binary file (2.65 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_unixccompiler.cpython-310.pyc
ADDED
|
Binary file (4.75 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_version.cpython-310.pyc
ADDED
|
Binary file (2.44 kB). View file
|
|
|
evalkit_llava/lib/python3.10/distutils/tests/__pycache__/test_versionpredicate.cpython-310.pyc
ADDED
|
Binary file (529 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/ensurepip/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (489 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/ensurepip/_bundled/__init__.py
ADDED
|
File without changes
|
evalkit_llava/lib/python3.10/getopt.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Parser for command line options.
|
| 2 |
+
|
| 3 |
+
This module helps scripts to parse the command line arguments in
|
| 4 |
+
sys.argv. It supports the same conventions as the Unix getopt()
|
| 5 |
+
function (including the special meanings of arguments of the form `-'
|
| 6 |
+
and `--'). Long options similar to those supported by GNU software
|
| 7 |
+
may be used as well via an optional third argument. This module
|
| 8 |
+
provides two functions and an exception:
|
| 9 |
+
|
| 10 |
+
getopt() -- Parse command line options
|
| 11 |
+
gnu_getopt() -- Like getopt(), but allow option and non-option arguments
|
| 12 |
+
to be intermixed.
|
| 13 |
+
GetoptError -- exception (class) raised with 'opt' attribute, which is the
|
| 14 |
+
option involved with the exception.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
# Long option support added by Lars Wirzenius <[email protected]>.
|
| 18 |
+
#
|
| 19 |
+
# Gerrit Holl <[email protected]> moved the string-based exceptions
|
| 20 |
+
# to class-based exceptions.
|
| 21 |
+
#
|
| 22 |
+
# Peter Åstrand <[email protected]> added gnu_getopt().
|
| 23 |
+
#
|
| 24 |
+
# TODO for gnu_getopt():
|
| 25 |
+
#
|
| 26 |
+
# - GNU getopt_long_only mechanism
|
| 27 |
+
# - allow the caller to specify ordering
|
| 28 |
+
# - RETURN_IN_ORDER option
|
| 29 |
+
# - GNU extension with '-' as first character of option string
|
| 30 |
+
# - optional arguments, specified by double colons
|
| 31 |
+
# - an option string with a W followed by semicolon should
|
| 32 |
+
# treat "-W foo" as "--foo"
|
| 33 |
+
|
| 34 |
+
__all__ = ["GetoptError","error","getopt","gnu_getopt"]
|
| 35 |
+
|
| 36 |
+
import os
|
| 37 |
+
try:
|
| 38 |
+
from gettext import gettext as _
|
| 39 |
+
except ImportError:
|
| 40 |
+
# Bootstrapping Python: gettext's dependencies not built yet
|
| 41 |
+
def _(s): return s
|
| 42 |
+
|
| 43 |
+
class GetoptError(Exception):
|
| 44 |
+
opt = ''
|
| 45 |
+
msg = ''
|
| 46 |
+
def __init__(self, msg, opt=''):
|
| 47 |
+
self.msg = msg
|
| 48 |
+
self.opt = opt
|
| 49 |
+
Exception.__init__(self, msg, opt)
|
| 50 |
+
|
| 51 |
+
def __str__(self):
|
| 52 |
+
return self.msg
|
| 53 |
+
|
| 54 |
+
error = GetoptError # backward compatibility
|
| 55 |
+
|
| 56 |
+
def getopt(args, shortopts, longopts = []):
|
| 57 |
+
"""getopt(args, options[, long_options]) -> opts, args
|
| 58 |
+
|
| 59 |
+
Parses command line options and parameter list. args is the
|
| 60 |
+
argument list to be parsed, without the leading reference to the
|
| 61 |
+
running program. Typically, this means "sys.argv[1:]". shortopts
|
| 62 |
+
is the string of option letters that the script wants to
|
| 63 |
+
recognize, with options that require an argument followed by a
|
| 64 |
+
colon (i.e., the same format that Unix getopt() uses). If
|
| 65 |
+
specified, longopts is a list of strings with the names of the
|
| 66 |
+
long options which should be supported. The leading '--'
|
| 67 |
+
characters should not be included in the option name. Options
|
| 68 |
+
which require an argument should be followed by an equal sign
|
| 69 |
+
('=').
|
| 70 |
+
|
| 71 |
+
The return value consists of two elements: the first is a list of
|
| 72 |
+
(option, value) pairs; the second is the list of program arguments
|
| 73 |
+
left after the option list was stripped (this is a trailing slice
|
| 74 |
+
of the first argument). Each option-and-value pair returned has
|
| 75 |
+
the option as its first element, prefixed with a hyphen (e.g.,
|
| 76 |
+
'-x'), and the option argument as its second element, or an empty
|
| 77 |
+
string if the option has no argument. The options occur in the
|
| 78 |
+
list in the same order in which they were found, thus allowing
|
| 79 |
+
multiple occurrences. Long and short options may be mixed.
|
| 80 |
+
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
opts = []
|
| 84 |
+
if type(longopts) == type(""):
|
| 85 |
+
longopts = [longopts]
|
| 86 |
+
else:
|
| 87 |
+
longopts = list(longopts)
|
| 88 |
+
while args and args[0].startswith('-') and args[0] != '-':
|
| 89 |
+
if args[0] == '--':
|
| 90 |
+
args = args[1:]
|
| 91 |
+
break
|
| 92 |
+
if args[0].startswith('--'):
|
| 93 |
+
opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
|
| 94 |
+
else:
|
| 95 |
+
opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
|
| 96 |
+
|
| 97 |
+
return opts, args
|
| 98 |
+
|
| 99 |
+
def gnu_getopt(args, shortopts, longopts = []):
|
| 100 |
+
"""getopt(args, options[, long_options]) -> opts, args
|
| 101 |
+
|
| 102 |
+
This function works like getopt(), except that GNU style scanning
|
| 103 |
+
mode is used by default. This means that option and non-option
|
| 104 |
+
arguments may be intermixed. The getopt() function stops
|
| 105 |
+
processing options as soon as a non-option argument is
|
| 106 |
+
encountered.
|
| 107 |
+
|
| 108 |
+
If the first character of the option string is `+', or if the
|
| 109 |
+
environment variable POSIXLY_CORRECT is set, then option
|
| 110 |
+
processing stops as soon as a non-option argument is encountered.
|
| 111 |
+
|
| 112 |
+
"""
|
| 113 |
+
|
| 114 |
+
opts = []
|
| 115 |
+
prog_args = []
|
| 116 |
+
if isinstance(longopts, str):
|
| 117 |
+
longopts = [longopts]
|
| 118 |
+
else:
|
| 119 |
+
longopts = list(longopts)
|
| 120 |
+
|
| 121 |
+
# Allow options after non-option arguments?
|
| 122 |
+
if shortopts.startswith('+'):
|
| 123 |
+
shortopts = shortopts[1:]
|
| 124 |
+
all_options_first = True
|
| 125 |
+
elif os.environ.get("POSIXLY_CORRECT"):
|
| 126 |
+
all_options_first = True
|
| 127 |
+
else:
|
| 128 |
+
all_options_first = False
|
| 129 |
+
|
| 130 |
+
while args:
|
| 131 |
+
if args[0] == '--':
|
| 132 |
+
prog_args += args[1:]
|
| 133 |
+
break
|
| 134 |
+
|
| 135 |
+
if args[0][:2] == '--':
|
| 136 |
+
opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
|
| 137 |
+
elif args[0][:1] == '-' and args[0] != '-':
|
| 138 |
+
opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
|
| 139 |
+
else:
|
| 140 |
+
if all_options_first:
|
| 141 |
+
prog_args += args
|
| 142 |
+
break
|
| 143 |
+
else:
|
| 144 |
+
prog_args.append(args[0])
|
| 145 |
+
args = args[1:]
|
| 146 |
+
|
| 147 |
+
return opts, prog_args
|
| 148 |
+
|
| 149 |
+
def do_longs(opts, opt, longopts, args):
|
| 150 |
+
try:
|
| 151 |
+
i = opt.index('=')
|
| 152 |
+
except ValueError:
|
| 153 |
+
optarg = None
|
| 154 |
+
else:
|
| 155 |
+
opt, optarg = opt[:i], opt[i+1:]
|
| 156 |
+
|
| 157 |
+
has_arg, opt = long_has_args(opt, longopts)
|
| 158 |
+
if has_arg:
|
| 159 |
+
if optarg is None:
|
| 160 |
+
if not args:
|
| 161 |
+
raise GetoptError(_('option --%s requires argument') % opt, opt)
|
| 162 |
+
optarg, args = args[0], args[1:]
|
| 163 |
+
elif optarg is not None:
|
| 164 |
+
raise GetoptError(_('option --%s must not have an argument') % opt, opt)
|
| 165 |
+
opts.append(('--' + opt, optarg or ''))
|
| 166 |
+
return opts, args
|
| 167 |
+
|
| 168 |
+
# Return:
|
| 169 |
+
# has_arg?
|
| 170 |
+
# full option name
|
| 171 |
+
def long_has_args(opt, longopts):
|
| 172 |
+
possibilities = [o for o in longopts if o.startswith(opt)]
|
| 173 |
+
if not possibilities:
|
| 174 |
+
raise GetoptError(_('option --%s not recognized') % opt, opt)
|
| 175 |
+
# Is there an exact match?
|
| 176 |
+
if opt in possibilities:
|
| 177 |
+
return False, opt
|
| 178 |
+
elif opt + '=' in possibilities:
|
| 179 |
+
return True, opt
|
| 180 |
+
# No exact match, so better be unique.
|
| 181 |
+
if len(possibilities) > 1:
|
| 182 |
+
# XXX since possibilities contains all valid continuations, might be
|
| 183 |
+
# nice to work them into the error msg
|
| 184 |
+
raise GetoptError(_('option --%s not a unique prefix') % opt, opt)
|
| 185 |
+
assert len(possibilities) == 1
|
| 186 |
+
unique_match = possibilities[0]
|
| 187 |
+
has_arg = unique_match.endswith('=')
|
| 188 |
+
if has_arg:
|
| 189 |
+
unique_match = unique_match[:-1]
|
| 190 |
+
return has_arg, unique_match
|
| 191 |
+
|
| 192 |
+
def do_shorts(opts, optstring, shortopts, args):
|
| 193 |
+
while optstring != '':
|
| 194 |
+
opt, optstring = optstring[0], optstring[1:]
|
| 195 |
+
if short_has_arg(opt, shortopts):
|
| 196 |
+
if optstring == '':
|
| 197 |
+
if not args:
|
| 198 |
+
raise GetoptError(_('option -%s requires argument') % opt,
|
| 199 |
+
opt)
|
| 200 |
+
optstring, args = args[0], args[1:]
|
| 201 |
+
optarg, optstring = optstring, ''
|
| 202 |
+
else:
|
| 203 |
+
optarg = ''
|
| 204 |
+
opts.append(('-' + opt, optarg))
|
| 205 |
+
return opts, args
|
| 206 |
+
|
| 207 |
+
def short_has_arg(opt, shortopts):
|
| 208 |
+
for i in range(len(shortopts)):
|
| 209 |
+
if opt == shortopts[i] != ':':
|
| 210 |
+
return shortopts.startswith(':', i+1)
|
| 211 |
+
raise GetoptError(_('option -%s not recognized') % opt, opt)
|
| 212 |
+
|
| 213 |
+
if __name__ == '__main__':
|
| 214 |
+
import sys
|
| 215 |
+
print(getopt(sys.argv[1:], "a:b", ["alpha=", "beta"]))
|
evalkit_llava/lib/python3.10/gzip.py
ADDED
|
@@ -0,0 +1,609 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Functions that read and write gzipped files.
|
| 2 |
+
|
| 3 |
+
The user of the file doesn't have to worry about the compression,
|
| 4 |
+
but random access is not allowed."""
|
| 5 |
+
|
| 6 |
+
# based on Andrew Kuchling's minigzip.py distributed with the zlib module
|
| 7 |
+
|
| 8 |
+
import struct, sys, time, os
|
| 9 |
+
import zlib
|
| 10 |
+
import builtins
|
| 11 |
+
import io
|
| 12 |
+
import _compression
|
| 13 |
+
|
| 14 |
+
__all__ = ["BadGzipFile", "GzipFile", "open", "compress", "decompress"]
|
| 15 |
+
|
| 16 |
+
FTEXT, FHCRC, FEXTRA, FNAME, FCOMMENT = 1, 2, 4, 8, 16
|
| 17 |
+
|
| 18 |
+
READ, WRITE = 1, 2
|
| 19 |
+
|
| 20 |
+
_COMPRESS_LEVEL_FAST = 1
|
| 21 |
+
_COMPRESS_LEVEL_TRADEOFF = 6
|
| 22 |
+
_COMPRESS_LEVEL_BEST = 9
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def open(filename, mode="rb", compresslevel=_COMPRESS_LEVEL_BEST,
|
| 26 |
+
encoding=None, errors=None, newline=None):
|
| 27 |
+
"""Open a gzip-compressed file in binary or text mode.
|
| 28 |
+
|
| 29 |
+
The filename argument can be an actual filename (a str or bytes object), or
|
| 30 |
+
an existing file object to read from or write to.
|
| 31 |
+
|
| 32 |
+
The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for
|
| 33 |
+
binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is
|
| 34 |
+
"rb", and the default compresslevel is 9.
|
| 35 |
+
|
| 36 |
+
For binary mode, this function is equivalent to the GzipFile constructor:
|
| 37 |
+
GzipFile(filename, mode, compresslevel). In this case, the encoding, errors
|
| 38 |
+
and newline arguments must not be provided.
|
| 39 |
+
|
| 40 |
+
For text mode, a GzipFile object is created, and wrapped in an
|
| 41 |
+
io.TextIOWrapper instance with the specified encoding, error handling
|
| 42 |
+
behavior, and line ending(s).
|
| 43 |
+
|
| 44 |
+
"""
|
| 45 |
+
if "t" in mode:
|
| 46 |
+
if "b" in mode:
|
| 47 |
+
raise ValueError("Invalid mode: %r" % (mode,))
|
| 48 |
+
else:
|
| 49 |
+
if encoding is not None:
|
| 50 |
+
raise ValueError("Argument 'encoding' not supported in binary mode")
|
| 51 |
+
if errors is not None:
|
| 52 |
+
raise ValueError("Argument 'errors' not supported in binary mode")
|
| 53 |
+
if newline is not None:
|
| 54 |
+
raise ValueError("Argument 'newline' not supported in binary mode")
|
| 55 |
+
|
| 56 |
+
gz_mode = mode.replace("t", "")
|
| 57 |
+
if isinstance(filename, (str, bytes, os.PathLike)):
|
| 58 |
+
binary_file = GzipFile(filename, gz_mode, compresslevel)
|
| 59 |
+
elif hasattr(filename, "read") or hasattr(filename, "write"):
|
| 60 |
+
binary_file = GzipFile(None, gz_mode, compresslevel, filename)
|
| 61 |
+
else:
|
| 62 |
+
raise TypeError("filename must be a str or bytes object, or a file")
|
| 63 |
+
|
| 64 |
+
if "t" in mode:
|
| 65 |
+
encoding = io.text_encoding(encoding)
|
| 66 |
+
return io.TextIOWrapper(binary_file, encoding, errors, newline)
|
| 67 |
+
else:
|
| 68 |
+
return binary_file
|
| 69 |
+
|
| 70 |
+
def write32u(output, value):
|
| 71 |
+
# The L format writes the bit pattern correctly whether signed
|
| 72 |
+
# or unsigned.
|
| 73 |
+
output.write(struct.pack("<L", value))
|
| 74 |
+
|
| 75 |
+
class _PaddedFile:
|
| 76 |
+
"""Minimal read-only file object that prepends a string to the contents
|
| 77 |
+
of an actual file. Shouldn't be used outside of gzip.py, as it lacks
|
| 78 |
+
essential functionality."""
|
| 79 |
+
|
| 80 |
+
def __init__(self, f, prepend=b''):
|
| 81 |
+
self._buffer = prepend
|
| 82 |
+
self._length = len(prepend)
|
| 83 |
+
self.file = f
|
| 84 |
+
self._read = 0
|
| 85 |
+
|
| 86 |
+
def read(self, size):
|
| 87 |
+
if self._read is None:
|
| 88 |
+
return self.file.read(size)
|
| 89 |
+
if self._read + size <= self._length:
|
| 90 |
+
read = self._read
|
| 91 |
+
self._read += size
|
| 92 |
+
return self._buffer[read:self._read]
|
| 93 |
+
else:
|
| 94 |
+
read = self._read
|
| 95 |
+
self._read = None
|
| 96 |
+
return self._buffer[read:] + \
|
| 97 |
+
self.file.read(size-self._length+read)
|
| 98 |
+
|
| 99 |
+
def prepend(self, prepend=b''):
|
| 100 |
+
if self._read is None:
|
| 101 |
+
self._buffer = prepend
|
| 102 |
+
else: # Assume data was read since the last prepend() call
|
| 103 |
+
self._read -= len(prepend)
|
| 104 |
+
return
|
| 105 |
+
self._length = len(self._buffer)
|
| 106 |
+
self._read = 0
|
| 107 |
+
|
| 108 |
+
def seek(self, off):
|
| 109 |
+
self._read = None
|
| 110 |
+
self._buffer = None
|
| 111 |
+
return self.file.seek(off)
|
| 112 |
+
|
| 113 |
+
def seekable(self):
|
| 114 |
+
return True # Allows fast-forwarding even in unseekable streams
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class BadGzipFile(OSError):
|
| 118 |
+
"""Exception raised in some cases for invalid gzip files."""
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
class GzipFile(_compression.BaseStream):
|
| 122 |
+
"""The GzipFile class simulates most of the methods of a file object with
|
| 123 |
+
the exception of the truncate() method.
|
| 124 |
+
|
| 125 |
+
This class only supports opening files in binary mode. If you need to open a
|
| 126 |
+
compressed file in text mode, use the gzip.open() function.
|
| 127 |
+
|
| 128 |
+
"""
|
| 129 |
+
|
| 130 |
+
# Overridden with internal file object to be closed, if only a filename
|
| 131 |
+
# is passed in
|
| 132 |
+
myfileobj = None
|
| 133 |
+
|
| 134 |
+
def __init__(self, filename=None, mode=None,
|
| 135 |
+
compresslevel=_COMPRESS_LEVEL_BEST, fileobj=None, mtime=None):
|
| 136 |
+
"""Constructor for the GzipFile class.
|
| 137 |
+
|
| 138 |
+
At least one of fileobj and filename must be given a
|
| 139 |
+
non-trivial value.
|
| 140 |
+
|
| 141 |
+
The new class instance is based on fileobj, which can be a regular
|
| 142 |
+
file, an io.BytesIO object, or any other object which simulates a file.
|
| 143 |
+
It defaults to None, in which case filename is opened to provide
|
| 144 |
+
a file object.
|
| 145 |
+
|
| 146 |
+
When fileobj is not None, the filename argument is only used to be
|
| 147 |
+
included in the gzip file header, which may include the original
|
| 148 |
+
filename of the uncompressed file. It defaults to the filename of
|
| 149 |
+
fileobj, if discernible; otherwise, it defaults to the empty string,
|
| 150 |
+
and in this case the original filename is not included in the header.
|
| 151 |
+
|
| 152 |
+
The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', 'wb', 'x', or
|
| 153 |
+
'xb' depending on whether the file will be read or written. The default
|
| 154 |
+
is the mode of fileobj if discernible; otherwise, the default is 'rb'.
|
| 155 |
+
A mode of 'r' is equivalent to one of 'rb', and similarly for 'w' and
|
| 156 |
+
'wb', 'a' and 'ab', and 'x' and 'xb'.
|
| 157 |
+
|
| 158 |
+
The compresslevel argument is an integer from 0 to 9 controlling the
|
| 159 |
+
level of compression; 1 is fastest and produces the least compression,
|
| 160 |
+
and 9 is slowest and produces the most compression. 0 is no compression
|
| 161 |
+
at all. The default is 9.
|
| 162 |
+
|
| 163 |
+
The mtime argument is an optional numeric timestamp to be written
|
| 164 |
+
to the last modification time field in the stream when compressing.
|
| 165 |
+
If omitted or None, the current time is used.
|
| 166 |
+
|
| 167 |
+
"""
|
| 168 |
+
|
| 169 |
+
if mode and ('t' in mode or 'U' in mode):
|
| 170 |
+
raise ValueError("Invalid mode: {!r}".format(mode))
|
| 171 |
+
if mode and 'b' not in mode:
|
| 172 |
+
mode += 'b'
|
| 173 |
+
if fileobj is None:
|
| 174 |
+
fileobj = self.myfileobj = builtins.open(filename, mode or 'rb')
|
| 175 |
+
if filename is None:
|
| 176 |
+
filename = getattr(fileobj, 'name', '')
|
| 177 |
+
if not isinstance(filename, (str, bytes)):
|
| 178 |
+
filename = ''
|
| 179 |
+
else:
|
| 180 |
+
filename = os.fspath(filename)
|
| 181 |
+
origmode = mode
|
| 182 |
+
if mode is None:
|
| 183 |
+
mode = getattr(fileobj, 'mode', 'rb')
|
| 184 |
+
|
| 185 |
+
if mode.startswith('r'):
|
| 186 |
+
self.mode = READ
|
| 187 |
+
raw = _GzipReader(fileobj)
|
| 188 |
+
self._buffer = io.BufferedReader(raw)
|
| 189 |
+
self.name = filename
|
| 190 |
+
|
| 191 |
+
elif mode.startswith(('w', 'a', 'x')):
|
| 192 |
+
if origmode is None:
|
| 193 |
+
import warnings
|
| 194 |
+
warnings.warn(
|
| 195 |
+
"GzipFile was opened for writing, but this will "
|
| 196 |
+
"change in future Python releases. "
|
| 197 |
+
"Specify the mode argument for opening it for writing.",
|
| 198 |
+
FutureWarning, 2)
|
| 199 |
+
self.mode = WRITE
|
| 200 |
+
self._init_write(filename)
|
| 201 |
+
self.compress = zlib.compressobj(compresslevel,
|
| 202 |
+
zlib.DEFLATED,
|
| 203 |
+
-zlib.MAX_WBITS,
|
| 204 |
+
zlib.DEF_MEM_LEVEL,
|
| 205 |
+
0)
|
| 206 |
+
self._write_mtime = mtime
|
| 207 |
+
else:
|
| 208 |
+
raise ValueError("Invalid mode: {!r}".format(mode))
|
| 209 |
+
|
| 210 |
+
self.fileobj = fileobj
|
| 211 |
+
|
| 212 |
+
if self.mode == WRITE:
|
| 213 |
+
self._write_gzip_header(compresslevel)
|
| 214 |
+
|
| 215 |
+
@property
|
| 216 |
+
def filename(self):
|
| 217 |
+
import warnings
|
| 218 |
+
warnings.warn("use the name attribute", DeprecationWarning, 2)
|
| 219 |
+
if self.mode == WRITE and self.name[-3:] != ".gz":
|
| 220 |
+
return self.name + ".gz"
|
| 221 |
+
return self.name
|
| 222 |
+
|
| 223 |
+
@property
|
| 224 |
+
def mtime(self):
|
| 225 |
+
"""Last modification time read from stream, or None"""
|
| 226 |
+
return self._buffer.raw._last_mtime
|
| 227 |
+
|
| 228 |
+
def __repr__(self):
|
| 229 |
+
s = repr(self.fileobj)
|
| 230 |
+
return '<gzip ' + s[1:-1] + ' ' + hex(id(self)) + '>'
|
| 231 |
+
|
| 232 |
+
def _init_write(self, filename):
|
| 233 |
+
self.name = filename
|
| 234 |
+
self.crc = zlib.crc32(b"")
|
| 235 |
+
self.size = 0
|
| 236 |
+
self.writebuf = []
|
| 237 |
+
self.bufsize = 0
|
| 238 |
+
self.offset = 0 # Current file offset for seek(), tell(), etc
|
| 239 |
+
|
| 240 |
+
def _write_gzip_header(self, compresslevel):
|
| 241 |
+
self.fileobj.write(b'\037\213') # magic header
|
| 242 |
+
self.fileobj.write(b'\010') # compression method
|
| 243 |
+
try:
|
| 244 |
+
# RFC 1952 requires the FNAME field to be Latin-1. Do not
|
| 245 |
+
# include filenames that cannot be represented that way.
|
| 246 |
+
fname = os.path.basename(self.name)
|
| 247 |
+
if not isinstance(fname, bytes):
|
| 248 |
+
fname = fname.encode('latin-1')
|
| 249 |
+
if fname.endswith(b'.gz'):
|
| 250 |
+
fname = fname[:-3]
|
| 251 |
+
except UnicodeEncodeError:
|
| 252 |
+
fname = b''
|
| 253 |
+
flags = 0
|
| 254 |
+
if fname:
|
| 255 |
+
flags = FNAME
|
| 256 |
+
self.fileobj.write(chr(flags).encode('latin-1'))
|
| 257 |
+
mtime = self._write_mtime
|
| 258 |
+
if mtime is None:
|
| 259 |
+
mtime = time.time()
|
| 260 |
+
write32u(self.fileobj, int(mtime))
|
| 261 |
+
if compresslevel == _COMPRESS_LEVEL_BEST:
|
| 262 |
+
xfl = b'\002'
|
| 263 |
+
elif compresslevel == _COMPRESS_LEVEL_FAST:
|
| 264 |
+
xfl = b'\004'
|
| 265 |
+
else:
|
| 266 |
+
xfl = b'\000'
|
| 267 |
+
self.fileobj.write(xfl)
|
| 268 |
+
self.fileobj.write(b'\377')
|
| 269 |
+
if fname:
|
| 270 |
+
self.fileobj.write(fname + b'\000')
|
| 271 |
+
|
| 272 |
+
def write(self,data):
|
| 273 |
+
self._check_not_closed()
|
| 274 |
+
if self.mode != WRITE:
|
| 275 |
+
import errno
|
| 276 |
+
raise OSError(errno.EBADF, "write() on read-only GzipFile object")
|
| 277 |
+
|
| 278 |
+
if self.fileobj is None:
|
| 279 |
+
raise ValueError("write() on closed GzipFile object")
|
| 280 |
+
|
| 281 |
+
if isinstance(data, (bytes, bytearray)):
|
| 282 |
+
length = len(data)
|
| 283 |
+
else:
|
| 284 |
+
# accept any data that supports the buffer protocol
|
| 285 |
+
data = memoryview(data)
|
| 286 |
+
length = data.nbytes
|
| 287 |
+
|
| 288 |
+
if length > 0:
|
| 289 |
+
self.fileobj.write(self.compress.compress(data))
|
| 290 |
+
self.size += length
|
| 291 |
+
self.crc = zlib.crc32(data, self.crc)
|
| 292 |
+
self.offset += length
|
| 293 |
+
|
| 294 |
+
return length
|
| 295 |
+
|
| 296 |
+
def read(self, size=-1):
|
| 297 |
+
self._check_not_closed()
|
| 298 |
+
if self.mode != READ:
|
| 299 |
+
import errno
|
| 300 |
+
raise OSError(errno.EBADF, "read() on write-only GzipFile object")
|
| 301 |
+
return self._buffer.read(size)
|
| 302 |
+
|
| 303 |
+
def read1(self, size=-1):
|
| 304 |
+
"""Implements BufferedIOBase.read1()
|
| 305 |
+
|
| 306 |
+
Reads up to a buffer's worth of data if size is negative."""
|
| 307 |
+
self._check_not_closed()
|
| 308 |
+
if self.mode != READ:
|
| 309 |
+
import errno
|
| 310 |
+
raise OSError(errno.EBADF, "read1() on write-only GzipFile object")
|
| 311 |
+
|
| 312 |
+
if size < 0:
|
| 313 |
+
size = io.DEFAULT_BUFFER_SIZE
|
| 314 |
+
return self._buffer.read1(size)
|
| 315 |
+
|
| 316 |
+
def peek(self, n):
|
| 317 |
+
self._check_not_closed()
|
| 318 |
+
if self.mode != READ:
|
| 319 |
+
import errno
|
| 320 |
+
raise OSError(errno.EBADF, "peek() on write-only GzipFile object")
|
| 321 |
+
return self._buffer.peek(n)
|
| 322 |
+
|
| 323 |
+
@property
|
| 324 |
+
def closed(self):
|
| 325 |
+
return self.fileobj is None
|
| 326 |
+
|
| 327 |
+
def close(self):
|
| 328 |
+
fileobj = self.fileobj
|
| 329 |
+
if fileobj is None:
|
| 330 |
+
return
|
| 331 |
+
self.fileobj = None
|
| 332 |
+
try:
|
| 333 |
+
if self.mode == WRITE:
|
| 334 |
+
fileobj.write(self.compress.flush())
|
| 335 |
+
write32u(fileobj, self.crc)
|
| 336 |
+
# self.size may exceed 2 GiB, or even 4 GiB
|
| 337 |
+
write32u(fileobj, self.size & 0xffffffff)
|
| 338 |
+
elif self.mode == READ:
|
| 339 |
+
self._buffer.close()
|
| 340 |
+
finally:
|
| 341 |
+
myfileobj = self.myfileobj
|
| 342 |
+
if myfileobj:
|
| 343 |
+
self.myfileobj = None
|
| 344 |
+
myfileobj.close()
|
| 345 |
+
|
| 346 |
+
def flush(self,zlib_mode=zlib.Z_SYNC_FLUSH):
|
| 347 |
+
self._check_not_closed()
|
| 348 |
+
if self.mode == WRITE:
|
| 349 |
+
# Ensure the compressor's buffer is flushed
|
| 350 |
+
self.fileobj.write(self.compress.flush(zlib_mode))
|
| 351 |
+
self.fileobj.flush()
|
| 352 |
+
|
| 353 |
+
def fileno(self):
|
| 354 |
+
"""Invoke the underlying file object's fileno() method.
|
| 355 |
+
|
| 356 |
+
This will raise AttributeError if the underlying file object
|
| 357 |
+
doesn't support fileno().
|
| 358 |
+
"""
|
| 359 |
+
return self.fileobj.fileno()
|
| 360 |
+
|
| 361 |
+
def rewind(self):
|
| 362 |
+
'''Return the uncompressed stream file position indicator to the
|
| 363 |
+
beginning of the file'''
|
| 364 |
+
if self.mode != READ:
|
| 365 |
+
raise OSError("Can't rewind in write mode")
|
| 366 |
+
self._buffer.seek(0)
|
| 367 |
+
|
| 368 |
+
def readable(self):
|
| 369 |
+
return self.mode == READ
|
| 370 |
+
|
| 371 |
+
def writable(self):
|
| 372 |
+
return self.mode == WRITE
|
| 373 |
+
|
| 374 |
+
def seekable(self):
|
| 375 |
+
return True
|
| 376 |
+
|
| 377 |
+
def seek(self, offset, whence=io.SEEK_SET):
|
| 378 |
+
if self.mode == WRITE:
|
| 379 |
+
if whence != io.SEEK_SET:
|
| 380 |
+
if whence == io.SEEK_CUR:
|
| 381 |
+
offset = self.offset + offset
|
| 382 |
+
else:
|
| 383 |
+
raise ValueError('Seek from end not supported')
|
| 384 |
+
if offset < self.offset:
|
| 385 |
+
raise OSError('Negative seek in write mode')
|
| 386 |
+
count = offset - self.offset
|
| 387 |
+
chunk = b'\0' * 1024
|
| 388 |
+
for i in range(count // 1024):
|
| 389 |
+
self.write(chunk)
|
| 390 |
+
self.write(b'\0' * (count % 1024))
|
| 391 |
+
elif self.mode == READ:
|
| 392 |
+
self._check_not_closed()
|
| 393 |
+
return self._buffer.seek(offset, whence)
|
| 394 |
+
|
| 395 |
+
return self.offset
|
| 396 |
+
|
| 397 |
+
def readline(self, size=-1):
|
| 398 |
+
self._check_not_closed()
|
| 399 |
+
return self._buffer.readline(size)
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
class _GzipReader(_compression.DecompressReader):
|
| 403 |
+
def __init__(self, fp):
|
| 404 |
+
super().__init__(_PaddedFile(fp), zlib.decompressobj,
|
| 405 |
+
wbits=-zlib.MAX_WBITS)
|
| 406 |
+
# Set flag indicating start of a new member
|
| 407 |
+
self._new_member = True
|
| 408 |
+
self._last_mtime = None
|
| 409 |
+
|
| 410 |
+
def _init_read(self):
|
| 411 |
+
self._crc = zlib.crc32(b"")
|
| 412 |
+
self._stream_size = 0 # Decompressed size of unconcatenated stream
|
| 413 |
+
|
| 414 |
+
def _read_exact(self, n):
|
| 415 |
+
'''Read exactly *n* bytes from `self._fp`
|
| 416 |
+
|
| 417 |
+
This method is required because self._fp may be unbuffered,
|
| 418 |
+
i.e. return short reads.
|
| 419 |
+
'''
|
| 420 |
+
|
| 421 |
+
data = self._fp.read(n)
|
| 422 |
+
while len(data) < n:
|
| 423 |
+
b = self._fp.read(n - len(data))
|
| 424 |
+
if not b:
|
| 425 |
+
raise EOFError("Compressed file ended before the "
|
| 426 |
+
"end-of-stream marker was reached")
|
| 427 |
+
data += b
|
| 428 |
+
return data
|
| 429 |
+
|
| 430 |
+
def _read_gzip_header(self):
|
| 431 |
+
magic = self._fp.read(2)
|
| 432 |
+
if magic == b'':
|
| 433 |
+
return False
|
| 434 |
+
|
| 435 |
+
if magic != b'\037\213':
|
| 436 |
+
raise BadGzipFile('Not a gzipped file (%r)' % magic)
|
| 437 |
+
|
| 438 |
+
(method, flag,
|
| 439 |
+
self._last_mtime) = struct.unpack("<BBIxx", self._read_exact(8))
|
| 440 |
+
if method != 8:
|
| 441 |
+
raise BadGzipFile('Unknown compression method')
|
| 442 |
+
|
| 443 |
+
if flag & FEXTRA:
|
| 444 |
+
# Read & discard the extra field, if present
|
| 445 |
+
extra_len, = struct.unpack("<H", self._read_exact(2))
|
| 446 |
+
self._read_exact(extra_len)
|
| 447 |
+
if flag & FNAME:
|
| 448 |
+
# Read and discard a null-terminated string containing the filename
|
| 449 |
+
while True:
|
| 450 |
+
s = self._fp.read(1)
|
| 451 |
+
if not s or s==b'\000':
|
| 452 |
+
break
|
| 453 |
+
if flag & FCOMMENT:
|
| 454 |
+
# Read and discard a null-terminated string containing a comment
|
| 455 |
+
while True:
|
| 456 |
+
s = self._fp.read(1)
|
| 457 |
+
if not s or s==b'\000':
|
| 458 |
+
break
|
| 459 |
+
if flag & FHCRC:
|
| 460 |
+
self._read_exact(2) # Read & discard the 16-bit header CRC
|
| 461 |
+
return True
|
| 462 |
+
|
| 463 |
+
def read(self, size=-1):
|
| 464 |
+
if size < 0:
|
| 465 |
+
return self.readall()
|
| 466 |
+
# size=0 is special because decompress(max_length=0) is not supported
|
| 467 |
+
if not size:
|
| 468 |
+
return b""
|
| 469 |
+
|
| 470 |
+
# For certain input data, a single
|
| 471 |
+
# call to decompress() may not return
|
| 472 |
+
# any data. In this case, retry until we get some data or reach EOF.
|
| 473 |
+
while True:
|
| 474 |
+
if self._decompressor.eof:
|
| 475 |
+
# Ending case: we've come to the end of a member in the file,
|
| 476 |
+
# so finish up this member, and read a new gzip header.
|
| 477 |
+
# Check the CRC and file size, and set the flag so we read
|
| 478 |
+
# a new member
|
| 479 |
+
self._read_eof()
|
| 480 |
+
self._new_member = True
|
| 481 |
+
self._decompressor = self._decomp_factory(
|
| 482 |
+
**self._decomp_args)
|
| 483 |
+
|
| 484 |
+
if self._new_member:
|
| 485 |
+
# If the _new_member flag is set, we have to
|
| 486 |
+
# jump to the next member, if there is one.
|
| 487 |
+
self._init_read()
|
| 488 |
+
if not self._read_gzip_header():
|
| 489 |
+
self._size = self._pos
|
| 490 |
+
return b""
|
| 491 |
+
self._new_member = False
|
| 492 |
+
|
| 493 |
+
# Read a chunk of data from the file
|
| 494 |
+
buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
|
| 495 |
+
|
| 496 |
+
uncompress = self._decompressor.decompress(buf, size)
|
| 497 |
+
if self._decompressor.unconsumed_tail != b"":
|
| 498 |
+
self._fp.prepend(self._decompressor.unconsumed_tail)
|
| 499 |
+
elif self._decompressor.unused_data != b"":
|
| 500 |
+
# Prepend the already read bytes to the fileobj so they can
|
| 501 |
+
# be seen by _read_eof() and _read_gzip_header()
|
| 502 |
+
self._fp.prepend(self._decompressor.unused_data)
|
| 503 |
+
|
| 504 |
+
if uncompress != b"":
|
| 505 |
+
break
|
| 506 |
+
if buf == b"":
|
| 507 |
+
raise EOFError("Compressed file ended before the "
|
| 508 |
+
"end-of-stream marker was reached")
|
| 509 |
+
|
| 510 |
+
self._add_read_data( uncompress )
|
| 511 |
+
self._pos += len(uncompress)
|
| 512 |
+
return uncompress
|
| 513 |
+
|
| 514 |
+
def _add_read_data(self, data):
|
| 515 |
+
self._crc = zlib.crc32(data, self._crc)
|
| 516 |
+
self._stream_size = self._stream_size + len(data)
|
| 517 |
+
|
| 518 |
+
def _read_eof(self):
|
| 519 |
+
# We've read to the end of the file
|
| 520 |
+
# We check that the computed CRC and size of the
|
| 521 |
+
# uncompressed data matches the stored values. Note that the size
|
| 522 |
+
# stored is the true file size mod 2**32.
|
| 523 |
+
crc32, isize = struct.unpack("<II", self._read_exact(8))
|
| 524 |
+
if crc32 != self._crc:
|
| 525 |
+
raise BadGzipFile("CRC check failed %s != %s" % (hex(crc32),
|
| 526 |
+
hex(self._crc)))
|
| 527 |
+
elif isize != (self._stream_size & 0xffffffff):
|
| 528 |
+
raise BadGzipFile("Incorrect length of data produced")
|
| 529 |
+
|
| 530 |
+
# Gzip files can be padded with zeroes and still have archives.
|
| 531 |
+
# Consume all zero bytes and set the file position to the first
|
| 532 |
+
# non-zero byte. See http://www.gzip.org/#faq8
|
| 533 |
+
c = b"\x00"
|
| 534 |
+
while c == b"\x00":
|
| 535 |
+
c = self._fp.read(1)
|
| 536 |
+
if c:
|
| 537 |
+
self._fp.prepend(c)
|
| 538 |
+
|
| 539 |
+
def _rewind(self):
|
| 540 |
+
super()._rewind()
|
| 541 |
+
self._new_member = True
|
| 542 |
+
|
| 543 |
+
def compress(data, compresslevel=_COMPRESS_LEVEL_BEST, *, mtime=None):
|
| 544 |
+
"""Compress data in one shot and return the compressed string.
|
| 545 |
+
Optional argument is the compression level, in range of 0-9.
|
| 546 |
+
"""
|
| 547 |
+
buf = io.BytesIO()
|
| 548 |
+
with GzipFile(fileobj=buf, mode='wb', compresslevel=compresslevel, mtime=mtime) as f:
|
| 549 |
+
f.write(data)
|
| 550 |
+
return buf.getvalue()
|
| 551 |
+
|
| 552 |
+
def decompress(data):
|
| 553 |
+
"""Decompress a gzip compressed string in one shot.
|
| 554 |
+
Return the decompressed string.
|
| 555 |
+
"""
|
| 556 |
+
with GzipFile(fileobj=io.BytesIO(data)) as f:
|
| 557 |
+
return f.read()
|
| 558 |
+
|
| 559 |
+
|
| 560 |
+
def main():
|
| 561 |
+
from argparse import ArgumentParser
|
| 562 |
+
parser = ArgumentParser(description=
|
| 563 |
+
"A simple command line interface for the gzip module: act like gzip, "
|
| 564 |
+
"but do not delete the input file.")
|
| 565 |
+
group = parser.add_mutually_exclusive_group()
|
| 566 |
+
group.add_argument('--fast', action='store_true', help='compress faster')
|
| 567 |
+
group.add_argument('--best', action='store_true', help='compress better')
|
| 568 |
+
group.add_argument("-d", "--decompress", action="store_true",
|
| 569 |
+
help="act like gunzip instead of gzip")
|
| 570 |
+
|
| 571 |
+
parser.add_argument("args", nargs="*", default=["-"], metavar='file')
|
| 572 |
+
args = parser.parse_args()
|
| 573 |
+
|
| 574 |
+
compresslevel = _COMPRESS_LEVEL_TRADEOFF
|
| 575 |
+
if args.fast:
|
| 576 |
+
compresslevel = _COMPRESS_LEVEL_FAST
|
| 577 |
+
elif args.best:
|
| 578 |
+
compresslevel = _COMPRESS_LEVEL_BEST
|
| 579 |
+
|
| 580 |
+
for arg in args.args:
|
| 581 |
+
if args.decompress:
|
| 582 |
+
if arg == "-":
|
| 583 |
+
f = GzipFile(filename="", mode="rb", fileobj=sys.stdin.buffer)
|
| 584 |
+
g = sys.stdout.buffer
|
| 585 |
+
else:
|
| 586 |
+
if arg[-3:] != ".gz":
|
| 587 |
+
sys.exit(f"filename doesn't end in .gz: {arg!r}")
|
| 588 |
+
f = open(arg, "rb")
|
| 589 |
+
g = builtins.open(arg[:-3], "wb")
|
| 590 |
+
else:
|
| 591 |
+
if arg == "-":
|
| 592 |
+
f = sys.stdin.buffer
|
| 593 |
+
g = GzipFile(filename="", mode="wb", fileobj=sys.stdout.buffer,
|
| 594 |
+
compresslevel=compresslevel)
|
| 595 |
+
else:
|
| 596 |
+
f = builtins.open(arg, "rb")
|
| 597 |
+
g = open(arg + ".gz", "wb")
|
| 598 |
+
while True:
|
| 599 |
+
chunk = f.read(io.DEFAULT_BUFFER_SIZE)
|
| 600 |
+
if not chunk:
|
| 601 |
+
break
|
| 602 |
+
g.write(chunk)
|
| 603 |
+
if g is not sys.stdout.buffer:
|
| 604 |
+
g.close()
|
| 605 |
+
if f is not sys.stdin.buffer:
|
| 606 |
+
f.close()
|
| 607 |
+
|
| 608 |
+
if __name__ == '__main__':
|
| 609 |
+
main()
|
evalkit_llava/lib/python3.10/json/__init__.py
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""JSON (JavaScript Object Notation) <https://json.org> is a subset of
|
| 2 |
+
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
|
| 3 |
+
interchange format.
|
| 4 |
+
|
| 5 |
+
:mod:`json` exposes an API familiar to users of the standard library
|
| 6 |
+
:mod:`marshal` and :mod:`pickle` modules. It is derived from a
|
| 7 |
+
version of the externally maintained simplejson library.
|
| 8 |
+
|
| 9 |
+
Encoding basic Python object hierarchies::
|
| 10 |
+
|
| 11 |
+
>>> import json
|
| 12 |
+
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
| 13 |
+
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
| 14 |
+
>>> print(json.dumps("\"foo\bar"))
|
| 15 |
+
"\"foo\bar"
|
| 16 |
+
>>> print(json.dumps('\u1234'))
|
| 17 |
+
"\u1234"
|
| 18 |
+
>>> print(json.dumps('\\'))
|
| 19 |
+
"\\"
|
| 20 |
+
>>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True))
|
| 21 |
+
{"a": 0, "b": 0, "c": 0}
|
| 22 |
+
>>> from io import StringIO
|
| 23 |
+
>>> io = StringIO()
|
| 24 |
+
>>> json.dump(['streaming API'], io)
|
| 25 |
+
>>> io.getvalue()
|
| 26 |
+
'["streaming API"]'
|
| 27 |
+
|
| 28 |
+
Compact encoding::
|
| 29 |
+
|
| 30 |
+
>>> import json
|
| 31 |
+
>>> mydict = {'4': 5, '6': 7}
|
| 32 |
+
>>> json.dumps([1,2,3,mydict], separators=(',', ':'))
|
| 33 |
+
'[1,2,3,{"4":5,"6":7}]'
|
| 34 |
+
|
| 35 |
+
Pretty printing::
|
| 36 |
+
|
| 37 |
+
>>> import json
|
| 38 |
+
>>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4))
|
| 39 |
+
{
|
| 40 |
+
"4": 5,
|
| 41 |
+
"6": 7
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
Decoding JSON::
|
| 45 |
+
|
| 46 |
+
>>> import json
|
| 47 |
+
>>> obj = ['foo', {'bar': ['baz', None, 1.0, 2]}]
|
| 48 |
+
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
| 49 |
+
True
|
| 50 |
+
>>> json.loads('"\\"foo\\bar"') == '"foo\x08ar'
|
| 51 |
+
True
|
| 52 |
+
>>> from io import StringIO
|
| 53 |
+
>>> io = StringIO('["streaming API"]')
|
| 54 |
+
>>> json.load(io)[0] == 'streaming API'
|
| 55 |
+
True
|
| 56 |
+
|
| 57 |
+
Specializing JSON object decoding::
|
| 58 |
+
|
| 59 |
+
>>> import json
|
| 60 |
+
>>> def as_complex(dct):
|
| 61 |
+
... if '__complex__' in dct:
|
| 62 |
+
... return complex(dct['real'], dct['imag'])
|
| 63 |
+
... return dct
|
| 64 |
+
...
|
| 65 |
+
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
| 66 |
+
... object_hook=as_complex)
|
| 67 |
+
(1+2j)
|
| 68 |
+
>>> from decimal import Decimal
|
| 69 |
+
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
|
| 70 |
+
True
|
| 71 |
+
|
| 72 |
+
Specializing JSON object encoding::
|
| 73 |
+
|
| 74 |
+
>>> import json
|
| 75 |
+
>>> def encode_complex(obj):
|
| 76 |
+
... if isinstance(obj, complex):
|
| 77 |
+
... return [obj.real, obj.imag]
|
| 78 |
+
... raise TypeError(f'Object of type {obj.__class__.__name__} '
|
| 79 |
+
... f'is not JSON serializable')
|
| 80 |
+
...
|
| 81 |
+
>>> json.dumps(2 + 1j, default=encode_complex)
|
| 82 |
+
'[2.0, 1.0]'
|
| 83 |
+
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
| 84 |
+
'[2.0, 1.0]'
|
| 85 |
+
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
| 86 |
+
'[2.0, 1.0]'
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
Using json.tool from the shell to validate and pretty-print::
|
| 90 |
+
|
| 91 |
+
$ echo '{"json":"obj"}' | python -m json.tool
|
| 92 |
+
{
|
| 93 |
+
"json": "obj"
|
| 94 |
+
}
|
| 95 |
+
$ echo '{ 1.2:3.4}' | python -m json.tool
|
| 96 |
+
Expecting property name enclosed in double quotes: line 1 column 3 (char 2)
|
| 97 |
+
"""
|
| 98 |
+
__version__ = '2.0.9'
|
| 99 |
+
__all__ = [
|
| 100 |
+
'dump', 'dumps', 'load', 'loads',
|
| 101 |
+
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
|
| 102 |
+
]
|
| 103 |
+
|
| 104 |
+
__author__ = 'Bob Ippolito <[email protected]>'
|
| 105 |
+
|
| 106 |
+
from .decoder import JSONDecoder, JSONDecodeError
|
| 107 |
+
from .encoder import JSONEncoder
|
| 108 |
+
import codecs
|
| 109 |
+
|
| 110 |
+
_default_encoder = JSONEncoder(
|
| 111 |
+
skipkeys=False,
|
| 112 |
+
ensure_ascii=True,
|
| 113 |
+
check_circular=True,
|
| 114 |
+
allow_nan=True,
|
| 115 |
+
indent=None,
|
| 116 |
+
separators=None,
|
| 117 |
+
default=None,
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
def dump(obj, fp, *, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 121 |
+
allow_nan=True, cls=None, indent=None, separators=None,
|
| 122 |
+
default=None, sort_keys=False, **kw):
|
| 123 |
+
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
| 124 |
+
``.write()``-supporting file-like object).
|
| 125 |
+
|
| 126 |
+
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
| 127 |
+
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
|
| 128 |
+
instead of raising a ``TypeError``.
|
| 129 |
+
|
| 130 |
+
If ``ensure_ascii`` is false, then the strings written to ``fp`` can
|
| 131 |
+
contain non-ASCII characters if they appear in strings contained in
|
| 132 |
+
``obj``. Otherwise, all such characters are escaped in JSON strings.
|
| 133 |
+
|
| 134 |
+
If ``check_circular`` is false, then the circular reference check
|
| 135 |
+
for container types will be skipped and a circular reference will
|
| 136 |
+
result in an ``RecursionError`` (or worse).
|
| 137 |
+
|
| 138 |
+
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
| 139 |
+
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
|
| 140 |
+
in strict compliance of the JSON specification, instead of using the
|
| 141 |
+
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
| 142 |
+
|
| 143 |
+
If ``indent`` is a non-negative integer, then JSON array elements and
|
| 144 |
+
object members will be pretty-printed with that indent level. An indent
|
| 145 |
+
level of 0 will only insert newlines. ``None`` is the most compact
|
| 146 |
+
representation.
|
| 147 |
+
|
| 148 |
+
If specified, ``separators`` should be an ``(item_separator, key_separator)``
|
| 149 |
+
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
|
| 150 |
+
``(',', ': ')`` otherwise. To get the most compact JSON representation,
|
| 151 |
+
you should specify ``(',', ':')`` to eliminate whitespace.
|
| 152 |
+
|
| 153 |
+
``default(obj)`` is a function that should return a serializable version
|
| 154 |
+
of obj or raise TypeError. The default simply raises TypeError.
|
| 155 |
+
|
| 156 |
+
If *sort_keys* is true (default: ``False``), then the output of
|
| 157 |
+
dictionaries will be sorted by key.
|
| 158 |
+
|
| 159 |
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
| 160 |
+
``.default()`` method to serialize additional types), specify it with
|
| 161 |
+
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
|
| 162 |
+
|
| 163 |
+
"""
|
| 164 |
+
# cached encoder
|
| 165 |
+
if (not skipkeys and ensure_ascii and
|
| 166 |
+
check_circular and allow_nan and
|
| 167 |
+
cls is None and indent is None and separators is None and
|
| 168 |
+
default is None and not sort_keys and not kw):
|
| 169 |
+
iterable = _default_encoder.iterencode(obj)
|
| 170 |
+
else:
|
| 171 |
+
if cls is None:
|
| 172 |
+
cls = JSONEncoder
|
| 173 |
+
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 174 |
+
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
| 175 |
+
separators=separators,
|
| 176 |
+
default=default, sort_keys=sort_keys, **kw).iterencode(obj)
|
| 177 |
+
# could accelerate with writelines in some versions of Python, at
|
| 178 |
+
# a debuggability cost
|
| 179 |
+
for chunk in iterable:
|
| 180 |
+
fp.write(chunk)
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def dumps(obj, *, skipkeys=False, ensure_ascii=True, check_circular=True,
|
| 184 |
+
allow_nan=True, cls=None, indent=None, separators=None,
|
| 185 |
+
default=None, sort_keys=False, **kw):
|
| 186 |
+
"""Serialize ``obj`` to a JSON formatted ``str``.
|
| 187 |
+
|
| 188 |
+
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
| 189 |
+
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
|
| 190 |
+
instead of raising a ``TypeError``.
|
| 191 |
+
|
| 192 |
+
If ``ensure_ascii`` is false, then the return value can contain non-ASCII
|
| 193 |
+
characters if they appear in strings contained in ``obj``. Otherwise, all
|
| 194 |
+
such characters are escaped in JSON strings.
|
| 195 |
+
|
| 196 |
+
If ``check_circular`` is false, then the circular reference check
|
| 197 |
+
for container types will be skipped and a circular reference will
|
| 198 |
+
result in an ``RecursionError`` (or worse).
|
| 199 |
+
|
| 200 |
+
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
| 201 |
+
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
|
| 202 |
+
strict compliance of the JSON specification, instead of using the
|
| 203 |
+
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
| 204 |
+
|
| 205 |
+
If ``indent`` is a non-negative integer, then JSON array elements and
|
| 206 |
+
object members will be pretty-printed with that indent level. An indent
|
| 207 |
+
level of 0 will only insert newlines. ``None`` is the most compact
|
| 208 |
+
representation.
|
| 209 |
+
|
| 210 |
+
If specified, ``separators`` should be an ``(item_separator, key_separator)``
|
| 211 |
+
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
|
| 212 |
+
``(',', ': ')`` otherwise. To get the most compact JSON representation,
|
| 213 |
+
you should specify ``(',', ':')`` to eliminate whitespace.
|
| 214 |
+
|
| 215 |
+
``default(obj)`` is a function that should return a serializable version
|
| 216 |
+
of obj or raise TypeError. The default simply raises TypeError.
|
| 217 |
+
|
| 218 |
+
If *sort_keys* is true (default: ``False``), then the output of
|
| 219 |
+
dictionaries will be sorted by key.
|
| 220 |
+
|
| 221 |
+
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
| 222 |
+
``.default()`` method to serialize additional types), specify it with
|
| 223 |
+
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
|
| 224 |
+
|
| 225 |
+
"""
|
| 226 |
+
# cached encoder
|
| 227 |
+
if (not skipkeys and ensure_ascii and
|
| 228 |
+
check_circular and allow_nan and
|
| 229 |
+
cls is None and indent is None and separators is None and
|
| 230 |
+
default is None and not sort_keys and not kw):
|
| 231 |
+
return _default_encoder.encode(obj)
|
| 232 |
+
if cls is None:
|
| 233 |
+
cls = JSONEncoder
|
| 234 |
+
return cls(
|
| 235 |
+
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
| 236 |
+
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
| 237 |
+
separators=separators, default=default, sort_keys=sort_keys,
|
| 238 |
+
**kw).encode(obj)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
_default_decoder = JSONDecoder(object_hook=None, object_pairs_hook=None)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def detect_encoding(b):
|
| 245 |
+
bstartswith = b.startswith
|
| 246 |
+
if bstartswith((codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE)):
|
| 247 |
+
return 'utf-32'
|
| 248 |
+
if bstartswith((codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE)):
|
| 249 |
+
return 'utf-16'
|
| 250 |
+
if bstartswith(codecs.BOM_UTF8):
|
| 251 |
+
return 'utf-8-sig'
|
| 252 |
+
|
| 253 |
+
if len(b) >= 4:
|
| 254 |
+
if not b[0]:
|
| 255 |
+
# 00 00 -- -- - utf-32-be
|
| 256 |
+
# 00 XX -- -- - utf-16-be
|
| 257 |
+
return 'utf-16-be' if b[1] else 'utf-32-be'
|
| 258 |
+
if not b[1]:
|
| 259 |
+
# XX 00 00 00 - utf-32-le
|
| 260 |
+
# XX 00 00 XX - utf-16-le
|
| 261 |
+
# XX 00 XX -- - utf-16-le
|
| 262 |
+
return 'utf-16-le' if b[2] or b[3] else 'utf-32-le'
|
| 263 |
+
elif len(b) == 2:
|
| 264 |
+
if not b[0]:
|
| 265 |
+
# 00 XX - utf-16-be
|
| 266 |
+
return 'utf-16-be'
|
| 267 |
+
if not b[1]:
|
| 268 |
+
# XX 00 - utf-16-le
|
| 269 |
+
return 'utf-16-le'
|
| 270 |
+
# default
|
| 271 |
+
return 'utf-8'
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def load(fp, *, cls=None, object_hook=None, parse_float=None,
|
| 275 |
+
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
|
| 276 |
+
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
| 277 |
+
a JSON document) to a Python object.
|
| 278 |
+
|
| 279 |
+
``object_hook`` is an optional function that will be called with the
|
| 280 |
+
result of any object literal decode (a ``dict``). The return value of
|
| 281 |
+
``object_hook`` will be used instead of the ``dict``. This feature
|
| 282 |
+
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
| 283 |
+
|
| 284 |
+
``object_pairs_hook`` is an optional function that will be called with the
|
| 285 |
+
result of any object literal decoded with an ordered list of pairs. The
|
| 286 |
+
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
|
| 287 |
+
This feature can be used to implement custom decoders. If ``object_hook``
|
| 288 |
+
is also defined, the ``object_pairs_hook`` takes priority.
|
| 289 |
+
|
| 290 |
+
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
| 291 |
+
kwarg; otherwise ``JSONDecoder`` is used.
|
| 292 |
+
"""
|
| 293 |
+
return loads(fp.read(),
|
| 294 |
+
cls=cls, object_hook=object_hook,
|
| 295 |
+
parse_float=parse_float, parse_int=parse_int,
|
| 296 |
+
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def loads(s, *, cls=None, object_hook=None, parse_float=None,
|
| 300 |
+
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
|
| 301 |
+
"""Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance
|
| 302 |
+
containing a JSON document) to a Python object.
|
| 303 |
+
|
| 304 |
+
``object_hook`` is an optional function that will be called with the
|
| 305 |
+
result of any object literal decode (a ``dict``). The return value of
|
| 306 |
+
``object_hook`` will be used instead of the ``dict``. This feature
|
| 307 |
+
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
| 308 |
+
|
| 309 |
+
``object_pairs_hook`` is an optional function that will be called with the
|
| 310 |
+
result of any object literal decoded with an ordered list of pairs. The
|
| 311 |
+
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
|
| 312 |
+
This feature can be used to implement custom decoders. If ``object_hook``
|
| 313 |
+
is also defined, the ``object_pairs_hook`` takes priority.
|
| 314 |
+
|
| 315 |
+
``parse_float``, if specified, will be called with the string
|
| 316 |
+
of every JSON float to be decoded. By default this is equivalent to
|
| 317 |
+
float(num_str). This can be used to use another datatype or parser
|
| 318 |
+
for JSON floats (e.g. decimal.Decimal).
|
| 319 |
+
|
| 320 |
+
``parse_int``, if specified, will be called with the string
|
| 321 |
+
of every JSON int to be decoded. By default this is equivalent to
|
| 322 |
+
int(num_str). This can be used to use another datatype or parser
|
| 323 |
+
for JSON integers (e.g. float).
|
| 324 |
+
|
| 325 |
+
``parse_constant``, if specified, will be called with one of the
|
| 326 |
+
following strings: -Infinity, Infinity, NaN.
|
| 327 |
+
This can be used to raise an exception if invalid JSON numbers
|
| 328 |
+
are encountered.
|
| 329 |
+
|
| 330 |
+
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
| 331 |
+
kwarg; otherwise ``JSONDecoder`` is used.
|
| 332 |
+
"""
|
| 333 |
+
if isinstance(s, str):
|
| 334 |
+
if s.startswith('\ufeff'):
|
| 335 |
+
raise JSONDecodeError("Unexpected UTF-8 BOM (decode using utf-8-sig)",
|
| 336 |
+
s, 0)
|
| 337 |
+
else:
|
| 338 |
+
if not isinstance(s, (bytes, bytearray)):
|
| 339 |
+
raise TypeError(f'the JSON object must be str, bytes or bytearray, '
|
| 340 |
+
f'not {s.__class__.__name__}')
|
| 341 |
+
s = s.decode(detect_encoding(s), 'surrogatepass')
|
| 342 |
+
|
| 343 |
+
if (cls is None and object_hook is None and
|
| 344 |
+
parse_int is None and parse_float is None and
|
| 345 |
+
parse_constant is None and object_pairs_hook is None and not kw):
|
| 346 |
+
return _default_decoder.decode(s)
|
| 347 |
+
if cls is None:
|
| 348 |
+
cls = JSONDecoder
|
| 349 |
+
if object_hook is not None:
|
| 350 |
+
kw['object_hook'] = object_hook
|
| 351 |
+
if object_pairs_hook is not None:
|
| 352 |
+
kw['object_pairs_hook'] = object_pairs_hook
|
| 353 |
+
if parse_float is not None:
|
| 354 |
+
kw['parse_float'] = parse_float
|
| 355 |
+
if parse_int is not None:
|
| 356 |
+
kw['parse_int'] = parse_int
|
| 357 |
+
if parse_constant is not None:
|
| 358 |
+
kw['parse_constant'] = parse_constant
|
| 359 |
+
return cls(**kw).decode(s)
|
evalkit_llava/lib/python3.10/json/__pycache__/encoder.cpython-310.pyc
ADDED
|
Binary file (11.4 kB). View file
|
|
|
evalkit_llava/lib/python3.10/json/__pycache__/scanner.cpython-310.pyc
ADDED
|
Binary file (1.91 kB). View file
|
|
|
evalkit_llava/lib/python3.10/json/scanner.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""JSON token scanner
|
| 2 |
+
"""
|
| 3 |
+
import re
|
| 4 |
+
try:
|
| 5 |
+
from _json import make_scanner as c_make_scanner
|
| 6 |
+
except ImportError:
|
| 7 |
+
c_make_scanner = None
|
| 8 |
+
|
| 9 |
+
__all__ = ['make_scanner']
|
| 10 |
+
|
| 11 |
+
NUMBER_RE = re.compile(
|
| 12 |
+
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
|
| 13 |
+
(re.VERBOSE | re.MULTILINE | re.DOTALL))
|
| 14 |
+
|
| 15 |
+
def py_make_scanner(context):
|
| 16 |
+
parse_object = context.parse_object
|
| 17 |
+
parse_array = context.parse_array
|
| 18 |
+
parse_string = context.parse_string
|
| 19 |
+
match_number = NUMBER_RE.match
|
| 20 |
+
strict = context.strict
|
| 21 |
+
parse_float = context.parse_float
|
| 22 |
+
parse_int = context.parse_int
|
| 23 |
+
parse_constant = context.parse_constant
|
| 24 |
+
object_hook = context.object_hook
|
| 25 |
+
object_pairs_hook = context.object_pairs_hook
|
| 26 |
+
memo = context.memo
|
| 27 |
+
|
| 28 |
+
def _scan_once(string, idx):
|
| 29 |
+
try:
|
| 30 |
+
nextchar = string[idx]
|
| 31 |
+
except IndexError:
|
| 32 |
+
raise StopIteration(idx) from None
|
| 33 |
+
|
| 34 |
+
if nextchar == '"':
|
| 35 |
+
return parse_string(string, idx + 1, strict)
|
| 36 |
+
elif nextchar == '{':
|
| 37 |
+
return parse_object((string, idx + 1), strict,
|
| 38 |
+
_scan_once, object_hook, object_pairs_hook, memo)
|
| 39 |
+
elif nextchar == '[':
|
| 40 |
+
return parse_array((string, idx + 1), _scan_once)
|
| 41 |
+
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
|
| 42 |
+
return None, idx + 4
|
| 43 |
+
elif nextchar == 't' and string[idx:idx + 4] == 'true':
|
| 44 |
+
return True, idx + 4
|
| 45 |
+
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
|
| 46 |
+
return False, idx + 5
|
| 47 |
+
|
| 48 |
+
m = match_number(string, idx)
|
| 49 |
+
if m is not None:
|
| 50 |
+
integer, frac, exp = m.groups()
|
| 51 |
+
if frac or exp:
|
| 52 |
+
res = parse_float(integer + (frac or '') + (exp or ''))
|
| 53 |
+
else:
|
| 54 |
+
res = parse_int(integer)
|
| 55 |
+
return res, m.end()
|
| 56 |
+
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
|
| 57 |
+
return parse_constant('NaN'), idx + 3
|
| 58 |
+
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
|
| 59 |
+
return parse_constant('Infinity'), idx + 8
|
| 60 |
+
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
|
| 61 |
+
return parse_constant('-Infinity'), idx + 9
|
| 62 |
+
else:
|
| 63 |
+
raise StopIteration(idx)
|
| 64 |
+
|
| 65 |
+
def scan_once(string, idx):
|
| 66 |
+
try:
|
| 67 |
+
return _scan_once(string, idx)
|
| 68 |
+
finally:
|
| 69 |
+
memo.clear()
|
| 70 |
+
|
| 71 |
+
return scan_once
|
| 72 |
+
|
| 73 |
+
make_scanner = c_make_scanner or py_make_scanner
|
evalkit_llava/lib/python3.10/keyword.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Keywords (from "Grammar/python.gram")
|
| 2 |
+
|
| 3 |
+
This file is automatically generated; please don't muck it up!
|
| 4 |
+
|
| 5 |
+
To update the symbols in this file, 'cd' to the top directory of
|
| 6 |
+
the python source tree and run:
|
| 7 |
+
|
| 8 |
+
PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \
|
| 9 |
+
Grammar/python.gram \
|
| 10 |
+
Grammar/Tokens \
|
| 11 |
+
Lib/keyword.py
|
| 12 |
+
|
| 13 |
+
Alternatively, you can run 'make regen-keyword'.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"]
|
| 17 |
+
|
| 18 |
+
kwlist = [
|
| 19 |
+
'False',
|
| 20 |
+
'None',
|
| 21 |
+
'True',
|
| 22 |
+
'and',
|
| 23 |
+
'as',
|
| 24 |
+
'assert',
|
| 25 |
+
'async',
|
| 26 |
+
'await',
|
| 27 |
+
'break',
|
| 28 |
+
'class',
|
| 29 |
+
'continue',
|
| 30 |
+
'def',
|
| 31 |
+
'del',
|
| 32 |
+
'elif',
|
| 33 |
+
'else',
|
| 34 |
+
'except',
|
| 35 |
+
'finally',
|
| 36 |
+
'for',
|
| 37 |
+
'from',
|
| 38 |
+
'global',
|
| 39 |
+
'if',
|
| 40 |
+
'import',
|
| 41 |
+
'in',
|
| 42 |
+
'is',
|
| 43 |
+
'lambda',
|
| 44 |
+
'nonlocal',
|
| 45 |
+
'not',
|
| 46 |
+
'or',
|
| 47 |
+
'pass',
|
| 48 |
+
'raise',
|
| 49 |
+
'return',
|
| 50 |
+
'try',
|
| 51 |
+
'while',
|
| 52 |
+
'with',
|
| 53 |
+
'yield'
|
| 54 |
+
]
|
| 55 |
+
|
| 56 |
+
softkwlist = [
|
| 57 |
+
'_',
|
| 58 |
+
'case',
|
| 59 |
+
'match'
|
| 60 |
+
]
|
| 61 |
+
|
| 62 |
+
iskeyword = frozenset(kwlist).__contains__
|
| 63 |
+
issoftkeyword = frozenset(softkwlist).__contains__
|
evalkit_llava/lib/python3.10/multiprocessing/__init__.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Package analogous to 'threading.py' but using processes
|
| 3 |
+
#
|
| 4 |
+
# multiprocessing/__init__.py
|
| 5 |
+
#
|
| 6 |
+
# This package is intended to duplicate the functionality (and much of
|
| 7 |
+
# the API) of threading.py but uses processes instead of threads. A
|
| 8 |
+
# subpackage 'multiprocessing.dummy' has the same API but is a simple
|
| 9 |
+
# wrapper for 'threading'.
|
| 10 |
+
#
|
| 11 |
+
# Copyright (c) 2006-2008, R Oudkerk
|
| 12 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 13 |
+
#
|
| 14 |
+
|
| 15 |
+
import sys
|
| 16 |
+
from . import context
|
| 17 |
+
|
| 18 |
+
#
|
| 19 |
+
# Copy stuff from default context
|
| 20 |
+
#
|
| 21 |
+
|
| 22 |
+
__all__ = [x for x in dir(context._default_context) if not x.startswith('_')]
|
| 23 |
+
globals().update((name, getattr(context._default_context, name)) for name in __all__)
|
| 24 |
+
|
| 25 |
+
#
|
| 26 |
+
# XXX These should not really be documented or public.
|
| 27 |
+
#
|
| 28 |
+
|
| 29 |
+
SUBDEBUG = 5
|
| 30 |
+
SUBWARNING = 25
|
| 31 |
+
|
| 32 |
+
#
|
| 33 |
+
# Alias for main module -- will be reset by bootstrapping child processes
|
| 34 |
+
#
|
| 35 |
+
|
| 36 |
+
if '__main__' in sys.modules:
|
| 37 |
+
sys.modules['__mp_main__'] = sys.modules['__main__']
|
evalkit_llava/lib/python3.10/multiprocessing/managers.py
ADDED
|
@@ -0,0 +1,1378 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Module providing manager classes for dealing
|
| 3 |
+
# with shared objects
|
| 4 |
+
#
|
| 5 |
+
# multiprocessing/managers.py
|
| 6 |
+
#
|
| 7 |
+
# Copyright (c) 2006-2008, R Oudkerk
|
| 8 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 9 |
+
#
|
| 10 |
+
|
| 11 |
+
__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
|
| 12 |
+
|
| 13 |
+
#
|
| 14 |
+
# Imports
|
| 15 |
+
#
|
| 16 |
+
|
| 17 |
+
import sys
|
| 18 |
+
import threading
|
| 19 |
+
import signal
|
| 20 |
+
import array
|
| 21 |
+
import queue
|
| 22 |
+
import time
|
| 23 |
+
import types
|
| 24 |
+
import os
|
| 25 |
+
from os import getpid
|
| 26 |
+
|
| 27 |
+
from traceback import format_exc
|
| 28 |
+
|
| 29 |
+
from . import connection
|
| 30 |
+
from .context import reduction, get_spawning_popen, ProcessError
|
| 31 |
+
from . import pool
|
| 32 |
+
from . import process
|
| 33 |
+
from . import util
|
| 34 |
+
from . import get_context
|
| 35 |
+
try:
|
| 36 |
+
from . import shared_memory
|
| 37 |
+
except ImportError:
|
| 38 |
+
HAS_SHMEM = False
|
| 39 |
+
else:
|
| 40 |
+
HAS_SHMEM = True
|
| 41 |
+
__all__.append('SharedMemoryManager')
|
| 42 |
+
|
| 43 |
+
#
|
| 44 |
+
# Register some things for pickling
|
| 45 |
+
#
|
| 46 |
+
|
| 47 |
+
def reduce_array(a):
|
| 48 |
+
return array.array, (a.typecode, a.tobytes())
|
| 49 |
+
reduction.register(array.array, reduce_array)
|
| 50 |
+
|
| 51 |
+
view_types = [type(getattr({}, name)()) for name in ('items','keys','values')]
|
| 52 |
+
if view_types[0] is not list: # only needed in Py3.0
|
| 53 |
+
def rebuild_as_list(obj):
|
| 54 |
+
return list, (list(obj),)
|
| 55 |
+
for view_type in view_types:
|
| 56 |
+
reduction.register(view_type, rebuild_as_list)
|
| 57 |
+
|
| 58 |
+
#
|
| 59 |
+
# Type for identifying shared objects
|
| 60 |
+
#
|
| 61 |
+
|
| 62 |
+
class Token(object):
|
| 63 |
+
'''
|
| 64 |
+
Type to uniquely identify a shared object
|
| 65 |
+
'''
|
| 66 |
+
__slots__ = ('typeid', 'address', 'id')
|
| 67 |
+
|
| 68 |
+
def __init__(self, typeid, address, id):
|
| 69 |
+
(self.typeid, self.address, self.id) = (typeid, address, id)
|
| 70 |
+
|
| 71 |
+
def __getstate__(self):
|
| 72 |
+
return (self.typeid, self.address, self.id)
|
| 73 |
+
|
| 74 |
+
def __setstate__(self, state):
|
| 75 |
+
(self.typeid, self.address, self.id) = state
|
| 76 |
+
|
| 77 |
+
def __repr__(self):
|
| 78 |
+
return '%s(typeid=%r, address=%r, id=%r)' % \
|
| 79 |
+
(self.__class__.__name__, self.typeid, self.address, self.id)
|
| 80 |
+
|
| 81 |
+
#
|
| 82 |
+
# Function for communication with a manager's server process
|
| 83 |
+
#
|
| 84 |
+
|
| 85 |
+
def dispatch(c, id, methodname, args=(), kwds={}):
|
| 86 |
+
'''
|
| 87 |
+
Send a message to manager using connection `c` and return response
|
| 88 |
+
'''
|
| 89 |
+
c.send((id, methodname, args, kwds))
|
| 90 |
+
kind, result = c.recv()
|
| 91 |
+
if kind == '#RETURN':
|
| 92 |
+
return result
|
| 93 |
+
raise convert_to_error(kind, result)
|
| 94 |
+
|
| 95 |
+
def convert_to_error(kind, result):
|
| 96 |
+
if kind == '#ERROR':
|
| 97 |
+
return result
|
| 98 |
+
elif kind in ('#TRACEBACK', '#UNSERIALIZABLE'):
|
| 99 |
+
if not isinstance(result, str):
|
| 100 |
+
raise TypeError(
|
| 101 |
+
"Result {0!r} (kind '{1}') type is {2}, not str".format(
|
| 102 |
+
result, kind, type(result)))
|
| 103 |
+
if kind == '#UNSERIALIZABLE':
|
| 104 |
+
return RemoteError('Unserializable message: %s\n' % result)
|
| 105 |
+
else:
|
| 106 |
+
return RemoteError(result)
|
| 107 |
+
else:
|
| 108 |
+
return ValueError('Unrecognized message type {!r}'.format(kind))
|
| 109 |
+
|
| 110 |
+
class RemoteError(Exception):
|
| 111 |
+
def __str__(self):
|
| 112 |
+
return ('\n' + '-'*75 + '\n' + str(self.args[0]) + '-'*75)
|
| 113 |
+
|
| 114 |
+
#
|
| 115 |
+
# Functions for finding the method names of an object
|
| 116 |
+
#
|
| 117 |
+
|
| 118 |
+
def all_methods(obj):
|
| 119 |
+
'''
|
| 120 |
+
Return a list of names of methods of `obj`
|
| 121 |
+
'''
|
| 122 |
+
temp = []
|
| 123 |
+
for name in dir(obj):
|
| 124 |
+
func = getattr(obj, name)
|
| 125 |
+
if callable(func):
|
| 126 |
+
temp.append(name)
|
| 127 |
+
return temp
|
| 128 |
+
|
| 129 |
+
def public_methods(obj):
|
| 130 |
+
'''
|
| 131 |
+
Return a list of names of methods of `obj` which do not start with '_'
|
| 132 |
+
'''
|
| 133 |
+
return [name for name in all_methods(obj) if name[0] != '_']
|
| 134 |
+
|
| 135 |
+
#
|
| 136 |
+
# Server which is run in a process controlled by a manager
|
| 137 |
+
#
|
| 138 |
+
|
| 139 |
+
class Server(object):
|
| 140 |
+
'''
|
| 141 |
+
Server class which runs in a process controlled by a manager object
|
| 142 |
+
'''
|
| 143 |
+
public = ['shutdown', 'create', 'accept_connection', 'get_methods',
|
| 144 |
+
'debug_info', 'number_of_objects', 'dummy', 'incref', 'decref']
|
| 145 |
+
|
| 146 |
+
def __init__(self, registry, address, authkey, serializer):
|
| 147 |
+
if not isinstance(authkey, bytes):
|
| 148 |
+
raise TypeError(
|
| 149 |
+
"Authkey {0!r} is type {1!s}, not bytes".format(
|
| 150 |
+
authkey, type(authkey)))
|
| 151 |
+
self.registry = registry
|
| 152 |
+
self.authkey = process.AuthenticationString(authkey)
|
| 153 |
+
Listener, Client = listener_client[serializer]
|
| 154 |
+
|
| 155 |
+
# do authentication later
|
| 156 |
+
self.listener = Listener(address=address, backlog=16)
|
| 157 |
+
self.address = self.listener.address
|
| 158 |
+
|
| 159 |
+
self.id_to_obj = {'0': (None, ())}
|
| 160 |
+
self.id_to_refcount = {}
|
| 161 |
+
self.id_to_local_proxy_obj = {}
|
| 162 |
+
self.mutex = threading.Lock()
|
| 163 |
+
|
| 164 |
+
def serve_forever(self):
|
| 165 |
+
'''
|
| 166 |
+
Run the server forever
|
| 167 |
+
'''
|
| 168 |
+
self.stop_event = threading.Event()
|
| 169 |
+
process.current_process()._manager_server = self
|
| 170 |
+
try:
|
| 171 |
+
accepter = threading.Thread(target=self.accepter)
|
| 172 |
+
accepter.daemon = True
|
| 173 |
+
accepter.start()
|
| 174 |
+
try:
|
| 175 |
+
while not self.stop_event.is_set():
|
| 176 |
+
self.stop_event.wait(1)
|
| 177 |
+
except (KeyboardInterrupt, SystemExit):
|
| 178 |
+
pass
|
| 179 |
+
finally:
|
| 180 |
+
if sys.stdout != sys.__stdout__: # what about stderr?
|
| 181 |
+
util.debug('resetting stdout, stderr')
|
| 182 |
+
sys.stdout = sys.__stdout__
|
| 183 |
+
sys.stderr = sys.__stderr__
|
| 184 |
+
sys.exit(0)
|
| 185 |
+
|
| 186 |
+
def accepter(self):
|
| 187 |
+
while True:
|
| 188 |
+
try:
|
| 189 |
+
c = self.listener.accept()
|
| 190 |
+
except OSError:
|
| 191 |
+
continue
|
| 192 |
+
t = threading.Thread(target=self.handle_request, args=(c,))
|
| 193 |
+
t.daemon = True
|
| 194 |
+
t.start()
|
| 195 |
+
|
| 196 |
+
def _handle_request(self, c):
|
| 197 |
+
request = None
|
| 198 |
+
try:
|
| 199 |
+
connection.deliver_challenge(c, self.authkey)
|
| 200 |
+
connection.answer_challenge(c, self.authkey)
|
| 201 |
+
request = c.recv()
|
| 202 |
+
ignore, funcname, args, kwds = request
|
| 203 |
+
assert funcname in self.public, '%r unrecognized' % funcname
|
| 204 |
+
func = getattr(self, funcname)
|
| 205 |
+
except Exception:
|
| 206 |
+
msg = ('#TRACEBACK', format_exc())
|
| 207 |
+
else:
|
| 208 |
+
try:
|
| 209 |
+
result = func(c, *args, **kwds)
|
| 210 |
+
except Exception:
|
| 211 |
+
msg = ('#TRACEBACK', format_exc())
|
| 212 |
+
else:
|
| 213 |
+
msg = ('#RETURN', result)
|
| 214 |
+
|
| 215 |
+
try:
|
| 216 |
+
c.send(msg)
|
| 217 |
+
except Exception as e:
|
| 218 |
+
try:
|
| 219 |
+
c.send(('#TRACEBACK', format_exc()))
|
| 220 |
+
except Exception:
|
| 221 |
+
pass
|
| 222 |
+
util.info('Failure to send message: %r', msg)
|
| 223 |
+
util.info(' ... request was %r', request)
|
| 224 |
+
util.info(' ... exception was %r', e)
|
| 225 |
+
|
| 226 |
+
def handle_request(self, conn):
|
| 227 |
+
'''
|
| 228 |
+
Handle a new connection
|
| 229 |
+
'''
|
| 230 |
+
try:
|
| 231 |
+
self._handle_request(conn)
|
| 232 |
+
except SystemExit:
|
| 233 |
+
# Server.serve_client() calls sys.exit(0) on EOF
|
| 234 |
+
pass
|
| 235 |
+
finally:
|
| 236 |
+
conn.close()
|
| 237 |
+
|
| 238 |
+
def serve_client(self, conn):
|
| 239 |
+
'''
|
| 240 |
+
Handle requests from the proxies in a particular process/thread
|
| 241 |
+
'''
|
| 242 |
+
util.debug('starting server thread to service %r',
|
| 243 |
+
threading.current_thread().name)
|
| 244 |
+
|
| 245 |
+
recv = conn.recv
|
| 246 |
+
send = conn.send
|
| 247 |
+
id_to_obj = self.id_to_obj
|
| 248 |
+
|
| 249 |
+
while not self.stop_event.is_set():
|
| 250 |
+
|
| 251 |
+
try:
|
| 252 |
+
methodname = obj = None
|
| 253 |
+
request = recv()
|
| 254 |
+
ident, methodname, args, kwds = request
|
| 255 |
+
try:
|
| 256 |
+
obj, exposed, gettypeid = id_to_obj[ident]
|
| 257 |
+
except KeyError as ke:
|
| 258 |
+
try:
|
| 259 |
+
obj, exposed, gettypeid = \
|
| 260 |
+
self.id_to_local_proxy_obj[ident]
|
| 261 |
+
except KeyError:
|
| 262 |
+
raise ke
|
| 263 |
+
|
| 264 |
+
if methodname not in exposed:
|
| 265 |
+
raise AttributeError(
|
| 266 |
+
'method %r of %r object is not in exposed=%r' %
|
| 267 |
+
(methodname, type(obj), exposed)
|
| 268 |
+
)
|
| 269 |
+
|
| 270 |
+
function = getattr(obj, methodname)
|
| 271 |
+
|
| 272 |
+
try:
|
| 273 |
+
res = function(*args, **kwds)
|
| 274 |
+
except Exception as e:
|
| 275 |
+
msg = ('#ERROR', e)
|
| 276 |
+
else:
|
| 277 |
+
typeid = gettypeid and gettypeid.get(methodname, None)
|
| 278 |
+
if typeid:
|
| 279 |
+
rident, rexposed = self.create(conn, typeid, res)
|
| 280 |
+
token = Token(typeid, self.address, rident)
|
| 281 |
+
msg = ('#PROXY', (rexposed, token))
|
| 282 |
+
else:
|
| 283 |
+
msg = ('#RETURN', res)
|
| 284 |
+
|
| 285 |
+
except AttributeError:
|
| 286 |
+
if methodname is None:
|
| 287 |
+
msg = ('#TRACEBACK', format_exc())
|
| 288 |
+
else:
|
| 289 |
+
try:
|
| 290 |
+
fallback_func = self.fallback_mapping[methodname]
|
| 291 |
+
result = fallback_func(
|
| 292 |
+
self, conn, ident, obj, *args, **kwds
|
| 293 |
+
)
|
| 294 |
+
msg = ('#RETURN', result)
|
| 295 |
+
except Exception:
|
| 296 |
+
msg = ('#TRACEBACK', format_exc())
|
| 297 |
+
|
| 298 |
+
except EOFError:
|
| 299 |
+
util.debug('got EOF -- exiting thread serving %r',
|
| 300 |
+
threading.current_thread().name)
|
| 301 |
+
sys.exit(0)
|
| 302 |
+
|
| 303 |
+
except Exception:
|
| 304 |
+
msg = ('#TRACEBACK', format_exc())
|
| 305 |
+
|
| 306 |
+
try:
|
| 307 |
+
try:
|
| 308 |
+
send(msg)
|
| 309 |
+
except Exception:
|
| 310 |
+
send(('#UNSERIALIZABLE', format_exc()))
|
| 311 |
+
except Exception as e:
|
| 312 |
+
util.info('exception in thread serving %r',
|
| 313 |
+
threading.current_thread().name)
|
| 314 |
+
util.info(' ... message was %r', msg)
|
| 315 |
+
util.info(' ... exception was %r', e)
|
| 316 |
+
conn.close()
|
| 317 |
+
sys.exit(1)
|
| 318 |
+
|
| 319 |
+
def fallback_getvalue(self, conn, ident, obj):
|
| 320 |
+
return obj
|
| 321 |
+
|
| 322 |
+
def fallback_str(self, conn, ident, obj):
|
| 323 |
+
return str(obj)
|
| 324 |
+
|
| 325 |
+
def fallback_repr(self, conn, ident, obj):
|
| 326 |
+
return repr(obj)
|
| 327 |
+
|
| 328 |
+
fallback_mapping = {
|
| 329 |
+
'__str__':fallback_str,
|
| 330 |
+
'__repr__':fallback_repr,
|
| 331 |
+
'#GETVALUE':fallback_getvalue
|
| 332 |
+
}
|
| 333 |
+
|
| 334 |
+
def dummy(self, c):
|
| 335 |
+
pass
|
| 336 |
+
|
| 337 |
+
def debug_info(self, c):
|
| 338 |
+
'''
|
| 339 |
+
Return some info --- useful to spot problems with refcounting
|
| 340 |
+
'''
|
| 341 |
+
# Perhaps include debug info about 'c'?
|
| 342 |
+
with self.mutex:
|
| 343 |
+
result = []
|
| 344 |
+
keys = list(self.id_to_refcount.keys())
|
| 345 |
+
keys.sort()
|
| 346 |
+
for ident in keys:
|
| 347 |
+
if ident != '0':
|
| 348 |
+
result.append(' %s: refcount=%s\n %s' %
|
| 349 |
+
(ident, self.id_to_refcount[ident],
|
| 350 |
+
str(self.id_to_obj[ident][0])[:75]))
|
| 351 |
+
return '\n'.join(result)
|
| 352 |
+
|
| 353 |
+
def number_of_objects(self, c):
|
| 354 |
+
'''
|
| 355 |
+
Number of shared objects
|
| 356 |
+
'''
|
| 357 |
+
# Doesn't use (len(self.id_to_obj) - 1) as we shouldn't count ident='0'
|
| 358 |
+
return len(self.id_to_refcount)
|
| 359 |
+
|
| 360 |
+
def shutdown(self, c):
|
| 361 |
+
'''
|
| 362 |
+
Shutdown this process
|
| 363 |
+
'''
|
| 364 |
+
try:
|
| 365 |
+
util.debug('manager received shutdown message')
|
| 366 |
+
c.send(('#RETURN', None))
|
| 367 |
+
except:
|
| 368 |
+
import traceback
|
| 369 |
+
traceback.print_exc()
|
| 370 |
+
finally:
|
| 371 |
+
self.stop_event.set()
|
| 372 |
+
|
| 373 |
+
def create(self, c, typeid, /, *args, **kwds):
|
| 374 |
+
'''
|
| 375 |
+
Create a new shared object and return its id
|
| 376 |
+
'''
|
| 377 |
+
with self.mutex:
|
| 378 |
+
callable, exposed, method_to_typeid, proxytype = \
|
| 379 |
+
self.registry[typeid]
|
| 380 |
+
|
| 381 |
+
if callable is None:
|
| 382 |
+
if kwds or (len(args) != 1):
|
| 383 |
+
raise ValueError(
|
| 384 |
+
"Without callable, must have one non-keyword argument")
|
| 385 |
+
obj = args[0]
|
| 386 |
+
else:
|
| 387 |
+
obj = callable(*args, **kwds)
|
| 388 |
+
|
| 389 |
+
if exposed is None:
|
| 390 |
+
exposed = public_methods(obj)
|
| 391 |
+
if method_to_typeid is not None:
|
| 392 |
+
if not isinstance(method_to_typeid, dict):
|
| 393 |
+
raise TypeError(
|
| 394 |
+
"Method_to_typeid {0!r}: type {1!s}, not dict".format(
|
| 395 |
+
method_to_typeid, type(method_to_typeid)))
|
| 396 |
+
exposed = list(exposed) + list(method_to_typeid)
|
| 397 |
+
|
| 398 |
+
ident = '%x' % id(obj) # convert to string because xmlrpclib
|
| 399 |
+
# only has 32 bit signed integers
|
| 400 |
+
util.debug('%r callable returned object with id %r', typeid, ident)
|
| 401 |
+
|
| 402 |
+
self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid)
|
| 403 |
+
if ident not in self.id_to_refcount:
|
| 404 |
+
self.id_to_refcount[ident] = 0
|
| 405 |
+
|
| 406 |
+
self.incref(c, ident)
|
| 407 |
+
return ident, tuple(exposed)
|
| 408 |
+
|
| 409 |
+
def get_methods(self, c, token):
|
| 410 |
+
'''
|
| 411 |
+
Return the methods of the shared object indicated by token
|
| 412 |
+
'''
|
| 413 |
+
return tuple(self.id_to_obj[token.id][1])
|
| 414 |
+
|
| 415 |
+
def accept_connection(self, c, name):
|
| 416 |
+
'''
|
| 417 |
+
Spawn a new thread to serve this connection
|
| 418 |
+
'''
|
| 419 |
+
threading.current_thread().name = name
|
| 420 |
+
c.send(('#RETURN', None))
|
| 421 |
+
self.serve_client(c)
|
| 422 |
+
|
| 423 |
+
def incref(self, c, ident):
|
| 424 |
+
with self.mutex:
|
| 425 |
+
try:
|
| 426 |
+
self.id_to_refcount[ident] += 1
|
| 427 |
+
except KeyError as ke:
|
| 428 |
+
# If no external references exist but an internal (to the
|
| 429 |
+
# manager) still does and a new external reference is created
|
| 430 |
+
# from it, restore the manager's tracking of it from the
|
| 431 |
+
# previously stashed internal ref.
|
| 432 |
+
if ident in self.id_to_local_proxy_obj:
|
| 433 |
+
self.id_to_refcount[ident] = 1
|
| 434 |
+
self.id_to_obj[ident] = \
|
| 435 |
+
self.id_to_local_proxy_obj[ident]
|
| 436 |
+
obj, exposed, gettypeid = self.id_to_obj[ident]
|
| 437 |
+
util.debug('Server re-enabled tracking & INCREF %r', ident)
|
| 438 |
+
else:
|
| 439 |
+
raise ke
|
| 440 |
+
|
| 441 |
+
def decref(self, c, ident):
|
| 442 |
+
if ident not in self.id_to_refcount and \
|
| 443 |
+
ident in self.id_to_local_proxy_obj:
|
| 444 |
+
util.debug('Server DECREF skipping %r', ident)
|
| 445 |
+
return
|
| 446 |
+
|
| 447 |
+
with self.mutex:
|
| 448 |
+
if self.id_to_refcount[ident] <= 0:
|
| 449 |
+
raise AssertionError(
|
| 450 |
+
"Id {0!s} ({1!r}) has refcount {2:n}, not 1+".format(
|
| 451 |
+
ident, self.id_to_obj[ident],
|
| 452 |
+
self.id_to_refcount[ident]))
|
| 453 |
+
self.id_to_refcount[ident] -= 1
|
| 454 |
+
if self.id_to_refcount[ident] == 0:
|
| 455 |
+
del self.id_to_refcount[ident]
|
| 456 |
+
|
| 457 |
+
if ident not in self.id_to_refcount:
|
| 458 |
+
# Two-step process in case the object turns out to contain other
|
| 459 |
+
# proxy objects (e.g. a managed list of managed lists).
|
| 460 |
+
# Otherwise, deleting self.id_to_obj[ident] would trigger the
|
| 461 |
+
# deleting of the stored value (another managed object) which would
|
| 462 |
+
# in turn attempt to acquire the mutex that is already held here.
|
| 463 |
+
self.id_to_obj[ident] = (None, (), None) # thread-safe
|
| 464 |
+
util.debug('disposing of obj with id %r', ident)
|
| 465 |
+
with self.mutex:
|
| 466 |
+
del self.id_to_obj[ident]
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
#
|
| 470 |
+
# Class to represent state of a manager
|
| 471 |
+
#
|
| 472 |
+
|
| 473 |
+
class State(object):
|
| 474 |
+
__slots__ = ['value']
|
| 475 |
+
INITIAL = 0
|
| 476 |
+
STARTED = 1
|
| 477 |
+
SHUTDOWN = 2
|
| 478 |
+
|
| 479 |
+
#
|
| 480 |
+
# Mapping from serializer name to Listener and Client types
|
| 481 |
+
#
|
| 482 |
+
|
| 483 |
+
listener_client = {
|
| 484 |
+
'pickle' : (connection.Listener, connection.Client),
|
| 485 |
+
'xmlrpclib' : (connection.XmlListener, connection.XmlClient)
|
| 486 |
+
}
|
| 487 |
+
|
| 488 |
+
#
|
| 489 |
+
# Definition of BaseManager
|
| 490 |
+
#
|
| 491 |
+
|
| 492 |
+
class BaseManager(object):
|
| 493 |
+
'''
|
| 494 |
+
Base class for managers
|
| 495 |
+
'''
|
| 496 |
+
_registry = {}
|
| 497 |
+
_Server = Server
|
| 498 |
+
|
| 499 |
+
def __init__(self, address=None, authkey=None, serializer='pickle',
|
| 500 |
+
ctx=None):
|
| 501 |
+
if authkey is None:
|
| 502 |
+
authkey = process.current_process().authkey
|
| 503 |
+
self._address = address # XXX not final address if eg ('', 0)
|
| 504 |
+
self._authkey = process.AuthenticationString(authkey)
|
| 505 |
+
self._state = State()
|
| 506 |
+
self._state.value = State.INITIAL
|
| 507 |
+
self._serializer = serializer
|
| 508 |
+
self._Listener, self._Client = listener_client[serializer]
|
| 509 |
+
self._ctx = ctx or get_context()
|
| 510 |
+
|
| 511 |
+
def get_server(self):
|
| 512 |
+
'''
|
| 513 |
+
Return server object with serve_forever() method and address attribute
|
| 514 |
+
'''
|
| 515 |
+
if self._state.value != State.INITIAL:
|
| 516 |
+
if self._state.value == State.STARTED:
|
| 517 |
+
raise ProcessError("Already started server")
|
| 518 |
+
elif self._state.value == State.SHUTDOWN:
|
| 519 |
+
raise ProcessError("Manager has shut down")
|
| 520 |
+
else:
|
| 521 |
+
raise ProcessError(
|
| 522 |
+
"Unknown state {!r}".format(self._state.value))
|
| 523 |
+
return Server(self._registry, self._address,
|
| 524 |
+
self._authkey, self._serializer)
|
| 525 |
+
|
| 526 |
+
def connect(self):
|
| 527 |
+
'''
|
| 528 |
+
Connect manager object to the server process
|
| 529 |
+
'''
|
| 530 |
+
Listener, Client = listener_client[self._serializer]
|
| 531 |
+
conn = Client(self._address, authkey=self._authkey)
|
| 532 |
+
dispatch(conn, None, 'dummy')
|
| 533 |
+
self._state.value = State.STARTED
|
| 534 |
+
|
| 535 |
+
def start(self, initializer=None, initargs=()):
|
| 536 |
+
'''
|
| 537 |
+
Spawn a server process for this manager object
|
| 538 |
+
'''
|
| 539 |
+
if self._state.value != State.INITIAL:
|
| 540 |
+
if self._state.value == State.STARTED:
|
| 541 |
+
raise ProcessError("Already started server")
|
| 542 |
+
elif self._state.value == State.SHUTDOWN:
|
| 543 |
+
raise ProcessError("Manager has shut down")
|
| 544 |
+
else:
|
| 545 |
+
raise ProcessError(
|
| 546 |
+
"Unknown state {!r}".format(self._state.value))
|
| 547 |
+
|
| 548 |
+
if initializer is not None and not callable(initializer):
|
| 549 |
+
raise TypeError('initializer must be a callable')
|
| 550 |
+
|
| 551 |
+
# pipe over which we will retrieve address of server
|
| 552 |
+
reader, writer = connection.Pipe(duplex=False)
|
| 553 |
+
|
| 554 |
+
# spawn process which runs a server
|
| 555 |
+
self._process = self._ctx.Process(
|
| 556 |
+
target=type(self)._run_server,
|
| 557 |
+
args=(self._registry, self._address, self._authkey,
|
| 558 |
+
self._serializer, writer, initializer, initargs),
|
| 559 |
+
)
|
| 560 |
+
ident = ':'.join(str(i) for i in self._process._identity)
|
| 561 |
+
self._process.name = type(self).__name__ + '-' + ident
|
| 562 |
+
self._process.start()
|
| 563 |
+
|
| 564 |
+
# get address of server
|
| 565 |
+
writer.close()
|
| 566 |
+
self._address = reader.recv()
|
| 567 |
+
reader.close()
|
| 568 |
+
|
| 569 |
+
# register a finalizer
|
| 570 |
+
self._state.value = State.STARTED
|
| 571 |
+
self.shutdown = util.Finalize(
|
| 572 |
+
self, type(self)._finalize_manager,
|
| 573 |
+
args=(self._process, self._address, self._authkey,
|
| 574 |
+
self._state, self._Client),
|
| 575 |
+
exitpriority=0
|
| 576 |
+
)
|
| 577 |
+
|
| 578 |
+
@classmethod
|
| 579 |
+
def _run_server(cls, registry, address, authkey, serializer, writer,
|
| 580 |
+
initializer=None, initargs=()):
|
| 581 |
+
'''
|
| 582 |
+
Create a server, report its address and run it
|
| 583 |
+
'''
|
| 584 |
+
# bpo-36368: protect server process from KeyboardInterrupt signals
|
| 585 |
+
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
| 586 |
+
|
| 587 |
+
if initializer is not None:
|
| 588 |
+
initializer(*initargs)
|
| 589 |
+
|
| 590 |
+
# create server
|
| 591 |
+
server = cls._Server(registry, address, authkey, serializer)
|
| 592 |
+
|
| 593 |
+
# inform parent process of the server's address
|
| 594 |
+
writer.send(server.address)
|
| 595 |
+
writer.close()
|
| 596 |
+
|
| 597 |
+
# run the manager
|
| 598 |
+
util.info('manager serving at %r', server.address)
|
| 599 |
+
server.serve_forever()
|
| 600 |
+
|
| 601 |
+
def _create(self, typeid, /, *args, **kwds):
|
| 602 |
+
'''
|
| 603 |
+
Create a new shared object; return the token and exposed tuple
|
| 604 |
+
'''
|
| 605 |
+
assert self._state.value == State.STARTED, 'server not yet started'
|
| 606 |
+
conn = self._Client(self._address, authkey=self._authkey)
|
| 607 |
+
try:
|
| 608 |
+
id, exposed = dispatch(conn, None, 'create', (typeid,)+args, kwds)
|
| 609 |
+
finally:
|
| 610 |
+
conn.close()
|
| 611 |
+
return Token(typeid, self._address, id), exposed
|
| 612 |
+
|
| 613 |
+
def join(self, timeout=None):
|
| 614 |
+
'''
|
| 615 |
+
Join the manager process (if it has been spawned)
|
| 616 |
+
'''
|
| 617 |
+
if self._process is not None:
|
| 618 |
+
self._process.join(timeout)
|
| 619 |
+
if not self._process.is_alive():
|
| 620 |
+
self._process = None
|
| 621 |
+
|
| 622 |
+
def _debug_info(self):
|
| 623 |
+
'''
|
| 624 |
+
Return some info about the servers shared objects and connections
|
| 625 |
+
'''
|
| 626 |
+
conn = self._Client(self._address, authkey=self._authkey)
|
| 627 |
+
try:
|
| 628 |
+
return dispatch(conn, None, 'debug_info')
|
| 629 |
+
finally:
|
| 630 |
+
conn.close()
|
| 631 |
+
|
| 632 |
+
def _number_of_objects(self):
|
| 633 |
+
'''
|
| 634 |
+
Return the number of shared objects
|
| 635 |
+
'''
|
| 636 |
+
conn = self._Client(self._address, authkey=self._authkey)
|
| 637 |
+
try:
|
| 638 |
+
return dispatch(conn, None, 'number_of_objects')
|
| 639 |
+
finally:
|
| 640 |
+
conn.close()
|
| 641 |
+
|
| 642 |
+
def __enter__(self):
|
| 643 |
+
if self._state.value == State.INITIAL:
|
| 644 |
+
self.start()
|
| 645 |
+
if self._state.value != State.STARTED:
|
| 646 |
+
if self._state.value == State.INITIAL:
|
| 647 |
+
raise ProcessError("Unable to start server")
|
| 648 |
+
elif self._state.value == State.SHUTDOWN:
|
| 649 |
+
raise ProcessError("Manager has shut down")
|
| 650 |
+
else:
|
| 651 |
+
raise ProcessError(
|
| 652 |
+
"Unknown state {!r}".format(self._state.value))
|
| 653 |
+
return self
|
| 654 |
+
|
| 655 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 656 |
+
self.shutdown()
|
| 657 |
+
|
| 658 |
+
@staticmethod
|
| 659 |
+
def _finalize_manager(process, address, authkey, state, _Client):
|
| 660 |
+
'''
|
| 661 |
+
Shutdown the manager process; will be registered as a finalizer
|
| 662 |
+
'''
|
| 663 |
+
if process.is_alive():
|
| 664 |
+
util.info('sending shutdown message to manager')
|
| 665 |
+
try:
|
| 666 |
+
conn = _Client(address, authkey=authkey)
|
| 667 |
+
try:
|
| 668 |
+
dispatch(conn, None, 'shutdown')
|
| 669 |
+
finally:
|
| 670 |
+
conn.close()
|
| 671 |
+
except Exception:
|
| 672 |
+
pass
|
| 673 |
+
|
| 674 |
+
process.join(timeout=1.0)
|
| 675 |
+
if process.is_alive():
|
| 676 |
+
util.info('manager still alive')
|
| 677 |
+
if hasattr(process, 'terminate'):
|
| 678 |
+
util.info('trying to `terminate()` manager process')
|
| 679 |
+
process.terminate()
|
| 680 |
+
process.join(timeout=1.0)
|
| 681 |
+
if process.is_alive():
|
| 682 |
+
util.info('manager still alive after terminate')
|
| 683 |
+
|
| 684 |
+
state.value = State.SHUTDOWN
|
| 685 |
+
try:
|
| 686 |
+
del BaseProxy._address_to_local[address]
|
| 687 |
+
except KeyError:
|
| 688 |
+
pass
|
| 689 |
+
|
| 690 |
+
@property
|
| 691 |
+
def address(self):
|
| 692 |
+
return self._address
|
| 693 |
+
|
| 694 |
+
@classmethod
|
| 695 |
+
def register(cls, typeid, callable=None, proxytype=None, exposed=None,
|
| 696 |
+
method_to_typeid=None, create_method=True):
|
| 697 |
+
'''
|
| 698 |
+
Register a typeid with the manager type
|
| 699 |
+
'''
|
| 700 |
+
if '_registry' not in cls.__dict__:
|
| 701 |
+
cls._registry = cls._registry.copy()
|
| 702 |
+
|
| 703 |
+
if proxytype is None:
|
| 704 |
+
proxytype = AutoProxy
|
| 705 |
+
|
| 706 |
+
exposed = exposed or getattr(proxytype, '_exposed_', None)
|
| 707 |
+
|
| 708 |
+
method_to_typeid = method_to_typeid or \
|
| 709 |
+
getattr(proxytype, '_method_to_typeid_', None)
|
| 710 |
+
|
| 711 |
+
if method_to_typeid:
|
| 712 |
+
for key, value in list(method_to_typeid.items()): # isinstance?
|
| 713 |
+
assert type(key) is str, '%r is not a string' % key
|
| 714 |
+
assert type(value) is str, '%r is not a string' % value
|
| 715 |
+
|
| 716 |
+
cls._registry[typeid] = (
|
| 717 |
+
callable, exposed, method_to_typeid, proxytype
|
| 718 |
+
)
|
| 719 |
+
|
| 720 |
+
if create_method:
|
| 721 |
+
def temp(self, /, *args, **kwds):
|
| 722 |
+
util.debug('requesting creation of a shared %r object', typeid)
|
| 723 |
+
token, exp = self._create(typeid, *args, **kwds)
|
| 724 |
+
proxy = proxytype(
|
| 725 |
+
token, self._serializer, manager=self,
|
| 726 |
+
authkey=self._authkey, exposed=exp
|
| 727 |
+
)
|
| 728 |
+
conn = self._Client(token.address, authkey=self._authkey)
|
| 729 |
+
dispatch(conn, None, 'decref', (token.id,))
|
| 730 |
+
return proxy
|
| 731 |
+
temp.__name__ = typeid
|
| 732 |
+
setattr(cls, typeid, temp)
|
| 733 |
+
|
| 734 |
+
#
|
| 735 |
+
# Subclass of set which get cleared after a fork
|
| 736 |
+
#
|
| 737 |
+
|
| 738 |
+
class ProcessLocalSet(set):
|
| 739 |
+
def __init__(self):
|
| 740 |
+
util.register_after_fork(self, lambda obj: obj.clear())
|
| 741 |
+
def __reduce__(self):
|
| 742 |
+
return type(self), ()
|
| 743 |
+
|
| 744 |
+
#
|
| 745 |
+
# Definition of BaseProxy
|
| 746 |
+
#
|
| 747 |
+
|
| 748 |
+
class BaseProxy(object):
|
| 749 |
+
'''
|
| 750 |
+
A base for proxies of shared objects
|
| 751 |
+
'''
|
| 752 |
+
_address_to_local = {}
|
| 753 |
+
_mutex = util.ForkAwareThreadLock()
|
| 754 |
+
|
| 755 |
+
def __init__(self, token, serializer, manager=None,
|
| 756 |
+
authkey=None, exposed=None, incref=True, manager_owned=False):
|
| 757 |
+
with BaseProxy._mutex:
|
| 758 |
+
tls_idset = BaseProxy._address_to_local.get(token.address, None)
|
| 759 |
+
if tls_idset is None:
|
| 760 |
+
tls_idset = util.ForkAwareLocal(), ProcessLocalSet()
|
| 761 |
+
BaseProxy._address_to_local[token.address] = tls_idset
|
| 762 |
+
|
| 763 |
+
# self._tls is used to record the connection used by this
|
| 764 |
+
# thread to communicate with the manager at token.address
|
| 765 |
+
self._tls = tls_idset[0]
|
| 766 |
+
|
| 767 |
+
# self._idset is used to record the identities of all shared
|
| 768 |
+
# objects for which the current process owns references and
|
| 769 |
+
# which are in the manager at token.address
|
| 770 |
+
self._idset = tls_idset[1]
|
| 771 |
+
|
| 772 |
+
self._token = token
|
| 773 |
+
self._id = self._token.id
|
| 774 |
+
self._manager = manager
|
| 775 |
+
self._serializer = serializer
|
| 776 |
+
self._Client = listener_client[serializer][1]
|
| 777 |
+
|
| 778 |
+
# Should be set to True only when a proxy object is being created
|
| 779 |
+
# on the manager server; primary use case: nested proxy objects.
|
| 780 |
+
# RebuildProxy detects when a proxy is being created on the manager
|
| 781 |
+
# and sets this value appropriately.
|
| 782 |
+
self._owned_by_manager = manager_owned
|
| 783 |
+
|
| 784 |
+
if authkey is not None:
|
| 785 |
+
self._authkey = process.AuthenticationString(authkey)
|
| 786 |
+
elif self._manager is not None:
|
| 787 |
+
self._authkey = self._manager._authkey
|
| 788 |
+
else:
|
| 789 |
+
self._authkey = process.current_process().authkey
|
| 790 |
+
|
| 791 |
+
if incref:
|
| 792 |
+
self._incref()
|
| 793 |
+
|
| 794 |
+
util.register_after_fork(self, BaseProxy._after_fork)
|
| 795 |
+
|
| 796 |
+
def _connect(self):
|
| 797 |
+
util.debug('making connection to manager')
|
| 798 |
+
name = process.current_process().name
|
| 799 |
+
if threading.current_thread().name != 'MainThread':
|
| 800 |
+
name += '|' + threading.current_thread().name
|
| 801 |
+
conn = self._Client(self._token.address, authkey=self._authkey)
|
| 802 |
+
dispatch(conn, None, 'accept_connection', (name,))
|
| 803 |
+
self._tls.connection = conn
|
| 804 |
+
|
| 805 |
+
def _callmethod(self, methodname, args=(), kwds={}):
|
| 806 |
+
'''
|
| 807 |
+
Try to call a method of the referent and return a copy of the result
|
| 808 |
+
'''
|
| 809 |
+
try:
|
| 810 |
+
conn = self._tls.connection
|
| 811 |
+
except AttributeError:
|
| 812 |
+
util.debug('thread %r does not own a connection',
|
| 813 |
+
threading.current_thread().name)
|
| 814 |
+
self._connect()
|
| 815 |
+
conn = self._tls.connection
|
| 816 |
+
|
| 817 |
+
conn.send((self._id, methodname, args, kwds))
|
| 818 |
+
kind, result = conn.recv()
|
| 819 |
+
|
| 820 |
+
if kind == '#RETURN':
|
| 821 |
+
return result
|
| 822 |
+
elif kind == '#PROXY':
|
| 823 |
+
exposed, token = result
|
| 824 |
+
proxytype = self._manager._registry[token.typeid][-1]
|
| 825 |
+
token.address = self._token.address
|
| 826 |
+
proxy = proxytype(
|
| 827 |
+
token, self._serializer, manager=self._manager,
|
| 828 |
+
authkey=self._authkey, exposed=exposed
|
| 829 |
+
)
|
| 830 |
+
conn = self._Client(token.address, authkey=self._authkey)
|
| 831 |
+
dispatch(conn, None, 'decref', (token.id,))
|
| 832 |
+
return proxy
|
| 833 |
+
raise convert_to_error(kind, result)
|
| 834 |
+
|
| 835 |
+
def _getvalue(self):
|
| 836 |
+
'''
|
| 837 |
+
Get a copy of the value of the referent
|
| 838 |
+
'''
|
| 839 |
+
return self._callmethod('#GETVALUE')
|
| 840 |
+
|
| 841 |
+
def _incref(self):
|
| 842 |
+
if self._owned_by_manager:
|
| 843 |
+
util.debug('owned_by_manager skipped INCREF of %r', self._token.id)
|
| 844 |
+
return
|
| 845 |
+
|
| 846 |
+
conn = self._Client(self._token.address, authkey=self._authkey)
|
| 847 |
+
dispatch(conn, None, 'incref', (self._id,))
|
| 848 |
+
util.debug('INCREF %r', self._token.id)
|
| 849 |
+
|
| 850 |
+
self._idset.add(self._id)
|
| 851 |
+
|
| 852 |
+
state = self._manager and self._manager._state
|
| 853 |
+
|
| 854 |
+
self._close = util.Finalize(
|
| 855 |
+
self, BaseProxy._decref,
|
| 856 |
+
args=(self._token, self._authkey, state,
|
| 857 |
+
self._tls, self._idset, self._Client),
|
| 858 |
+
exitpriority=10
|
| 859 |
+
)
|
| 860 |
+
|
| 861 |
+
@staticmethod
|
| 862 |
+
def _decref(token, authkey, state, tls, idset, _Client):
|
| 863 |
+
idset.discard(token.id)
|
| 864 |
+
|
| 865 |
+
# check whether manager is still alive
|
| 866 |
+
if state is None or state.value == State.STARTED:
|
| 867 |
+
# tell manager this process no longer cares about referent
|
| 868 |
+
try:
|
| 869 |
+
util.debug('DECREF %r', token.id)
|
| 870 |
+
conn = _Client(token.address, authkey=authkey)
|
| 871 |
+
dispatch(conn, None, 'decref', (token.id,))
|
| 872 |
+
except Exception as e:
|
| 873 |
+
util.debug('... decref failed %s', e)
|
| 874 |
+
|
| 875 |
+
else:
|
| 876 |
+
util.debug('DECREF %r -- manager already shutdown', token.id)
|
| 877 |
+
|
| 878 |
+
# check whether we can close this thread's connection because
|
| 879 |
+
# the process owns no more references to objects for this manager
|
| 880 |
+
if not idset and hasattr(tls, 'connection'):
|
| 881 |
+
util.debug('thread %r has no more proxies so closing conn',
|
| 882 |
+
threading.current_thread().name)
|
| 883 |
+
tls.connection.close()
|
| 884 |
+
del tls.connection
|
| 885 |
+
|
| 886 |
+
def _after_fork(self):
|
| 887 |
+
self._manager = None
|
| 888 |
+
try:
|
| 889 |
+
self._incref()
|
| 890 |
+
except Exception as e:
|
| 891 |
+
# the proxy may just be for a manager which has shutdown
|
| 892 |
+
util.info('incref failed: %s' % e)
|
| 893 |
+
|
| 894 |
+
def __reduce__(self):
|
| 895 |
+
kwds = {}
|
| 896 |
+
if get_spawning_popen() is not None:
|
| 897 |
+
kwds['authkey'] = self._authkey
|
| 898 |
+
|
| 899 |
+
if getattr(self, '_isauto', False):
|
| 900 |
+
kwds['exposed'] = self._exposed_
|
| 901 |
+
return (RebuildProxy,
|
| 902 |
+
(AutoProxy, self._token, self._serializer, kwds))
|
| 903 |
+
else:
|
| 904 |
+
return (RebuildProxy,
|
| 905 |
+
(type(self), self._token, self._serializer, kwds))
|
| 906 |
+
|
| 907 |
+
def __deepcopy__(self, memo):
|
| 908 |
+
return self._getvalue()
|
| 909 |
+
|
| 910 |
+
def __repr__(self):
|
| 911 |
+
return '<%s object, typeid %r at %#x>' % \
|
| 912 |
+
(type(self).__name__, self._token.typeid, id(self))
|
| 913 |
+
|
| 914 |
+
def __str__(self):
|
| 915 |
+
'''
|
| 916 |
+
Return representation of the referent (or a fall-back if that fails)
|
| 917 |
+
'''
|
| 918 |
+
try:
|
| 919 |
+
return self._callmethod('__repr__')
|
| 920 |
+
except Exception:
|
| 921 |
+
return repr(self)[:-1] + "; '__str__()' failed>"
|
| 922 |
+
|
| 923 |
+
#
|
| 924 |
+
# Function used for unpickling
|
| 925 |
+
#
|
| 926 |
+
|
| 927 |
+
def RebuildProxy(func, token, serializer, kwds):
|
| 928 |
+
'''
|
| 929 |
+
Function used for unpickling proxy objects.
|
| 930 |
+
'''
|
| 931 |
+
server = getattr(process.current_process(), '_manager_server', None)
|
| 932 |
+
if server and server.address == token.address:
|
| 933 |
+
util.debug('Rebuild a proxy owned by manager, token=%r', token)
|
| 934 |
+
kwds['manager_owned'] = True
|
| 935 |
+
if token.id not in server.id_to_local_proxy_obj:
|
| 936 |
+
server.id_to_local_proxy_obj[token.id] = \
|
| 937 |
+
server.id_to_obj[token.id]
|
| 938 |
+
incref = (
|
| 939 |
+
kwds.pop('incref', True) and
|
| 940 |
+
not getattr(process.current_process(), '_inheriting', False)
|
| 941 |
+
)
|
| 942 |
+
return func(token, serializer, incref=incref, **kwds)
|
| 943 |
+
|
| 944 |
+
#
|
| 945 |
+
# Functions to create proxies and proxy types
|
| 946 |
+
#
|
| 947 |
+
|
| 948 |
+
def MakeProxyType(name, exposed, _cache={}):
|
| 949 |
+
'''
|
| 950 |
+
Return a proxy type whose methods are given by `exposed`
|
| 951 |
+
'''
|
| 952 |
+
exposed = tuple(exposed)
|
| 953 |
+
try:
|
| 954 |
+
return _cache[(name, exposed)]
|
| 955 |
+
except KeyError:
|
| 956 |
+
pass
|
| 957 |
+
|
| 958 |
+
dic = {}
|
| 959 |
+
|
| 960 |
+
for meth in exposed:
|
| 961 |
+
exec('''def %s(self, /, *args, **kwds):
|
| 962 |
+
return self._callmethod(%r, args, kwds)''' % (meth, meth), dic)
|
| 963 |
+
|
| 964 |
+
ProxyType = type(name, (BaseProxy,), dic)
|
| 965 |
+
ProxyType._exposed_ = exposed
|
| 966 |
+
_cache[(name, exposed)] = ProxyType
|
| 967 |
+
return ProxyType
|
| 968 |
+
|
| 969 |
+
|
| 970 |
+
def AutoProxy(token, serializer, manager=None, authkey=None,
|
| 971 |
+
exposed=None, incref=True, manager_owned=False):
|
| 972 |
+
'''
|
| 973 |
+
Return an auto-proxy for `token`
|
| 974 |
+
'''
|
| 975 |
+
_Client = listener_client[serializer][1]
|
| 976 |
+
|
| 977 |
+
if exposed is None:
|
| 978 |
+
conn = _Client(token.address, authkey=authkey)
|
| 979 |
+
try:
|
| 980 |
+
exposed = dispatch(conn, None, 'get_methods', (token,))
|
| 981 |
+
finally:
|
| 982 |
+
conn.close()
|
| 983 |
+
|
| 984 |
+
if authkey is None and manager is not None:
|
| 985 |
+
authkey = manager._authkey
|
| 986 |
+
if authkey is None:
|
| 987 |
+
authkey = process.current_process().authkey
|
| 988 |
+
|
| 989 |
+
ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
|
| 990 |
+
proxy = ProxyType(token, serializer, manager=manager, authkey=authkey,
|
| 991 |
+
incref=incref, manager_owned=manager_owned)
|
| 992 |
+
proxy._isauto = True
|
| 993 |
+
return proxy
|
| 994 |
+
|
| 995 |
+
#
|
| 996 |
+
# Types/callables which we will register with SyncManager
|
| 997 |
+
#
|
| 998 |
+
|
| 999 |
+
class Namespace(object):
|
| 1000 |
+
def __init__(self, /, **kwds):
|
| 1001 |
+
self.__dict__.update(kwds)
|
| 1002 |
+
def __repr__(self):
|
| 1003 |
+
items = list(self.__dict__.items())
|
| 1004 |
+
temp = []
|
| 1005 |
+
for name, value in items:
|
| 1006 |
+
if not name.startswith('_'):
|
| 1007 |
+
temp.append('%s=%r' % (name, value))
|
| 1008 |
+
temp.sort()
|
| 1009 |
+
return '%s(%s)' % (self.__class__.__name__, ', '.join(temp))
|
| 1010 |
+
|
| 1011 |
+
class Value(object):
|
| 1012 |
+
def __init__(self, typecode, value, lock=True):
|
| 1013 |
+
self._typecode = typecode
|
| 1014 |
+
self._value = value
|
| 1015 |
+
def get(self):
|
| 1016 |
+
return self._value
|
| 1017 |
+
def set(self, value):
|
| 1018 |
+
self._value = value
|
| 1019 |
+
def __repr__(self):
|
| 1020 |
+
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
|
| 1021 |
+
value = property(get, set)
|
| 1022 |
+
|
| 1023 |
+
def Array(typecode, sequence, lock=True):
|
| 1024 |
+
return array.array(typecode, sequence)
|
| 1025 |
+
|
| 1026 |
+
#
|
| 1027 |
+
# Proxy types used by SyncManager
|
| 1028 |
+
#
|
| 1029 |
+
|
| 1030 |
+
class IteratorProxy(BaseProxy):
|
| 1031 |
+
_exposed_ = ('__next__', 'send', 'throw', 'close')
|
| 1032 |
+
def __iter__(self):
|
| 1033 |
+
return self
|
| 1034 |
+
def __next__(self, *args):
|
| 1035 |
+
return self._callmethod('__next__', args)
|
| 1036 |
+
def send(self, *args):
|
| 1037 |
+
return self._callmethod('send', args)
|
| 1038 |
+
def throw(self, *args):
|
| 1039 |
+
return self._callmethod('throw', args)
|
| 1040 |
+
def close(self, *args):
|
| 1041 |
+
return self._callmethod('close', args)
|
| 1042 |
+
|
| 1043 |
+
|
| 1044 |
+
class AcquirerProxy(BaseProxy):
|
| 1045 |
+
_exposed_ = ('acquire', 'release')
|
| 1046 |
+
def acquire(self, blocking=True, timeout=None):
|
| 1047 |
+
args = (blocking,) if timeout is None else (blocking, timeout)
|
| 1048 |
+
return self._callmethod('acquire', args)
|
| 1049 |
+
def release(self):
|
| 1050 |
+
return self._callmethod('release')
|
| 1051 |
+
def __enter__(self):
|
| 1052 |
+
return self._callmethod('acquire')
|
| 1053 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 1054 |
+
return self._callmethod('release')
|
| 1055 |
+
|
| 1056 |
+
|
| 1057 |
+
class ConditionProxy(AcquirerProxy):
|
| 1058 |
+
_exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')
|
| 1059 |
+
def wait(self, timeout=None):
|
| 1060 |
+
return self._callmethod('wait', (timeout,))
|
| 1061 |
+
def notify(self, n=1):
|
| 1062 |
+
return self._callmethod('notify', (n,))
|
| 1063 |
+
def notify_all(self):
|
| 1064 |
+
return self._callmethod('notify_all')
|
| 1065 |
+
def wait_for(self, predicate, timeout=None):
|
| 1066 |
+
result = predicate()
|
| 1067 |
+
if result:
|
| 1068 |
+
return result
|
| 1069 |
+
if timeout is not None:
|
| 1070 |
+
endtime = time.monotonic() + timeout
|
| 1071 |
+
else:
|
| 1072 |
+
endtime = None
|
| 1073 |
+
waittime = None
|
| 1074 |
+
while not result:
|
| 1075 |
+
if endtime is not None:
|
| 1076 |
+
waittime = endtime - time.monotonic()
|
| 1077 |
+
if waittime <= 0:
|
| 1078 |
+
break
|
| 1079 |
+
self.wait(waittime)
|
| 1080 |
+
result = predicate()
|
| 1081 |
+
return result
|
| 1082 |
+
|
| 1083 |
+
|
| 1084 |
+
class EventProxy(BaseProxy):
|
| 1085 |
+
_exposed_ = ('is_set', 'set', 'clear', 'wait')
|
| 1086 |
+
def is_set(self):
|
| 1087 |
+
return self._callmethod('is_set')
|
| 1088 |
+
def set(self):
|
| 1089 |
+
return self._callmethod('set')
|
| 1090 |
+
def clear(self):
|
| 1091 |
+
return self._callmethod('clear')
|
| 1092 |
+
def wait(self, timeout=None):
|
| 1093 |
+
return self._callmethod('wait', (timeout,))
|
| 1094 |
+
|
| 1095 |
+
|
| 1096 |
+
class BarrierProxy(BaseProxy):
|
| 1097 |
+
_exposed_ = ('__getattribute__', 'wait', 'abort', 'reset')
|
| 1098 |
+
def wait(self, timeout=None):
|
| 1099 |
+
return self._callmethod('wait', (timeout,))
|
| 1100 |
+
def abort(self):
|
| 1101 |
+
return self._callmethod('abort')
|
| 1102 |
+
def reset(self):
|
| 1103 |
+
return self._callmethod('reset')
|
| 1104 |
+
@property
|
| 1105 |
+
def parties(self):
|
| 1106 |
+
return self._callmethod('__getattribute__', ('parties',))
|
| 1107 |
+
@property
|
| 1108 |
+
def n_waiting(self):
|
| 1109 |
+
return self._callmethod('__getattribute__', ('n_waiting',))
|
| 1110 |
+
@property
|
| 1111 |
+
def broken(self):
|
| 1112 |
+
return self._callmethod('__getattribute__', ('broken',))
|
| 1113 |
+
|
| 1114 |
+
|
| 1115 |
+
class NamespaceProxy(BaseProxy):
|
| 1116 |
+
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
|
| 1117 |
+
def __getattr__(self, key):
|
| 1118 |
+
if key[0] == '_':
|
| 1119 |
+
return object.__getattribute__(self, key)
|
| 1120 |
+
callmethod = object.__getattribute__(self, '_callmethod')
|
| 1121 |
+
return callmethod('__getattribute__', (key,))
|
| 1122 |
+
def __setattr__(self, key, value):
|
| 1123 |
+
if key[0] == '_':
|
| 1124 |
+
return object.__setattr__(self, key, value)
|
| 1125 |
+
callmethod = object.__getattribute__(self, '_callmethod')
|
| 1126 |
+
return callmethod('__setattr__', (key, value))
|
| 1127 |
+
def __delattr__(self, key):
|
| 1128 |
+
if key[0] == '_':
|
| 1129 |
+
return object.__delattr__(self, key)
|
| 1130 |
+
callmethod = object.__getattribute__(self, '_callmethod')
|
| 1131 |
+
return callmethod('__delattr__', (key,))
|
| 1132 |
+
|
| 1133 |
+
|
| 1134 |
+
class ValueProxy(BaseProxy):
|
| 1135 |
+
_exposed_ = ('get', 'set')
|
| 1136 |
+
def get(self):
|
| 1137 |
+
return self._callmethod('get')
|
| 1138 |
+
def set(self, value):
|
| 1139 |
+
return self._callmethod('set', (value,))
|
| 1140 |
+
value = property(get, set)
|
| 1141 |
+
|
| 1142 |
+
__class_getitem__ = classmethod(types.GenericAlias)
|
| 1143 |
+
|
| 1144 |
+
|
| 1145 |
+
BaseListProxy = MakeProxyType('BaseListProxy', (
|
| 1146 |
+
'__add__', '__contains__', '__delitem__', '__getitem__', '__len__',
|
| 1147 |
+
'__mul__', '__reversed__', '__rmul__', '__setitem__',
|
| 1148 |
+
'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove',
|
| 1149 |
+
'reverse', 'sort', '__imul__'
|
| 1150 |
+
))
|
| 1151 |
+
class ListProxy(BaseListProxy):
|
| 1152 |
+
def __iadd__(self, value):
|
| 1153 |
+
self._callmethod('extend', (value,))
|
| 1154 |
+
return self
|
| 1155 |
+
def __imul__(self, value):
|
| 1156 |
+
self._callmethod('__imul__', (value,))
|
| 1157 |
+
return self
|
| 1158 |
+
|
| 1159 |
+
|
| 1160 |
+
DictProxy = MakeProxyType('DictProxy', (
|
| 1161 |
+
'__contains__', '__delitem__', '__getitem__', '__iter__', '__len__',
|
| 1162 |
+
'__setitem__', 'clear', 'copy', 'get', 'items',
|
| 1163 |
+
'keys', 'pop', 'popitem', 'setdefault', 'update', 'values'
|
| 1164 |
+
))
|
| 1165 |
+
DictProxy._method_to_typeid_ = {
|
| 1166 |
+
'__iter__': 'Iterator',
|
| 1167 |
+
}
|
| 1168 |
+
|
| 1169 |
+
|
| 1170 |
+
ArrayProxy = MakeProxyType('ArrayProxy', (
|
| 1171 |
+
'__len__', '__getitem__', '__setitem__'
|
| 1172 |
+
))
|
| 1173 |
+
|
| 1174 |
+
|
| 1175 |
+
BasePoolProxy = MakeProxyType('PoolProxy', (
|
| 1176 |
+
'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join',
|
| 1177 |
+
'map', 'map_async', 'starmap', 'starmap_async', 'terminate',
|
| 1178 |
+
))
|
| 1179 |
+
BasePoolProxy._method_to_typeid_ = {
|
| 1180 |
+
'apply_async': 'AsyncResult',
|
| 1181 |
+
'map_async': 'AsyncResult',
|
| 1182 |
+
'starmap_async': 'AsyncResult',
|
| 1183 |
+
'imap': 'Iterator',
|
| 1184 |
+
'imap_unordered': 'Iterator'
|
| 1185 |
+
}
|
| 1186 |
+
class PoolProxy(BasePoolProxy):
|
| 1187 |
+
def __enter__(self):
|
| 1188 |
+
return self
|
| 1189 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 1190 |
+
self.terminate()
|
| 1191 |
+
|
| 1192 |
+
#
|
| 1193 |
+
# Definition of SyncManager
|
| 1194 |
+
#
|
| 1195 |
+
|
| 1196 |
+
class SyncManager(BaseManager):
|
| 1197 |
+
'''
|
| 1198 |
+
Subclass of `BaseManager` which supports a number of shared object types.
|
| 1199 |
+
|
| 1200 |
+
The types registered are those intended for the synchronization
|
| 1201 |
+
of threads, plus `dict`, `list` and `Namespace`.
|
| 1202 |
+
|
| 1203 |
+
The `multiprocessing.Manager()` function creates started instances of
|
| 1204 |
+
this class.
|
| 1205 |
+
'''
|
| 1206 |
+
|
| 1207 |
+
SyncManager.register('Queue', queue.Queue)
|
| 1208 |
+
SyncManager.register('JoinableQueue', queue.Queue)
|
| 1209 |
+
SyncManager.register('Event', threading.Event, EventProxy)
|
| 1210 |
+
SyncManager.register('Lock', threading.Lock, AcquirerProxy)
|
| 1211 |
+
SyncManager.register('RLock', threading.RLock, AcquirerProxy)
|
| 1212 |
+
SyncManager.register('Semaphore', threading.Semaphore, AcquirerProxy)
|
| 1213 |
+
SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore,
|
| 1214 |
+
AcquirerProxy)
|
| 1215 |
+
SyncManager.register('Condition', threading.Condition, ConditionProxy)
|
| 1216 |
+
SyncManager.register('Barrier', threading.Barrier, BarrierProxy)
|
| 1217 |
+
SyncManager.register('Pool', pool.Pool, PoolProxy)
|
| 1218 |
+
SyncManager.register('list', list, ListProxy)
|
| 1219 |
+
SyncManager.register('dict', dict, DictProxy)
|
| 1220 |
+
SyncManager.register('Value', Value, ValueProxy)
|
| 1221 |
+
SyncManager.register('Array', Array, ArrayProxy)
|
| 1222 |
+
SyncManager.register('Namespace', Namespace, NamespaceProxy)
|
| 1223 |
+
|
| 1224 |
+
# types returned by methods of PoolProxy
|
| 1225 |
+
SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False)
|
| 1226 |
+
SyncManager.register('AsyncResult', create_method=False)
|
| 1227 |
+
|
| 1228 |
+
#
|
| 1229 |
+
# Definition of SharedMemoryManager and SharedMemoryServer
|
| 1230 |
+
#
|
| 1231 |
+
|
| 1232 |
+
if HAS_SHMEM:
|
| 1233 |
+
class _SharedMemoryTracker:
|
| 1234 |
+
"Manages one or more shared memory segments."
|
| 1235 |
+
|
| 1236 |
+
def __init__(self, name, segment_names=[]):
|
| 1237 |
+
self.shared_memory_context_name = name
|
| 1238 |
+
self.segment_names = segment_names
|
| 1239 |
+
|
| 1240 |
+
def register_segment(self, segment_name):
|
| 1241 |
+
"Adds the supplied shared memory block name to tracker."
|
| 1242 |
+
util.debug(f"Register segment {segment_name!r} in pid {getpid()}")
|
| 1243 |
+
self.segment_names.append(segment_name)
|
| 1244 |
+
|
| 1245 |
+
def destroy_segment(self, segment_name):
|
| 1246 |
+
"""Calls unlink() on the shared memory block with the supplied name
|
| 1247 |
+
and removes it from the list of blocks being tracked."""
|
| 1248 |
+
util.debug(f"Destroy segment {segment_name!r} in pid {getpid()}")
|
| 1249 |
+
self.segment_names.remove(segment_name)
|
| 1250 |
+
segment = shared_memory.SharedMemory(segment_name)
|
| 1251 |
+
segment.close()
|
| 1252 |
+
segment.unlink()
|
| 1253 |
+
|
| 1254 |
+
def unlink(self):
|
| 1255 |
+
"Calls destroy_segment() on all tracked shared memory blocks."
|
| 1256 |
+
for segment_name in self.segment_names[:]:
|
| 1257 |
+
self.destroy_segment(segment_name)
|
| 1258 |
+
|
| 1259 |
+
def __del__(self):
|
| 1260 |
+
util.debug(f"Call {self.__class__.__name__}.__del__ in {getpid()}")
|
| 1261 |
+
self.unlink()
|
| 1262 |
+
|
| 1263 |
+
def __getstate__(self):
|
| 1264 |
+
return (self.shared_memory_context_name, self.segment_names)
|
| 1265 |
+
|
| 1266 |
+
def __setstate__(self, state):
|
| 1267 |
+
self.__init__(*state)
|
| 1268 |
+
|
| 1269 |
+
|
| 1270 |
+
class SharedMemoryServer(Server):
|
| 1271 |
+
|
| 1272 |
+
public = Server.public + \
|
| 1273 |
+
['track_segment', 'release_segment', 'list_segments']
|
| 1274 |
+
|
| 1275 |
+
def __init__(self, *args, **kwargs):
|
| 1276 |
+
Server.__init__(self, *args, **kwargs)
|
| 1277 |
+
address = self.address
|
| 1278 |
+
# The address of Linux abstract namespaces can be bytes
|
| 1279 |
+
if isinstance(address, bytes):
|
| 1280 |
+
address = os.fsdecode(address)
|
| 1281 |
+
self.shared_memory_context = \
|
| 1282 |
+
_SharedMemoryTracker(f"shm_{address}_{getpid()}")
|
| 1283 |
+
util.debug(f"SharedMemoryServer started by pid {getpid()}")
|
| 1284 |
+
|
| 1285 |
+
def create(self, c, typeid, /, *args, **kwargs):
|
| 1286 |
+
"""Create a new distributed-shared object (not backed by a shared
|
| 1287 |
+
memory block) and return its id to be used in a Proxy Object."""
|
| 1288 |
+
# Unless set up as a shared proxy, don't make shared_memory_context
|
| 1289 |
+
# a standard part of kwargs. This makes things easier for supplying
|
| 1290 |
+
# simple functions.
|
| 1291 |
+
if hasattr(self.registry[typeid][-1], "_shared_memory_proxy"):
|
| 1292 |
+
kwargs['shared_memory_context'] = self.shared_memory_context
|
| 1293 |
+
return Server.create(self, c, typeid, *args, **kwargs)
|
| 1294 |
+
|
| 1295 |
+
def shutdown(self, c):
|
| 1296 |
+
"Call unlink() on all tracked shared memory, terminate the Server."
|
| 1297 |
+
self.shared_memory_context.unlink()
|
| 1298 |
+
return Server.shutdown(self, c)
|
| 1299 |
+
|
| 1300 |
+
def track_segment(self, c, segment_name):
|
| 1301 |
+
"Adds the supplied shared memory block name to Server's tracker."
|
| 1302 |
+
self.shared_memory_context.register_segment(segment_name)
|
| 1303 |
+
|
| 1304 |
+
def release_segment(self, c, segment_name):
|
| 1305 |
+
"""Calls unlink() on the shared memory block with the supplied name
|
| 1306 |
+
and removes it from the tracker instance inside the Server."""
|
| 1307 |
+
self.shared_memory_context.destroy_segment(segment_name)
|
| 1308 |
+
|
| 1309 |
+
def list_segments(self, c):
|
| 1310 |
+
"""Returns a list of names of shared memory blocks that the Server
|
| 1311 |
+
is currently tracking."""
|
| 1312 |
+
return self.shared_memory_context.segment_names
|
| 1313 |
+
|
| 1314 |
+
|
| 1315 |
+
class SharedMemoryManager(BaseManager):
|
| 1316 |
+
"""Like SyncManager but uses SharedMemoryServer instead of Server.
|
| 1317 |
+
|
| 1318 |
+
It provides methods for creating and returning SharedMemory instances
|
| 1319 |
+
and for creating a list-like object (ShareableList) backed by shared
|
| 1320 |
+
memory. It also provides methods that create and return Proxy Objects
|
| 1321 |
+
that support synchronization across processes (i.e. multi-process-safe
|
| 1322 |
+
locks and semaphores).
|
| 1323 |
+
"""
|
| 1324 |
+
|
| 1325 |
+
_Server = SharedMemoryServer
|
| 1326 |
+
|
| 1327 |
+
def __init__(self, *args, **kwargs):
|
| 1328 |
+
if os.name == "posix":
|
| 1329 |
+
# bpo-36867: Ensure the resource_tracker is running before
|
| 1330 |
+
# launching the manager process, so that concurrent
|
| 1331 |
+
# shared_memory manipulation both in the manager and in the
|
| 1332 |
+
# current process does not create two resource_tracker
|
| 1333 |
+
# processes.
|
| 1334 |
+
from . import resource_tracker
|
| 1335 |
+
resource_tracker.ensure_running()
|
| 1336 |
+
BaseManager.__init__(self, *args, **kwargs)
|
| 1337 |
+
util.debug(f"{self.__class__.__name__} created by pid {getpid()}")
|
| 1338 |
+
|
| 1339 |
+
def __del__(self):
|
| 1340 |
+
util.debug(f"{self.__class__.__name__}.__del__ by pid {getpid()}")
|
| 1341 |
+
pass
|
| 1342 |
+
|
| 1343 |
+
def get_server(self):
|
| 1344 |
+
'Better than monkeypatching for now; merge into Server ultimately'
|
| 1345 |
+
if self._state.value != State.INITIAL:
|
| 1346 |
+
if self._state.value == State.STARTED:
|
| 1347 |
+
raise ProcessError("Already started SharedMemoryServer")
|
| 1348 |
+
elif self._state.value == State.SHUTDOWN:
|
| 1349 |
+
raise ProcessError("SharedMemoryManager has shut down")
|
| 1350 |
+
else:
|
| 1351 |
+
raise ProcessError(
|
| 1352 |
+
"Unknown state {!r}".format(self._state.value))
|
| 1353 |
+
return self._Server(self._registry, self._address,
|
| 1354 |
+
self._authkey, self._serializer)
|
| 1355 |
+
|
| 1356 |
+
def SharedMemory(self, size):
|
| 1357 |
+
"""Returns a new SharedMemory instance with the specified size in
|
| 1358 |
+
bytes, to be tracked by the manager."""
|
| 1359 |
+
with self._Client(self._address, authkey=self._authkey) as conn:
|
| 1360 |
+
sms = shared_memory.SharedMemory(None, create=True, size=size)
|
| 1361 |
+
try:
|
| 1362 |
+
dispatch(conn, None, 'track_segment', (sms.name,))
|
| 1363 |
+
except BaseException as e:
|
| 1364 |
+
sms.unlink()
|
| 1365 |
+
raise e
|
| 1366 |
+
return sms
|
| 1367 |
+
|
| 1368 |
+
def ShareableList(self, sequence):
|
| 1369 |
+
"""Returns a new ShareableList instance populated with the values
|
| 1370 |
+
from the input sequence, to be tracked by the manager."""
|
| 1371 |
+
with self._Client(self._address, authkey=self._authkey) as conn:
|
| 1372 |
+
sl = shared_memory.ShareableList(sequence)
|
| 1373 |
+
try:
|
| 1374 |
+
dispatch(conn, None, 'track_segment', (sl.shm.name,))
|
| 1375 |
+
except BaseException as e:
|
| 1376 |
+
sl.shm.unlink()
|
| 1377 |
+
raise e
|
| 1378 |
+
return sl
|
evalkit_llava/lib/python3.10/multiprocessing/popen_forkserver.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from .context import reduction, set_spawning_popen
|
| 5 |
+
if not reduction.HAVE_SEND_HANDLE:
|
| 6 |
+
raise ImportError('No support for sending fds between processes')
|
| 7 |
+
from . import forkserver
|
| 8 |
+
from . import popen_fork
|
| 9 |
+
from . import spawn
|
| 10 |
+
from . import util
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
__all__ = ['Popen']
|
| 14 |
+
|
| 15 |
+
#
|
| 16 |
+
# Wrapper for an fd used while launching a process
|
| 17 |
+
#
|
| 18 |
+
|
| 19 |
+
class _DupFd(object):
|
| 20 |
+
def __init__(self, ind):
|
| 21 |
+
self.ind = ind
|
| 22 |
+
def detach(self):
|
| 23 |
+
return forkserver.get_inherited_fds()[self.ind]
|
| 24 |
+
|
| 25 |
+
#
|
| 26 |
+
# Start child process using a server process
|
| 27 |
+
#
|
| 28 |
+
|
| 29 |
+
class Popen(popen_fork.Popen):
|
| 30 |
+
method = 'forkserver'
|
| 31 |
+
DupFd = _DupFd
|
| 32 |
+
|
| 33 |
+
def __init__(self, process_obj):
|
| 34 |
+
self._fds = []
|
| 35 |
+
super().__init__(process_obj)
|
| 36 |
+
|
| 37 |
+
def duplicate_for_child(self, fd):
|
| 38 |
+
self._fds.append(fd)
|
| 39 |
+
return len(self._fds) - 1
|
| 40 |
+
|
| 41 |
+
def _launch(self, process_obj):
|
| 42 |
+
prep_data = spawn.get_preparation_data(process_obj._name)
|
| 43 |
+
buf = io.BytesIO()
|
| 44 |
+
set_spawning_popen(self)
|
| 45 |
+
try:
|
| 46 |
+
reduction.dump(prep_data, buf)
|
| 47 |
+
reduction.dump(process_obj, buf)
|
| 48 |
+
finally:
|
| 49 |
+
set_spawning_popen(None)
|
| 50 |
+
|
| 51 |
+
self.sentinel, w = forkserver.connect_to_new_process(self._fds)
|
| 52 |
+
# Keep a duplicate of the data pipe's write end as a sentinel of the
|
| 53 |
+
# parent process used by the child process.
|
| 54 |
+
_parent_w = os.dup(w)
|
| 55 |
+
self.finalizer = util.Finalize(self, util.close_fds,
|
| 56 |
+
(_parent_w, self.sentinel))
|
| 57 |
+
with open(w, 'wb', closefd=True) as f:
|
| 58 |
+
f.write(buf.getbuffer())
|
| 59 |
+
self.pid = forkserver.read_signed(self.sentinel)
|
| 60 |
+
|
| 61 |
+
def poll(self, flag=os.WNOHANG):
|
| 62 |
+
if self.returncode is None:
|
| 63 |
+
from multiprocessing.connection import wait
|
| 64 |
+
timeout = 0 if flag == os.WNOHANG else None
|
| 65 |
+
if not wait([self.sentinel], timeout):
|
| 66 |
+
return None
|
| 67 |
+
try:
|
| 68 |
+
self.returncode = forkserver.read_signed(self.sentinel)
|
| 69 |
+
except (OSError, EOFError):
|
| 70 |
+
# This should not happen usually, but perhaps the forkserver
|
| 71 |
+
# process itself got killed
|
| 72 |
+
self.returncode = 255
|
| 73 |
+
|
| 74 |
+
return self.returncode
|
evalkit_llava/lib/python3.10/multiprocessing/util.py
ADDED
|
@@ -0,0 +1,489 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Module providing various facilities to other parts of the package
|
| 3 |
+
#
|
| 4 |
+
# multiprocessing/util.py
|
| 5 |
+
#
|
| 6 |
+
# Copyright (c) 2006-2008, R Oudkerk
|
| 7 |
+
# Licensed to PSF under a Contributor Agreement.
|
| 8 |
+
#
|
| 9 |
+
|
| 10 |
+
import os
|
| 11 |
+
import itertools
|
| 12 |
+
import sys
|
| 13 |
+
import weakref
|
| 14 |
+
import atexit
|
| 15 |
+
import threading # we want threading to install it's
|
| 16 |
+
# cleanup function before multiprocessing does
|
| 17 |
+
from subprocess import _args_from_interpreter_flags
|
| 18 |
+
|
| 19 |
+
from . import process
|
| 20 |
+
|
| 21 |
+
__all__ = [
|
| 22 |
+
'sub_debug', 'debug', 'info', 'sub_warning', 'get_logger',
|
| 23 |
+
'log_to_stderr', 'get_temp_dir', 'register_after_fork',
|
| 24 |
+
'is_exiting', 'Finalize', 'ForkAwareThreadLock', 'ForkAwareLocal',
|
| 25 |
+
'close_all_fds_except', 'SUBDEBUG', 'SUBWARNING',
|
| 26 |
+
]
|
| 27 |
+
|
| 28 |
+
#
|
| 29 |
+
# Logging
|
| 30 |
+
#
|
| 31 |
+
|
| 32 |
+
NOTSET = 0
|
| 33 |
+
SUBDEBUG = 5
|
| 34 |
+
DEBUG = 10
|
| 35 |
+
INFO = 20
|
| 36 |
+
SUBWARNING = 25
|
| 37 |
+
|
| 38 |
+
LOGGER_NAME = 'multiprocessing'
|
| 39 |
+
DEFAULT_LOGGING_FORMAT = '[%(levelname)s/%(processName)s] %(message)s'
|
| 40 |
+
|
| 41 |
+
_logger = None
|
| 42 |
+
_log_to_stderr = False
|
| 43 |
+
|
| 44 |
+
def sub_debug(msg, *args):
|
| 45 |
+
if _logger:
|
| 46 |
+
_logger.log(SUBDEBUG, msg, *args)
|
| 47 |
+
|
| 48 |
+
def debug(msg, *args):
|
| 49 |
+
if _logger:
|
| 50 |
+
_logger.log(DEBUG, msg, *args)
|
| 51 |
+
|
| 52 |
+
def info(msg, *args):
|
| 53 |
+
if _logger:
|
| 54 |
+
_logger.log(INFO, msg, *args)
|
| 55 |
+
|
| 56 |
+
def sub_warning(msg, *args):
|
| 57 |
+
if _logger:
|
| 58 |
+
_logger.log(SUBWARNING, msg, *args)
|
| 59 |
+
|
| 60 |
+
def get_logger():
|
| 61 |
+
'''
|
| 62 |
+
Returns logger used by multiprocessing
|
| 63 |
+
'''
|
| 64 |
+
global _logger
|
| 65 |
+
import logging
|
| 66 |
+
|
| 67 |
+
logging._acquireLock()
|
| 68 |
+
try:
|
| 69 |
+
if not _logger:
|
| 70 |
+
|
| 71 |
+
_logger = logging.getLogger(LOGGER_NAME)
|
| 72 |
+
_logger.propagate = 0
|
| 73 |
+
|
| 74 |
+
# XXX multiprocessing should cleanup before logging
|
| 75 |
+
if hasattr(atexit, 'unregister'):
|
| 76 |
+
atexit.unregister(_exit_function)
|
| 77 |
+
atexit.register(_exit_function)
|
| 78 |
+
else:
|
| 79 |
+
atexit._exithandlers.remove((_exit_function, (), {}))
|
| 80 |
+
atexit._exithandlers.append((_exit_function, (), {}))
|
| 81 |
+
|
| 82 |
+
finally:
|
| 83 |
+
logging._releaseLock()
|
| 84 |
+
|
| 85 |
+
return _logger
|
| 86 |
+
|
| 87 |
+
def log_to_stderr(level=None):
|
| 88 |
+
'''
|
| 89 |
+
Turn on logging and add a handler which prints to stderr
|
| 90 |
+
'''
|
| 91 |
+
global _log_to_stderr
|
| 92 |
+
import logging
|
| 93 |
+
|
| 94 |
+
logger = get_logger()
|
| 95 |
+
formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT)
|
| 96 |
+
handler = logging.StreamHandler()
|
| 97 |
+
handler.setFormatter(formatter)
|
| 98 |
+
logger.addHandler(handler)
|
| 99 |
+
|
| 100 |
+
if level:
|
| 101 |
+
logger.setLevel(level)
|
| 102 |
+
_log_to_stderr = True
|
| 103 |
+
return _logger
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
# Abstract socket support
|
| 107 |
+
|
| 108 |
+
def _platform_supports_abstract_sockets():
|
| 109 |
+
if sys.platform == "linux":
|
| 110 |
+
return True
|
| 111 |
+
if hasattr(sys, 'getandroidapilevel'):
|
| 112 |
+
return True
|
| 113 |
+
return False
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def is_abstract_socket_namespace(address):
|
| 117 |
+
if not address:
|
| 118 |
+
return False
|
| 119 |
+
if isinstance(address, bytes):
|
| 120 |
+
return address[0] == 0
|
| 121 |
+
elif isinstance(address, str):
|
| 122 |
+
return address[0] == "\0"
|
| 123 |
+
raise TypeError(f'address type of {address!r} unrecognized')
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
abstract_sockets_supported = _platform_supports_abstract_sockets()
|
| 127 |
+
|
| 128 |
+
#
|
| 129 |
+
# Function returning a temp directory which will be removed on exit
|
| 130 |
+
#
|
| 131 |
+
|
| 132 |
+
def _remove_temp_dir(rmtree, tempdir):
|
| 133 |
+
rmtree(tempdir)
|
| 134 |
+
|
| 135 |
+
current_process = process.current_process()
|
| 136 |
+
# current_process() can be None if the finalizer is called
|
| 137 |
+
# late during Python finalization
|
| 138 |
+
if current_process is not None:
|
| 139 |
+
current_process._config['tempdir'] = None
|
| 140 |
+
|
| 141 |
+
def get_temp_dir():
|
| 142 |
+
# get name of a temp directory which will be automatically cleaned up
|
| 143 |
+
tempdir = process.current_process()._config.get('tempdir')
|
| 144 |
+
if tempdir is None:
|
| 145 |
+
import shutil, tempfile
|
| 146 |
+
tempdir = tempfile.mkdtemp(prefix='pymp-')
|
| 147 |
+
info('created temp directory %s', tempdir)
|
| 148 |
+
# keep a strong reference to shutil.rmtree(), since the finalizer
|
| 149 |
+
# can be called late during Python shutdown
|
| 150 |
+
Finalize(None, _remove_temp_dir, args=(shutil.rmtree, tempdir),
|
| 151 |
+
exitpriority=-100)
|
| 152 |
+
process.current_process()._config['tempdir'] = tempdir
|
| 153 |
+
return tempdir
|
| 154 |
+
|
| 155 |
+
#
|
| 156 |
+
# Support for reinitialization of objects when bootstrapping a child process
|
| 157 |
+
#
|
| 158 |
+
|
| 159 |
+
_afterfork_registry = weakref.WeakValueDictionary()
|
| 160 |
+
_afterfork_counter = itertools.count()
|
| 161 |
+
|
| 162 |
+
def _run_after_forkers():
|
| 163 |
+
items = list(_afterfork_registry.items())
|
| 164 |
+
items.sort()
|
| 165 |
+
for (index, ident, func), obj in items:
|
| 166 |
+
try:
|
| 167 |
+
func(obj)
|
| 168 |
+
except Exception as e:
|
| 169 |
+
info('after forker raised exception %s', e)
|
| 170 |
+
|
| 171 |
+
def register_after_fork(obj, func):
|
| 172 |
+
_afterfork_registry[(next(_afterfork_counter), id(obj), func)] = obj
|
| 173 |
+
|
| 174 |
+
#
|
| 175 |
+
# Finalization using weakrefs
|
| 176 |
+
#
|
| 177 |
+
|
| 178 |
+
_finalizer_registry = {}
|
| 179 |
+
_finalizer_counter = itertools.count()
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class Finalize(object):
|
| 183 |
+
'''
|
| 184 |
+
Class which supports object finalization using weakrefs
|
| 185 |
+
'''
|
| 186 |
+
def __init__(self, obj, callback, args=(), kwargs=None, exitpriority=None):
|
| 187 |
+
if (exitpriority is not None) and not isinstance(exitpriority,int):
|
| 188 |
+
raise TypeError(
|
| 189 |
+
"Exitpriority ({0!r}) must be None or int, not {1!s}".format(
|
| 190 |
+
exitpriority, type(exitpriority)))
|
| 191 |
+
|
| 192 |
+
if obj is not None:
|
| 193 |
+
self._weakref = weakref.ref(obj, self)
|
| 194 |
+
elif exitpriority is None:
|
| 195 |
+
raise ValueError("Without object, exitpriority cannot be None")
|
| 196 |
+
|
| 197 |
+
self._callback = callback
|
| 198 |
+
self._args = args
|
| 199 |
+
self._kwargs = kwargs or {}
|
| 200 |
+
self._key = (exitpriority, next(_finalizer_counter))
|
| 201 |
+
self._pid = os.getpid()
|
| 202 |
+
|
| 203 |
+
_finalizer_registry[self._key] = self
|
| 204 |
+
|
| 205 |
+
def __call__(self, wr=None,
|
| 206 |
+
# Need to bind these locally because the globals can have
|
| 207 |
+
# been cleared at shutdown
|
| 208 |
+
_finalizer_registry=_finalizer_registry,
|
| 209 |
+
sub_debug=sub_debug, getpid=os.getpid):
|
| 210 |
+
'''
|
| 211 |
+
Run the callback unless it has already been called or cancelled
|
| 212 |
+
'''
|
| 213 |
+
try:
|
| 214 |
+
del _finalizer_registry[self._key]
|
| 215 |
+
except KeyError:
|
| 216 |
+
sub_debug('finalizer no longer registered')
|
| 217 |
+
else:
|
| 218 |
+
if self._pid != getpid():
|
| 219 |
+
sub_debug('finalizer ignored because different process')
|
| 220 |
+
res = None
|
| 221 |
+
else:
|
| 222 |
+
sub_debug('finalizer calling %s with args %s and kwargs %s',
|
| 223 |
+
self._callback, self._args, self._kwargs)
|
| 224 |
+
res = self._callback(*self._args, **self._kwargs)
|
| 225 |
+
self._weakref = self._callback = self._args = \
|
| 226 |
+
self._kwargs = self._key = None
|
| 227 |
+
return res
|
| 228 |
+
|
| 229 |
+
def cancel(self):
|
| 230 |
+
'''
|
| 231 |
+
Cancel finalization of the object
|
| 232 |
+
'''
|
| 233 |
+
try:
|
| 234 |
+
del _finalizer_registry[self._key]
|
| 235 |
+
except KeyError:
|
| 236 |
+
pass
|
| 237 |
+
else:
|
| 238 |
+
self._weakref = self._callback = self._args = \
|
| 239 |
+
self._kwargs = self._key = None
|
| 240 |
+
|
| 241 |
+
def still_active(self):
|
| 242 |
+
'''
|
| 243 |
+
Return whether this finalizer is still waiting to invoke callback
|
| 244 |
+
'''
|
| 245 |
+
return self._key in _finalizer_registry
|
| 246 |
+
|
| 247 |
+
def __repr__(self):
|
| 248 |
+
try:
|
| 249 |
+
obj = self._weakref()
|
| 250 |
+
except (AttributeError, TypeError):
|
| 251 |
+
obj = None
|
| 252 |
+
|
| 253 |
+
if obj is None:
|
| 254 |
+
return '<%s object, dead>' % self.__class__.__name__
|
| 255 |
+
|
| 256 |
+
x = '<%s object, callback=%s' % (
|
| 257 |
+
self.__class__.__name__,
|
| 258 |
+
getattr(self._callback, '__name__', self._callback))
|
| 259 |
+
if self._args:
|
| 260 |
+
x += ', args=' + str(self._args)
|
| 261 |
+
if self._kwargs:
|
| 262 |
+
x += ', kwargs=' + str(self._kwargs)
|
| 263 |
+
if self._key[0] is not None:
|
| 264 |
+
x += ', exitpriority=' + str(self._key[0])
|
| 265 |
+
return x + '>'
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def _run_finalizers(minpriority=None):
|
| 269 |
+
'''
|
| 270 |
+
Run all finalizers whose exit priority is not None and at least minpriority
|
| 271 |
+
|
| 272 |
+
Finalizers with highest priority are called first; finalizers with
|
| 273 |
+
the same priority will be called in reverse order of creation.
|
| 274 |
+
'''
|
| 275 |
+
if _finalizer_registry is None:
|
| 276 |
+
# This function may be called after this module's globals are
|
| 277 |
+
# destroyed. See the _exit_function function in this module for more
|
| 278 |
+
# notes.
|
| 279 |
+
return
|
| 280 |
+
|
| 281 |
+
if minpriority is None:
|
| 282 |
+
f = lambda p : p[0] is not None
|
| 283 |
+
else:
|
| 284 |
+
f = lambda p : p[0] is not None and p[0] >= minpriority
|
| 285 |
+
|
| 286 |
+
# Careful: _finalizer_registry may be mutated while this function
|
| 287 |
+
# is running (either by a GC run or by another thread).
|
| 288 |
+
|
| 289 |
+
# list(_finalizer_registry) should be atomic, while
|
| 290 |
+
# list(_finalizer_registry.items()) is not.
|
| 291 |
+
keys = [key for key in list(_finalizer_registry) if f(key)]
|
| 292 |
+
keys.sort(reverse=True)
|
| 293 |
+
|
| 294 |
+
for key in keys:
|
| 295 |
+
finalizer = _finalizer_registry.get(key)
|
| 296 |
+
# key may have been removed from the registry
|
| 297 |
+
if finalizer is not None:
|
| 298 |
+
sub_debug('calling %s', finalizer)
|
| 299 |
+
try:
|
| 300 |
+
finalizer()
|
| 301 |
+
except Exception:
|
| 302 |
+
import traceback
|
| 303 |
+
traceback.print_exc()
|
| 304 |
+
|
| 305 |
+
if minpriority is None:
|
| 306 |
+
_finalizer_registry.clear()
|
| 307 |
+
|
| 308 |
+
#
|
| 309 |
+
# Clean up on exit
|
| 310 |
+
#
|
| 311 |
+
|
| 312 |
+
def is_exiting():
|
| 313 |
+
'''
|
| 314 |
+
Returns true if the process is shutting down
|
| 315 |
+
'''
|
| 316 |
+
return _exiting or _exiting is None
|
| 317 |
+
|
| 318 |
+
_exiting = False
|
| 319 |
+
|
| 320 |
+
def _exit_function(info=info, debug=debug, _run_finalizers=_run_finalizers,
|
| 321 |
+
active_children=process.active_children,
|
| 322 |
+
current_process=process.current_process):
|
| 323 |
+
# We hold on to references to functions in the arglist due to the
|
| 324 |
+
# situation described below, where this function is called after this
|
| 325 |
+
# module's globals are destroyed.
|
| 326 |
+
|
| 327 |
+
global _exiting
|
| 328 |
+
|
| 329 |
+
if not _exiting:
|
| 330 |
+
_exiting = True
|
| 331 |
+
|
| 332 |
+
info('process shutting down')
|
| 333 |
+
debug('running all "atexit" finalizers with priority >= 0')
|
| 334 |
+
_run_finalizers(0)
|
| 335 |
+
|
| 336 |
+
if current_process() is not None:
|
| 337 |
+
# We check if the current process is None here because if
|
| 338 |
+
# it's None, any call to ``active_children()`` will raise
|
| 339 |
+
# an AttributeError (active_children winds up trying to
|
| 340 |
+
# get attributes from util._current_process). One
|
| 341 |
+
# situation where this can happen is if someone has
|
| 342 |
+
# manipulated sys.modules, causing this module to be
|
| 343 |
+
# garbage collected. The destructor for the module type
|
| 344 |
+
# then replaces all values in the module dict with None.
|
| 345 |
+
# For instance, after setuptools runs a test it replaces
|
| 346 |
+
# sys.modules with a copy created earlier. See issues
|
| 347 |
+
# #9775 and #15881. Also related: #4106, #9205, and
|
| 348 |
+
# #9207.
|
| 349 |
+
|
| 350 |
+
for p in active_children():
|
| 351 |
+
if p.daemon:
|
| 352 |
+
info('calling terminate() for daemon %s', p.name)
|
| 353 |
+
p._popen.terminate()
|
| 354 |
+
|
| 355 |
+
for p in active_children():
|
| 356 |
+
info('calling join() for process %s', p.name)
|
| 357 |
+
p.join()
|
| 358 |
+
|
| 359 |
+
debug('running the remaining "atexit" finalizers')
|
| 360 |
+
_run_finalizers()
|
| 361 |
+
|
| 362 |
+
atexit.register(_exit_function)
|
| 363 |
+
|
| 364 |
+
#
|
| 365 |
+
# Some fork aware types
|
| 366 |
+
#
|
| 367 |
+
|
| 368 |
+
class ForkAwareThreadLock(object):
|
| 369 |
+
def __init__(self):
|
| 370 |
+
self._lock = threading.Lock()
|
| 371 |
+
self.acquire = self._lock.acquire
|
| 372 |
+
self.release = self._lock.release
|
| 373 |
+
register_after_fork(self, ForkAwareThreadLock._at_fork_reinit)
|
| 374 |
+
|
| 375 |
+
def _at_fork_reinit(self):
|
| 376 |
+
self._lock._at_fork_reinit()
|
| 377 |
+
|
| 378 |
+
def __enter__(self):
|
| 379 |
+
return self._lock.__enter__()
|
| 380 |
+
|
| 381 |
+
def __exit__(self, *args):
|
| 382 |
+
return self._lock.__exit__(*args)
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
class ForkAwareLocal(threading.local):
|
| 386 |
+
def __init__(self):
|
| 387 |
+
register_after_fork(self, lambda obj : obj.__dict__.clear())
|
| 388 |
+
def __reduce__(self):
|
| 389 |
+
return type(self), ()
|
| 390 |
+
|
| 391 |
+
#
|
| 392 |
+
# Close fds except those specified
|
| 393 |
+
#
|
| 394 |
+
|
| 395 |
+
try:
|
| 396 |
+
MAXFD = os.sysconf("SC_OPEN_MAX")
|
| 397 |
+
except Exception:
|
| 398 |
+
MAXFD = 256
|
| 399 |
+
|
| 400 |
+
def close_all_fds_except(fds):
|
| 401 |
+
fds = list(fds) + [-1, MAXFD]
|
| 402 |
+
fds.sort()
|
| 403 |
+
assert fds[-1] == MAXFD, 'fd too large'
|
| 404 |
+
for i in range(len(fds) - 1):
|
| 405 |
+
os.closerange(fds[i]+1, fds[i+1])
|
| 406 |
+
#
|
| 407 |
+
# Close sys.stdin and replace stdin with os.devnull
|
| 408 |
+
#
|
| 409 |
+
|
| 410 |
+
def _close_stdin():
|
| 411 |
+
if sys.stdin is None:
|
| 412 |
+
return
|
| 413 |
+
|
| 414 |
+
try:
|
| 415 |
+
sys.stdin.close()
|
| 416 |
+
except (OSError, ValueError):
|
| 417 |
+
pass
|
| 418 |
+
|
| 419 |
+
try:
|
| 420 |
+
fd = os.open(os.devnull, os.O_RDONLY)
|
| 421 |
+
try:
|
| 422 |
+
sys.stdin = open(fd, encoding="utf-8", closefd=False)
|
| 423 |
+
except:
|
| 424 |
+
os.close(fd)
|
| 425 |
+
raise
|
| 426 |
+
except (OSError, ValueError):
|
| 427 |
+
pass
|
| 428 |
+
|
| 429 |
+
#
|
| 430 |
+
# Flush standard streams, if any
|
| 431 |
+
#
|
| 432 |
+
|
| 433 |
+
def _flush_std_streams():
|
| 434 |
+
try:
|
| 435 |
+
sys.stdout.flush()
|
| 436 |
+
except (AttributeError, ValueError):
|
| 437 |
+
pass
|
| 438 |
+
try:
|
| 439 |
+
sys.stderr.flush()
|
| 440 |
+
except (AttributeError, ValueError):
|
| 441 |
+
pass
|
| 442 |
+
|
| 443 |
+
#
|
| 444 |
+
# Start a program with only specified fds kept open
|
| 445 |
+
#
|
| 446 |
+
|
| 447 |
+
def spawnv_passfds(path, args, passfds):
|
| 448 |
+
import _posixsubprocess
|
| 449 |
+
passfds = tuple(sorted(map(int, passfds)))
|
| 450 |
+
errpipe_read, errpipe_write = os.pipe()
|
| 451 |
+
try:
|
| 452 |
+
return _posixsubprocess.fork_exec(
|
| 453 |
+
args, [os.fsencode(path)], True, passfds, None, None,
|
| 454 |
+
-1, -1, -1, -1, -1, -1, errpipe_read, errpipe_write,
|
| 455 |
+
False, False, None, None, None, -1, None)
|
| 456 |
+
finally:
|
| 457 |
+
os.close(errpipe_read)
|
| 458 |
+
os.close(errpipe_write)
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
def close_fds(*fds):
|
| 462 |
+
"""Close each file descriptor given as an argument"""
|
| 463 |
+
for fd in fds:
|
| 464 |
+
os.close(fd)
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def _cleanup_tests():
|
| 468 |
+
"""Cleanup multiprocessing resources when multiprocessing tests
|
| 469 |
+
completed."""
|
| 470 |
+
|
| 471 |
+
from test import support
|
| 472 |
+
|
| 473 |
+
# cleanup multiprocessing
|
| 474 |
+
process._cleanup()
|
| 475 |
+
|
| 476 |
+
# Stop the ForkServer process if it's running
|
| 477 |
+
from multiprocessing import forkserver
|
| 478 |
+
forkserver._forkserver._stop()
|
| 479 |
+
|
| 480 |
+
# Stop the ResourceTracker process if it's running
|
| 481 |
+
from multiprocessing import resource_tracker
|
| 482 |
+
resource_tracker._resource_tracker._stop()
|
| 483 |
+
|
| 484 |
+
# bpo-37421: Explicitly call _run_finalizers() to remove immediately
|
| 485 |
+
# temporary directories created by multiprocessing.util.get_temp_dir().
|
| 486 |
+
_run_finalizers()
|
| 487 |
+
support.gc_collect()
|
| 488 |
+
|
| 489 |
+
support.reap_children()
|
evalkit_llava/lib/python3.10/reprlib.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Redo the builtin repr() (representation) but with limits on most sizes."""
|
| 2 |
+
|
| 3 |
+
__all__ = ["Repr", "repr", "recursive_repr"]
|
| 4 |
+
|
| 5 |
+
import builtins
|
| 6 |
+
from itertools import islice
|
| 7 |
+
from _thread import get_ident
|
| 8 |
+
|
| 9 |
+
def recursive_repr(fillvalue='...'):
|
| 10 |
+
'Decorator to make a repr function return fillvalue for a recursive call'
|
| 11 |
+
|
| 12 |
+
def decorating_function(user_function):
|
| 13 |
+
repr_running = set()
|
| 14 |
+
|
| 15 |
+
def wrapper(self):
|
| 16 |
+
key = id(self), get_ident()
|
| 17 |
+
if key in repr_running:
|
| 18 |
+
return fillvalue
|
| 19 |
+
repr_running.add(key)
|
| 20 |
+
try:
|
| 21 |
+
result = user_function(self)
|
| 22 |
+
finally:
|
| 23 |
+
repr_running.discard(key)
|
| 24 |
+
return result
|
| 25 |
+
|
| 26 |
+
# Can't use functools.wraps() here because of bootstrap issues
|
| 27 |
+
wrapper.__module__ = getattr(user_function, '__module__')
|
| 28 |
+
wrapper.__doc__ = getattr(user_function, '__doc__')
|
| 29 |
+
wrapper.__name__ = getattr(user_function, '__name__')
|
| 30 |
+
wrapper.__qualname__ = getattr(user_function, '__qualname__')
|
| 31 |
+
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
|
| 32 |
+
return wrapper
|
| 33 |
+
|
| 34 |
+
return decorating_function
|
| 35 |
+
|
| 36 |
+
class Repr:
|
| 37 |
+
|
| 38 |
+
def __init__(self):
|
| 39 |
+
self.maxlevel = 6
|
| 40 |
+
self.maxtuple = 6
|
| 41 |
+
self.maxlist = 6
|
| 42 |
+
self.maxarray = 5
|
| 43 |
+
self.maxdict = 4
|
| 44 |
+
self.maxset = 6
|
| 45 |
+
self.maxfrozenset = 6
|
| 46 |
+
self.maxdeque = 6
|
| 47 |
+
self.maxstring = 30
|
| 48 |
+
self.maxlong = 40
|
| 49 |
+
self.maxother = 30
|
| 50 |
+
|
| 51 |
+
def repr(self, x):
|
| 52 |
+
return self.repr1(x, self.maxlevel)
|
| 53 |
+
|
| 54 |
+
def repr1(self, x, level):
|
| 55 |
+
typename = type(x).__name__
|
| 56 |
+
if ' ' in typename:
|
| 57 |
+
parts = typename.split()
|
| 58 |
+
typename = '_'.join(parts)
|
| 59 |
+
if hasattr(self, 'repr_' + typename):
|
| 60 |
+
return getattr(self, 'repr_' + typename)(x, level)
|
| 61 |
+
else:
|
| 62 |
+
return self.repr_instance(x, level)
|
| 63 |
+
|
| 64 |
+
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
|
| 65 |
+
n = len(x)
|
| 66 |
+
if level <= 0 and n:
|
| 67 |
+
s = '...'
|
| 68 |
+
else:
|
| 69 |
+
newlevel = level - 1
|
| 70 |
+
repr1 = self.repr1
|
| 71 |
+
pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
|
| 72 |
+
if n > maxiter: pieces.append('...')
|
| 73 |
+
s = ', '.join(pieces)
|
| 74 |
+
if n == 1 and trail: right = trail + right
|
| 75 |
+
return '%s%s%s' % (left, s, right)
|
| 76 |
+
|
| 77 |
+
def repr_tuple(self, x, level):
|
| 78 |
+
return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
|
| 79 |
+
|
| 80 |
+
def repr_list(self, x, level):
|
| 81 |
+
return self._repr_iterable(x, level, '[', ']', self.maxlist)
|
| 82 |
+
|
| 83 |
+
def repr_array(self, x, level):
|
| 84 |
+
if not x:
|
| 85 |
+
return "array('%s')" % x.typecode
|
| 86 |
+
header = "array('%s', [" % x.typecode
|
| 87 |
+
return self._repr_iterable(x, level, header, '])', self.maxarray)
|
| 88 |
+
|
| 89 |
+
def repr_set(self, x, level):
|
| 90 |
+
if not x:
|
| 91 |
+
return 'set()'
|
| 92 |
+
x = _possibly_sorted(x)
|
| 93 |
+
return self._repr_iterable(x, level, '{', '}', self.maxset)
|
| 94 |
+
|
| 95 |
+
def repr_frozenset(self, x, level):
|
| 96 |
+
if not x:
|
| 97 |
+
return 'frozenset()'
|
| 98 |
+
x = _possibly_sorted(x)
|
| 99 |
+
return self._repr_iterable(x, level, 'frozenset({', '})',
|
| 100 |
+
self.maxfrozenset)
|
| 101 |
+
|
| 102 |
+
def repr_deque(self, x, level):
|
| 103 |
+
return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
|
| 104 |
+
|
| 105 |
+
def repr_dict(self, x, level):
|
| 106 |
+
n = len(x)
|
| 107 |
+
if n == 0: return '{}'
|
| 108 |
+
if level <= 0: return '{...}'
|
| 109 |
+
newlevel = level - 1
|
| 110 |
+
repr1 = self.repr1
|
| 111 |
+
pieces = []
|
| 112 |
+
for key in islice(_possibly_sorted(x), self.maxdict):
|
| 113 |
+
keyrepr = repr1(key, newlevel)
|
| 114 |
+
valrepr = repr1(x[key], newlevel)
|
| 115 |
+
pieces.append('%s: %s' % (keyrepr, valrepr))
|
| 116 |
+
if n > self.maxdict: pieces.append('...')
|
| 117 |
+
s = ', '.join(pieces)
|
| 118 |
+
return '{%s}' % (s,)
|
| 119 |
+
|
| 120 |
+
def repr_str(self, x, level):
|
| 121 |
+
s = builtins.repr(x[:self.maxstring])
|
| 122 |
+
if len(s) > self.maxstring:
|
| 123 |
+
i = max(0, (self.maxstring-3)//2)
|
| 124 |
+
j = max(0, self.maxstring-3-i)
|
| 125 |
+
s = builtins.repr(x[:i] + x[len(x)-j:])
|
| 126 |
+
s = s[:i] + '...' + s[len(s)-j:]
|
| 127 |
+
return s
|
| 128 |
+
|
| 129 |
+
def repr_int(self, x, level):
|
| 130 |
+
s = builtins.repr(x) # XXX Hope this isn't too slow...
|
| 131 |
+
if len(s) > self.maxlong:
|
| 132 |
+
i = max(0, (self.maxlong-3)//2)
|
| 133 |
+
j = max(0, self.maxlong-3-i)
|
| 134 |
+
s = s[:i] + '...' + s[len(s)-j:]
|
| 135 |
+
return s
|
| 136 |
+
|
| 137 |
+
def repr_instance(self, x, level):
|
| 138 |
+
try:
|
| 139 |
+
s = builtins.repr(x)
|
| 140 |
+
# Bugs in x.__repr__() can cause arbitrary
|
| 141 |
+
# exceptions -- then make up something
|
| 142 |
+
except Exception:
|
| 143 |
+
return '<%s instance at %#x>' % (x.__class__.__name__, id(x))
|
| 144 |
+
if len(s) > self.maxother:
|
| 145 |
+
i = max(0, (self.maxother-3)//2)
|
| 146 |
+
j = max(0, self.maxother-3-i)
|
| 147 |
+
s = s[:i] + '...' + s[len(s)-j:]
|
| 148 |
+
return s
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def _possibly_sorted(x):
|
| 152 |
+
# Since not all sequences of items can be sorted and comparison
|
| 153 |
+
# functions may raise arbitrary exceptions, return an unsorted
|
| 154 |
+
# sequence in that case.
|
| 155 |
+
try:
|
| 156 |
+
return sorted(x)
|
| 157 |
+
except Exception:
|
| 158 |
+
return list(x)
|
| 159 |
+
|
| 160 |
+
aRepr = Repr()
|
| 161 |
+
repr = aRepr.repr
|
evalkit_llava/lib/python3.10/threading.py
ADDED
|
@@ -0,0 +1,1645 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Thread module emulating a subset of Java's threading model."""
|
| 2 |
+
|
| 3 |
+
import os as _os
|
| 4 |
+
import sys as _sys
|
| 5 |
+
import _thread
|
| 6 |
+
import functools
|
| 7 |
+
|
| 8 |
+
from time import monotonic as _time
|
| 9 |
+
from _weakrefset import WeakSet
|
| 10 |
+
from itertools import islice as _islice, count as _count
|
| 11 |
+
try:
|
| 12 |
+
from _collections import deque as _deque
|
| 13 |
+
except ImportError:
|
| 14 |
+
from collections import deque as _deque
|
| 15 |
+
|
| 16 |
+
# Note regarding PEP 8 compliant names
|
| 17 |
+
# This threading model was originally inspired by Java, and inherited
|
| 18 |
+
# the convention of camelCase function and method names from that
|
| 19 |
+
# language. Those original names are not in any imminent danger of
|
| 20 |
+
# being deprecated (even for Py3k),so this module provides them as an
|
| 21 |
+
# alias for the PEP 8 compliant names
|
| 22 |
+
# Note that using the new PEP 8 compliant names facilitates substitution
|
| 23 |
+
# with the multiprocessing module, which doesn't provide the old
|
| 24 |
+
# Java inspired names.
|
| 25 |
+
|
| 26 |
+
__all__ = ['get_ident', 'active_count', 'Condition', 'current_thread',
|
| 27 |
+
'enumerate', 'main_thread', 'TIMEOUT_MAX',
|
| 28 |
+
'Event', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread',
|
| 29 |
+
'Barrier', 'BrokenBarrierError', 'Timer', 'ThreadError',
|
| 30 |
+
'setprofile', 'settrace', 'local', 'stack_size',
|
| 31 |
+
'excepthook', 'ExceptHookArgs', 'gettrace', 'getprofile']
|
| 32 |
+
|
| 33 |
+
# Rename some stuff so "from threading import *" is safe
|
| 34 |
+
_start_new_thread = _thread.start_new_thread
|
| 35 |
+
_allocate_lock = _thread.allocate_lock
|
| 36 |
+
_set_sentinel = _thread._set_sentinel
|
| 37 |
+
get_ident = _thread.get_ident
|
| 38 |
+
try:
|
| 39 |
+
get_native_id = _thread.get_native_id
|
| 40 |
+
_HAVE_THREAD_NATIVE_ID = True
|
| 41 |
+
__all__.append('get_native_id')
|
| 42 |
+
except AttributeError:
|
| 43 |
+
_HAVE_THREAD_NATIVE_ID = False
|
| 44 |
+
ThreadError = _thread.error
|
| 45 |
+
try:
|
| 46 |
+
_CRLock = _thread.RLock
|
| 47 |
+
except AttributeError:
|
| 48 |
+
_CRLock = None
|
| 49 |
+
TIMEOUT_MAX = _thread.TIMEOUT_MAX
|
| 50 |
+
del _thread
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
# Support for profile and trace hooks
|
| 54 |
+
|
| 55 |
+
_profile_hook = None
|
| 56 |
+
_trace_hook = None
|
| 57 |
+
|
| 58 |
+
def setprofile(func):
|
| 59 |
+
"""Set a profile function for all threads started from the threading module.
|
| 60 |
+
|
| 61 |
+
The func will be passed to sys.setprofile() for each thread, before its
|
| 62 |
+
run() method is called.
|
| 63 |
+
|
| 64 |
+
"""
|
| 65 |
+
global _profile_hook
|
| 66 |
+
_profile_hook = func
|
| 67 |
+
|
| 68 |
+
def getprofile():
|
| 69 |
+
"""Get the profiler function as set by threading.setprofile()."""
|
| 70 |
+
return _profile_hook
|
| 71 |
+
|
| 72 |
+
def settrace(func):
|
| 73 |
+
"""Set a trace function for all threads started from the threading module.
|
| 74 |
+
|
| 75 |
+
The func will be passed to sys.settrace() for each thread, before its run()
|
| 76 |
+
method is called.
|
| 77 |
+
|
| 78 |
+
"""
|
| 79 |
+
global _trace_hook
|
| 80 |
+
_trace_hook = func
|
| 81 |
+
|
| 82 |
+
def gettrace():
|
| 83 |
+
"""Get the trace function as set by threading.settrace()."""
|
| 84 |
+
return _trace_hook
|
| 85 |
+
|
| 86 |
+
# Synchronization classes
|
| 87 |
+
|
| 88 |
+
Lock = _allocate_lock
|
| 89 |
+
|
| 90 |
+
def RLock(*args, **kwargs):
|
| 91 |
+
"""Factory function that returns a new reentrant lock.
|
| 92 |
+
|
| 93 |
+
A reentrant lock must be released by the thread that acquired it. Once a
|
| 94 |
+
thread has acquired a reentrant lock, the same thread may acquire it again
|
| 95 |
+
without blocking; the thread must release it once for each time it has
|
| 96 |
+
acquired it.
|
| 97 |
+
|
| 98 |
+
"""
|
| 99 |
+
if _CRLock is None:
|
| 100 |
+
return _PyRLock(*args, **kwargs)
|
| 101 |
+
return _CRLock(*args, **kwargs)
|
| 102 |
+
|
| 103 |
+
class _RLock:
|
| 104 |
+
"""This class implements reentrant lock objects.
|
| 105 |
+
|
| 106 |
+
A reentrant lock must be released by the thread that acquired it. Once a
|
| 107 |
+
thread has acquired a reentrant lock, the same thread may acquire it
|
| 108 |
+
again without blocking; the thread must release it once for each time it
|
| 109 |
+
has acquired it.
|
| 110 |
+
|
| 111 |
+
"""
|
| 112 |
+
|
| 113 |
+
def __init__(self):
|
| 114 |
+
self._block = _allocate_lock()
|
| 115 |
+
self._owner = None
|
| 116 |
+
self._count = 0
|
| 117 |
+
|
| 118 |
+
def __repr__(self):
|
| 119 |
+
owner = self._owner
|
| 120 |
+
try:
|
| 121 |
+
owner = _active[owner].name
|
| 122 |
+
except KeyError:
|
| 123 |
+
pass
|
| 124 |
+
return "<%s %s.%s object owner=%r count=%d at %s>" % (
|
| 125 |
+
"locked" if self._block.locked() else "unlocked",
|
| 126 |
+
self.__class__.__module__,
|
| 127 |
+
self.__class__.__qualname__,
|
| 128 |
+
owner,
|
| 129 |
+
self._count,
|
| 130 |
+
hex(id(self))
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
def _at_fork_reinit(self):
|
| 134 |
+
self._block._at_fork_reinit()
|
| 135 |
+
self._owner = None
|
| 136 |
+
self._count = 0
|
| 137 |
+
|
| 138 |
+
def acquire(self, blocking=True, timeout=-1):
|
| 139 |
+
"""Acquire a lock, blocking or non-blocking.
|
| 140 |
+
|
| 141 |
+
When invoked without arguments: if this thread already owns the lock,
|
| 142 |
+
increment the recursion level by one, and return immediately. Otherwise,
|
| 143 |
+
if another thread owns the lock, block until the lock is unlocked. Once
|
| 144 |
+
the lock is unlocked (not owned by any thread), then grab ownership, set
|
| 145 |
+
the recursion level to one, and return. If more than one thread is
|
| 146 |
+
blocked waiting until the lock is unlocked, only one at a time will be
|
| 147 |
+
able to grab ownership of the lock. There is no return value in this
|
| 148 |
+
case.
|
| 149 |
+
|
| 150 |
+
When invoked with the blocking argument set to true, do the same thing
|
| 151 |
+
as when called without arguments, and return true.
|
| 152 |
+
|
| 153 |
+
When invoked with the blocking argument set to false, do not block. If a
|
| 154 |
+
call without an argument would block, return false immediately;
|
| 155 |
+
otherwise, do the same thing as when called without arguments, and
|
| 156 |
+
return true.
|
| 157 |
+
|
| 158 |
+
When invoked with the floating-point timeout argument set to a positive
|
| 159 |
+
value, block for at most the number of seconds specified by timeout
|
| 160 |
+
and as long as the lock cannot be acquired. Return true if the lock has
|
| 161 |
+
been acquired, false if the timeout has elapsed.
|
| 162 |
+
|
| 163 |
+
"""
|
| 164 |
+
me = get_ident()
|
| 165 |
+
if self._owner == me:
|
| 166 |
+
self._count += 1
|
| 167 |
+
return 1
|
| 168 |
+
rc = self._block.acquire(blocking, timeout)
|
| 169 |
+
if rc:
|
| 170 |
+
self._owner = me
|
| 171 |
+
self._count = 1
|
| 172 |
+
return rc
|
| 173 |
+
|
| 174 |
+
__enter__ = acquire
|
| 175 |
+
|
| 176 |
+
def release(self):
|
| 177 |
+
"""Release a lock, decrementing the recursion level.
|
| 178 |
+
|
| 179 |
+
If after the decrement it is zero, reset the lock to unlocked (not owned
|
| 180 |
+
by any thread), and if any other threads are blocked waiting for the
|
| 181 |
+
lock to become unlocked, allow exactly one of them to proceed. If after
|
| 182 |
+
the decrement the recursion level is still nonzero, the lock remains
|
| 183 |
+
locked and owned by the calling thread.
|
| 184 |
+
|
| 185 |
+
Only call this method when the calling thread owns the lock. A
|
| 186 |
+
RuntimeError is raised if this method is called when the lock is
|
| 187 |
+
unlocked.
|
| 188 |
+
|
| 189 |
+
There is no return value.
|
| 190 |
+
|
| 191 |
+
"""
|
| 192 |
+
if self._owner != get_ident():
|
| 193 |
+
raise RuntimeError("cannot release un-acquired lock")
|
| 194 |
+
self._count = count = self._count - 1
|
| 195 |
+
if not count:
|
| 196 |
+
self._owner = None
|
| 197 |
+
self._block.release()
|
| 198 |
+
|
| 199 |
+
def __exit__(self, t, v, tb):
|
| 200 |
+
self.release()
|
| 201 |
+
|
| 202 |
+
# Internal methods used by condition variables
|
| 203 |
+
|
| 204 |
+
def _acquire_restore(self, state):
|
| 205 |
+
self._block.acquire()
|
| 206 |
+
self._count, self._owner = state
|
| 207 |
+
|
| 208 |
+
def _release_save(self):
|
| 209 |
+
if self._count == 0:
|
| 210 |
+
raise RuntimeError("cannot release un-acquired lock")
|
| 211 |
+
count = self._count
|
| 212 |
+
self._count = 0
|
| 213 |
+
owner = self._owner
|
| 214 |
+
self._owner = None
|
| 215 |
+
self._block.release()
|
| 216 |
+
return (count, owner)
|
| 217 |
+
|
| 218 |
+
def _is_owned(self):
|
| 219 |
+
return self._owner == get_ident()
|
| 220 |
+
|
| 221 |
+
_PyRLock = _RLock
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
class Condition:
|
| 225 |
+
"""Class that implements a condition variable.
|
| 226 |
+
|
| 227 |
+
A condition variable allows one or more threads to wait until they are
|
| 228 |
+
notified by another thread.
|
| 229 |
+
|
| 230 |
+
If the lock argument is given and not None, it must be a Lock or RLock
|
| 231 |
+
object, and it is used as the underlying lock. Otherwise, a new RLock object
|
| 232 |
+
is created and used as the underlying lock.
|
| 233 |
+
|
| 234 |
+
"""
|
| 235 |
+
|
| 236 |
+
def __init__(self, lock=None):
|
| 237 |
+
if lock is None:
|
| 238 |
+
lock = RLock()
|
| 239 |
+
self._lock = lock
|
| 240 |
+
# Export the lock's acquire() and release() methods
|
| 241 |
+
self.acquire = lock.acquire
|
| 242 |
+
self.release = lock.release
|
| 243 |
+
# If the lock defines _release_save() and/or _acquire_restore(),
|
| 244 |
+
# these override the default implementations (which just call
|
| 245 |
+
# release() and acquire() on the lock). Ditto for _is_owned().
|
| 246 |
+
try:
|
| 247 |
+
self._release_save = lock._release_save
|
| 248 |
+
except AttributeError:
|
| 249 |
+
pass
|
| 250 |
+
try:
|
| 251 |
+
self._acquire_restore = lock._acquire_restore
|
| 252 |
+
except AttributeError:
|
| 253 |
+
pass
|
| 254 |
+
try:
|
| 255 |
+
self._is_owned = lock._is_owned
|
| 256 |
+
except AttributeError:
|
| 257 |
+
pass
|
| 258 |
+
self._waiters = _deque()
|
| 259 |
+
|
| 260 |
+
def _at_fork_reinit(self):
|
| 261 |
+
self._lock._at_fork_reinit()
|
| 262 |
+
self._waiters.clear()
|
| 263 |
+
|
| 264 |
+
def __enter__(self):
|
| 265 |
+
return self._lock.__enter__()
|
| 266 |
+
|
| 267 |
+
def __exit__(self, *args):
|
| 268 |
+
return self._lock.__exit__(*args)
|
| 269 |
+
|
| 270 |
+
def __repr__(self):
|
| 271 |
+
return "<Condition(%s, %d)>" % (self._lock, len(self._waiters))
|
| 272 |
+
|
| 273 |
+
def _release_save(self):
|
| 274 |
+
self._lock.release() # No state to save
|
| 275 |
+
|
| 276 |
+
def _acquire_restore(self, x):
|
| 277 |
+
self._lock.acquire() # Ignore saved state
|
| 278 |
+
|
| 279 |
+
def _is_owned(self):
|
| 280 |
+
# Return True if lock is owned by current_thread.
|
| 281 |
+
# This method is called only if _lock doesn't have _is_owned().
|
| 282 |
+
if self._lock.acquire(False):
|
| 283 |
+
self._lock.release()
|
| 284 |
+
return False
|
| 285 |
+
else:
|
| 286 |
+
return True
|
| 287 |
+
|
| 288 |
+
def wait(self, timeout=None):
|
| 289 |
+
"""Wait until notified or until a timeout occurs.
|
| 290 |
+
|
| 291 |
+
If the calling thread has not acquired the lock when this method is
|
| 292 |
+
called, a RuntimeError is raised.
|
| 293 |
+
|
| 294 |
+
This method releases the underlying lock, and then blocks until it is
|
| 295 |
+
awakened by a notify() or notify_all() call for the same condition
|
| 296 |
+
variable in another thread, or until the optional timeout occurs. Once
|
| 297 |
+
awakened or timed out, it re-acquires the lock and returns.
|
| 298 |
+
|
| 299 |
+
When the timeout argument is present and not None, it should be a
|
| 300 |
+
floating point number specifying a timeout for the operation in seconds
|
| 301 |
+
(or fractions thereof).
|
| 302 |
+
|
| 303 |
+
When the underlying lock is an RLock, it is not released using its
|
| 304 |
+
release() method, since this may not actually unlock the lock when it
|
| 305 |
+
was acquired multiple times recursively. Instead, an internal interface
|
| 306 |
+
of the RLock class is used, which really unlocks it even when it has
|
| 307 |
+
been recursively acquired several times. Another internal interface is
|
| 308 |
+
then used to restore the recursion level when the lock is reacquired.
|
| 309 |
+
|
| 310 |
+
"""
|
| 311 |
+
if not self._is_owned():
|
| 312 |
+
raise RuntimeError("cannot wait on un-acquired lock")
|
| 313 |
+
waiter = _allocate_lock()
|
| 314 |
+
waiter.acquire()
|
| 315 |
+
self._waiters.append(waiter)
|
| 316 |
+
saved_state = self._release_save()
|
| 317 |
+
gotit = False
|
| 318 |
+
try: # restore state no matter what (e.g., KeyboardInterrupt)
|
| 319 |
+
if timeout is None:
|
| 320 |
+
waiter.acquire()
|
| 321 |
+
gotit = True
|
| 322 |
+
else:
|
| 323 |
+
if timeout > 0:
|
| 324 |
+
gotit = waiter.acquire(True, timeout)
|
| 325 |
+
else:
|
| 326 |
+
gotit = waiter.acquire(False)
|
| 327 |
+
return gotit
|
| 328 |
+
finally:
|
| 329 |
+
self._acquire_restore(saved_state)
|
| 330 |
+
if not gotit:
|
| 331 |
+
try:
|
| 332 |
+
self._waiters.remove(waiter)
|
| 333 |
+
except ValueError:
|
| 334 |
+
pass
|
| 335 |
+
|
| 336 |
+
def wait_for(self, predicate, timeout=None):
|
| 337 |
+
"""Wait until a condition evaluates to True.
|
| 338 |
+
|
| 339 |
+
predicate should be a callable which result will be interpreted as a
|
| 340 |
+
boolean value. A timeout may be provided giving the maximum time to
|
| 341 |
+
wait.
|
| 342 |
+
|
| 343 |
+
"""
|
| 344 |
+
endtime = None
|
| 345 |
+
waittime = timeout
|
| 346 |
+
result = predicate()
|
| 347 |
+
while not result:
|
| 348 |
+
if waittime is not None:
|
| 349 |
+
if endtime is None:
|
| 350 |
+
endtime = _time() + waittime
|
| 351 |
+
else:
|
| 352 |
+
waittime = endtime - _time()
|
| 353 |
+
if waittime <= 0:
|
| 354 |
+
break
|
| 355 |
+
self.wait(waittime)
|
| 356 |
+
result = predicate()
|
| 357 |
+
return result
|
| 358 |
+
|
| 359 |
+
def notify(self, n=1):
|
| 360 |
+
"""Wake up one or more threads waiting on this condition, if any.
|
| 361 |
+
|
| 362 |
+
If the calling thread has not acquired the lock when this method is
|
| 363 |
+
called, a RuntimeError is raised.
|
| 364 |
+
|
| 365 |
+
This method wakes up at most n of the threads waiting for the condition
|
| 366 |
+
variable; it is a no-op if no threads are waiting.
|
| 367 |
+
|
| 368 |
+
"""
|
| 369 |
+
if not self._is_owned():
|
| 370 |
+
raise RuntimeError("cannot notify on un-acquired lock")
|
| 371 |
+
waiters = self._waiters
|
| 372 |
+
while waiters and n > 0:
|
| 373 |
+
waiter = waiters[0]
|
| 374 |
+
try:
|
| 375 |
+
waiter.release()
|
| 376 |
+
except RuntimeError:
|
| 377 |
+
# gh-92530: The previous call of notify() released the lock,
|
| 378 |
+
# but was interrupted before removing it from the queue.
|
| 379 |
+
# It can happen if a signal handler raises an exception,
|
| 380 |
+
# like CTRL+C which raises KeyboardInterrupt.
|
| 381 |
+
pass
|
| 382 |
+
else:
|
| 383 |
+
n -= 1
|
| 384 |
+
try:
|
| 385 |
+
waiters.remove(waiter)
|
| 386 |
+
except ValueError:
|
| 387 |
+
pass
|
| 388 |
+
|
| 389 |
+
def notify_all(self):
|
| 390 |
+
"""Wake up all threads waiting on this condition.
|
| 391 |
+
|
| 392 |
+
If the calling thread has not acquired the lock when this method
|
| 393 |
+
is called, a RuntimeError is raised.
|
| 394 |
+
|
| 395 |
+
"""
|
| 396 |
+
self.notify(len(self._waiters))
|
| 397 |
+
|
| 398 |
+
def notifyAll(self):
|
| 399 |
+
"""Wake up all threads waiting on this condition.
|
| 400 |
+
|
| 401 |
+
This method is deprecated, use notify_all() instead.
|
| 402 |
+
|
| 403 |
+
"""
|
| 404 |
+
import warnings
|
| 405 |
+
warnings.warn('notifyAll() is deprecated, use notify_all() instead',
|
| 406 |
+
DeprecationWarning, stacklevel=2)
|
| 407 |
+
self.notify_all()
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
class Semaphore:
|
| 411 |
+
"""This class implements semaphore objects.
|
| 412 |
+
|
| 413 |
+
Semaphores manage a counter representing the number of release() calls minus
|
| 414 |
+
the number of acquire() calls, plus an initial value. The acquire() method
|
| 415 |
+
blocks if necessary until it can return without making the counter
|
| 416 |
+
negative. If not given, value defaults to 1.
|
| 417 |
+
|
| 418 |
+
"""
|
| 419 |
+
|
| 420 |
+
# After Tim Peters' semaphore class, but not quite the same (no maximum)
|
| 421 |
+
|
| 422 |
+
def __init__(self, value=1):
|
| 423 |
+
if value < 0:
|
| 424 |
+
raise ValueError("semaphore initial value must be >= 0")
|
| 425 |
+
self._cond = Condition(Lock())
|
| 426 |
+
self._value = value
|
| 427 |
+
|
| 428 |
+
def acquire(self, blocking=True, timeout=None):
|
| 429 |
+
"""Acquire a semaphore, decrementing the internal counter by one.
|
| 430 |
+
|
| 431 |
+
When invoked without arguments: if the internal counter is larger than
|
| 432 |
+
zero on entry, decrement it by one and return immediately. If it is zero
|
| 433 |
+
on entry, block, waiting until some other thread has called release() to
|
| 434 |
+
make it larger than zero. This is done with proper interlocking so that
|
| 435 |
+
if multiple acquire() calls are blocked, release() will wake exactly one
|
| 436 |
+
of them up. The implementation may pick one at random, so the order in
|
| 437 |
+
which blocked threads are awakened should not be relied on. There is no
|
| 438 |
+
return value in this case.
|
| 439 |
+
|
| 440 |
+
When invoked with blocking set to true, do the same thing as when called
|
| 441 |
+
without arguments, and return true.
|
| 442 |
+
|
| 443 |
+
When invoked with blocking set to false, do not block. If a call without
|
| 444 |
+
an argument would block, return false immediately; otherwise, do the
|
| 445 |
+
same thing as when called without arguments, and return true.
|
| 446 |
+
|
| 447 |
+
When invoked with a timeout other than None, it will block for at
|
| 448 |
+
most timeout seconds. If acquire does not complete successfully in
|
| 449 |
+
that interval, return false. Return true otherwise.
|
| 450 |
+
|
| 451 |
+
"""
|
| 452 |
+
if not blocking and timeout is not None:
|
| 453 |
+
raise ValueError("can't specify timeout for non-blocking acquire")
|
| 454 |
+
rc = False
|
| 455 |
+
endtime = None
|
| 456 |
+
with self._cond:
|
| 457 |
+
while self._value == 0:
|
| 458 |
+
if not blocking:
|
| 459 |
+
break
|
| 460 |
+
if timeout is not None:
|
| 461 |
+
if endtime is None:
|
| 462 |
+
endtime = _time() + timeout
|
| 463 |
+
else:
|
| 464 |
+
timeout = endtime - _time()
|
| 465 |
+
if timeout <= 0:
|
| 466 |
+
break
|
| 467 |
+
self._cond.wait(timeout)
|
| 468 |
+
else:
|
| 469 |
+
self._value -= 1
|
| 470 |
+
rc = True
|
| 471 |
+
return rc
|
| 472 |
+
|
| 473 |
+
__enter__ = acquire
|
| 474 |
+
|
| 475 |
+
def release(self, n=1):
|
| 476 |
+
"""Release a semaphore, incrementing the internal counter by one or more.
|
| 477 |
+
|
| 478 |
+
When the counter is zero on entry and another thread is waiting for it
|
| 479 |
+
to become larger than zero again, wake up that thread.
|
| 480 |
+
|
| 481 |
+
"""
|
| 482 |
+
if n < 1:
|
| 483 |
+
raise ValueError('n must be one or more')
|
| 484 |
+
with self._cond:
|
| 485 |
+
self._value += n
|
| 486 |
+
for i in range(n):
|
| 487 |
+
self._cond.notify()
|
| 488 |
+
|
| 489 |
+
def __exit__(self, t, v, tb):
|
| 490 |
+
self.release()
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
class BoundedSemaphore(Semaphore):
|
| 494 |
+
"""Implements a bounded semaphore.
|
| 495 |
+
|
| 496 |
+
A bounded semaphore checks to make sure its current value doesn't exceed its
|
| 497 |
+
initial value. If it does, ValueError is raised. In most situations
|
| 498 |
+
semaphores are used to guard resources with limited capacity.
|
| 499 |
+
|
| 500 |
+
If the semaphore is released too many times it's a sign of a bug. If not
|
| 501 |
+
given, value defaults to 1.
|
| 502 |
+
|
| 503 |
+
Like regular semaphores, bounded semaphores manage a counter representing
|
| 504 |
+
the number of release() calls minus the number of acquire() calls, plus an
|
| 505 |
+
initial value. The acquire() method blocks if necessary until it can return
|
| 506 |
+
without making the counter negative. If not given, value defaults to 1.
|
| 507 |
+
|
| 508 |
+
"""
|
| 509 |
+
|
| 510 |
+
def __init__(self, value=1):
|
| 511 |
+
Semaphore.__init__(self, value)
|
| 512 |
+
self._initial_value = value
|
| 513 |
+
|
| 514 |
+
def release(self, n=1):
|
| 515 |
+
"""Release a semaphore, incrementing the internal counter by one or more.
|
| 516 |
+
|
| 517 |
+
When the counter is zero on entry and another thread is waiting for it
|
| 518 |
+
to become larger than zero again, wake up that thread.
|
| 519 |
+
|
| 520 |
+
If the number of releases exceeds the number of acquires,
|
| 521 |
+
raise a ValueError.
|
| 522 |
+
|
| 523 |
+
"""
|
| 524 |
+
if n < 1:
|
| 525 |
+
raise ValueError('n must be one or more')
|
| 526 |
+
with self._cond:
|
| 527 |
+
if self._value + n > self._initial_value:
|
| 528 |
+
raise ValueError("Semaphore released too many times")
|
| 529 |
+
self._value += n
|
| 530 |
+
for i in range(n):
|
| 531 |
+
self._cond.notify()
|
| 532 |
+
|
| 533 |
+
|
| 534 |
+
class Event:
|
| 535 |
+
"""Class implementing event objects.
|
| 536 |
+
|
| 537 |
+
Events manage a flag that can be set to true with the set() method and reset
|
| 538 |
+
to false with the clear() method. The wait() method blocks until the flag is
|
| 539 |
+
true. The flag is initially false.
|
| 540 |
+
|
| 541 |
+
"""
|
| 542 |
+
|
| 543 |
+
# After Tim Peters' event class (without is_posted())
|
| 544 |
+
|
| 545 |
+
def __init__(self):
|
| 546 |
+
self._cond = Condition(Lock())
|
| 547 |
+
self._flag = False
|
| 548 |
+
|
| 549 |
+
def _at_fork_reinit(self):
|
| 550 |
+
# Private method called by Thread._reset_internal_locks()
|
| 551 |
+
self._cond._at_fork_reinit()
|
| 552 |
+
|
| 553 |
+
def is_set(self):
|
| 554 |
+
"""Return true if and only if the internal flag is true."""
|
| 555 |
+
return self._flag
|
| 556 |
+
|
| 557 |
+
def isSet(self):
|
| 558 |
+
"""Return true if and only if the internal flag is true.
|
| 559 |
+
|
| 560 |
+
This method is deprecated, use is_set() instead.
|
| 561 |
+
|
| 562 |
+
"""
|
| 563 |
+
import warnings
|
| 564 |
+
warnings.warn('isSet() is deprecated, use is_set() instead',
|
| 565 |
+
DeprecationWarning, stacklevel=2)
|
| 566 |
+
return self.is_set()
|
| 567 |
+
|
| 568 |
+
def set(self):
|
| 569 |
+
"""Set the internal flag to true.
|
| 570 |
+
|
| 571 |
+
All threads waiting for it to become true are awakened. Threads
|
| 572 |
+
that call wait() once the flag is true will not block at all.
|
| 573 |
+
|
| 574 |
+
"""
|
| 575 |
+
with self._cond:
|
| 576 |
+
self._flag = True
|
| 577 |
+
self._cond.notify_all()
|
| 578 |
+
|
| 579 |
+
def clear(self):
|
| 580 |
+
"""Reset the internal flag to false.
|
| 581 |
+
|
| 582 |
+
Subsequently, threads calling wait() will block until set() is called to
|
| 583 |
+
set the internal flag to true again.
|
| 584 |
+
|
| 585 |
+
"""
|
| 586 |
+
with self._cond:
|
| 587 |
+
self._flag = False
|
| 588 |
+
|
| 589 |
+
def wait(self, timeout=None):
|
| 590 |
+
"""Block until the internal flag is true.
|
| 591 |
+
|
| 592 |
+
If the internal flag is true on entry, return immediately. Otherwise,
|
| 593 |
+
block until another thread calls set() to set the flag to true, or until
|
| 594 |
+
the optional timeout occurs.
|
| 595 |
+
|
| 596 |
+
When the timeout argument is present and not None, it should be a
|
| 597 |
+
floating point number specifying a timeout for the operation in seconds
|
| 598 |
+
(or fractions thereof).
|
| 599 |
+
|
| 600 |
+
This method returns the internal flag on exit, so it will always return
|
| 601 |
+
True except if a timeout is given and the operation times out.
|
| 602 |
+
|
| 603 |
+
"""
|
| 604 |
+
with self._cond:
|
| 605 |
+
signaled = self._flag
|
| 606 |
+
if not signaled:
|
| 607 |
+
signaled = self._cond.wait(timeout)
|
| 608 |
+
return signaled
|
| 609 |
+
|
| 610 |
+
|
| 611 |
+
# A barrier class. Inspired in part by the pthread_barrier_* api and
|
| 612 |
+
# the CyclicBarrier class from Java. See
|
| 613 |
+
# http://sourceware.org/pthreads-win32/manual/pthread_barrier_init.html and
|
| 614 |
+
# http://java.sun.com/j2se/1.5.0/docs/api/java/util/concurrent/
|
| 615 |
+
# CyclicBarrier.html
|
| 616 |
+
# for information.
|
| 617 |
+
# We maintain two main states, 'filling' and 'draining' enabling the barrier
|
| 618 |
+
# to be cyclic. Threads are not allowed into it until it has fully drained
|
| 619 |
+
# since the previous cycle. In addition, a 'resetting' state exists which is
|
| 620 |
+
# similar to 'draining' except that threads leave with a BrokenBarrierError,
|
| 621 |
+
# and a 'broken' state in which all threads get the exception.
|
| 622 |
+
class Barrier:
|
| 623 |
+
"""Implements a Barrier.
|
| 624 |
+
|
| 625 |
+
Useful for synchronizing a fixed number of threads at known synchronization
|
| 626 |
+
points. Threads block on 'wait()' and are simultaneously awoken once they
|
| 627 |
+
have all made that call.
|
| 628 |
+
|
| 629 |
+
"""
|
| 630 |
+
|
| 631 |
+
def __init__(self, parties, action=None, timeout=None):
|
| 632 |
+
"""Create a barrier, initialised to 'parties' threads.
|
| 633 |
+
|
| 634 |
+
'action' is a callable which, when supplied, will be called by one of
|
| 635 |
+
the threads after they have all entered the barrier and just prior to
|
| 636 |
+
releasing them all. If a 'timeout' is provided, it is used as the
|
| 637 |
+
default for all subsequent 'wait()' calls.
|
| 638 |
+
|
| 639 |
+
"""
|
| 640 |
+
self._cond = Condition(Lock())
|
| 641 |
+
self._action = action
|
| 642 |
+
self._timeout = timeout
|
| 643 |
+
self._parties = parties
|
| 644 |
+
self._state = 0 # 0 filling, 1 draining, -1 resetting, -2 broken
|
| 645 |
+
self._count = 0
|
| 646 |
+
|
| 647 |
+
def wait(self, timeout=None):
|
| 648 |
+
"""Wait for the barrier.
|
| 649 |
+
|
| 650 |
+
When the specified number of threads have started waiting, they are all
|
| 651 |
+
simultaneously awoken. If an 'action' was provided for the barrier, one
|
| 652 |
+
of the threads will have executed that callback prior to returning.
|
| 653 |
+
Returns an individual index number from 0 to 'parties-1'.
|
| 654 |
+
|
| 655 |
+
"""
|
| 656 |
+
if timeout is None:
|
| 657 |
+
timeout = self._timeout
|
| 658 |
+
with self._cond:
|
| 659 |
+
self._enter() # Block while the barrier drains.
|
| 660 |
+
index = self._count
|
| 661 |
+
self._count += 1
|
| 662 |
+
try:
|
| 663 |
+
if index + 1 == self._parties:
|
| 664 |
+
# We release the barrier
|
| 665 |
+
self._release()
|
| 666 |
+
else:
|
| 667 |
+
# We wait until someone releases us
|
| 668 |
+
self._wait(timeout)
|
| 669 |
+
return index
|
| 670 |
+
finally:
|
| 671 |
+
self._count -= 1
|
| 672 |
+
# Wake up any threads waiting for barrier to drain.
|
| 673 |
+
self._exit()
|
| 674 |
+
|
| 675 |
+
# Block until the barrier is ready for us, or raise an exception
|
| 676 |
+
# if it is broken.
|
| 677 |
+
def _enter(self):
|
| 678 |
+
while self._state in (-1, 1):
|
| 679 |
+
# It is draining or resetting, wait until done
|
| 680 |
+
self._cond.wait()
|
| 681 |
+
#see if the barrier is in a broken state
|
| 682 |
+
if self._state < 0:
|
| 683 |
+
raise BrokenBarrierError
|
| 684 |
+
assert self._state == 0
|
| 685 |
+
|
| 686 |
+
# Optionally run the 'action' and release the threads waiting
|
| 687 |
+
# in the barrier.
|
| 688 |
+
def _release(self):
|
| 689 |
+
try:
|
| 690 |
+
if self._action:
|
| 691 |
+
self._action()
|
| 692 |
+
# enter draining state
|
| 693 |
+
self._state = 1
|
| 694 |
+
self._cond.notify_all()
|
| 695 |
+
except:
|
| 696 |
+
#an exception during the _action handler. Break and reraise
|
| 697 |
+
self._break()
|
| 698 |
+
raise
|
| 699 |
+
|
| 700 |
+
# Wait in the barrier until we are released. Raise an exception
|
| 701 |
+
# if the barrier is reset or broken.
|
| 702 |
+
def _wait(self, timeout):
|
| 703 |
+
if not self._cond.wait_for(lambda : self._state != 0, timeout):
|
| 704 |
+
#timed out. Break the barrier
|
| 705 |
+
self._break()
|
| 706 |
+
raise BrokenBarrierError
|
| 707 |
+
if self._state < 0:
|
| 708 |
+
raise BrokenBarrierError
|
| 709 |
+
assert self._state == 1
|
| 710 |
+
|
| 711 |
+
# If we are the last thread to exit the barrier, signal any threads
|
| 712 |
+
# waiting for the barrier to drain.
|
| 713 |
+
def _exit(self):
|
| 714 |
+
if self._count == 0:
|
| 715 |
+
if self._state in (-1, 1):
|
| 716 |
+
#resetting or draining
|
| 717 |
+
self._state = 0
|
| 718 |
+
self._cond.notify_all()
|
| 719 |
+
|
| 720 |
+
def reset(self):
|
| 721 |
+
"""Reset the barrier to the initial state.
|
| 722 |
+
|
| 723 |
+
Any threads currently waiting will get the BrokenBarrier exception
|
| 724 |
+
raised.
|
| 725 |
+
|
| 726 |
+
"""
|
| 727 |
+
with self._cond:
|
| 728 |
+
if self._count > 0:
|
| 729 |
+
if self._state == 0:
|
| 730 |
+
#reset the barrier, waking up threads
|
| 731 |
+
self._state = -1
|
| 732 |
+
elif self._state == -2:
|
| 733 |
+
#was broken, set it to reset state
|
| 734 |
+
#which clears when the last thread exits
|
| 735 |
+
self._state = -1
|
| 736 |
+
else:
|
| 737 |
+
self._state = 0
|
| 738 |
+
self._cond.notify_all()
|
| 739 |
+
|
| 740 |
+
def abort(self):
|
| 741 |
+
"""Place the barrier into a 'broken' state.
|
| 742 |
+
|
| 743 |
+
Useful in case of error. Any currently waiting threads and threads
|
| 744 |
+
attempting to 'wait()' will have BrokenBarrierError raised.
|
| 745 |
+
|
| 746 |
+
"""
|
| 747 |
+
with self._cond:
|
| 748 |
+
self._break()
|
| 749 |
+
|
| 750 |
+
def _break(self):
|
| 751 |
+
# An internal error was detected. The barrier is set to
|
| 752 |
+
# a broken state all parties awakened.
|
| 753 |
+
self._state = -2
|
| 754 |
+
self._cond.notify_all()
|
| 755 |
+
|
| 756 |
+
@property
|
| 757 |
+
def parties(self):
|
| 758 |
+
"""Return the number of threads required to trip the barrier."""
|
| 759 |
+
return self._parties
|
| 760 |
+
|
| 761 |
+
@property
|
| 762 |
+
def n_waiting(self):
|
| 763 |
+
"""Return the number of threads currently waiting at the barrier."""
|
| 764 |
+
# We don't need synchronization here since this is an ephemeral result
|
| 765 |
+
# anyway. It returns the correct value in the steady state.
|
| 766 |
+
if self._state == 0:
|
| 767 |
+
return self._count
|
| 768 |
+
return 0
|
| 769 |
+
|
| 770 |
+
@property
|
| 771 |
+
def broken(self):
|
| 772 |
+
"""Return True if the barrier is in a broken state."""
|
| 773 |
+
return self._state == -2
|
| 774 |
+
|
| 775 |
+
# exception raised by the Barrier class
|
| 776 |
+
class BrokenBarrierError(RuntimeError):
|
| 777 |
+
pass
|
| 778 |
+
|
| 779 |
+
|
| 780 |
+
# Helper to generate new thread names
|
| 781 |
+
_counter = _count(1).__next__
|
| 782 |
+
def _newname(name_template):
|
| 783 |
+
return name_template % _counter()
|
| 784 |
+
|
| 785 |
+
# Active thread administration.
|
| 786 |
+
#
|
| 787 |
+
# bpo-44422: Use a reentrant lock to allow reentrant calls to functions like
|
| 788 |
+
# threading.enumerate().
|
| 789 |
+
_active_limbo_lock = RLock()
|
| 790 |
+
_active = {} # maps thread id to Thread object
|
| 791 |
+
_limbo = {}
|
| 792 |
+
_dangling = WeakSet()
|
| 793 |
+
|
| 794 |
+
# Set of Thread._tstate_lock locks of non-daemon threads used by _shutdown()
|
| 795 |
+
# to wait until all Python thread states get deleted:
|
| 796 |
+
# see Thread._set_tstate_lock().
|
| 797 |
+
_shutdown_locks_lock = _allocate_lock()
|
| 798 |
+
_shutdown_locks = set()
|
| 799 |
+
|
| 800 |
+
def _maintain_shutdown_locks():
|
| 801 |
+
"""
|
| 802 |
+
Drop any shutdown locks that don't correspond to running threads anymore.
|
| 803 |
+
|
| 804 |
+
Calling this from time to time avoids an ever-growing _shutdown_locks
|
| 805 |
+
set when Thread objects are not joined explicitly. See bpo-37788.
|
| 806 |
+
|
| 807 |
+
This must be called with _shutdown_locks_lock acquired.
|
| 808 |
+
"""
|
| 809 |
+
# If a lock was released, the corresponding thread has exited
|
| 810 |
+
to_remove = [lock for lock in _shutdown_locks if not lock.locked()]
|
| 811 |
+
_shutdown_locks.difference_update(to_remove)
|
| 812 |
+
|
| 813 |
+
|
| 814 |
+
# Main class for threads
|
| 815 |
+
|
| 816 |
+
class Thread:
|
| 817 |
+
"""A class that represents a thread of control.
|
| 818 |
+
|
| 819 |
+
This class can be safely subclassed in a limited fashion. There are two ways
|
| 820 |
+
to specify the activity: by passing a callable object to the constructor, or
|
| 821 |
+
by overriding the run() method in a subclass.
|
| 822 |
+
|
| 823 |
+
"""
|
| 824 |
+
|
| 825 |
+
_initialized = False
|
| 826 |
+
|
| 827 |
+
def __init__(self, group=None, target=None, name=None,
|
| 828 |
+
args=(), kwargs=None, *, daemon=None):
|
| 829 |
+
"""This constructor should always be called with keyword arguments. Arguments are:
|
| 830 |
+
|
| 831 |
+
*group* should be None; reserved for future extension when a ThreadGroup
|
| 832 |
+
class is implemented.
|
| 833 |
+
|
| 834 |
+
*target* is the callable object to be invoked by the run()
|
| 835 |
+
method. Defaults to None, meaning nothing is called.
|
| 836 |
+
|
| 837 |
+
*name* is the thread name. By default, a unique name is constructed of
|
| 838 |
+
the form "Thread-N" where N is a small decimal number.
|
| 839 |
+
|
| 840 |
+
*args* is the argument tuple for the target invocation. Defaults to ().
|
| 841 |
+
|
| 842 |
+
*kwargs* is a dictionary of keyword arguments for the target
|
| 843 |
+
invocation. Defaults to {}.
|
| 844 |
+
|
| 845 |
+
If a subclass overrides the constructor, it must make sure to invoke
|
| 846 |
+
the base class constructor (Thread.__init__()) before doing anything
|
| 847 |
+
else to the thread.
|
| 848 |
+
|
| 849 |
+
"""
|
| 850 |
+
assert group is None, "group argument must be None for now"
|
| 851 |
+
if kwargs is None:
|
| 852 |
+
kwargs = {}
|
| 853 |
+
if name:
|
| 854 |
+
name = str(name)
|
| 855 |
+
else:
|
| 856 |
+
name = _newname("Thread-%d")
|
| 857 |
+
if target is not None:
|
| 858 |
+
try:
|
| 859 |
+
target_name = target.__name__
|
| 860 |
+
name += f" ({target_name})"
|
| 861 |
+
except AttributeError:
|
| 862 |
+
pass
|
| 863 |
+
|
| 864 |
+
self._target = target
|
| 865 |
+
self._name = name
|
| 866 |
+
self._args = args
|
| 867 |
+
self._kwargs = kwargs
|
| 868 |
+
if daemon is not None:
|
| 869 |
+
self._daemonic = daemon
|
| 870 |
+
else:
|
| 871 |
+
self._daemonic = current_thread().daemon
|
| 872 |
+
self._ident = None
|
| 873 |
+
if _HAVE_THREAD_NATIVE_ID:
|
| 874 |
+
self._native_id = None
|
| 875 |
+
self._tstate_lock = None
|
| 876 |
+
self._started = Event()
|
| 877 |
+
self._is_stopped = False
|
| 878 |
+
self._initialized = True
|
| 879 |
+
# Copy of sys.stderr used by self._invoke_excepthook()
|
| 880 |
+
self._stderr = _sys.stderr
|
| 881 |
+
self._invoke_excepthook = _make_invoke_excepthook()
|
| 882 |
+
# For debugging and _after_fork()
|
| 883 |
+
_dangling.add(self)
|
| 884 |
+
|
| 885 |
+
def _reset_internal_locks(self, is_alive):
|
| 886 |
+
# private! Called by _after_fork() to reset our internal locks as
|
| 887 |
+
# they may be in an invalid state leading to a deadlock or crash.
|
| 888 |
+
self._started._at_fork_reinit()
|
| 889 |
+
if is_alive:
|
| 890 |
+
# bpo-42350: If the fork happens when the thread is already stopped
|
| 891 |
+
# (ex: after threading._shutdown() has been called), _tstate_lock
|
| 892 |
+
# is None. Do nothing in this case.
|
| 893 |
+
if self._tstate_lock is not None:
|
| 894 |
+
self._tstate_lock._at_fork_reinit()
|
| 895 |
+
self._tstate_lock.acquire()
|
| 896 |
+
else:
|
| 897 |
+
# The thread isn't alive after fork: it doesn't have a tstate
|
| 898 |
+
# anymore.
|
| 899 |
+
self._is_stopped = True
|
| 900 |
+
self._tstate_lock = None
|
| 901 |
+
|
| 902 |
+
def __repr__(self):
|
| 903 |
+
assert self._initialized, "Thread.__init__() was not called"
|
| 904 |
+
status = "initial"
|
| 905 |
+
if self._started.is_set():
|
| 906 |
+
status = "started"
|
| 907 |
+
self.is_alive() # easy way to get ._is_stopped set when appropriate
|
| 908 |
+
if self._is_stopped:
|
| 909 |
+
status = "stopped"
|
| 910 |
+
if self._daemonic:
|
| 911 |
+
status += " daemon"
|
| 912 |
+
if self._ident is not None:
|
| 913 |
+
status += " %s" % self._ident
|
| 914 |
+
return "<%s(%s, %s)>" % (self.__class__.__name__, self._name, status)
|
| 915 |
+
|
| 916 |
+
def start(self):
|
| 917 |
+
"""Start the thread's activity.
|
| 918 |
+
|
| 919 |
+
It must be called at most once per thread object. It arranges for the
|
| 920 |
+
object's run() method to be invoked in a separate thread of control.
|
| 921 |
+
|
| 922 |
+
This method will raise a RuntimeError if called more than once on the
|
| 923 |
+
same thread object.
|
| 924 |
+
|
| 925 |
+
"""
|
| 926 |
+
if not self._initialized:
|
| 927 |
+
raise RuntimeError("thread.__init__() not called")
|
| 928 |
+
|
| 929 |
+
if self._started.is_set():
|
| 930 |
+
raise RuntimeError("threads can only be started once")
|
| 931 |
+
|
| 932 |
+
with _active_limbo_lock:
|
| 933 |
+
_limbo[self] = self
|
| 934 |
+
try:
|
| 935 |
+
_start_new_thread(self._bootstrap, ())
|
| 936 |
+
except Exception:
|
| 937 |
+
with _active_limbo_lock:
|
| 938 |
+
del _limbo[self]
|
| 939 |
+
raise
|
| 940 |
+
self._started.wait()
|
| 941 |
+
|
| 942 |
+
def run(self):
|
| 943 |
+
"""Method representing the thread's activity.
|
| 944 |
+
|
| 945 |
+
You may override this method in a subclass. The standard run() method
|
| 946 |
+
invokes the callable object passed to the object's constructor as the
|
| 947 |
+
target argument, if any, with sequential and keyword arguments taken
|
| 948 |
+
from the args and kwargs arguments, respectively.
|
| 949 |
+
|
| 950 |
+
"""
|
| 951 |
+
try:
|
| 952 |
+
if self._target is not None:
|
| 953 |
+
self._target(*self._args, **self._kwargs)
|
| 954 |
+
finally:
|
| 955 |
+
# Avoid a refcycle if the thread is running a function with
|
| 956 |
+
# an argument that has a member that points to the thread.
|
| 957 |
+
del self._target, self._args, self._kwargs
|
| 958 |
+
|
| 959 |
+
def _bootstrap(self):
|
| 960 |
+
# Wrapper around the real bootstrap code that ignores
|
| 961 |
+
# exceptions during interpreter cleanup. Those typically
|
| 962 |
+
# happen when a daemon thread wakes up at an unfortunate
|
| 963 |
+
# moment, finds the world around it destroyed, and raises some
|
| 964 |
+
# random exception *** while trying to report the exception in
|
| 965 |
+
# _bootstrap_inner() below ***. Those random exceptions
|
| 966 |
+
# don't help anybody, and they confuse users, so we suppress
|
| 967 |
+
# them. We suppress them only when it appears that the world
|
| 968 |
+
# indeed has already been destroyed, so that exceptions in
|
| 969 |
+
# _bootstrap_inner() during normal business hours are properly
|
| 970 |
+
# reported. Also, we only suppress them for daemonic threads;
|
| 971 |
+
# if a non-daemonic encounters this, something else is wrong.
|
| 972 |
+
try:
|
| 973 |
+
self._bootstrap_inner()
|
| 974 |
+
except:
|
| 975 |
+
if self._daemonic and _sys is None:
|
| 976 |
+
return
|
| 977 |
+
raise
|
| 978 |
+
|
| 979 |
+
def _set_ident(self):
|
| 980 |
+
self._ident = get_ident()
|
| 981 |
+
|
| 982 |
+
if _HAVE_THREAD_NATIVE_ID:
|
| 983 |
+
def _set_native_id(self):
|
| 984 |
+
self._native_id = get_native_id()
|
| 985 |
+
|
| 986 |
+
def _set_tstate_lock(self):
|
| 987 |
+
"""
|
| 988 |
+
Set a lock object which will be released by the interpreter when
|
| 989 |
+
the underlying thread state (see pystate.h) gets deleted.
|
| 990 |
+
"""
|
| 991 |
+
self._tstate_lock = _set_sentinel()
|
| 992 |
+
self._tstate_lock.acquire()
|
| 993 |
+
|
| 994 |
+
if not self.daemon:
|
| 995 |
+
with _shutdown_locks_lock:
|
| 996 |
+
_maintain_shutdown_locks()
|
| 997 |
+
_shutdown_locks.add(self._tstate_lock)
|
| 998 |
+
|
| 999 |
+
def _bootstrap_inner(self):
|
| 1000 |
+
try:
|
| 1001 |
+
self._set_ident()
|
| 1002 |
+
self._set_tstate_lock()
|
| 1003 |
+
if _HAVE_THREAD_NATIVE_ID:
|
| 1004 |
+
self._set_native_id()
|
| 1005 |
+
self._started.set()
|
| 1006 |
+
with _active_limbo_lock:
|
| 1007 |
+
_active[self._ident] = self
|
| 1008 |
+
del _limbo[self]
|
| 1009 |
+
|
| 1010 |
+
if _trace_hook:
|
| 1011 |
+
_sys.settrace(_trace_hook)
|
| 1012 |
+
if _profile_hook:
|
| 1013 |
+
_sys.setprofile(_profile_hook)
|
| 1014 |
+
|
| 1015 |
+
try:
|
| 1016 |
+
self.run()
|
| 1017 |
+
except:
|
| 1018 |
+
self._invoke_excepthook(self)
|
| 1019 |
+
finally:
|
| 1020 |
+
with _active_limbo_lock:
|
| 1021 |
+
try:
|
| 1022 |
+
# We don't call self._delete() because it also
|
| 1023 |
+
# grabs _active_limbo_lock.
|
| 1024 |
+
del _active[get_ident()]
|
| 1025 |
+
except:
|
| 1026 |
+
pass
|
| 1027 |
+
|
| 1028 |
+
def _stop(self):
|
| 1029 |
+
# After calling ._stop(), .is_alive() returns False and .join() returns
|
| 1030 |
+
# immediately. ._tstate_lock must be released before calling ._stop().
|
| 1031 |
+
#
|
| 1032 |
+
# Normal case: C code at the end of the thread's life
|
| 1033 |
+
# (release_sentinel in _threadmodule.c) releases ._tstate_lock, and
|
| 1034 |
+
# that's detected by our ._wait_for_tstate_lock(), called by .join()
|
| 1035 |
+
# and .is_alive(). Any number of threads _may_ call ._stop()
|
| 1036 |
+
# simultaneously (for example, if multiple threads are blocked in
|
| 1037 |
+
# .join() calls), and they're not serialized. That's harmless -
|
| 1038 |
+
# they'll just make redundant rebindings of ._is_stopped and
|
| 1039 |
+
# ._tstate_lock. Obscure: we rebind ._tstate_lock last so that the
|
| 1040 |
+
# "assert self._is_stopped" in ._wait_for_tstate_lock() always works
|
| 1041 |
+
# (the assert is executed only if ._tstate_lock is None).
|
| 1042 |
+
#
|
| 1043 |
+
# Special case: _main_thread releases ._tstate_lock via this
|
| 1044 |
+
# module's _shutdown() function.
|
| 1045 |
+
lock = self._tstate_lock
|
| 1046 |
+
if lock is not None:
|
| 1047 |
+
assert not lock.locked()
|
| 1048 |
+
self._is_stopped = True
|
| 1049 |
+
self._tstate_lock = None
|
| 1050 |
+
if not self.daemon:
|
| 1051 |
+
with _shutdown_locks_lock:
|
| 1052 |
+
# Remove our lock and other released locks from _shutdown_locks
|
| 1053 |
+
_maintain_shutdown_locks()
|
| 1054 |
+
|
| 1055 |
+
def _delete(self):
|
| 1056 |
+
"Remove current thread from the dict of currently running threads."
|
| 1057 |
+
with _active_limbo_lock:
|
| 1058 |
+
del _active[get_ident()]
|
| 1059 |
+
# There must not be any python code between the previous line
|
| 1060 |
+
# and after the lock is released. Otherwise a tracing function
|
| 1061 |
+
# could try to acquire the lock again in the same thread, (in
|
| 1062 |
+
# current_thread()), and would block.
|
| 1063 |
+
|
| 1064 |
+
def join(self, timeout=None):
|
| 1065 |
+
"""Wait until the thread terminates.
|
| 1066 |
+
|
| 1067 |
+
This blocks the calling thread until the thread whose join() method is
|
| 1068 |
+
called terminates -- either normally or through an unhandled exception
|
| 1069 |
+
or until the optional timeout occurs.
|
| 1070 |
+
|
| 1071 |
+
When the timeout argument is present and not None, it should be a
|
| 1072 |
+
floating point number specifying a timeout for the operation in seconds
|
| 1073 |
+
(or fractions thereof). As join() always returns None, you must call
|
| 1074 |
+
is_alive() after join() to decide whether a timeout happened -- if the
|
| 1075 |
+
thread is still alive, the join() call timed out.
|
| 1076 |
+
|
| 1077 |
+
When the timeout argument is not present or None, the operation will
|
| 1078 |
+
block until the thread terminates.
|
| 1079 |
+
|
| 1080 |
+
A thread can be join()ed many times.
|
| 1081 |
+
|
| 1082 |
+
join() raises a RuntimeError if an attempt is made to join the current
|
| 1083 |
+
thread as that would cause a deadlock. It is also an error to join() a
|
| 1084 |
+
thread before it has been started and attempts to do so raises the same
|
| 1085 |
+
exception.
|
| 1086 |
+
|
| 1087 |
+
"""
|
| 1088 |
+
if not self._initialized:
|
| 1089 |
+
raise RuntimeError("Thread.__init__() not called")
|
| 1090 |
+
if not self._started.is_set():
|
| 1091 |
+
raise RuntimeError("cannot join thread before it is started")
|
| 1092 |
+
if self is current_thread():
|
| 1093 |
+
raise RuntimeError("cannot join current thread")
|
| 1094 |
+
|
| 1095 |
+
if timeout is None:
|
| 1096 |
+
self._wait_for_tstate_lock()
|
| 1097 |
+
else:
|
| 1098 |
+
# the behavior of a negative timeout isn't documented, but
|
| 1099 |
+
# historically .join(timeout=x) for x<0 has acted as if timeout=0
|
| 1100 |
+
self._wait_for_tstate_lock(timeout=max(timeout, 0))
|
| 1101 |
+
|
| 1102 |
+
def _wait_for_tstate_lock(self, block=True, timeout=-1):
|
| 1103 |
+
# Issue #18808: wait for the thread state to be gone.
|
| 1104 |
+
# At the end of the thread's life, after all knowledge of the thread
|
| 1105 |
+
# is removed from C data structures, C code releases our _tstate_lock.
|
| 1106 |
+
# This method passes its arguments to _tstate_lock.acquire().
|
| 1107 |
+
# If the lock is acquired, the C code is done, and self._stop() is
|
| 1108 |
+
# called. That sets ._is_stopped to True, and ._tstate_lock to None.
|
| 1109 |
+
lock = self._tstate_lock
|
| 1110 |
+
if lock is None:
|
| 1111 |
+
# already determined that the C code is done
|
| 1112 |
+
assert self._is_stopped
|
| 1113 |
+
return
|
| 1114 |
+
|
| 1115 |
+
try:
|
| 1116 |
+
if lock.acquire(block, timeout):
|
| 1117 |
+
lock.release()
|
| 1118 |
+
self._stop()
|
| 1119 |
+
except:
|
| 1120 |
+
if lock.locked():
|
| 1121 |
+
# bpo-45274: lock.acquire() acquired the lock, but the function
|
| 1122 |
+
# was interrupted with an exception before reaching the
|
| 1123 |
+
# lock.release(). It can happen if a signal handler raises an
|
| 1124 |
+
# exception, like CTRL+C which raises KeyboardInterrupt.
|
| 1125 |
+
lock.release()
|
| 1126 |
+
self._stop()
|
| 1127 |
+
raise
|
| 1128 |
+
|
| 1129 |
+
@property
|
| 1130 |
+
def name(self):
|
| 1131 |
+
"""A string used for identification purposes only.
|
| 1132 |
+
|
| 1133 |
+
It has no semantics. Multiple threads may be given the same name. The
|
| 1134 |
+
initial name is set by the constructor.
|
| 1135 |
+
|
| 1136 |
+
"""
|
| 1137 |
+
assert self._initialized, "Thread.__init__() not called"
|
| 1138 |
+
return self._name
|
| 1139 |
+
|
| 1140 |
+
@name.setter
|
| 1141 |
+
def name(self, name):
|
| 1142 |
+
assert self._initialized, "Thread.__init__() not called"
|
| 1143 |
+
self._name = str(name)
|
| 1144 |
+
|
| 1145 |
+
@property
|
| 1146 |
+
def ident(self):
|
| 1147 |
+
"""Thread identifier of this thread or None if it has not been started.
|
| 1148 |
+
|
| 1149 |
+
This is a nonzero integer. See the get_ident() function. Thread
|
| 1150 |
+
identifiers may be recycled when a thread exits and another thread is
|
| 1151 |
+
created. The identifier is available even after the thread has exited.
|
| 1152 |
+
|
| 1153 |
+
"""
|
| 1154 |
+
assert self._initialized, "Thread.__init__() not called"
|
| 1155 |
+
return self._ident
|
| 1156 |
+
|
| 1157 |
+
if _HAVE_THREAD_NATIVE_ID:
|
| 1158 |
+
@property
|
| 1159 |
+
def native_id(self):
|
| 1160 |
+
"""Native integral thread ID of this thread, or None if it has not been started.
|
| 1161 |
+
|
| 1162 |
+
This is a non-negative integer. See the get_native_id() function.
|
| 1163 |
+
This represents the Thread ID as reported by the kernel.
|
| 1164 |
+
|
| 1165 |
+
"""
|
| 1166 |
+
assert self._initialized, "Thread.__init__() not called"
|
| 1167 |
+
return self._native_id
|
| 1168 |
+
|
| 1169 |
+
def is_alive(self):
|
| 1170 |
+
"""Return whether the thread is alive.
|
| 1171 |
+
|
| 1172 |
+
This method returns True just before the run() method starts until just
|
| 1173 |
+
after the run() method terminates. See also the module function
|
| 1174 |
+
enumerate().
|
| 1175 |
+
|
| 1176 |
+
"""
|
| 1177 |
+
assert self._initialized, "Thread.__init__() not called"
|
| 1178 |
+
if self._is_stopped or not self._started.is_set():
|
| 1179 |
+
return False
|
| 1180 |
+
self._wait_for_tstate_lock(False)
|
| 1181 |
+
return not self._is_stopped
|
| 1182 |
+
|
| 1183 |
+
@property
|
| 1184 |
+
def daemon(self):
|
| 1185 |
+
"""A boolean value indicating whether this thread is a daemon thread.
|
| 1186 |
+
|
| 1187 |
+
This must be set before start() is called, otherwise RuntimeError is
|
| 1188 |
+
raised. Its initial value is inherited from the creating thread; the
|
| 1189 |
+
main thread is not a daemon thread and therefore all threads created in
|
| 1190 |
+
the main thread default to daemon = False.
|
| 1191 |
+
|
| 1192 |
+
The entire Python program exits when only daemon threads are left.
|
| 1193 |
+
|
| 1194 |
+
"""
|
| 1195 |
+
assert self._initialized, "Thread.__init__() not called"
|
| 1196 |
+
return self._daemonic
|
| 1197 |
+
|
| 1198 |
+
@daemon.setter
|
| 1199 |
+
def daemon(self, daemonic):
|
| 1200 |
+
if not self._initialized:
|
| 1201 |
+
raise RuntimeError("Thread.__init__() not called")
|
| 1202 |
+
if self._started.is_set():
|
| 1203 |
+
raise RuntimeError("cannot set daemon status of active thread")
|
| 1204 |
+
self._daemonic = daemonic
|
| 1205 |
+
|
| 1206 |
+
def isDaemon(self):
|
| 1207 |
+
"""Return whether this thread is a daemon.
|
| 1208 |
+
|
| 1209 |
+
This method is deprecated, use the daemon attribute instead.
|
| 1210 |
+
|
| 1211 |
+
"""
|
| 1212 |
+
import warnings
|
| 1213 |
+
warnings.warn('isDaemon() is deprecated, get the daemon attribute instead',
|
| 1214 |
+
DeprecationWarning, stacklevel=2)
|
| 1215 |
+
return self.daemon
|
| 1216 |
+
|
| 1217 |
+
def setDaemon(self, daemonic):
|
| 1218 |
+
"""Set whether this thread is a daemon.
|
| 1219 |
+
|
| 1220 |
+
This method is deprecated, use the .daemon property instead.
|
| 1221 |
+
|
| 1222 |
+
"""
|
| 1223 |
+
import warnings
|
| 1224 |
+
warnings.warn('setDaemon() is deprecated, set the daemon attribute instead',
|
| 1225 |
+
DeprecationWarning, stacklevel=2)
|
| 1226 |
+
self.daemon = daemonic
|
| 1227 |
+
|
| 1228 |
+
def getName(self):
|
| 1229 |
+
"""Return a string used for identification purposes only.
|
| 1230 |
+
|
| 1231 |
+
This method is deprecated, use the name attribute instead.
|
| 1232 |
+
|
| 1233 |
+
"""
|
| 1234 |
+
import warnings
|
| 1235 |
+
warnings.warn('getName() is deprecated, get the name attribute instead',
|
| 1236 |
+
DeprecationWarning, stacklevel=2)
|
| 1237 |
+
return self.name
|
| 1238 |
+
|
| 1239 |
+
def setName(self, name):
|
| 1240 |
+
"""Set the name string for this thread.
|
| 1241 |
+
|
| 1242 |
+
This method is deprecated, use the name attribute instead.
|
| 1243 |
+
|
| 1244 |
+
"""
|
| 1245 |
+
import warnings
|
| 1246 |
+
warnings.warn('setName() is deprecated, set the name attribute instead',
|
| 1247 |
+
DeprecationWarning, stacklevel=2)
|
| 1248 |
+
self.name = name
|
| 1249 |
+
|
| 1250 |
+
|
| 1251 |
+
try:
|
| 1252 |
+
from _thread import (_excepthook as excepthook,
|
| 1253 |
+
_ExceptHookArgs as ExceptHookArgs)
|
| 1254 |
+
except ImportError:
|
| 1255 |
+
# Simple Python implementation if _thread._excepthook() is not available
|
| 1256 |
+
from traceback import print_exception as _print_exception
|
| 1257 |
+
from collections import namedtuple
|
| 1258 |
+
|
| 1259 |
+
_ExceptHookArgs = namedtuple(
|
| 1260 |
+
'ExceptHookArgs',
|
| 1261 |
+
'exc_type exc_value exc_traceback thread')
|
| 1262 |
+
|
| 1263 |
+
def ExceptHookArgs(args):
|
| 1264 |
+
return _ExceptHookArgs(*args)
|
| 1265 |
+
|
| 1266 |
+
def excepthook(args, /):
|
| 1267 |
+
"""
|
| 1268 |
+
Handle uncaught Thread.run() exception.
|
| 1269 |
+
"""
|
| 1270 |
+
if args.exc_type == SystemExit:
|
| 1271 |
+
# silently ignore SystemExit
|
| 1272 |
+
return
|
| 1273 |
+
|
| 1274 |
+
if _sys is not None and _sys.stderr is not None:
|
| 1275 |
+
stderr = _sys.stderr
|
| 1276 |
+
elif args.thread is not None:
|
| 1277 |
+
stderr = args.thread._stderr
|
| 1278 |
+
if stderr is None:
|
| 1279 |
+
# do nothing if sys.stderr is None and sys.stderr was None
|
| 1280 |
+
# when the thread was created
|
| 1281 |
+
return
|
| 1282 |
+
else:
|
| 1283 |
+
# do nothing if sys.stderr is None and args.thread is None
|
| 1284 |
+
return
|
| 1285 |
+
|
| 1286 |
+
if args.thread is not None:
|
| 1287 |
+
name = args.thread.name
|
| 1288 |
+
else:
|
| 1289 |
+
name = get_ident()
|
| 1290 |
+
print(f"Exception in thread {name}:",
|
| 1291 |
+
file=stderr, flush=True)
|
| 1292 |
+
_print_exception(args.exc_type, args.exc_value, args.exc_traceback,
|
| 1293 |
+
file=stderr)
|
| 1294 |
+
stderr.flush()
|
| 1295 |
+
|
| 1296 |
+
|
| 1297 |
+
# Original value of threading.excepthook
|
| 1298 |
+
__excepthook__ = excepthook
|
| 1299 |
+
|
| 1300 |
+
|
| 1301 |
+
def _make_invoke_excepthook():
|
| 1302 |
+
# Create a local namespace to ensure that variables remain alive
|
| 1303 |
+
# when _invoke_excepthook() is called, even if it is called late during
|
| 1304 |
+
# Python shutdown. It is mostly needed for daemon threads.
|
| 1305 |
+
|
| 1306 |
+
old_excepthook = excepthook
|
| 1307 |
+
old_sys_excepthook = _sys.excepthook
|
| 1308 |
+
if old_excepthook is None:
|
| 1309 |
+
raise RuntimeError("threading.excepthook is None")
|
| 1310 |
+
if old_sys_excepthook is None:
|
| 1311 |
+
raise RuntimeError("sys.excepthook is None")
|
| 1312 |
+
|
| 1313 |
+
sys_exc_info = _sys.exc_info
|
| 1314 |
+
local_print = print
|
| 1315 |
+
local_sys = _sys
|
| 1316 |
+
|
| 1317 |
+
def invoke_excepthook(thread):
|
| 1318 |
+
global excepthook
|
| 1319 |
+
try:
|
| 1320 |
+
hook = excepthook
|
| 1321 |
+
if hook is None:
|
| 1322 |
+
hook = old_excepthook
|
| 1323 |
+
|
| 1324 |
+
args = ExceptHookArgs([*sys_exc_info(), thread])
|
| 1325 |
+
|
| 1326 |
+
hook(args)
|
| 1327 |
+
except Exception as exc:
|
| 1328 |
+
exc.__suppress_context__ = True
|
| 1329 |
+
del exc
|
| 1330 |
+
|
| 1331 |
+
if local_sys is not None and local_sys.stderr is not None:
|
| 1332 |
+
stderr = local_sys.stderr
|
| 1333 |
+
else:
|
| 1334 |
+
stderr = thread._stderr
|
| 1335 |
+
|
| 1336 |
+
local_print("Exception in threading.excepthook:",
|
| 1337 |
+
file=stderr, flush=True)
|
| 1338 |
+
|
| 1339 |
+
if local_sys is not None and local_sys.excepthook is not None:
|
| 1340 |
+
sys_excepthook = local_sys.excepthook
|
| 1341 |
+
else:
|
| 1342 |
+
sys_excepthook = old_sys_excepthook
|
| 1343 |
+
|
| 1344 |
+
sys_excepthook(*sys_exc_info())
|
| 1345 |
+
finally:
|
| 1346 |
+
# Break reference cycle (exception stored in a variable)
|
| 1347 |
+
args = None
|
| 1348 |
+
|
| 1349 |
+
return invoke_excepthook
|
| 1350 |
+
|
| 1351 |
+
|
| 1352 |
+
# The timer class was contributed by Itamar Shtull-Trauring
|
| 1353 |
+
|
| 1354 |
+
class Timer(Thread):
|
| 1355 |
+
"""Call a function after a specified number of seconds:
|
| 1356 |
+
|
| 1357 |
+
t = Timer(30.0, f, args=None, kwargs=None)
|
| 1358 |
+
t.start()
|
| 1359 |
+
t.cancel() # stop the timer's action if it's still waiting
|
| 1360 |
+
|
| 1361 |
+
"""
|
| 1362 |
+
|
| 1363 |
+
def __init__(self, interval, function, args=None, kwargs=None):
|
| 1364 |
+
Thread.__init__(self)
|
| 1365 |
+
self.interval = interval
|
| 1366 |
+
self.function = function
|
| 1367 |
+
self.args = args if args is not None else []
|
| 1368 |
+
self.kwargs = kwargs if kwargs is not None else {}
|
| 1369 |
+
self.finished = Event()
|
| 1370 |
+
|
| 1371 |
+
def cancel(self):
|
| 1372 |
+
"""Stop the timer if it hasn't finished yet."""
|
| 1373 |
+
self.finished.set()
|
| 1374 |
+
|
| 1375 |
+
def run(self):
|
| 1376 |
+
self.finished.wait(self.interval)
|
| 1377 |
+
if not self.finished.is_set():
|
| 1378 |
+
self.function(*self.args, **self.kwargs)
|
| 1379 |
+
self.finished.set()
|
| 1380 |
+
|
| 1381 |
+
|
| 1382 |
+
# Special thread class to represent the main thread
|
| 1383 |
+
|
| 1384 |
+
class _MainThread(Thread):
|
| 1385 |
+
|
| 1386 |
+
def __init__(self):
|
| 1387 |
+
Thread.__init__(self, name="MainThread", daemon=False)
|
| 1388 |
+
self._set_tstate_lock()
|
| 1389 |
+
self._started.set()
|
| 1390 |
+
self._set_ident()
|
| 1391 |
+
if _HAVE_THREAD_NATIVE_ID:
|
| 1392 |
+
self._set_native_id()
|
| 1393 |
+
with _active_limbo_lock:
|
| 1394 |
+
_active[self._ident] = self
|
| 1395 |
+
|
| 1396 |
+
|
| 1397 |
+
# Dummy thread class to represent threads not started here.
|
| 1398 |
+
# These aren't garbage collected when they die, nor can they be waited for.
|
| 1399 |
+
# If they invoke anything in threading.py that calls current_thread(), they
|
| 1400 |
+
# leave an entry in the _active dict forever after.
|
| 1401 |
+
# Their purpose is to return *something* from current_thread().
|
| 1402 |
+
# They are marked as daemon threads so we won't wait for them
|
| 1403 |
+
# when we exit (conform previous semantics).
|
| 1404 |
+
|
| 1405 |
+
class _DummyThread(Thread):
|
| 1406 |
+
|
| 1407 |
+
def __init__(self):
|
| 1408 |
+
Thread.__init__(self, name=_newname("Dummy-%d"), daemon=True)
|
| 1409 |
+
|
| 1410 |
+
self._started.set()
|
| 1411 |
+
self._set_ident()
|
| 1412 |
+
if _HAVE_THREAD_NATIVE_ID:
|
| 1413 |
+
self._set_native_id()
|
| 1414 |
+
with _active_limbo_lock:
|
| 1415 |
+
_active[self._ident] = self
|
| 1416 |
+
|
| 1417 |
+
def _stop(self):
|
| 1418 |
+
pass
|
| 1419 |
+
|
| 1420 |
+
def is_alive(self):
|
| 1421 |
+
assert not self._is_stopped and self._started.is_set()
|
| 1422 |
+
return True
|
| 1423 |
+
|
| 1424 |
+
def join(self, timeout=None):
|
| 1425 |
+
assert False, "cannot join a dummy thread"
|
| 1426 |
+
|
| 1427 |
+
|
| 1428 |
+
# Global API functions
|
| 1429 |
+
|
| 1430 |
+
def current_thread():
|
| 1431 |
+
"""Return the current Thread object, corresponding to the caller's thread of control.
|
| 1432 |
+
|
| 1433 |
+
If the caller's thread of control was not created through the threading
|
| 1434 |
+
module, a dummy thread object with limited functionality is returned.
|
| 1435 |
+
|
| 1436 |
+
"""
|
| 1437 |
+
try:
|
| 1438 |
+
return _active[get_ident()]
|
| 1439 |
+
except KeyError:
|
| 1440 |
+
return _DummyThread()
|
| 1441 |
+
|
| 1442 |
+
def currentThread():
|
| 1443 |
+
"""Return the current Thread object, corresponding to the caller's thread of control.
|
| 1444 |
+
|
| 1445 |
+
This function is deprecated, use current_thread() instead.
|
| 1446 |
+
|
| 1447 |
+
"""
|
| 1448 |
+
import warnings
|
| 1449 |
+
warnings.warn('currentThread() is deprecated, use current_thread() instead',
|
| 1450 |
+
DeprecationWarning, stacklevel=2)
|
| 1451 |
+
return current_thread()
|
| 1452 |
+
|
| 1453 |
+
def active_count():
|
| 1454 |
+
"""Return the number of Thread objects currently alive.
|
| 1455 |
+
|
| 1456 |
+
The returned count is equal to the length of the list returned by
|
| 1457 |
+
enumerate().
|
| 1458 |
+
|
| 1459 |
+
"""
|
| 1460 |
+
with _active_limbo_lock:
|
| 1461 |
+
return len(_active) + len(_limbo)
|
| 1462 |
+
|
| 1463 |
+
def activeCount():
|
| 1464 |
+
"""Return the number of Thread objects currently alive.
|
| 1465 |
+
|
| 1466 |
+
This function is deprecated, use active_count() instead.
|
| 1467 |
+
|
| 1468 |
+
"""
|
| 1469 |
+
import warnings
|
| 1470 |
+
warnings.warn('activeCount() is deprecated, use active_count() instead',
|
| 1471 |
+
DeprecationWarning, stacklevel=2)
|
| 1472 |
+
return active_count()
|
| 1473 |
+
|
| 1474 |
+
def _enumerate():
|
| 1475 |
+
# Same as enumerate(), but without the lock. Internal use only.
|
| 1476 |
+
return list(_active.values()) + list(_limbo.values())
|
| 1477 |
+
|
| 1478 |
+
def enumerate():
|
| 1479 |
+
"""Return a list of all Thread objects currently alive.
|
| 1480 |
+
|
| 1481 |
+
The list includes daemonic threads, dummy thread objects created by
|
| 1482 |
+
current_thread(), and the main thread. It excludes terminated threads and
|
| 1483 |
+
threads that have not yet been started.
|
| 1484 |
+
|
| 1485 |
+
"""
|
| 1486 |
+
with _active_limbo_lock:
|
| 1487 |
+
return list(_active.values()) + list(_limbo.values())
|
| 1488 |
+
|
| 1489 |
+
|
| 1490 |
+
_threading_atexits = []
|
| 1491 |
+
_SHUTTING_DOWN = False
|
| 1492 |
+
|
| 1493 |
+
def _register_atexit(func, *arg, **kwargs):
|
| 1494 |
+
"""CPython internal: register *func* to be called before joining threads.
|
| 1495 |
+
|
| 1496 |
+
The registered *func* is called with its arguments just before all
|
| 1497 |
+
non-daemon threads are joined in `_shutdown()`. It provides a similar
|
| 1498 |
+
purpose to `atexit.register()`, but its functions are called prior to
|
| 1499 |
+
threading shutdown instead of interpreter shutdown.
|
| 1500 |
+
|
| 1501 |
+
For similarity to atexit, the registered functions are called in reverse.
|
| 1502 |
+
"""
|
| 1503 |
+
if _SHUTTING_DOWN:
|
| 1504 |
+
raise RuntimeError("can't register atexit after shutdown")
|
| 1505 |
+
|
| 1506 |
+
call = functools.partial(func, *arg, **kwargs)
|
| 1507 |
+
_threading_atexits.append(call)
|
| 1508 |
+
|
| 1509 |
+
|
| 1510 |
+
from _thread import stack_size
|
| 1511 |
+
|
| 1512 |
+
# Create the main thread object,
|
| 1513 |
+
# and make it available for the interpreter
|
| 1514 |
+
# (Py_Main) as threading._shutdown.
|
| 1515 |
+
|
| 1516 |
+
_main_thread = _MainThread()
|
| 1517 |
+
|
| 1518 |
+
def _shutdown():
|
| 1519 |
+
"""
|
| 1520 |
+
Wait until the Python thread state of all non-daemon threads get deleted.
|
| 1521 |
+
"""
|
| 1522 |
+
# Obscure: other threads may be waiting to join _main_thread. That's
|
| 1523 |
+
# dubious, but some code does it. We can't wait for C code to release
|
| 1524 |
+
# the main thread's tstate_lock - that won't happen until the interpreter
|
| 1525 |
+
# is nearly dead. So we release it here. Note that just calling _stop()
|
| 1526 |
+
# isn't enough: other threads may already be waiting on _tstate_lock.
|
| 1527 |
+
if _main_thread._is_stopped:
|
| 1528 |
+
# _shutdown() was already called
|
| 1529 |
+
return
|
| 1530 |
+
|
| 1531 |
+
global _SHUTTING_DOWN
|
| 1532 |
+
_SHUTTING_DOWN = True
|
| 1533 |
+
|
| 1534 |
+
# Call registered threading atexit functions before threads are joined.
|
| 1535 |
+
# Order is reversed, similar to atexit.
|
| 1536 |
+
for atexit_call in reversed(_threading_atexits):
|
| 1537 |
+
atexit_call()
|
| 1538 |
+
|
| 1539 |
+
# Main thread
|
| 1540 |
+
if _main_thread.ident == get_ident():
|
| 1541 |
+
tlock = _main_thread._tstate_lock
|
| 1542 |
+
# The main thread isn't finished yet, so its thread state lock can't
|
| 1543 |
+
# have been released.
|
| 1544 |
+
assert tlock is not None
|
| 1545 |
+
assert tlock.locked()
|
| 1546 |
+
tlock.release()
|
| 1547 |
+
_main_thread._stop()
|
| 1548 |
+
else:
|
| 1549 |
+
# bpo-1596321: _shutdown() must be called in the main thread.
|
| 1550 |
+
# If the threading module was not imported by the main thread,
|
| 1551 |
+
# _main_thread is the thread which imported the threading module.
|
| 1552 |
+
# In this case, ignore _main_thread, similar behavior than for threads
|
| 1553 |
+
# spawned by C libraries or using _thread.start_new_thread().
|
| 1554 |
+
pass
|
| 1555 |
+
|
| 1556 |
+
# Join all non-deamon threads
|
| 1557 |
+
while True:
|
| 1558 |
+
with _shutdown_locks_lock:
|
| 1559 |
+
locks = list(_shutdown_locks)
|
| 1560 |
+
_shutdown_locks.clear()
|
| 1561 |
+
|
| 1562 |
+
if not locks:
|
| 1563 |
+
break
|
| 1564 |
+
|
| 1565 |
+
for lock in locks:
|
| 1566 |
+
# mimic Thread.join()
|
| 1567 |
+
lock.acquire()
|
| 1568 |
+
lock.release()
|
| 1569 |
+
|
| 1570 |
+
# new threads can be spawned while we were waiting for the other
|
| 1571 |
+
# threads to complete
|
| 1572 |
+
|
| 1573 |
+
|
| 1574 |
+
def main_thread():
|
| 1575 |
+
"""Return the main thread object.
|
| 1576 |
+
|
| 1577 |
+
In normal conditions, the main thread is the thread from which the
|
| 1578 |
+
Python interpreter was started.
|
| 1579 |
+
"""
|
| 1580 |
+
return _main_thread
|
| 1581 |
+
|
| 1582 |
+
# get thread-local implementation, either from the thread
|
| 1583 |
+
# module, or from the python fallback
|
| 1584 |
+
|
| 1585 |
+
try:
|
| 1586 |
+
from _thread import _local as local
|
| 1587 |
+
except ImportError:
|
| 1588 |
+
from _threading_local import local
|
| 1589 |
+
|
| 1590 |
+
|
| 1591 |
+
def _after_fork():
|
| 1592 |
+
"""
|
| 1593 |
+
Cleanup threading module state that should not exist after a fork.
|
| 1594 |
+
"""
|
| 1595 |
+
# Reset _active_limbo_lock, in case we forked while the lock was held
|
| 1596 |
+
# by another (non-forked) thread. http://bugs.python.org/issue874900
|
| 1597 |
+
global _active_limbo_lock, _main_thread
|
| 1598 |
+
global _shutdown_locks_lock, _shutdown_locks
|
| 1599 |
+
_active_limbo_lock = RLock()
|
| 1600 |
+
|
| 1601 |
+
# fork() only copied the current thread; clear references to others.
|
| 1602 |
+
new_active = {}
|
| 1603 |
+
|
| 1604 |
+
try:
|
| 1605 |
+
current = _active[get_ident()]
|
| 1606 |
+
except KeyError:
|
| 1607 |
+
# fork() was called in a thread which was not spawned
|
| 1608 |
+
# by threading.Thread. For example, a thread spawned
|
| 1609 |
+
# by thread.start_new_thread().
|
| 1610 |
+
current = _MainThread()
|
| 1611 |
+
|
| 1612 |
+
_main_thread = current
|
| 1613 |
+
|
| 1614 |
+
# reset _shutdown() locks: threads re-register their _tstate_lock below
|
| 1615 |
+
_shutdown_locks_lock = _allocate_lock()
|
| 1616 |
+
_shutdown_locks = set()
|
| 1617 |
+
|
| 1618 |
+
with _active_limbo_lock:
|
| 1619 |
+
# Dangling thread instances must still have their locks reset,
|
| 1620 |
+
# because someone may join() them.
|
| 1621 |
+
threads = set(_enumerate())
|
| 1622 |
+
threads.update(_dangling)
|
| 1623 |
+
for thread in threads:
|
| 1624 |
+
# Any lock/condition variable may be currently locked or in an
|
| 1625 |
+
# invalid state, so we reinitialize them.
|
| 1626 |
+
if thread is current:
|
| 1627 |
+
# There is only one active thread. We reset the ident to
|
| 1628 |
+
# its new value since it can have changed.
|
| 1629 |
+
thread._reset_internal_locks(True)
|
| 1630 |
+
ident = get_ident()
|
| 1631 |
+
thread._ident = ident
|
| 1632 |
+
new_active[ident] = thread
|
| 1633 |
+
else:
|
| 1634 |
+
# All the others are already stopped.
|
| 1635 |
+
thread._reset_internal_locks(False)
|
| 1636 |
+
thread._stop()
|
| 1637 |
+
|
| 1638 |
+
_limbo.clear()
|
| 1639 |
+
_active.clear()
|
| 1640 |
+
_active.update(new_active)
|
| 1641 |
+
assert len(_active) == 1
|
| 1642 |
+
|
| 1643 |
+
|
| 1644 |
+
if hasattr(_os, "register_at_fork"):
|
| 1645 |
+
_os.register_at_fork(after_in_child=_after_fork)
|