Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.  
							See raw diff
- .gitattributes +1 -0
- evalkit_tf437/lib/python3.10/site-packages/__pycache__/decorator.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/__pycache__/ffmpy.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/__pycache__/isympy.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/__pycache__/pylab.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/__pycache__/threadpoolctl.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/__pycache__/launch.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/__pycache__/test.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/cursor.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/helpers.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/input.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/keymap.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/cursor.py +65 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/input.py +86 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/keymap.py +134 -0
- evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/selection_menu.py +143 -0
- evalkit_tf437/lib/python3.10/site-packages/nvidia_curand_cu12-10.3.2.106.dist-info/RECORD +33 -0
- evalkit_tf437/lib/python3.10/site-packages/nvidia_curand_cu12-10.3.2.106.dist-info/WHEEL +5 -0
- evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/INSTALLER +1 -0
- evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/LICENSE +1250 -0
- evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/METADATA +1573 -0
- evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/RECORD +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/REQUESTED +0 -0
- evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/WHEEL +6 -0
- evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/entry_points.txt +3 -0
- evalkit_tf437/lib/python3.10/site-packages/pyarrow/libarrow_substrait.so.1800 +3 -0
- evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/INSTALLER +1 -0
- evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/LICENSE +175 -0
- evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/RECORD +43 -0
- evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/REQUESTED +0 -0
- evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/WHEEL +5 -0
- evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/top_level.txt +1 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/__init__.py +162 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/_built_with_meson.py +0 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/_config.py +376 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/_distributor_init.py +13 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/_isotonic.pyx +115 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/_min_dependencies.py +75 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/base.py +1393 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/calibration.py +1423 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/conftest.py +358 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/decomposition/tests/__init__.py +0 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/discriminant_analysis.py +1129 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/dummy.py +702 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/exceptions.py +249 -0
- evalkit_tf437/lib/python3.10/site-packages/sklearn/feature_extraction/__init__.py +18 -0
    	
        .gitattributes
    CHANGED
    
    | @@ -1630,3 +1630,4 @@ evalkit_tf437/lib/python3.10/site-packages/orjson/orjson.cpython-310-x86_64-linu | |
| 1630 | 
             
            evalkit_internvl/lib/python3.10/site-packages/sympy/printing/tests/__pycache__/test_latex.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
         | 
| 1631 | 
             
            evalkit_internvl/lib/python3.10/site-packages/sympy/printing/__pycache__/latex.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
         | 
| 1632 | 
             
            evalkit_internvl/lib/python3.10/site-packages/sympy/printing/pretty/tests/__pycache__/test_pretty.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
         | 
|  | 
|  | |
| 1630 | 
             
            evalkit_internvl/lib/python3.10/site-packages/sympy/printing/tests/__pycache__/test_latex.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
         | 
| 1631 | 
             
            evalkit_internvl/lib/python3.10/site-packages/sympy/printing/__pycache__/latex.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
         | 
| 1632 | 
             
            evalkit_internvl/lib/python3.10/site-packages/sympy/printing/pretty/tests/__pycache__/test_pretty.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
         | 
| 1633 | 
            +
            evalkit_tf437/lib/python3.10/site-packages/pyarrow/libarrow_substrait.so.1800 filter=lfs diff=lfs merge=lfs -text
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/__pycache__/decorator.cpython-310.pyc
    ADDED
    
    | Binary file (12.8 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/__pycache__/ffmpy.cpython-310.pyc
    ADDED
    
    | Binary file (9.93 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/__pycache__/isympy.cpython-310.pyc
    ADDED
    
    | Binary file (9.42 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/__pycache__/pylab.cpython-310.pyc
    ADDED
    
    | Binary file (236 Bytes). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc
    ADDED
    
    | Binary file (27.6 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/__pycache__/threadpoolctl.cpython-310.pyc
    ADDED
    
    | Binary file (43.8 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc
    ADDED
    
    | Binary file (87.8 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/__pycache__/__init__.cpython-310.pyc
    ADDED
    
    | Binary file (178 Bytes). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/__pycache__/launch.cpython-310.pyc
    ADDED
    
    | Binary file (25.9 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/__pycache__/test.cpython-310.pyc
    ADDED
    
    | Binary file (1.71 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/__init__.cpython-310.pyc
    ADDED
    
    | Binary file (232 Bytes). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/cursor.cpython-310.pyc
    ADDED
    
    | Binary file (1.42 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/helpers.cpython-310.pyc
    ADDED
    
    | Binary file (1.64 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/input.cpython-310.pyc
    ADDED
    
    | Binary file (2.41 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/__pycache__/keymap.cpython-310.pyc
    ADDED
    
    | Binary file (2.39 kB). View file | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/cursor.py
    ADDED
    
    | @@ -0,0 +1,65 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            # Copyright 2022 The HuggingFace Team and Brian Chao. All rights reserved.
         | 
| 2 | 
            +
            #
         | 
| 3 | 
            +
            # Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 4 | 
            +
            # you may not use this file except in compliance with the License.
         | 
| 5 | 
            +
            # You may obtain a copy of the License at
         | 
| 6 | 
            +
            #
         | 
| 7 | 
            +
            #     http://www.apache.org/licenses/LICENSE-2.0
         | 
| 8 | 
            +
            #
         | 
| 9 | 
            +
            # Unless required by applicable law or agreed to in writing, software
         | 
| 10 | 
            +
            # distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 11 | 
            +
            # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 12 | 
            +
            # See the License for the specific language governing permissions and
         | 
| 13 | 
            +
            # limitations under the License.
         | 
| 14 | 
            +
             | 
| 15 | 
            +
            """
         | 
| 16 | 
            +
            A utility for showing and hiding the terminal cursor on Windows and Linux, based on https://github.com/bchao1/bullet
         | 
| 17 | 
            +
            """
         | 
| 18 | 
            +
             | 
| 19 | 
            +
            import os
         | 
| 20 | 
            +
            import sys
         | 
| 21 | 
            +
            from contextlib import contextmanager
         | 
| 22 | 
            +
             | 
| 23 | 
            +
             | 
| 24 | 
            +
            # Windows only
         | 
| 25 | 
            +
            if os.name == "nt":
         | 
| 26 | 
            +
                import ctypes
         | 
| 27 | 
            +
                import msvcrt  # noqa
         | 
| 28 | 
            +
             | 
| 29 | 
            +
                class CursorInfo(ctypes.Structure):
         | 
| 30 | 
            +
                    # _fields is a specific attr expected by ctypes
         | 
| 31 | 
            +
                    _fields_ = [("size", ctypes.c_int), ("visible", ctypes.c_byte)]
         | 
| 32 | 
            +
             | 
| 33 | 
            +
             | 
| 34 | 
            +
            def hide_cursor():
         | 
| 35 | 
            +
                if os.name == "nt":
         | 
| 36 | 
            +
                    ci = CursorInfo()
         | 
| 37 | 
            +
                    handle = ctypes.windll.kernel32.GetStdHandle(-11)
         | 
| 38 | 
            +
                    ctypes.windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(ci))
         | 
| 39 | 
            +
                    ci.visible = False
         | 
| 40 | 
            +
                    ctypes.windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(ci))
         | 
| 41 | 
            +
                elif os.name == "posix":
         | 
| 42 | 
            +
                    sys.stdout.write("\033[?25l")
         | 
| 43 | 
            +
                    sys.stdout.flush()
         | 
| 44 | 
            +
             | 
| 45 | 
            +
             | 
| 46 | 
            +
            def show_cursor():
         | 
| 47 | 
            +
                if os.name == "nt":
         | 
| 48 | 
            +
                    ci = CursorInfo()
         | 
| 49 | 
            +
                    handle = ctypes.windll.kernel32.GetStdHandle(-11)
         | 
| 50 | 
            +
                    ctypes.windll.kernel32.GetConsoleCursorInfo(handle, ctypes.byref(ci))
         | 
| 51 | 
            +
                    ci.visible = True
         | 
| 52 | 
            +
                    ctypes.windll.kernel32.SetConsoleCursorInfo(handle, ctypes.byref(ci))
         | 
| 53 | 
            +
                elif os.name == "posix":
         | 
| 54 | 
            +
                    sys.stdout.write("\033[?25h")
         | 
| 55 | 
            +
                    sys.stdout.flush()
         | 
| 56 | 
            +
             | 
| 57 | 
            +
             | 
| 58 | 
            +
            @contextmanager
         | 
| 59 | 
            +
            def hide():
         | 
| 60 | 
            +
                "Context manager to hide the terminal cursor"
         | 
| 61 | 
            +
                try:
         | 
| 62 | 
            +
                    hide_cursor()
         | 
| 63 | 
            +
                    yield
         | 
| 64 | 
            +
                finally:
         | 
| 65 | 
            +
                    show_cursor()
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/input.py
    ADDED
    
    | @@ -0,0 +1,86 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            # Copyright 2022 The HuggingFace Team and Brian Chao. All rights reserved.
         | 
| 2 | 
            +
            #
         | 
| 3 | 
            +
            # Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 4 | 
            +
            # you may not use this file except in compliance with the License.
         | 
| 5 | 
            +
            # You may obtain a copy of the License at
         | 
| 6 | 
            +
            #
         | 
| 7 | 
            +
            #     http://www.apache.org/licenses/LICENSE-2.0
         | 
| 8 | 
            +
            #
         | 
| 9 | 
            +
            # Unless required by applicable law or agreed to in writing, software
         | 
| 10 | 
            +
            # distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 11 | 
            +
            # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 12 | 
            +
            # See the License for the specific language governing permissions and
         | 
| 13 | 
            +
            # limitations under the License.
         | 
| 14 | 
            +
             | 
| 15 | 
            +
            """
         | 
| 16 | 
            +
            This file contains utilities for handling input from the user and registering specific keys to specific functions,
         | 
| 17 | 
            +
            based on https://github.com/bchao1/bullet
         | 
| 18 | 
            +
            """
         | 
| 19 | 
            +
             | 
| 20 | 
            +
            from typing import List
         | 
| 21 | 
            +
             | 
| 22 | 
            +
            from .keymap import KEYMAP, get_character
         | 
| 23 | 
            +
             | 
| 24 | 
            +
             | 
| 25 | 
            +
            def mark(key: str):
         | 
| 26 | 
            +
                """
         | 
| 27 | 
            +
                Mark the function with the key code so it can be handled in the register
         | 
| 28 | 
            +
                """
         | 
| 29 | 
            +
             | 
| 30 | 
            +
                def decorator(func):
         | 
| 31 | 
            +
                    handle = getattr(func, "handle_key", [])
         | 
| 32 | 
            +
                    handle += [key]
         | 
| 33 | 
            +
                    setattr(func, "handle_key", handle)
         | 
| 34 | 
            +
                    return func
         | 
| 35 | 
            +
             | 
| 36 | 
            +
                return decorator
         | 
| 37 | 
            +
             | 
| 38 | 
            +
             | 
| 39 | 
            +
            def mark_multiple(*keys: List[str]):
         | 
| 40 | 
            +
                """
         | 
| 41 | 
            +
                Mark the function with the key codes so it can be handled in the register
         | 
| 42 | 
            +
                """
         | 
| 43 | 
            +
             | 
| 44 | 
            +
                def decorator(func):
         | 
| 45 | 
            +
                    handle = getattr(func, "handle_key", [])
         | 
| 46 | 
            +
                    handle += keys
         | 
| 47 | 
            +
                    setattr(func, "handle_key", handle)
         | 
| 48 | 
            +
                    return func
         | 
| 49 | 
            +
             | 
| 50 | 
            +
                return decorator
         | 
| 51 | 
            +
             | 
| 52 | 
            +
             | 
| 53 | 
            +
            class KeyHandler(type):
         | 
| 54 | 
            +
                """
         | 
| 55 | 
            +
                Metaclass that adds the key handlers to the class
         | 
| 56 | 
            +
                """
         | 
| 57 | 
            +
             | 
| 58 | 
            +
                def __new__(cls, name, bases, attrs):
         | 
| 59 | 
            +
                    new_cls = super().__new__(cls, name, bases, attrs)
         | 
| 60 | 
            +
                    if not hasattr(new_cls, "key_handler"):
         | 
| 61 | 
            +
                        setattr(new_cls, "key_handler", {})
         | 
| 62 | 
            +
                    setattr(new_cls, "handle_input", KeyHandler.handle_input)
         | 
| 63 | 
            +
             | 
| 64 | 
            +
                    for value in attrs.values():
         | 
| 65 | 
            +
                        handled_keys = getattr(value, "handle_key", [])
         | 
| 66 | 
            +
                        for key in handled_keys:
         | 
| 67 | 
            +
                            new_cls.key_handler[key] = value
         | 
| 68 | 
            +
                    return new_cls
         | 
| 69 | 
            +
             | 
| 70 | 
            +
                @staticmethod
         | 
| 71 | 
            +
                def handle_input(cls):
         | 
| 72 | 
            +
                    "Finds and returns the selected character if it exists in the handler"
         | 
| 73 | 
            +
                    char = get_character()
         | 
| 74 | 
            +
                    if char != KEYMAP["undefined"]:
         | 
| 75 | 
            +
                        char = ord(char)
         | 
| 76 | 
            +
                    handler = cls.key_handler.get(char)
         | 
| 77 | 
            +
                    if handler:
         | 
| 78 | 
            +
                        cls.current_selection = char
         | 
| 79 | 
            +
                        return handler(cls)
         | 
| 80 | 
            +
                    else:
         | 
| 81 | 
            +
                        return None
         | 
| 82 | 
            +
             | 
| 83 | 
            +
             | 
| 84 | 
            +
            def register(cls):
         | 
| 85 | 
            +
                """Adds KeyHandler metaclass to the class"""
         | 
| 86 | 
            +
                return KeyHandler(cls.__name__, cls.__bases__, cls.__dict__.copy())
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/keymap.py
    ADDED
    
    | @@ -0,0 +1,134 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            # Copyright 2022 The HuggingFace Team and Brian Chao. All rights reserved.
         | 
| 2 | 
            +
            #
         | 
| 3 | 
            +
            # Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 4 | 
            +
            # you may not use this file except in compliance with the License.
         | 
| 5 | 
            +
            # You may obtain a copy of the License at
         | 
| 6 | 
            +
            #
         | 
| 7 | 
            +
            #     http://www.apache.org/licenses/LICENSE-2.0
         | 
| 8 | 
            +
            #
         | 
| 9 | 
            +
            # Unless required by applicable law or agreed to in writing, software
         | 
| 10 | 
            +
            # distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 11 | 
            +
            # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 12 | 
            +
            # See the License for the specific language governing permissions and
         | 
| 13 | 
            +
            # limitations under the License.
         | 
| 14 | 
            +
             | 
| 15 | 
            +
            """
         | 
| 16 | 
            +
            Utilities relating to parsing raw characters from the keyboard, based on https://github.com/bchao1/bullet
         | 
| 17 | 
            +
            """
         | 
| 18 | 
            +
             | 
| 19 | 
            +
             | 
| 20 | 
            +
            import os
         | 
| 21 | 
            +
            import string
         | 
| 22 | 
            +
            import sys
         | 
| 23 | 
            +
             | 
| 24 | 
            +
             | 
| 25 | 
            +
            ARROW_KEY_FLAG = 1 << 8
         | 
| 26 | 
            +
             | 
| 27 | 
            +
            KEYMAP = {
         | 
| 28 | 
            +
                "tab": ord("\t"),
         | 
| 29 | 
            +
                "newline": ord("\r"),
         | 
| 30 | 
            +
                "esc": 27,
         | 
| 31 | 
            +
                "up": 65 + ARROW_KEY_FLAG,
         | 
| 32 | 
            +
                "down": 66 + ARROW_KEY_FLAG,
         | 
| 33 | 
            +
                "right": 67 + ARROW_KEY_FLAG,
         | 
| 34 | 
            +
                "left": 68 + ARROW_KEY_FLAG,
         | 
| 35 | 
            +
                "mod_int": 91,
         | 
| 36 | 
            +
                "undefined": sys.maxsize,
         | 
| 37 | 
            +
                "interrupt": 3,
         | 
| 38 | 
            +
                "insert": 50,
         | 
| 39 | 
            +
                "delete": 51,
         | 
| 40 | 
            +
                "pg_up": 53,
         | 
| 41 | 
            +
                "pg_down": 54,
         | 
| 42 | 
            +
            }
         | 
| 43 | 
            +
             | 
| 44 | 
            +
            KEYMAP["arrow_begin"] = KEYMAP["up"]
         | 
| 45 | 
            +
            KEYMAP["arrow_end"] = KEYMAP["left"]
         | 
| 46 | 
            +
             | 
| 47 | 
            +
            if sys.platform == "win32":
         | 
| 48 | 
            +
                WIN_CH_BUFFER = []
         | 
| 49 | 
            +
                WIN_KEYMAP = {
         | 
| 50 | 
            +
                    b"\xe0H": KEYMAP["up"] - ARROW_KEY_FLAG,
         | 
| 51 | 
            +
                    b"\x00H": KEYMAP["up"] - ARROW_KEY_FLAG,
         | 
| 52 | 
            +
                    b"\xe0P": KEYMAP["down"] - ARROW_KEY_FLAG,
         | 
| 53 | 
            +
                    b"\x00P": KEYMAP["down"] - ARROW_KEY_FLAG,
         | 
| 54 | 
            +
                    b"\xe0M": KEYMAP["right"] - ARROW_KEY_FLAG,
         | 
| 55 | 
            +
                    b"\x00M": KEYMAP["right"] - ARROW_KEY_FLAG,
         | 
| 56 | 
            +
                    b"\xe0K": KEYMAP["left"] - ARROW_KEY_FLAG,
         | 
| 57 | 
            +
                    b"\x00K": KEYMAP["left"] - ARROW_KEY_FLAG,
         | 
| 58 | 
            +
                }
         | 
| 59 | 
            +
             | 
| 60 | 
            +
            for i in range(10):
         | 
| 61 | 
            +
                KEYMAP[str(i)] = ord(str(i))
         | 
| 62 | 
            +
             | 
| 63 | 
            +
             | 
| 64 | 
            +
            def get_raw_chars():
         | 
| 65 | 
            +
                "Gets raw characters from inputs"
         | 
| 66 | 
            +
                if os.name == "nt":
         | 
| 67 | 
            +
                    import msvcrt
         | 
| 68 | 
            +
             | 
| 69 | 
            +
                    encoding = "mbcs"
         | 
| 70 | 
            +
                    # Flush the keyboard buffer
         | 
| 71 | 
            +
                    while msvcrt.kbhit():
         | 
| 72 | 
            +
                        msvcrt.getch()
         | 
| 73 | 
            +
                    if len(WIN_CH_BUFFER) == 0:
         | 
| 74 | 
            +
                        # Read the keystroke
         | 
| 75 | 
            +
                        ch = msvcrt.getch()
         | 
| 76 | 
            +
             | 
| 77 | 
            +
                        # If it is a prefix char, get second part
         | 
| 78 | 
            +
                        if ch in (b"\x00", b"\xe0"):
         | 
| 79 | 
            +
                            ch2 = ch + msvcrt.getch()
         | 
| 80 | 
            +
                            # Translate actual Win chars to bullet char types
         | 
| 81 | 
            +
                            try:
         | 
| 82 | 
            +
                                chx = chr(WIN_KEYMAP[ch2])
         | 
| 83 | 
            +
                                WIN_CH_BUFFER.append(chr(KEYMAP["mod_int"]))
         | 
| 84 | 
            +
                                WIN_CH_BUFFER.append(chx)
         | 
| 85 | 
            +
                                if ord(chx) in (
         | 
| 86 | 
            +
                                    KEYMAP["insert"] - 1 << 9,
         | 
| 87 | 
            +
                                    KEYMAP["delete"] - 1 << 9,
         | 
| 88 | 
            +
                                    KEYMAP["pg_up"] - 1 << 9,
         | 
| 89 | 
            +
                                    KEYMAP["pg_down"] - 1 << 9,
         | 
| 90 | 
            +
                                ):
         | 
| 91 | 
            +
                                    WIN_CH_BUFFER.append(chr(126))
         | 
| 92 | 
            +
                                ch = chr(KEYMAP["esc"])
         | 
| 93 | 
            +
                            except KeyError:
         | 
| 94 | 
            +
                                ch = ch2[1]
         | 
| 95 | 
            +
                        else:
         | 
| 96 | 
            +
                            ch = ch.decode(encoding)
         | 
| 97 | 
            +
                    else:
         | 
| 98 | 
            +
                        ch = WIN_CH_BUFFER.pop(0)
         | 
| 99 | 
            +
                elif os.name == "posix":
         | 
| 100 | 
            +
                    import termios
         | 
| 101 | 
            +
                    import tty
         | 
| 102 | 
            +
             | 
| 103 | 
            +
                    fd = sys.stdin.fileno()
         | 
| 104 | 
            +
                    old_settings = termios.tcgetattr(fd)
         | 
| 105 | 
            +
                    try:
         | 
| 106 | 
            +
                        tty.setraw(fd)
         | 
| 107 | 
            +
                        ch = sys.stdin.read(1)
         | 
| 108 | 
            +
                    finally:
         | 
| 109 | 
            +
                        termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
         | 
| 110 | 
            +
                return ch
         | 
| 111 | 
            +
             | 
| 112 | 
            +
             | 
| 113 | 
            +
            def get_character():
         | 
| 114 | 
            +
                "Gets a character from the keyboard and returns the key code"
         | 
| 115 | 
            +
                char = get_raw_chars()
         | 
| 116 | 
            +
                if ord(char) in [KEYMAP["interrupt"], KEYMAP["newline"]]:
         | 
| 117 | 
            +
                    return char
         | 
| 118 | 
            +
             | 
| 119 | 
            +
                elif ord(char) == KEYMAP["esc"]:
         | 
| 120 | 
            +
                    combo = get_raw_chars()
         | 
| 121 | 
            +
                    if ord(combo) == KEYMAP["mod_int"]:
         | 
| 122 | 
            +
                        key = get_raw_chars()
         | 
| 123 | 
            +
                        if ord(key) >= KEYMAP["arrow_begin"] - ARROW_KEY_FLAG and ord(key) <= KEYMAP["arrow_end"] - ARROW_KEY_FLAG:
         | 
| 124 | 
            +
                            return chr(ord(key) + ARROW_KEY_FLAG)
         | 
| 125 | 
            +
                        else:
         | 
| 126 | 
            +
                            return KEYMAP["undefined"]
         | 
| 127 | 
            +
                    else:
         | 
| 128 | 
            +
                        return get_raw_chars()
         | 
| 129 | 
            +
             | 
| 130 | 
            +
                else:
         | 
| 131 | 
            +
                    if char in string.printable:
         | 
| 132 | 
            +
                        return char
         | 
| 133 | 
            +
                    else:
         | 
| 134 | 
            +
                        return KEYMAP["undefined"]
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/accelerate/commands/menu/selection_menu.py
    ADDED
    
    | @@ -0,0 +1,143 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            # Copyright 2022 The HuggingFace Team and Brian Chao. All rights reserved.
         | 
| 2 | 
            +
            #
         | 
| 3 | 
            +
            # Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 4 | 
            +
            # you may not use this file except in compliance with the License.
         | 
| 5 | 
            +
            # You may obtain a copy of the License at
         | 
| 6 | 
            +
            #
         | 
| 7 | 
            +
            #     http://www.apache.org/licenses/LICENSE-2.0
         | 
| 8 | 
            +
            #
         | 
| 9 | 
            +
            # Unless required by applicable law or agreed to in writing, software
         | 
| 10 | 
            +
            # distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 11 | 
            +
            # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 12 | 
            +
            # See the License for the specific language governing permissions and
         | 
| 13 | 
            +
            # limitations under the License.
         | 
| 14 | 
            +
             | 
| 15 | 
            +
            """
         | 
| 16 | 
            +
            Main driver for the selection menu, based on https://github.com/bchao1/bullet
         | 
| 17 | 
            +
            """
         | 
| 18 | 
            +
            import builtins
         | 
| 19 | 
            +
            import sys
         | 
| 20 | 
            +
             | 
| 21 | 
            +
            from ...utils.imports import _is_package_available
         | 
| 22 | 
            +
            from . import cursor, input
         | 
| 23 | 
            +
            from .helpers import Direction, clear_line, forceWrite, linebreak, move_cursor, reset_cursor, writeColor
         | 
| 24 | 
            +
            from .keymap import KEYMAP
         | 
| 25 | 
            +
             | 
| 26 | 
            +
             | 
| 27 | 
            +
            in_colab = False
         | 
| 28 | 
            +
            try:
         | 
| 29 | 
            +
                in_colab = _is_package_available("google.colab")
         | 
| 30 | 
            +
            except ModuleNotFoundError:
         | 
| 31 | 
            +
                pass
         | 
| 32 | 
            +
             | 
| 33 | 
            +
             | 
| 34 | 
            +
            @input.register
         | 
| 35 | 
            +
            class BulletMenu:
         | 
| 36 | 
            +
                """
         | 
| 37 | 
            +
                A CLI menu to select a choice from a list of choices using the keyboard.
         | 
| 38 | 
            +
                """
         | 
| 39 | 
            +
             | 
| 40 | 
            +
                def __init__(self, prompt: str = None, choices: list = []):
         | 
| 41 | 
            +
                    self.position = 0
         | 
| 42 | 
            +
                    self.choices = choices
         | 
| 43 | 
            +
                    self.prompt = prompt
         | 
| 44 | 
            +
                    if sys.platform == "win32":
         | 
| 45 | 
            +
                        self.arrow_char = "*"
         | 
| 46 | 
            +
                    else:
         | 
| 47 | 
            +
                        self.arrow_char = "➔ "
         | 
| 48 | 
            +
             | 
| 49 | 
            +
                def write_choice(self, index, end: str = ""):
         | 
| 50 | 
            +
                    if sys.platform != "win32":
         | 
| 51 | 
            +
                        writeColor(self.choices[index], 32, end)
         | 
| 52 | 
            +
                    else:
         | 
| 53 | 
            +
                        forceWrite(self.choices[index], end)
         | 
| 54 | 
            +
             | 
| 55 | 
            +
                def print_choice(self, index: int):
         | 
| 56 | 
            +
                    "Prints the choice at the given index"
         | 
| 57 | 
            +
                    if index == self.position:
         | 
| 58 | 
            +
                        forceWrite(f" {self.arrow_char} ")
         | 
| 59 | 
            +
                        self.write_choice(index)
         | 
| 60 | 
            +
                    else:
         | 
| 61 | 
            +
                        forceWrite(f"    {self.choices[index]}")
         | 
| 62 | 
            +
                    reset_cursor()
         | 
| 63 | 
            +
             | 
| 64 | 
            +
                def move_direction(self, direction: Direction, num_spaces: int = 1):
         | 
| 65 | 
            +
                    "Should not be directly called, used to move a direction of either up or down"
         | 
| 66 | 
            +
                    old_position = self.position
         | 
| 67 | 
            +
                    if direction == Direction.DOWN:
         | 
| 68 | 
            +
                        if self.position + 1 >= len(self.choices):
         | 
| 69 | 
            +
                            return
         | 
| 70 | 
            +
                        self.position += num_spaces
         | 
| 71 | 
            +
                    else:
         | 
| 72 | 
            +
                        if self.position - 1 < 0:
         | 
| 73 | 
            +
                            return
         | 
| 74 | 
            +
                        self.position -= num_spaces
         | 
| 75 | 
            +
                    clear_line()
         | 
| 76 | 
            +
                    self.print_choice(old_position)
         | 
| 77 | 
            +
                    move_cursor(num_spaces, direction.name)
         | 
| 78 | 
            +
                    self.print_choice(self.position)
         | 
| 79 | 
            +
             | 
| 80 | 
            +
                @input.mark(KEYMAP["up"])
         | 
| 81 | 
            +
                def move_up(self):
         | 
| 82 | 
            +
                    self.move_direction(Direction.UP)
         | 
| 83 | 
            +
             | 
| 84 | 
            +
                @input.mark(KEYMAP["down"])
         | 
| 85 | 
            +
                def move_down(self):
         | 
| 86 | 
            +
                    self.move_direction(Direction.DOWN)
         | 
| 87 | 
            +
             | 
| 88 | 
            +
                @input.mark(KEYMAP["newline"])
         | 
| 89 | 
            +
                def select(self):
         | 
| 90 | 
            +
                    move_cursor(len(self.choices) - self.position, "DOWN")
         | 
| 91 | 
            +
                    return self.position
         | 
| 92 | 
            +
             | 
| 93 | 
            +
                @input.mark(KEYMAP["interrupt"])
         | 
| 94 | 
            +
                def interrupt(self):
         | 
| 95 | 
            +
                    move_cursor(len(self.choices) - self.position, "DOWN")
         | 
| 96 | 
            +
                    raise KeyboardInterrupt
         | 
| 97 | 
            +
             | 
| 98 | 
            +
                @input.mark_multiple(*[KEYMAP[str(number)] for number in range(10)])
         | 
| 99 | 
            +
                def select_row(self):
         | 
| 100 | 
            +
                    index = int(chr(self.current_selection))
         | 
| 101 | 
            +
                    movement = index - self.position
         | 
| 102 | 
            +
                    if index == self.position:
         | 
| 103 | 
            +
                        return
         | 
| 104 | 
            +
                    if index < len(self.choices):
         | 
| 105 | 
            +
                        if self.position > index:
         | 
| 106 | 
            +
                            self.move_direction(Direction.UP, -movement)
         | 
| 107 | 
            +
                        elif self.position < index:
         | 
| 108 | 
            +
                            self.move_direction(Direction.DOWN, movement)
         | 
| 109 | 
            +
                        else:
         | 
| 110 | 
            +
                            return
         | 
| 111 | 
            +
                    else:
         | 
| 112 | 
            +
                        return
         | 
| 113 | 
            +
             | 
| 114 | 
            +
                def run(self, default_choice: int = 0):
         | 
| 115 | 
            +
                    "Start the menu and return the selected choice"
         | 
| 116 | 
            +
                    if self.prompt:
         | 
| 117 | 
            +
                        linebreak()
         | 
| 118 | 
            +
                        forceWrite(self.prompt, "\n")
         | 
| 119 | 
            +
                        if in_colab:
         | 
| 120 | 
            +
                            forceWrite("Please input a choice index (starting from 0), and press enter", "\n")
         | 
| 121 | 
            +
                        else:
         | 
| 122 | 
            +
                            forceWrite("Please select a choice using the arrow or number keys, and selecting with enter", "\n")
         | 
| 123 | 
            +
                    self.position = default_choice
         | 
| 124 | 
            +
                    for i in range(len(self.choices)):
         | 
| 125 | 
            +
                        self.print_choice(i)
         | 
| 126 | 
            +
                        forceWrite("\n")
         | 
| 127 | 
            +
                    move_cursor(len(self.choices) - self.position, "UP")
         | 
| 128 | 
            +
                    with cursor.hide():
         | 
| 129 | 
            +
                        while True:
         | 
| 130 | 
            +
                            if in_colab:
         | 
| 131 | 
            +
                                try:
         | 
| 132 | 
            +
                                    choice = int(builtins.input())
         | 
| 133 | 
            +
                                except ValueError:
         | 
| 134 | 
            +
                                    choice = default_choice
         | 
| 135 | 
            +
                            else:
         | 
| 136 | 
            +
                                choice = self.handle_input()
         | 
| 137 | 
            +
                            if choice is not None:
         | 
| 138 | 
            +
                                reset_cursor()
         | 
| 139 | 
            +
                                for _ in range(len(self.choices) + 1):
         | 
| 140 | 
            +
                                    move_cursor(1, "UP")
         | 
| 141 | 
            +
                                    clear_line()
         | 
| 142 | 
            +
                                self.write_choice(choice, "\n")
         | 
| 143 | 
            +
                                return choice
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/nvidia_curand_cu12-10.3.2.106.dist-info/RECORD
    ADDED
    
    | @@ -0,0 +1,33 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
         | 
| 2 | 
            +
            nvidia/__pycache__/__init__.cpython-310.pyc,,
         | 
| 3 | 
            +
            nvidia/curand/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
         | 
| 4 | 
            +
            nvidia/curand/__pycache__/__init__.cpython-310.pyc,,
         | 
| 5 | 
            +
            nvidia/curand/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
         | 
| 6 | 
            +
            nvidia/curand/include/__pycache__/__init__.cpython-310.pyc,,
         | 
| 7 | 
            +
            nvidia/curand/include/curand.h,sha256=y393HddG5_OdV-3cK2l2Q_fxzGbEc1cyCcYjkOE3oIk,43966
         | 
| 8 | 
            +
            nvidia/curand/include/curand_discrete.h,sha256=2qD3BkI622XEu0444wVP7HeYkKAx0Rjr2HDhqU4SA7E,3486
         | 
| 9 | 
            +
            nvidia/curand/include/curand_discrete2.h,sha256=ZrQTO5R9x83AMX88uq7M8M94DLSC5VEz0PAkfcwtQeg,10883
         | 
| 10 | 
            +
            nvidia/curand/include/curand_globals.h,sha256=bES1Kx0NrATXk1DReMMkqWrB062nOnaAp39y22wViXU,3717
         | 
| 11 | 
            +
            nvidia/curand/include/curand_kernel.h,sha256=SjfAeh13ybXIxiekcgczzua02kIAqETopJKRhYvCat8,53133
         | 
| 12 | 
            +
            nvidia/curand/include/curand_lognormal.h,sha256=-X-iNkJSzWpAYYjogm689EJTZfzore9sxU7ObddljLk,28142
         | 
| 13 | 
            +
            nvidia/curand/include/curand_mrg32k3a.h,sha256=ZVVREjGNsJQJ-3IzZZ_LKGtGteslicb8E0Aly49BKPs,170296
         | 
| 14 | 
            +
            nvidia/curand/include/curand_mtgp32.h,sha256=Qhrmx0pHWF-P2Uu5bKwYE9ymEWq3c7qBzCITVMaKMfI,7845
         | 
| 15 | 
            +
            nvidia/curand/include/curand_mtgp32_host.h,sha256=SXqzmSQkzTLSRJ4pojTg_TNCC3T-G89HdBK-boSDqr4,18274
         | 
| 16 | 
            +
            nvidia/curand/include/curand_mtgp32_kernel.h,sha256=ajZnXr5ZXnQExElf6LPpigrrKPTmMIZbRyTEnJ-BDhw,13731
         | 
| 17 | 
            +
            nvidia/curand/include/curand_mtgp32dc_p_11213.h,sha256=7_gGYUH47UugIAEt60vYH5nFa-QUwTpDwSEgLg9cZts,276889
         | 
| 18 | 
            +
            nvidia/curand/include/curand_normal.h,sha256=lnmYVk2fn0oEVWOytdKhXrHL36GLCjMnB8OnZeCaYcA,26953
         | 
| 19 | 
            +
            nvidia/curand/include/curand_normal_static.h,sha256=5K4iTC9AuSWCe1LVxuj_0y3BVjtp0bxO6hndv2rbmiw,4727
         | 
| 20 | 
            +
            nvidia/curand/include/curand_philox4x32_x.h,sha256=T21IP-Rdg3_tSVU9Je4dLKuwEqE4ovfwi7r1hOY92Dw,7166
         | 
| 21 | 
            +
            nvidia/curand/include/curand_poisson.h,sha256=KrhXOmO_D7aclnj8geIyHqdpSQwWHurS9V_pVtgzodM,25461
         | 
| 22 | 
            +
            nvidia/curand/include/curand_precalc.h,sha256=I6NZdgT42fMm9qSCtP-rlOAqt4Zsqgal0ajktcPmEak,1392393
         | 
| 23 | 
            +
            nvidia/curand/include/curand_uniform.h,sha256=gpmRgQu5r6ppgLTg60NXoDdVJS6wMUy6jC5bh8l04e8,17472
         | 
| 24 | 
            +
            nvidia/curand/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
         | 
| 25 | 
            +
            nvidia/curand/lib/__pycache__/__init__.cpython-310.pyc,,
         | 
| 26 | 
            +
            nvidia/curand/lib/libcurand.so.10,sha256=Qah4DXdgqpHMpyMtCF9VGDx-bPrsq8LzLnGSYIMQNfw,96681392
         | 
| 27 | 
            +
            nvidia_curand_cu12-10.3.2.106.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
         | 
| 28 | 
            +
            nvidia_curand_cu12-10.3.2.106.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262
         | 
| 29 | 
            +
            nvidia_curand_cu12-10.3.2.106.dist-info/METADATA,sha256=TTSGosdrLNURQYusjHa9N5vwOD1zz1DnnZV0im74NT4,1507
         | 
| 30 | 
            +
            nvidia_curand_cu12-10.3.2.106.dist-info/RECORD,,
         | 
| 31 | 
            +
            nvidia_curand_cu12-10.3.2.106.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
         | 
| 32 | 
            +
            nvidia_curand_cu12-10.3.2.106.dist-info/WHEEL,sha256=-kQi_VMfvRQozZJT7HUPMfY-5vLo0LVTmAylNJ3Ft98,106
         | 
| 33 | 
            +
            nvidia_curand_cu12-10.3.2.106.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/nvidia_curand_cu12-10.3.2.106.dist-info/WHEEL
    ADDED
    
    | @@ -0,0 +1,5 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            Wheel-Version: 1.0
         | 
| 2 | 
            +
            Generator: bdist_wheel (0.37.1)
         | 
| 3 | 
            +
            Root-Is-Purelib: true
         | 
| 4 | 
            +
            Tag: py3-none-manylinux1_x86_64
         | 
| 5 | 
            +
             | 
    	
        evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/INSTALLER
    ADDED
    
    | @@ -0,0 +1 @@ | |
|  | 
|  | |
| 1 | 
            +
            pip
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/LICENSE
    ADDED
    
    | @@ -0,0 +1,1250 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            BSD 3-Clause License
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            Copyright (c) 2008-2011, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team
         | 
| 4 | 
            +
            All rights reserved.
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            Copyright (c) 2011-2023, Open source contributors.
         | 
| 7 | 
            +
             | 
| 8 | 
            +
            Redistribution and use in source and binary forms, with or without
         | 
| 9 | 
            +
            modification, are permitted provided that the following conditions are met:
         | 
| 10 | 
            +
             | 
| 11 | 
            +
            * Redistributions of source code must retain the above copyright notice, this
         | 
| 12 | 
            +
              list of conditions and the following disclaimer.
         | 
| 13 | 
            +
             | 
| 14 | 
            +
            * Redistributions in binary form must reproduce the above copyright notice,
         | 
| 15 | 
            +
              this list of conditions and the following disclaimer in the documentation
         | 
| 16 | 
            +
              and/or other materials provided with the distribution.
         | 
| 17 | 
            +
             | 
| 18 | 
            +
            * Neither the name of the copyright holder nor the names of its
         | 
| 19 | 
            +
              contributors may be used to endorse or promote products derived from
         | 
| 20 | 
            +
              this software without specific prior written permission.
         | 
| 21 | 
            +
             | 
| 22 | 
            +
            THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
         | 
| 23 | 
            +
            AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
         | 
| 24 | 
            +
            IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         | 
| 25 | 
            +
            DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
         | 
| 26 | 
            +
            FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
         | 
| 27 | 
            +
            DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
         | 
| 28 | 
            +
            SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
         | 
| 29 | 
            +
            CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
         | 
| 30 | 
            +
            OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 31 | 
            +
            OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 32 | 
            +
            Copyright (c) 2010-2019 Keith Goodman
         | 
| 33 | 
            +
            Copyright (c) 2019 Bottleneck Developers
         | 
| 34 | 
            +
            All rights reserved.
         | 
| 35 | 
            +
             | 
| 36 | 
            +
            Redistribution and use in source and binary forms, with or without
         | 
| 37 | 
            +
            modification, are permitted provided that the following conditions are met:
         | 
| 38 | 
            +
             | 
| 39 | 
            +
                * Redistributions of source code must retain the above copyright notice,
         | 
| 40 | 
            +
                  this list of conditions and the following disclaimer.
         | 
| 41 | 
            +
             | 
| 42 | 
            +
                * Redistributions in binary form must reproduce the above copyright
         | 
| 43 | 
            +
                  notice, this list of conditions and the following disclaimer in the
         | 
| 44 | 
            +
                  documentation and/or other materials provided with the distribution.
         | 
| 45 | 
            +
             | 
| 46 | 
            +
            THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
         | 
| 47 | 
            +
            AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
         | 
| 48 | 
            +
            IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
         | 
| 49 | 
            +
            ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
         | 
| 50 | 
            +
            LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
         | 
| 51 | 
            +
            CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
         | 
| 52 | 
            +
            SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
         | 
| 53 | 
            +
            INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
         | 
| 54 | 
            +
            CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
         | 
| 55 | 
            +
            ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
         | 
| 56 | 
            +
            POSSIBILITY OF SUCH DAMAGE.Copyright 2017- Paul Ganssle <[email protected]>
         | 
| 57 | 
            +
            Copyright 2017- dateutil contributors (see AUTHORS file)
         | 
| 58 | 
            +
             | 
| 59 | 
            +
               Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 60 | 
            +
               you may not use this file except in compliance with the License.
         | 
| 61 | 
            +
               You may obtain a copy of the License at
         | 
| 62 | 
            +
             | 
| 63 | 
            +
                   http://www.apache.org/licenses/LICENSE-2.0
         | 
| 64 | 
            +
             | 
| 65 | 
            +
               Unless required by applicable law or agreed to in writing, software
         | 
| 66 | 
            +
               distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 67 | 
            +
               WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 68 | 
            +
               See the License for the specific language governing permissions and
         | 
| 69 | 
            +
               limitations under the License.
         | 
| 70 | 
            +
             | 
| 71 | 
            +
            The above license applies to all contributions after 2017-12-01, as well as
         | 
| 72 | 
            +
            all contributions that have been re-licensed (see AUTHORS file for the list of
         | 
| 73 | 
            +
            contributors who have re-licensed their code).
         | 
| 74 | 
            +
            --------------------------------------------------------------------------------
         | 
| 75 | 
            +
            dateutil - Extensions to the standard Python datetime module.
         | 
| 76 | 
            +
             | 
| 77 | 
            +
            Copyright (c) 2003-2011 - Gustavo Niemeyer <[email protected]>
         | 
| 78 | 
            +
            Copyright (c) 2012-2014 - Tomi Pieviläinen <[email protected]>
         | 
| 79 | 
            +
            Copyright (c) 2014-2016 - Yaron de Leeuw <[email protected]>
         | 
| 80 | 
            +
            Copyright (c) 2015-     - Paul Ganssle <[email protected]>
         | 
| 81 | 
            +
            Copyright (c) 2015-     - dateutil contributors (see AUTHORS file)
         | 
| 82 | 
            +
             | 
| 83 | 
            +
            All rights reserved.
         | 
| 84 | 
            +
             | 
| 85 | 
            +
            Redistribution and use in source and binary forms, with or without
         | 
| 86 | 
            +
            modification, are permitted provided that the following conditions are met:
         | 
| 87 | 
            +
             | 
| 88 | 
            +
                * Redistributions of source code must retain the above copyright notice,
         | 
| 89 | 
            +
                  this list of conditions and the following disclaimer.
         | 
| 90 | 
            +
                * Redistributions in binary form must reproduce the above copyright notice,
         | 
| 91 | 
            +
                  this list of conditions and the following disclaimer in the documentation
         | 
| 92 | 
            +
                  and/or other materials provided with the distribution.
         | 
| 93 | 
            +
                * Neither the name of the copyright holder nor the names of its
         | 
| 94 | 
            +
                  contributors may be used to endorse or promote products derived from
         | 
| 95 | 
            +
                  this software without specific prior written permission.
         | 
| 96 | 
            +
             | 
| 97 | 
            +
            THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
         | 
| 98 | 
            +
            "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
         | 
| 99 | 
            +
            LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
         | 
| 100 | 
            +
            A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
         | 
| 101 | 
            +
            CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
         | 
| 102 | 
            +
            EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
         | 
| 103 | 
            +
            PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
         | 
| 104 | 
            +
            PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
         | 
| 105 | 
            +
            LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
         | 
| 106 | 
            +
            NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
         | 
| 107 | 
            +
            SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 108 | 
            +
             | 
| 109 | 
            +
            The above BSD License Applies to all code, even that also covered by Apache 2.0.# MIT License
         | 
| 110 | 
            +
             | 
| 111 | 
            +
            Copyright (c) 2019 Hadley Wickham; RStudio; and Evan Miller
         | 
| 112 | 
            +
             | 
| 113 | 
            +
            Permission is hereby granted, free of charge, to any person obtaining a copy
         | 
| 114 | 
            +
            of this software and associated documentation files (the "Software"), to deal
         | 
| 115 | 
            +
            in the Software without restriction, including without limitation the rights
         | 
| 116 | 
            +
            to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
         | 
| 117 | 
            +
            copies of the Software, and to permit persons to whom the Software is
         | 
| 118 | 
            +
            furnished to do so, subject to the following conditions:
         | 
| 119 | 
            +
             | 
| 120 | 
            +
            The above copyright notice and this permission notice shall be included in all
         | 
| 121 | 
            +
            copies or substantial portions of the Software.
         | 
| 122 | 
            +
             | 
| 123 | 
            +
            THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
         | 
| 124 | 
            +
            IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
         | 
| 125 | 
            +
            FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
         | 
| 126 | 
            +
            AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
         | 
| 127 | 
            +
            LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
         | 
| 128 | 
            +
            OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
         | 
| 129 | 
            +
            SOFTWARE.
         | 
| 130 | 
            +
            Based on http://opensource.org/licenses/MIT
         | 
| 131 | 
            +
             | 
| 132 | 
            +
            This is a template. Complete and ship as file LICENSE the following 2
         | 
| 133 | 
            +
            lines (only)
         | 
| 134 | 
            +
             | 
| 135 | 
            +
            YEAR:
         | 
| 136 | 
            +
            COPYRIGHT HOLDER:
         | 
| 137 | 
            +
             | 
| 138 | 
            +
            and specify as
         | 
| 139 | 
            +
             | 
| 140 | 
            +
            License: MIT + file LICENSE
         | 
| 141 | 
            +
             | 
| 142 | 
            +
            Copyright (c) <YEAR>, <COPYRIGHT HOLDER>
         | 
| 143 | 
            +
             | 
| 144 | 
            +
            Permission is hereby granted, free of charge, to any person obtaining
         | 
| 145 | 
            +
            a copy of this software and associated documentation files (the
         | 
| 146 | 
            +
            "Software"), to deal in the Software without restriction, including
         | 
| 147 | 
            +
            without limitation the rights to use, copy, modify, merge, publish,
         | 
| 148 | 
            +
            distribute, sublicense, and/or sell copies of the Software, and to
         | 
| 149 | 
            +
            permit persons to whom the Software is furnished to do so, subject to
         | 
| 150 | 
            +
            the following conditions:
         | 
| 151 | 
            +
             | 
| 152 | 
            +
            The above copyright notice and this permission notice shall be
         | 
| 153 | 
            +
            included in all copies or substantial portions of the Software.
         | 
| 154 | 
            +
             | 
| 155 | 
            +
            THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
         | 
| 156 | 
            +
            EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
         | 
| 157 | 
            +
            MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
         | 
| 158 | 
            +
            NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
         | 
| 159 | 
            +
            LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
         | 
| 160 | 
            +
            OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
         | 
| 161 | 
            +
            WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
         | 
| 162 | 
            +
            The MIT License
         | 
| 163 | 
            +
             | 
| 164 | 
            +
            Copyright (c) 2008-     Attractive Chaos <[email protected]>
         | 
| 165 | 
            +
             | 
| 166 | 
            +
            Permission is hereby granted, free of charge, to any person obtaining
         | 
| 167 | 
            +
            a copy of this software and associated documentation files (the
         | 
| 168 | 
            +
            "Software"), to deal in the Software without restriction, including
         | 
| 169 | 
            +
            without limitation the rights to use, copy, modify, merge, publish,
         | 
| 170 | 
            +
            distribute, sublicense, and/or sell copies of the Software, and to
         | 
| 171 | 
            +
            permit persons to whom the Software is furnished to do so, subject to
         | 
| 172 | 
            +
            the following conditions:
         | 
| 173 | 
            +
             | 
| 174 | 
            +
            The above copyright notice and this permission notice shall be
         | 
| 175 | 
            +
            included in all copies or substantial portions of the Software.
         | 
| 176 | 
            +
             | 
| 177 | 
            +
            THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
         | 
| 178 | 
            +
            EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
         | 
| 179 | 
            +
            MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
         | 
| 180 | 
            +
            NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
         | 
| 181 | 
            +
            BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
         | 
| 182 | 
            +
            ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
         | 
| 183 | 
            +
            CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
         | 
| 184 | 
            +
            SOFTWARE.musl as a whole is licensed under the following standard MIT license:
         | 
| 185 | 
            +
             | 
| 186 | 
            +
            ----------------------------------------------------------------------
         | 
| 187 | 
            +
            Copyright © 2005-2020 Rich Felker, et al.
         | 
| 188 | 
            +
             | 
| 189 | 
            +
            Permission is hereby granted, free of charge, to any person obtaining
         | 
| 190 | 
            +
            a copy of this software and associated documentation files (the
         | 
| 191 | 
            +
            "Software"), to deal in the Software without restriction, including
         | 
| 192 | 
            +
            without limitation the rights to use, copy, modify, merge, publish,
         | 
| 193 | 
            +
            distribute, sublicense, and/or sell copies of the Software, and to
         | 
| 194 | 
            +
            permit persons to whom the Software is furnished to do so, subject to
         | 
| 195 | 
            +
            the following conditions:
         | 
| 196 | 
            +
             | 
| 197 | 
            +
            The above copyright notice and this permission notice shall be
         | 
| 198 | 
            +
            included in all copies or substantial portions of the Software.
         | 
| 199 | 
            +
             | 
| 200 | 
            +
            THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
         | 
| 201 | 
            +
            EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
         | 
| 202 | 
            +
            MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
         | 
| 203 | 
            +
            IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
         | 
| 204 | 
            +
            CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
         | 
| 205 | 
            +
            TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
         | 
| 206 | 
            +
            SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
         | 
| 207 | 
            +
            ----------------------------------------------------------------------
         | 
| 208 | 
            +
             | 
| 209 | 
            +
            Authors/contributors include:
         | 
| 210 | 
            +
             | 
| 211 | 
            +
            A. Wilcox
         | 
| 212 | 
            +
            Ada Worcester
         | 
| 213 | 
            +
            Alex Dowad
         | 
| 214 | 
            +
            Alex Suykov
         | 
| 215 | 
            +
            Alexander Monakov
         | 
| 216 | 
            +
            Andre McCurdy
         | 
| 217 | 
            +
            Andrew Kelley
         | 
| 218 | 
            +
            Anthony G. Basile
         | 
| 219 | 
            +
            Aric Belsito
         | 
| 220 | 
            +
            Arvid Picciani
         | 
| 221 | 
            +
            Bartosz Brachaczek
         | 
| 222 | 
            +
            Benjamin Peterson
         | 
| 223 | 
            +
            Bobby Bingham
         | 
| 224 | 
            +
            Boris Brezillon
         | 
| 225 | 
            +
            Brent Cook
         | 
| 226 | 
            +
            Chris Spiegel
         | 
| 227 | 
            +
            Clément Vasseur
         | 
| 228 | 
            +
            Daniel Micay
         | 
| 229 | 
            +
            Daniel Sabogal
         | 
| 230 | 
            +
            Daurnimator
         | 
| 231 | 
            +
            David Carlier
         | 
| 232 | 
            +
            David Edelsohn
         | 
| 233 | 
            +
            Denys Vlasenko
         | 
| 234 | 
            +
            Dmitry Ivanov
         | 
| 235 | 
            +
            Dmitry V. Levin
         | 
| 236 | 
            +
            Drew DeVault
         | 
| 237 | 
            +
            Emil Renner Berthing
         | 
| 238 | 
            +
            Fangrui Song
         | 
| 239 | 
            +
            Felix Fietkau
         | 
| 240 | 
            +
            Felix Janda
         | 
| 241 | 
            +
            Gianluca Anzolin
         | 
| 242 | 
            +
            Hauke Mehrtens
         | 
| 243 | 
            +
            He X
         | 
| 244 | 
            +
            Hiltjo Posthuma
         | 
| 245 | 
            +
            Isaac Dunham
         | 
| 246 | 
            +
            Jaydeep Patil
         | 
| 247 | 
            +
            Jens Gustedt
         | 
| 248 | 
            +
            Jeremy Huntwork
         | 
| 249 | 
            +
            Jo-Philipp Wich
         | 
| 250 | 
            +
            Joakim Sindholt
         | 
| 251 | 
            +
            John Spencer
         | 
| 252 | 
            +
            Julien Ramseier
         | 
| 253 | 
            +
            Justin Cormack
         | 
| 254 | 
            +
            Kaarle Ritvanen
         | 
| 255 | 
            +
            Khem Raj
         | 
| 256 | 
            +
            Kylie McClain
         | 
| 257 | 
            +
            Leah Neukirchen
         | 
| 258 | 
            +
            Luca Barbato
         | 
| 259 | 
            +
            Luka Perkov
         | 
| 260 | 
            +
            M Farkas-Dyck (Strake)
         | 
| 261 | 
            +
            Mahesh Bodapati
         | 
| 262 | 
            +
            Markus Wichmann
         | 
| 263 | 
            +
            Masanori Ogino
         | 
| 264 | 
            +
            Michael Clark
         | 
| 265 | 
            +
            Michael Forney
         | 
| 266 | 
            +
            Mikhail Kremnyov
         | 
| 267 | 
            +
            Natanael Copa
         | 
| 268 | 
            +
            Nicholas J. Kain
         | 
| 269 | 
            +
            orc
         | 
| 270 | 
            +
            Pascal Cuoq
         | 
| 271 | 
            +
            Patrick Oppenlander
         | 
| 272 | 
            +
            Petr Hosek
         | 
| 273 | 
            +
            Petr Skocik
         | 
| 274 | 
            +
            Pierre Carrier
         | 
| 275 | 
            +
            Reini Urban
         | 
| 276 | 
            +
            Rich Felker
         | 
| 277 | 
            +
            Richard Pennington
         | 
| 278 | 
            +
            Ryan Fairfax
         | 
| 279 | 
            +
            Samuel Holland
         | 
| 280 | 
            +
            Segev Finer
         | 
| 281 | 
            +
            Shiz
         | 
| 282 | 
            +
            sin
         | 
| 283 | 
            +
            Solar Designer
         | 
| 284 | 
            +
            Stefan Kristiansson
         | 
| 285 | 
            +
            Stefan O'Rear
         | 
| 286 | 
            +
            Szabolcs Nagy
         | 
| 287 | 
            +
            Timo Teräs
         | 
| 288 | 
            +
            Trutz Behn
         | 
| 289 | 
            +
            Valentin Ochs
         | 
| 290 | 
            +
            Will Dietz
         | 
| 291 | 
            +
            William Haddon
         | 
| 292 | 
            +
            William Pitcock
         | 
| 293 | 
            +
             | 
| 294 | 
            +
            Portions of this software are derived from third-party works licensed
         | 
| 295 | 
            +
            under terms compatible with the above MIT license:
         | 
| 296 | 
            +
             | 
| 297 | 
            +
            The TRE regular expression implementation (src/regex/reg* and
         | 
| 298 | 
            +
            src/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed
         | 
| 299 | 
            +
            under a 2-clause BSD license (license text in the source files). The
         | 
| 300 | 
            +
            included version has been heavily modified by Rich Felker in 2012, in
         | 
| 301 | 
            +
            the interests of size, simplicity, and namespace cleanliness.
         | 
| 302 | 
            +
             | 
| 303 | 
            +
            Much of the math library code (src/math/* and src/complex/*) is
         | 
| 304 | 
            +
            Copyright © 1993,2004 Sun Microsystems or
         | 
| 305 | 
            +
            Copyright © 2003-2011 David Schultz or
         | 
| 306 | 
            +
            Copyright © 2003-2009 Steven G. Kargl or
         | 
| 307 | 
            +
            Copyright © 2003-2009 Bruce D. Evans or
         | 
| 308 | 
            +
            Copyright © 2008 Stephen L. Moshier or
         | 
| 309 | 
            +
            Copyright © 2017-2018 Arm Limited
         | 
| 310 | 
            +
            and labelled as such in comments in the individual source files. All
         | 
| 311 | 
            +
            have been licensed under extremely permissive terms.
         | 
| 312 | 
            +
             | 
| 313 | 
            +
            The ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008
         | 
| 314 | 
            +
            The Android Open Source Project and is licensed under a two-clause BSD
         | 
| 315 | 
            +
            license. It was taken from Bionic libc, used on Android.
         | 
| 316 | 
            +
             | 
| 317 | 
            +
            The AArch64 memcpy and memset code (src/string/aarch64/*) are
         | 
| 318 | 
            +
            Copyright © 1999-2019, Arm Limited.
         | 
| 319 | 
            +
             | 
| 320 | 
            +
            The implementation of DES for crypt (src/crypt/crypt_des.c) is
         | 
| 321 | 
            +
            Copyright © 1994 David Burren. It is licensed under a BSD license.
         | 
| 322 | 
            +
             | 
| 323 | 
            +
            The implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was
         | 
| 324 | 
            +
            originally written by Solar Designer and placed into the public
         | 
| 325 | 
            +
            domain. The code also comes with a fallback permissive license for use
         | 
| 326 | 
            +
            in jurisdictions that may not recognize the public domain.
         | 
| 327 | 
            +
             | 
| 328 | 
            +
            The smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011
         | 
| 329 | 
            +
            Valentin Ochs and is licensed under an MIT-style license.
         | 
| 330 | 
            +
             | 
| 331 | 
            +
            The x86_64 port was written by Nicholas J. Kain and is licensed under
         | 
| 332 | 
            +
            the standard MIT terms.
         | 
| 333 | 
            +
             | 
| 334 | 
            +
            The mips and microblaze ports were originally written by Richard
         | 
| 335 | 
            +
            Pennington for use in the ellcc project. The original code was adapted
         | 
| 336 | 
            +
            by Rich Felker for build system and code conventions during upstream
         | 
| 337 | 
            +
            integration. It is licensed under the standard MIT terms.
         | 
| 338 | 
            +
             | 
| 339 | 
            +
            The mips64 port was contributed by Imagination Technologies and is
         | 
| 340 | 
            +
            licensed under the standard MIT terms.
         | 
| 341 | 
            +
             | 
| 342 | 
            +
            The powerpc port was also originally written by Richard Pennington,
         | 
| 343 | 
            +
            and later supplemented and integrated by John Spencer. It is licensed
         | 
| 344 | 
            +
            under the standard MIT terms.
         | 
| 345 | 
            +
             | 
| 346 | 
            +
            All other files which have no copyright comments are original works
         | 
| 347 | 
            +
            produced specifically for use as part of this library, written either
         | 
| 348 | 
            +
            by Rich Felker, the main author of the library, or by one or more
         | 
| 349 | 
            +
            contibutors listed above. Details on authorship of individual files
         | 
| 350 | 
            +
            can be found in the git version control history of the project. The
         | 
| 351 | 
            +
            omission of copyright and license comments in each file is in the
         | 
| 352 | 
            +
            interest of source tree size.
         | 
| 353 | 
            +
             | 
| 354 | 
            +
            In addition, permission is hereby granted for all public header files
         | 
| 355 | 
            +
            (include/* and arch/*/bits/*) and crt files intended to be linked into
         | 
| 356 | 
            +
            applications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit
         | 
| 357 | 
            +
            the copyright notice and permission notice otherwise required by the
         | 
| 358 | 
            +
            license, and to use these files without any requirement of
         | 
| 359 | 
            +
            attribution. These files include substantial contributions from:
         | 
| 360 | 
            +
             | 
| 361 | 
            +
            Bobby Bingham
         | 
| 362 | 
            +
            John Spencer
         | 
| 363 | 
            +
            Nicholas J. Kain
         | 
| 364 | 
            +
            Rich Felker
         | 
| 365 | 
            +
            Richard Pennington
         | 
| 366 | 
            +
            Stefan Kristiansson
         | 
| 367 | 
            +
            Szabolcs Nagy
         | 
| 368 | 
            +
             | 
| 369 | 
            +
            all of whom have explicitly granted such permission.
         | 
| 370 | 
            +
             | 
| 371 | 
            +
            This file previously contained text expressing a belief that most of
         | 
| 372 | 
            +
            the files covered by the above exception were sufficiently trivial not
         | 
| 373 | 
            +
            to be subject to copyright, resulting in confusion over whether it
         | 
| 374 | 
            +
            negated the permissions granted in the license. In the spirit of
         | 
| 375 | 
            +
            permissive licensing, and of not having licensing issues being an
         | 
| 376 | 
            +
            obstacle to adoption, that text has been removed.Copyright (c) 2005-2023, NumPy Developers.
         | 
| 377 | 
            +
            All rights reserved.
         | 
| 378 | 
            +
             | 
| 379 | 
            +
            Redistribution and use in source and binary forms, with or without
         | 
| 380 | 
            +
            modification, are permitted provided that the following conditions are
         | 
| 381 | 
            +
            met:
         | 
| 382 | 
            +
             | 
| 383 | 
            +
                * Redistributions of source code must retain the above copyright
         | 
| 384 | 
            +
                   notice, this list of conditions and the following disclaimer.
         | 
| 385 | 
            +
             | 
| 386 | 
            +
                * Redistributions in binary form must reproduce the above
         | 
| 387 | 
            +
                   copyright notice, this list of conditions and the following
         | 
| 388 | 
            +
                   disclaimer in the documentation and/or other materials provided
         | 
| 389 | 
            +
                   with the distribution.
         | 
| 390 | 
            +
             | 
| 391 | 
            +
                * Neither the name of the NumPy Developers nor the names of any
         | 
| 392 | 
            +
                   contributors may be used to endorse or promote products derived
         | 
| 393 | 
            +
                   from this software without specific prior written permission.
         | 
| 394 | 
            +
             | 
| 395 | 
            +
            THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
         | 
| 396 | 
            +
            "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
         | 
| 397 | 
            +
            LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
         | 
| 398 | 
            +
            A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
         | 
| 399 | 
            +
            OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
         | 
| 400 | 
            +
            SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
         | 
| 401 | 
            +
            LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
         | 
| 402 | 
            +
            DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
         | 
| 403 | 
            +
            THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
         | 
| 404 | 
            +
            (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 405 | 
            +
            OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 406 | 
            +
                                             Apache License
         | 
| 407 | 
            +
                                       Version 2.0, January 2004
         | 
| 408 | 
            +
                                    http://www.apache.org/licenses/
         | 
| 409 | 
            +
             | 
| 410 | 
            +
               TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
         | 
| 411 | 
            +
             | 
| 412 | 
            +
               1. Definitions.
         | 
| 413 | 
            +
             | 
| 414 | 
            +
                  "License" shall mean the terms and conditions for use, reproduction,
         | 
| 415 | 
            +
                  and distribution as defined by Sections 1 through 9 of this document.
         | 
| 416 | 
            +
             | 
| 417 | 
            +
                  "Licensor" shall mean the copyright owner or entity authorized by
         | 
| 418 | 
            +
                  the copyright owner that is granting the License.
         | 
| 419 | 
            +
             | 
| 420 | 
            +
                  "Legal Entity" shall mean the union of the acting entity and all
         | 
| 421 | 
            +
                  other entities that control, are controlled by, or are under common
         | 
| 422 | 
            +
                  control with that entity. For the purposes of this definition,
         | 
| 423 | 
            +
                  "control" means (i) the power, direct or indirect, to cause the
         | 
| 424 | 
            +
                  direction or management of such entity, whether by contract or
         | 
| 425 | 
            +
                  otherwise, or (ii) ownership of fifty percent (50%) or more of the
         | 
| 426 | 
            +
                  outstanding shares, or (iii) beneficial ownership of such entity.
         | 
| 427 | 
            +
             | 
| 428 | 
            +
                  "You" (or "Your") shall mean an individual or Legal Entity
         | 
| 429 | 
            +
                  exercising permissions granted by this License.
         | 
| 430 | 
            +
             | 
| 431 | 
            +
                  "Source" form shall mean the preferred form for making modifications,
         | 
| 432 | 
            +
                  including but not limited to software source code, documentation
         | 
| 433 | 
            +
                  source, and configuration files.
         | 
| 434 | 
            +
             | 
| 435 | 
            +
                  "Object" form shall mean any form resulting from mechanical
         | 
| 436 | 
            +
                  transformation or translation of a Source form, including but
         | 
| 437 | 
            +
                  not limited to compiled object code, generated documentation,
         | 
| 438 | 
            +
                  and conversions to other media types.
         | 
| 439 | 
            +
             | 
| 440 | 
            +
                  "Work" shall mean the work of authorship, whether in Source or
         | 
| 441 | 
            +
                  Object form, made available under the License, as indicated by a
         | 
| 442 | 
            +
                  copyright notice that is included in or attached to the work
         | 
| 443 | 
            +
                  (an example is provided in the Appendix below).
         | 
| 444 | 
            +
             | 
| 445 | 
            +
                  "Derivative Works" shall mean any work, whether in Source or Object
         | 
| 446 | 
            +
                  form, that is based on (or derived from) the Work and for which the
         | 
| 447 | 
            +
                  editorial revisions, annotations, elaborations, or other modifications
         | 
| 448 | 
            +
                  represent, as a whole, an original work of authorship. For the purposes
         | 
| 449 | 
            +
                  of this License, Derivative Works shall not include works that remain
         | 
| 450 | 
            +
                  separable from, or merely link (or bind by name) to the interfaces of,
         | 
| 451 | 
            +
                  the Work and Derivative Works thereof.
         | 
| 452 | 
            +
             | 
| 453 | 
            +
                  "Contribution" shall mean any work of authorship, including
         | 
| 454 | 
            +
                  the original version of the Work and any modifications or additions
         | 
| 455 | 
            +
                  to that Work or Derivative Works thereof, that is intentionally
         | 
| 456 | 
            +
                  submitted to Licensor for inclusion in the Work by the copyright owner
         | 
| 457 | 
            +
                  or by an individual or Legal Entity authorized to submit on behalf of
         | 
| 458 | 
            +
                  the copyright owner. For the purposes of this definition, "submitted"
         | 
| 459 | 
            +
                  means any form of electronic, verbal, or written communication sent
         | 
| 460 | 
            +
                  to the Licensor or its representatives, including but not limited to
         | 
| 461 | 
            +
                  communication on electronic mailing lists, source code control systems,
         | 
| 462 | 
            +
                  and issue tracking systems that are managed by, or on behalf of, the
         | 
| 463 | 
            +
                  Licensor for the purpose of discussing and improving the Work, but
         | 
| 464 | 
            +
                  excluding communication that is conspicuously marked or otherwise
         | 
| 465 | 
            +
                  designated in writing by the copyright owner as "Not a Contribution."
         | 
| 466 | 
            +
             | 
| 467 | 
            +
                  "Contributor" shall mean Licensor and any individual or Legal Entity
         | 
| 468 | 
            +
                  on behalf of whom a Contribution has been received by Licensor and
         | 
| 469 | 
            +
                  subsequently incorporated within the Work.
         | 
| 470 | 
            +
             | 
| 471 | 
            +
               2. Grant of Copyright License. Subject to the terms and conditions of
         | 
| 472 | 
            +
                  this License, each Contributor hereby grants to You a perpetual,
         | 
| 473 | 
            +
                  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
         | 
| 474 | 
            +
                  copyright license to reproduce, prepare Derivative Works of,
         | 
| 475 | 
            +
                  publicly display, publicly perform, sublicense, and distribute the
         | 
| 476 | 
            +
                  Work and such Derivative Works in Source or Object form.
         | 
| 477 | 
            +
             | 
| 478 | 
            +
               3. Grant of Patent License. Subject to the terms and conditions of
         | 
| 479 | 
            +
                  this License, each Contributor hereby grants to You a perpetual,
         | 
| 480 | 
            +
                  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
         | 
| 481 | 
            +
                  (except as stated in this section) patent license to make, have made,
         | 
| 482 | 
            +
                  use, offer to sell, sell, import, and otherwise transfer the Work,
         | 
| 483 | 
            +
                  where such license applies only to those patent claims licensable
         | 
| 484 | 
            +
                  by such Contributor that are necessarily infringed by their
         | 
| 485 | 
            +
                  Contribution(s) alone or by combination of their Contribution(s)
         | 
| 486 | 
            +
                  with the Work to which such Contribution(s) was submitted. If You
         | 
| 487 | 
            +
                  institute patent litigation against any entity (including a
         | 
| 488 | 
            +
                  cross-claim or counterclaim in a lawsuit) alleging that the Work
         | 
| 489 | 
            +
                  or a Contribution incorporated within the Work constitutes direct
         | 
| 490 | 
            +
                  or contributory patent infringement, then any patent licenses
         | 
| 491 | 
            +
                  granted to You under this License for that Work shall terminate
         | 
| 492 | 
            +
                  as of the date such litigation is filed.
         | 
| 493 | 
            +
             | 
| 494 | 
            +
               4. Redistribution. You may reproduce and distribute copies of the
         | 
| 495 | 
            +
                  Work or Derivative Works thereof in any medium, with or without
         | 
| 496 | 
            +
                  modifications, and in Source or Object form, provided that You
         | 
| 497 | 
            +
                  meet the following conditions:
         | 
| 498 | 
            +
             | 
| 499 | 
            +
                  (a) You must give any other recipients of the Work or
         | 
| 500 | 
            +
                      Derivative Works a copy of this License; and
         | 
| 501 | 
            +
             | 
| 502 | 
            +
                  (b) You must cause any modified files to carry prominent notices
         | 
| 503 | 
            +
                      stating that You changed the files; and
         | 
| 504 | 
            +
             | 
| 505 | 
            +
                  (c) You must retain, in the Source form of any Derivative Works
         | 
| 506 | 
            +
                      that You distribute, all copyright, patent, trademark, and
         | 
| 507 | 
            +
                      attribution notices from the Source form of the Work,
         | 
| 508 | 
            +
                      excluding those notices that do not pertain to any part of
         | 
| 509 | 
            +
                      the Derivative Works; and
         | 
| 510 | 
            +
             | 
| 511 | 
            +
                  (d) If the Work includes a "NOTICE" text file as part of its
         | 
| 512 | 
            +
                      distribution, then any Derivative Works that You distribute must
         | 
| 513 | 
            +
                      include a readable copy of the attribution notices contained
         | 
| 514 | 
            +
                      within such NOTICE file, excluding those notices that do not
         | 
| 515 | 
            +
                      pertain to any part of the Derivative Works, in at least one
         | 
| 516 | 
            +
                      of the following places: within a NOTICE text file distributed
         | 
| 517 | 
            +
                      as part of the Derivative Works; within the Source form or
         | 
| 518 | 
            +
                      documentation, if provided along with the Derivative Works; or,
         | 
| 519 | 
            +
                      within a display generated by the Derivative Works, if and
         | 
| 520 | 
            +
                      wherever such third-party notices normally appear. The contents
         | 
| 521 | 
            +
                      of the NOTICE file are for informational purposes only and
         | 
| 522 | 
            +
                      do not modify the License. You may add Your own attribution
         | 
| 523 | 
            +
                      notices within Derivative Works that You distribute, alongside
         | 
| 524 | 
            +
                      or as an addendum to the NOTICE text from the Work, provided
         | 
| 525 | 
            +
                      that such additional attribution notices cannot be construed
         | 
| 526 | 
            +
                      as modifying the License.
         | 
| 527 | 
            +
             | 
| 528 | 
            +
                  You may add Your own copyright statement to Your modifications and
         | 
| 529 | 
            +
                  may provide additional or different license terms and conditions
         | 
| 530 | 
            +
                  for use, reproduction, or distribution of Your modifications, or
         | 
| 531 | 
            +
                  for any such Derivative Works as a whole, provided Your use,
         | 
| 532 | 
            +
                  reproduction, and distribution of the Work otherwise complies with
         | 
| 533 | 
            +
                  the conditions stated in this License.
         | 
| 534 | 
            +
             | 
| 535 | 
            +
               5. Submission of Contributions. Unless You explicitly state otherwise,
         | 
| 536 | 
            +
                  any Contribution intentionally submitted for inclusion in the Work
         | 
| 537 | 
            +
                  by You to the Licensor shall be under the terms and conditions of
         | 
| 538 | 
            +
                  this License, without any additional terms or conditions.
         | 
| 539 | 
            +
                  Notwithstanding the above, nothing herein shall supersede or modify
         | 
| 540 | 
            +
                  the terms of any separate license agreement you may have executed
         | 
| 541 | 
            +
                  with Licensor regarding such Contributions.
         | 
| 542 | 
            +
             | 
| 543 | 
            +
               6. Trademarks. This License does not grant permission to use the trade
         | 
| 544 | 
            +
                  names, trademarks, service marks, or product names of the Licensor,
         | 
| 545 | 
            +
                  except as required for reasonable and customary use in describing the
         | 
| 546 | 
            +
                  origin of the Work and reproducing the content of the NOTICE file.
         | 
| 547 | 
            +
             | 
| 548 | 
            +
               7. Disclaimer of Warranty. Unless required by applicable law or
         | 
| 549 | 
            +
                  agreed to in writing, Licensor provides the Work (and each
         | 
| 550 | 
            +
                  Contributor provides its Contributions) on an "AS IS" BASIS,
         | 
| 551 | 
            +
                  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
         | 
| 552 | 
            +
                  implied, including, without limitation, any warranties or conditions
         | 
| 553 | 
            +
                  of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
         | 
| 554 | 
            +
                  PARTICULAR PURPOSE. You are solely responsible for determining the
         | 
| 555 | 
            +
                  appropriateness of using or redistributing the Work and assume any
         | 
| 556 | 
            +
                  risks associated with Your exercise of permissions under this License.
         | 
| 557 | 
            +
             | 
| 558 | 
            +
               8. Limitation of Liability. In no event and under no legal theory,
         | 
| 559 | 
            +
                  whether in tort (including negligence), contract, or otherwise,
         | 
| 560 | 
            +
                  unless required by applicable law (such as deliberate and grossly
         | 
| 561 | 
            +
                  negligent acts) or agreed to in writing, shall any Contributor be
         | 
| 562 | 
            +
                  liable to You for damages, including any direct, indirect, special,
         | 
| 563 | 
            +
                  incidental, or consequential damages of any character arising as a
         | 
| 564 | 
            +
                  result of this License or out of the use or inability to use the
         | 
| 565 | 
            +
                  Work (including but not limited to damages for loss of goodwill,
         | 
| 566 | 
            +
                  work stoppage, computer failure or malfunction, or any and all
         | 
| 567 | 
            +
                  other commercial damages or losses), even if such Contributor
         | 
| 568 | 
            +
                  has been advised of the possibility of such damages.
         | 
| 569 | 
            +
             | 
| 570 | 
            +
               9. Accepting Warranty or Additional Liability. While redistributing
         | 
| 571 | 
            +
                  the Work or Derivative Works thereof, You may choose to offer,
         | 
| 572 | 
            +
                  and charge a fee for, acceptance of support, warranty, indemnity,
         | 
| 573 | 
            +
                  or other liability obligations and/or rights consistent with this
         | 
| 574 | 
            +
                  License. However, in accepting such obligations, You may act only
         | 
| 575 | 
            +
                  on Your own behalf and on Your sole responsibility, not on behalf
         | 
| 576 | 
            +
                  of any other Contributor, and only if You agree to indemnify,
         | 
| 577 | 
            +
                  defend, and hold each Contributor harmless for any liability
         | 
| 578 | 
            +
                  incurred by, or claims asserted against, such Contributor by reason
         | 
| 579 | 
            +
                  of your accepting any such warranty or additional liability.
         | 
| 580 | 
            +
             | 
| 581 | 
            +
               END OF TERMS AND CONDITIONS
         | 
| 582 | 
            +
             | 
| 583 | 
            +
             | 
| 584 | 
            +
            Copyright (c) Donald Stufft and individual contributors.
         | 
| 585 | 
            +
            All rights reserved.
         | 
| 586 | 
            +
             | 
| 587 | 
            +
            Redistribution and use in source and binary forms, with or without
         | 
| 588 | 
            +
            modification, are permitted provided that the following conditions are met:
         | 
| 589 | 
            +
             | 
| 590 | 
            +
                1. Redistributions of source code must retain the above copyright notice,
         | 
| 591 | 
            +
                   this list of conditions and the following disclaimer.
         | 
| 592 | 
            +
             | 
| 593 | 
            +
                2. Redistributions in binary form must reproduce the above copyright
         | 
| 594 | 
            +
                   notice, this list of conditions and the following disclaimer in the
         | 
| 595 | 
            +
                   documentation and/or other materials provided with the distribution.
         | 
| 596 | 
            +
             | 
| 597 | 
            +
            THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
         | 
| 598 | 
            +
            ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
         | 
| 599 | 
            +
            WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         | 
| 600 | 
            +
            DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
         | 
| 601 | 
            +
            FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
         | 
| 602 | 
            +
            DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
         | 
| 603 | 
            +
            SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
         | 
| 604 | 
            +
            CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
         | 
| 605 | 
            +
            OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 606 | 
            +
            OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.A. HISTORY OF THE SOFTWARE
         | 
| 607 | 
            +
            ==========================
         | 
| 608 | 
            +
             | 
| 609 | 
            +
            Python was created in the early 1990s by Guido van Rossum at Stichting
         | 
| 610 | 
            +
            Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
         | 
| 611 | 
            +
            as a successor of a language called ABC.  Guido remains Python's
         | 
| 612 | 
            +
            principal author, although it includes many contributions from others.
         | 
| 613 | 
            +
             | 
| 614 | 
            +
            In 1995, Guido continued his work on Python at the Corporation for
         | 
| 615 | 
            +
            National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
         | 
| 616 | 
            +
            in Reston, Virginia where he released several versions of the
         | 
| 617 | 
            +
            software.
         | 
| 618 | 
            +
             | 
| 619 | 
            +
            In May 2000, Guido and the Python core development team moved to
         | 
| 620 | 
            +
            BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
         | 
| 621 | 
            +
            year, the PythonLabs team moved to Digital Creations, which became
         | 
| 622 | 
            +
            Zope Corporation.  In 2001, the Python Software Foundation (PSF, see
         | 
| 623 | 
            +
            https://www.python.org/psf/) was formed, a non-profit organization
         | 
| 624 | 
            +
            created specifically to own Python-related Intellectual Property.
         | 
| 625 | 
            +
            Zope Corporation was a sponsoring member of the PSF.
         | 
| 626 | 
            +
             | 
| 627 | 
            +
            All Python releases are Open Source (see https://opensource.org for
         | 
| 628 | 
            +
            the Open Source Definition).  Historically, most, but not all, Python
         | 
| 629 | 
            +
            releases have also been GPL-compatible; the table below summarizes
         | 
| 630 | 
            +
            the various releases.
         | 
| 631 | 
            +
             | 
| 632 | 
            +
                Release         Derived     Year        Owner       GPL-
         | 
| 633 | 
            +
                                from                                compatible? (1)
         | 
| 634 | 
            +
             | 
| 635 | 
            +
                0.9.0 thru 1.2              1991-1995   CWI         yes
         | 
| 636 | 
            +
                1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
         | 
| 637 | 
            +
                1.6             1.5.2       2000        CNRI        no
         | 
| 638 | 
            +
                2.0             1.6         2000        BeOpen.com  no
         | 
| 639 | 
            +
                1.6.1           1.6         2001        CNRI        yes (2)
         | 
| 640 | 
            +
                2.1             2.0+1.6.1   2001        PSF         no
         | 
| 641 | 
            +
                2.0.1           2.0+1.6.1   2001        PSF         yes
         | 
| 642 | 
            +
                2.1.1           2.1+2.0.1   2001        PSF         yes
         | 
| 643 | 
            +
                2.1.2           2.1.1       2002        PSF         yes
         | 
| 644 | 
            +
                2.1.3           2.1.2       2002        PSF         yes
         | 
| 645 | 
            +
                2.2 and above   2.1.1       2001-now    PSF         yes
         | 
| 646 | 
            +
             | 
| 647 | 
            +
            Footnotes:
         | 
| 648 | 
            +
             | 
| 649 | 
            +
            (1) GPL-compatible doesn't mean that we're distributing Python under
         | 
| 650 | 
            +
                the GPL.  All Python licenses, unlike the GPL, let you distribute
         | 
| 651 | 
            +
                a modified version without making your changes open source.  The
         | 
| 652 | 
            +
                GPL-compatible licenses make it possible to combine Python with
         | 
| 653 | 
            +
                other software that is released under the GPL; the others don't.
         | 
| 654 | 
            +
             | 
| 655 | 
            +
            (2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
         | 
| 656 | 
            +
                because its license has a choice of law clause.  According to
         | 
| 657 | 
            +
                CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
         | 
| 658 | 
            +
                is "not incompatible" with the GPL.
         | 
| 659 | 
            +
             | 
| 660 | 
            +
            Thanks to the many outside volunteers who have worked under Guido's
         | 
| 661 | 
            +
            direction to make these releases possible.
         | 
| 662 | 
            +
             | 
| 663 | 
            +
             | 
| 664 | 
            +
            B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
         | 
| 665 | 
            +
            ===============================================================
         | 
| 666 | 
            +
             | 
| 667 | 
            +
            Python software and documentation are licensed under the
         | 
| 668 | 
            +
            Python Software Foundation License Version 2.
         | 
| 669 | 
            +
             | 
| 670 | 
            +
            Starting with Python 3.8.6, examples, recipes, and other code in
         | 
| 671 | 
            +
            the documentation are dual licensed under the PSF License Version 2
         | 
| 672 | 
            +
            and the Zero-Clause BSD license.
         | 
| 673 | 
            +
             | 
| 674 | 
            +
            Some software incorporated into Python is under different licenses.
         | 
| 675 | 
            +
            The licenses are listed with code falling under that license.
         | 
| 676 | 
            +
             | 
| 677 | 
            +
             | 
| 678 | 
            +
            PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
         | 
| 679 | 
            +
            --------------------------------------------
         | 
| 680 | 
            +
             | 
| 681 | 
            +
            1. This LICENSE AGREEMENT is between the Python Software Foundation
         | 
| 682 | 
            +
            ("PSF"), and the Individual or Organization ("Licensee") accessing and
         | 
| 683 | 
            +
            otherwise using this software ("Python") in source or binary form and
         | 
| 684 | 
            +
            its associated documentation.
         | 
| 685 | 
            +
             | 
| 686 | 
            +
            2. Subject to the terms and conditions of this License Agreement, PSF hereby
         | 
| 687 | 
            +
            grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
         | 
| 688 | 
            +
            analyze, test, perform and/or display publicly, prepare derivative works,
         | 
| 689 | 
            +
            distribute, and otherwise use Python alone or in any derivative version,
         | 
| 690 | 
            +
            provided, however, that PSF's License Agreement and PSF's notice of copyright,
         | 
| 691 | 
            +
            i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
         | 
| 692 | 
            +
            2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
         | 
| 693 | 
            +
            All Rights Reserved" are retained in Python alone or in any derivative version
         | 
| 694 | 
            +
            prepared by Licensee.
         | 
| 695 | 
            +
             | 
| 696 | 
            +
            3. In the event Licensee prepares a derivative work that is based on
         | 
| 697 | 
            +
            or incorporates Python or any part thereof, and wants to make
         | 
| 698 | 
            +
            the derivative work available to others as provided herein, then
         | 
| 699 | 
            +
            Licensee hereby agrees to include in any such work a brief summary of
         | 
| 700 | 
            +
            the changes made to Python.
         | 
| 701 | 
            +
             | 
| 702 | 
            +
            4. PSF is making Python available to Licensee on an "AS IS"
         | 
| 703 | 
            +
            basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
         | 
| 704 | 
            +
            IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
         | 
| 705 | 
            +
            DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
         | 
| 706 | 
            +
            FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
         | 
| 707 | 
            +
            INFRINGE ANY THIRD PARTY RIGHTS.
         | 
| 708 | 
            +
             | 
| 709 | 
            +
            5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
         | 
| 710 | 
            +
            FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
         | 
| 711 | 
            +
            A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
         | 
| 712 | 
            +
            OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
         | 
| 713 | 
            +
             | 
| 714 | 
            +
            6. This License Agreement will automatically terminate upon a material
         | 
| 715 | 
            +
            breach of its terms and conditions.
         | 
| 716 | 
            +
             | 
| 717 | 
            +
            7. Nothing in this License Agreement shall be deemed to create any
         | 
| 718 | 
            +
            relationship of agency, partnership, or joint venture between PSF and
         | 
| 719 | 
            +
            Licensee.  This License Agreement does not grant permission to use PSF
         | 
| 720 | 
            +
            trademarks or trade name in a trademark sense to endorse or promote
         | 
| 721 | 
            +
            products or services of Licensee, or any third party.
         | 
| 722 | 
            +
             | 
| 723 | 
            +
            8. By copying, installing or otherwise using Python, Licensee
         | 
| 724 | 
            +
            agrees to be bound by the terms and conditions of this License
         | 
| 725 | 
            +
            Agreement.
         | 
| 726 | 
            +
             | 
| 727 | 
            +
             | 
| 728 | 
            +
            BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
         | 
| 729 | 
            +
            -------------------------------------------
         | 
| 730 | 
            +
             | 
| 731 | 
            +
            BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
         | 
| 732 | 
            +
             | 
| 733 | 
            +
            1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
         | 
| 734 | 
            +
            office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
         | 
| 735 | 
            +
            Individual or Organization ("Licensee") accessing and otherwise using
         | 
| 736 | 
            +
            this software in source or binary form and its associated
         | 
| 737 | 
            +
            documentation ("the Software").
         | 
| 738 | 
            +
             | 
| 739 | 
            +
            2. Subject to the terms and conditions of this BeOpen Python License
         | 
| 740 | 
            +
            Agreement, BeOpen hereby grants Licensee a non-exclusive,
         | 
| 741 | 
            +
            royalty-free, world-wide license to reproduce, analyze, test, perform
         | 
| 742 | 
            +
            and/or display publicly, prepare derivative works, distribute, and
         | 
| 743 | 
            +
            otherwise use the Software alone or in any derivative version,
         | 
| 744 | 
            +
            provided, however, that the BeOpen Python License is retained in the
         | 
| 745 | 
            +
            Software, alone or in any derivative version prepared by Licensee.
         | 
| 746 | 
            +
             | 
| 747 | 
            +
            3. BeOpen is making the Software available to Licensee on an "AS IS"
         | 
| 748 | 
            +
            basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
         | 
| 749 | 
            +
            IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
         | 
| 750 | 
            +
            DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
         | 
| 751 | 
            +
            FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
         | 
| 752 | 
            +
            INFRINGE ANY THIRD PARTY RIGHTS.
         | 
| 753 | 
            +
             | 
| 754 | 
            +
            4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
         | 
| 755 | 
            +
            SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
         | 
| 756 | 
            +
            AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
         | 
| 757 | 
            +
            DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
         | 
| 758 | 
            +
             | 
| 759 | 
            +
            5. This License Agreement will automatically terminate upon a material
         | 
| 760 | 
            +
            breach of its terms and conditions.
         | 
| 761 | 
            +
             | 
| 762 | 
            +
            6. This License Agreement shall be governed by and interpreted in all
         | 
| 763 | 
            +
            respects by the law of the State of California, excluding conflict of
         | 
| 764 | 
            +
            law provisions.  Nothing in this License Agreement shall be deemed to
         | 
| 765 | 
            +
            create any relationship of agency, partnership, or joint venture
         | 
| 766 | 
            +
            between BeOpen and Licensee.  This License Agreement does not grant
         | 
| 767 | 
            +
            permission to use BeOpen trademarks or trade names in a trademark
         | 
| 768 | 
            +
            sense to endorse or promote products or services of Licensee, or any
         | 
| 769 | 
            +
            third party.  As an exception, the "BeOpen Python" logos available at
         | 
| 770 | 
            +
            http://www.pythonlabs.com/logos.html may be used according to the
         | 
| 771 | 
            +
            permissions granted on that web page.
         | 
| 772 | 
            +
             | 
| 773 | 
            +
            7. By copying, installing or otherwise using the software, Licensee
         | 
| 774 | 
            +
            agrees to be bound by the terms and conditions of this License
         | 
| 775 | 
            +
            Agreement.
         | 
| 776 | 
            +
             | 
| 777 | 
            +
             | 
| 778 | 
            +
            CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
         | 
| 779 | 
            +
            ---------------------------------------
         | 
| 780 | 
            +
             | 
| 781 | 
            +
            1. This LICENSE AGREEMENT is between the Corporation for National
         | 
| 782 | 
            +
            Research Initiatives, having an office at 1895 Preston White Drive,
         | 
| 783 | 
            +
            Reston, VA 20191 ("CNRI"), and the Individual or Organization
         | 
| 784 | 
            +
            ("Licensee") accessing and otherwise using Python 1.6.1 software in
         | 
| 785 | 
            +
            source or binary form and its associated documentation.
         | 
| 786 | 
            +
             | 
| 787 | 
            +
            2. Subject to the terms and conditions of this License Agreement, CNRI
         | 
| 788 | 
            +
            hereby grants Licensee a nonexclusive, royalty-free, world-wide
         | 
| 789 | 
            +
            license to reproduce, analyze, test, perform and/or display publicly,
         | 
| 790 | 
            +
            prepare derivative works, distribute, and otherwise use Python 1.6.1
         | 
| 791 | 
            +
            alone or in any derivative version, provided, however, that CNRI's
         | 
| 792 | 
            +
            License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
         | 
| 793 | 
            +
            1995-2001 Corporation for National Research Initiatives; All Rights
         | 
| 794 | 
            +
            Reserved" are retained in Python 1.6.1 alone or in any derivative
         | 
| 795 | 
            +
            version prepared by Licensee.  Alternately, in lieu of CNRI's License
         | 
| 796 | 
            +
            Agreement, Licensee may substitute the following text (omitting the
         | 
| 797 | 
            +
            quotes): "Python 1.6.1 is made available subject to the terms and
         | 
| 798 | 
            +
            conditions in CNRI's License Agreement.  This Agreement together with
         | 
| 799 | 
            +
            Python 1.6.1 may be located on the internet using the following
         | 
| 800 | 
            +
            unique, persistent identifier (known as a handle): 1895.22/1013.  This
         | 
| 801 | 
            +
            Agreement may also be obtained from a proxy server on the internet
         | 
| 802 | 
            +
            using the following URL: http://hdl.handle.net/1895.22/1013".
         | 
| 803 | 
            +
             | 
| 804 | 
            +
            3. In the event Licensee prepares a derivative work that is based on
         | 
| 805 | 
            +
            or incorporates Python 1.6.1 or any part thereof, and wants to make
         | 
| 806 | 
            +
            the derivative work available to others as provided herein, then
         | 
| 807 | 
            +
            Licensee hereby agrees to include in any such work a brief summary of
         | 
| 808 | 
            +
            the changes made to Python 1.6.1.
         | 
| 809 | 
            +
             | 
| 810 | 
            +
            4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
         | 
| 811 | 
            +
            basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
         | 
| 812 | 
            +
            IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
         | 
| 813 | 
            +
            DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
         | 
| 814 | 
            +
            FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
         | 
| 815 | 
            +
            INFRINGE ANY THIRD PARTY RIGHTS.
         | 
| 816 | 
            +
             | 
| 817 | 
            +
            5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
         | 
| 818 | 
            +
            1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
         | 
| 819 | 
            +
            A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
         | 
| 820 | 
            +
            OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
         | 
| 821 | 
            +
             | 
| 822 | 
            +
            6. This License Agreement will automatically terminate upon a material
         | 
| 823 | 
            +
            breach of its terms and conditions.
         | 
| 824 | 
            +
             | 
| 825 | 
            +
            7. This License Agreement shall be governed by the federal
         | 
| 826 | 
            +
            intellectual property law of the United States, including without
         | 
| 827 | 
            +
            limitation the federal copyright law, and, to the extent such
         | 
| 828 | 
            +
            U.S. federal law does not apply, by the law of the Commonwealth of
         | 
| 829 | 
            +
            Virginia, excluding Virginia's conflict of law provisions.
         | 
| 830 | 
            +
            Notwithstanding the foregoing, with regard to derivative works based
         | 
| 831 | 
            +
            on Python 1.6.1 that incorporate non-separable material that was
         | 
| 832 | 
            +
            previously distributed under the GNU General Public License (GPL), the
         | 
| 833 | 
            +
            law of the Commonwealth of Virginia shall govern this License
         | 
| 834 | 
            +
            Agreement only as to issues arising under or with respect to
         | 
| 835 | 
            +
            Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
         | 
| 836 | 
            +
            License Agreement shall be deemed to create any relationship of
         | 
| 837 | 
            +
            agency, partnership, or joint venture between CNRI and Licensee.  This
         | 
| 838 | 
            +
            License Agreement does not grant permission to use CNRI trademarks or
         | 
| 839 | 
            +
            trade name in a trademark sense to endorse or promote products or
         | 
| 840 | 
            +
            services of Licensee, or any third party.
         | 
| 841 | 
            +
             | 
| 842 | 
            +
            8. By clicking on the "ACCEPT" button where indicated, or by copying,
         | 
| 843 | 
            +
            installing or otherwise using Python 1.6.1, Licensee agrees to be
         | 
| 844 | 
            +
            bound by the terms and conditions of this License Agreement.
         | 
| 845 | 
            +
             | 
| 846 | 
            +
                    ACCEPT
         | 
| 847 | 
            +
             | 
| 848 | 
            +
             | 
| 849 | 
            +
            CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
         | 
| 850 | 
            +
            --------------------------------------------------
         | 
| 851 | 
            +
             | 
| 852 | 
            +
            Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
         | 
| 853 | 
            +
            The Netherlands.  All rights reserved.
         | 
| 854 | 
            +
             | 
| 855 | 
            +
            Permission to use, copy, modify, and distribute this software and its
         | 
| 856 | 
            +
            documentation for any purpose and without fee is hereby granted,
         | 
| 857 | 
            +
            provided that the above copyright notice appear in all copies and that
         | 
| 858 | 
            +
            both that copyright notice and this permission notice appear in
         | 
| 859 | 
            +
            supporting documentation, and that the name of Stichting Mathematisch
         | 
| 860 | 
            +
            Centrum or CWI not be used in advertising or publicity pertaining to
         | 
| 861 | 
            +
            distribution of the software without specific, written prior
         | 
| 862 | 
            +
            permission.
         | 
| 863 | 
            +
             | 
| 864 | 
            +
            STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
         | 
| 865 | 
            +
            THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
         | 
| 866 | 
            +
            FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
         | 
| 867 | 
            +
            FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
         | 
| 868 | 
            +
            WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
         | 
| 869 | 
            +
            ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
         | 
| 870 | 
            +
            OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
         | 
| 871 | 
            +
             | 
| 872 | 
            +
            ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
         | 
| 873 | 
            +
            ----------------------------------------------------------------------
         | 
| 874 | 
            +
             | 
| 875 | 
            +
            Permission to use, copy, modify, and/or distribute this software for any
         | 
| 876 | 
            +
            purpose with or without fee is hereby granted.
         | 
| 877 | 
            +
             | 
| 878 | 
            +
            THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
         | 
| 879 | 
            +
            REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
         | 
| 880 | 
            +
            AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
         | 
| 881 | 
            +
            INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
         | 
| 882 | 
            +
            LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
         | 
| 883 | 
            +
            OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
         | 
| 884 | 
            +
            PERFORMANCE OF THIS SOFTWARE.
         | 
| 885 | 
            +
            Copyright (c) 2014, Al Sweigart
         | 
| 886 | 
            +
            All rights reserved.
         | 
| 887 | 
            +
             | 
| 888 | 
            +
            Redistribution and use in source and binary forms, with or without
         | 
| 889 | 
            +
            modification, are permitted provided that the following conditions are met:
         | 
| 890 | 
            +
             | 
| 891 | 
            +
            * Redistributions of source code must retain the above copyright notice, this
         | 
| 892 | 
            +
              list of conditions and the following disclaimer.
         | 
| 893 | 
            +
             | 
| 894 | 
            +
            * Redistributions in binary form must reproduce the above copyright notice,
         | 
| 895 | 
            +
              this list of conditions and the following disclaimer in the documentation
         | 
| 896 | 
            +
              and/or other materials provided with the distribution.
         | 
| 897 | 
            +
             | 
| 898 | 
            +
            * Neither the name of the {organization} nor the names of its
         | 
| 899 | 
            +
              contributors may be used to endorse or promote products derived from
         | 
| 900 | 
            +
              this software without specific prior written permission.
         | 
| 901 | 
            +
             | 
| 902 | 
            +
            THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
         | 
| 903 | 
            +
            AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
         | 
| 904 | 
            +
            IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         | 
| 905 | 
            +
            DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
         | 
| 906 | 
            +
            FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
         | 
| 907 | 
            +
            DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
         | 
| 908 | 
            +
            SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
         | 
| 909 | 
            +
            CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
         | 
| 910 | 
            +
            OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 911 | 
            +
            OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.Copyright (c) 2017 Anthony Sottile
         | 
| 912 | 
            +
             | 
| 913 | 
            +
            Permission is hereby granted, free of charge, to any person obtaining a copy
         | 
| 914 | 
            +
            of this software and associated documentation files (the "Software"), to deal
         | 
| 915 | 
            +
            in the Software without restriction, including without limitation the rights
         | 
| 916 | 
            +
            to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
         | 
| 917 | 
            +
            copies of the Software, and to permit persons to whom the Software is
         | 
| 918 | 
            +
            furnished to do so, subject to the following conditions:
         | 
| 919 | 
            +
             | 
| 920 | 
            +
            The above copyright notice and this permission notice shall be included in
         | 
| 921 | 
            +
            all copies or substantial portions of the Software.
         | 
| 922 | 
            +
             | 
| 923 | 
            +
            THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
         | 
| 924 | 
            +
            IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
         | 
| 925 | 
            +
            FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
         | 
| 926 | 
            +
            AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
         | 
| 927 | 
            +
            LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
         | 
| 928 | 
            +
            OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
         | 
| 929 | 
            +
            THE SOFTWARE.Copyright (c) 2015-2019 Jared Hobbs
         | 
| 930 | 
            +
             | 
| 931 | 
            +
            Permission is hereby granted, free of charge, to any person obtaining a copy of
         | 
| 932 | 
            +
            this software and associated documentation files (the "Software"), to deal in
         | 
| 933 | 
            +
            the Software without restriction, including without limitation the rights to
         | 
| 934 | 
            +
            use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
         | 
| 935 | 
            +
            of the Software, and to permit persons to whom the Software is furnished to do
         | 
| 936 | 
            +
            so, subject to the following conditions:
         | 
| 937 | 
            +
             | 
| 938 | 
            +
            The above copyright notice and this permission notice shall be included in all
         | 
| 939 | 
            +
            copies or substantial portions of the Software.
         | 
| 940 | 
            +
             | 
| 941 | 
            +
            THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
         | 
| 942 | 
            +
            IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
         | 
| 943 | 
            +
            FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
         | 
| 944 | 
            +
            AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
         | 
| 945 | 
            +
            LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
         | 
| 946 | 
            +
            OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
         | 
| 947 | 
            +
            SOFTWARE.Developed by ESN, an Electronic Arts Inc. studio.
         | 
| 948 | 
            +
            Copyright (c) 2014, Electronic Arts Inc.
         | 
| 949 | 
            +
            All rights reserved.
         | 
| 950 | 
            +
             | 
| 951 | 
            +
            Redistribution and use in source and binary forms, with or without
         | 
| 952 | 
            +
            modification, are permitted provided that the following conditions are met:
         | 
| 953 | 
            +
            * Redistributions of source code must retain the above copyright
         | 
| 954 | 
            +
            notice, this list of conditions and the following disclaimer.
         | 
| 955 | 
            +
            * Redistributions in binary form must reproduce the above copyright
         | 
| 956 | 
            +
            notice, this list of conditions and the following disclaimer in the
         | 
| 957 | 
            +
            documentation and/or other materials provided with the distribution.
         | 
| 958 | 
            +
            * Neither the name of ESN, Electronic Arts Inc. nor the
         | 
| 959 | 
            +
            names of its contributors may be used to endorse or promote products
         | 
| 960 | 
            +
            derived from this software without specific prior written permission.
         | 
| 961 | 
            +
             | 
| 962 | 
            +
            THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
         | 
| 963 | 
            +
            ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
         | 
| 964 | 
            +
            WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         | 
| 965 | 
            +
            DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS INC. BE LIABLE
         | 
| 966 | 
            +
            FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
         | 
| 967 | 
            +
            (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
         | 
| 968 | 
            +
            LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
         | 
| 969 | 
            +
            ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
         | 
| 970 | 
            +
            (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
         | 
| 971 | 
            +
            SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 972 | 
            +
             | 
| 973 | 
            +
            ----
         | 
| 974 | 
            +
             | 
| 975 | 
            +
            Portions of code from MODP_ASCII - Ascii transformations (upper/lower, etc)
         | 
| 976 | 
            +
            https://github.com/client9/stringencoders
         | 
| 977 | 
            +
             | 
| 978 | 
            +
              Copyright 2005, 2006, 2007
         | 
| 979 | 
            +
              Nick Galbreath -- nickg [at] modp [dot] com
         | 
| 980 | 
            +
              All rights reserved.
         | 
| 981 | 
            +
             | 
| 982 | 
            +
              Redistribution and use in source and binary forms, with or without
         | 
| 983 | 
            +
              modification, are permitted provided that the following conditions are
         | 
| 984 | 
            +
              met:
         | 
| 985 | 
            +
             | 
| 986 | 
            +
                Redistributions of source code must retain the above copyright
         | 
| 987 | 
            +
                notice, this list of conditions and the following disclaimer.
         | 
| 988 | 
            +
             | 
| 989 | 
            +
                Redistributions in binary form must reproduce the above copyright
         | 
| 990 | 
            +
                notice, this list of conditions and the following disclaimer in the
         | 
| 991 | 
            +
                documentation and/or other materials provided with the distribution.
         | 
| 992 | 
            +
             | 
| 993 | 
            +
                Neither the name of the modp.com nor the names of its
         | 
| 994 | 
            +
                contributors may be used to endorse or promote products derived from
         | 
| 995 | 
            +
                this software without specific prior written permission.
         | 
| 996 | 
            +
             | 
| 997 | 
            +
              THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
         | 
| 998 | 
            +
              "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
         | 
| 999 | 
            +
              LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
         | 
| 1000 | 
            +
              A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
         | 
| 1001 | 
            +
              OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
         | 
| 1002 | 
            +
              SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
         | 
| 1003 | 
            +
              LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
         | 
| 1004 | 
            +
              DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
         | 
| 1005 | 
            +
              THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
         | 
| 1006 | 
            +
              (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 1007 | 
            +
              OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 1008 | 
            +
             | 
| 1009 | 
            +
              This is the standard "new" BSD license:
         | 
| 1010 | 
            +
              http://www.opensource.org/licenses/bsd-license.php
         | 
| 1011 | 
            +
             | 
| 1012 | 
            +
            https://github.com/client9/stringencoders/blob/cfd5c1507325ae497ea9bacdacba12c0ffd79d30/COPYING
         | 
| 1013 | 
            +
             | 
| 1014 | 
            +
            ----
         | 
| 1015 | 
            +
             | 
| 1016 | 
            +
            Numeric decoder derived from from TCL library
         | 
| 1017 | 
            +
            https://opensource.apple.com/source/tcl/tcl-14/tcl/license.terms
         | 
| 1018 | 
            +
             * Copyright (c) 1988-1993 The Regents of the University of California.
         | 
| 1019 | 
            +
             * Copyright (c) 1994 Sun Microsystems, Inc.
         | 
| 1020 | 
            +
             | 
| 1021 | 
            +
              This software is copyrighted by the Regents of the University of
         | 
| 1022 | 
            +
              California, Sun Microsystems, Inc., Scriptics Corporation, ActiveState
         | 
| 1023 | 
            +
              Corporation and other parties.  The following terms apply to all files
         | 
| 1024 | 
            +
              associated with the software unless explicitly disclaimed in
         | 
| 1025 | 
            +
              individual files.
         | 
| 1026 | 
            +
             | 
| 1027 | 
            +
              The authors hereby grant permission to use, copy, modify, distribute,
         | 
| 1028 | 
            +
              and license this software and its documentation for any purpose, provided
         | 
| 1029 | 
            +
              that existing copyright notices are retained in all copies and that this
         | 
| 1030 | 
            +
              notice is included verbatim in any distributions. No written agreement,
         | 
| 1031 | 
            +
              license, or royalty fee is required for any of the authorized uses.
         | 
| 1032 | 
            +
              Modifications to this software may be copyrighted by their authors
         | 
| 1033 | 
            +
              and need not follow the licensing terms described here, provided that
         | 
| 1034 | 
            +
              the new terms are clearly indicated on the first page of each file where
         | 
| 1035 | 
            +
              they apply.
         | 
| 1036 | 
            +
             | 
| 1037 | 
            +
              IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY
         | 
| 1038 | 
            +
              FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
         | 
| 1039 | 
            +
              ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY
         | 
| 1040 | 
            +
              DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE
         | 
| 1041 | 
            +
              POSSIBILITY OF SUCH DAMAGE.
         | 
| 1042 | 
            +
             | 
| 1043 | 
            +
              THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,
         | 
| 1044 | 
            +
              INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
         | 
| 1045 | 
            +
              FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT.  THIS SOFTWARE
         | 
| 1046 | 
            +
              IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE
         | 
| 1047 | 
            +
              NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
         | 
| 1048 | 
            +
              MODIFICATIONS.
         | 
| 1049 | 
            +
             | 
| 1050 | 
            +
              GOVERNMENT USE: If you are acquiring this software on behalf of the
         | 
| 1051 | 
            +
              U.S. government, the Government shall have only "Restricted Rights"
         | 
| 1052 | 
            +
              in the software and related documentation as defined in the Federal
         | 
| 1053 | 
            +
              Acquisition Regulations (FARs) in Clause 52.227.19 (c) (2).  If you
         | 
| 1054 | 
            +
              are acquiring the software on behalf of the Department of Defense, the
         | 
| 1055 | 
            +
              software shall be classified as "Commercial Computer Software" and the
         | 
| 1056 | 
            +
              Government shall have only "Restricted Rights" as defined in Clause
         | 
| 1057 | 
            +
              252.227-7013 (c) (1) of DFARs.  Notwithstanding the foregoing, the
         | 
| 1058 | 
            +
              authors grant the U.S. Government and others acting in its behalf
         | 
| 1059 | 
            +
              permission to use and distribute the software in accordance with the
         | 
| 1060 | 
            +
              terms specified in this license.Apache License
         | 
| 1061 | 
            +
            Version 2.0, January 2004
         | 
| 1062 | 
            +
            http://www.apache.org/licenses/
         | 
| 1063 | 
            +
             | 
| 1064 | 
            +
            TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
         | 
| 1065 | 
            +
             | 
| 1066 | 
            +
            1. Definitions.
         | 
| 1067 | 
            +
             | 
| 1068 | 
            +
            "License" shall mean the terms and conditions for use, reproduction, and
         | 
| 1069 | 
            +
            distribution as defined by Sections 1 through 9 of this document.
         | 
| 1070 | 
            +
             | 
| 1071 | 
            +
            "Licensor" shall mean the copyright owner or entity authorized by the copyright
         | 
| 1072 | 
            +
            owner that is granting the License.
         | 
| 1073 | 
            +
             | 
| 1074 | 
            +
            "Legal Entity" shall mean the union of the acting entity and all other entities
         | 
| 1075 | 
            +
            that control, are controlled by, or are under common control with that entity.
         | 
| 1076 | 
            +
            For the purposes of this definition, "control" means (i) the power, direct or
         | 
| 1077 | 
            +
            indirect, to cause the direction or management of such entity, whether by
         | 
| 1078 | 
            +
            contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
         | 
| 1079 | 
            +
            outstanding shares, or (iii) beneficial ownership of such entity.
         | 
| 1080 | 
            +
             | 
| 1081 | 
            +
            "You" (or "Your") shall mean an individual or Legal Entity exercising
         | 
| 1082 | 
            +
            permissions granted by this License.
         | 
| 1083 | 
            +
             | 
| 1084 | 
            +
            "Source" form shall mean the preferred form for making modifications, including
         | 
| 1085 | 
            +
            but not limited to software source code, documentation source, and configuration
         | 
| 1086 | 
            +
            files.
         | 
| 1087 | 
            +
             | 
| 1088 | 
            +
            "Object" form shall mean any form resulting from mechanical transformation or
         | 
| 1089 | 
            +
            translation of a Source form, including but not limited to compiled object code,
         | 
| 1090 | 
            +
            generated documentation, and conversions to other media types.
         | 
| 1091 | 
            +
             | 
| 1092 | 
            +
            "Work" shall mean the work of authorship, whether in Source or Object form, made
         | 
| 1093 | 
            +
            available under the License, as indicated by a copyright notice that is included
         | 
| 1094 | 
            +
            in or attached to the work (an example is provided in the Appendix below).
         | 
| 1095 | 
            +
             | 
| 1096 | 
            +
            "Derivative Works" shall mean any work, whether in Source or Object form, that
         | 
| 1097 | 
            +
            is based on (or derived from) the Work and for which the editorial revisions,
         | 
| 1098 | 
            +
            annotations, elaborations, or other modifications represent, as a whole, an
         | 
| 1099 | 
            +
            original work of authorship. For the purposes of this License, Derivative Works
         | 
| 1100 | 
            +
            shall not include works that remain separable from, or merely link (or bind by
         | 
| 1101 | 
            +
            name) to the interfaces of, the Work and Derivative Works thereof.
         | 
| 1102 | 
            +
             | 
| 1103 | 
            +
            "Contribution" shall mean any work of authorship, including the original version
         | 
| 1104 | 
            +
            of the Work and any modifications or additions to that Work or Derivative Works
         | 
| 1105 | 
            +
            thereof, that is intentionally submitted to Licensor for inclusion in the Work
         | 
| 1106 | 
            +
            by the copyright owner or by an individual or Legal Entity authorized to submit
         | 
| 1107 | 
            +
            on behalf of the copyright owner. For the purposes of this definition,
         | 
| 1108 | 
            +
            "submitted" means any form of electronic, verbal, or written communication sent
         | 
| 1109 | 
            +
            to the Licensor or its representatives, including but not limited to
         | 
| 1110 | 
            +
            communication on electronic mailing lists, source code control systems, and
         | 
| 1111 | 
            +
            issue tracking systems that are managed by, or on behalf of, the Licensor for
         | 
| 1112 | 
            +
            the purpose of discussing and improving the Work, but excluding communication
         | 
| 1113 | 
            +
            that is conspicuously marked or otherwise designated in writing by the copyright
         | 
| 1114 | 
            +
            owner as "Not a Contribution."
         | 
| 1115 | 
            +
             | 
| 1116 | 
            +
            "Contributor" shall mean Licensor and any individual or Legal Entity on behalf
         | 
| 1117 | 
            +
            of whom a Contribution has been received by Licensor and subsequently
         | 
| 1118 | 
            +
            incorporated within the Work.
         | 
| 1119 | 
            +
             | 
| 1120 | 
            +
            2. Grant of Copyright License.
         | 
| 1121 | 
            +
             | 
| 1122 | 
            +
            Subject to the terms and conditions of this License, each Contributor hereby
         | 
| 1123 | 
            +
            grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
         | 
| 1124 | 
            +
            irrevocable copyright license to reproduce, prepare Derivative Works of,
         | 
| 1125 | 
            +
            publicly display, publicly perform, sublicense, and distribute the Work and such
         | 
| 1126 | 
            +
            Derivative Works in Source or Object form.
         | 
| 1127 | 
            +
             | 
| 1128 | 
            +
            3. Grant of Patent License.
         | 
| 1129 | 
            +
             | 
| 1130 | 
            +
            Subject to the terms and conditions of this License, each Contributor hereby
         | 
| 1131 | 
            +
            grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
         | 
| 1132 | 
            +
            irrevocable (except as stated in this section) patent license to make, have
         | 
| 1133 | 
            +
            made, use, offer to sell, sell, import, and otherwise transfer the Work, where
         | 
| 1134 | 
            +
            such license applies only to those patent claims licensable by such Contributor
         | 
| 1135 | 
            +
            that are necessarily infringed by their Contribution(s) alone or by combination
         | 
| 1136 | 
            +
            of their Contribution(s) with the Work to which such Contribution(s) was
         | 
| 1137 | 
            +
            submitted. If You institute patent litigation against any entity (including a
         | 
| 1138 | 
            +
            cross-claim or counterclaim in a lawsuit) alleging that the Work or a
         | 
| 1139 | 
            +
            Contribution incorporated within the Work constitutes direct or contributory
         | 
| 1140 | 
            +
            patent infringement, then any patent licenses granted to You under this License
         | 
| 1141 | 
            +
            for that Work shall terminate as of the date such litigation is filed.
         | 
| 1142 | 
            +
             | 
| 1143 | 
            +
            4. Redistribution.
         | 
| 1144 | 
            +
             | 
| 1145 | 
            +
            You may reproduce and distribute copies of the Work or Derivative Works thereof
         | 
| 1146 | 
            +
            in any medium, with or without modifications, and in Source or Object form,
         | 
| 1147 | 
            +
            provided that You meet the following conditions:
         | 
| 1148 | 
            +
             | 
| 1149 | 
            +
            You must give any other recipients of the Work or Derivative Works a copy of
         | 
| 1150 | 
            +
            this License; and
         | 
| 1151 | 
            +
            You must cause any modified files to carry prominent notices stating that You
         | 
| 1152 | 
            +
            changed the files; and
         | 
| 1153 | 
            +
            You must retain, in the Source form of any Derivative Works that You distribute,
         | 
| 1154 | 
            +
            all copyright, patent, trademark, and attribution notices from the Source form
         | 
| 1155 | 
            +
            of the Work, excluding those notices that do not pertain to any part of the
         | 
| 1156 | 
            +
            Derivative Works; and
         | 
| 1157 | 
            +
            If the Work includes a "NOTICE" text file as part of its distribution, then any
         | 
| 1158 | 
            +
            Derivative Works that You distribute must include a readable copy of the
         | 
| 1159 | 
            +
            attribution notices contained within such NOTICE file, excluding those notices
         | 
| 1160 | 
            +
            that do not pertain to any part of the Derivative Works, in at least one of the
         | 
| 1161 | 
            +
            following places: within a NOTICE text file distributed as part of the
         | 
| 1162 | 
            +
            Derivative Works; within the Source form or documentation, if provided along
         | 
| 1163 | 
            +
            with the Derivative Works; or, within a display generated by the Derivative
         | 
| 1164 | 
            +
            Works, if and wherever such third-party notices normally appear. The contents of
         | 
| 1165 | 
            +
            the NOTICE file are for informational purposes only and do not modify the
         | 
| 1166 | 
            +
            License. You may add Your own attribution notices within Derivative Works that
         | 
| 1167 | 
            +
            You distribute, alongside or as an addendum to the NOTICE text from the Work,
         | 
| 1168 | 
            +
            provided that such additional attribution notices cannot be construed as
         | 
| 1169 | 
            +
            modifying the License.
         | 
| 1170 | 
            +
            You may add Your own copyright statement to Your modifications and may provide
         | 
| 1171 | 
            +
            additional or different license terms and conditions for use, reproduction, or
         | 
| 1172 | 
            +
            distribution of Your modifications, or for any such Derivative Works as a whole,
         | 
| 1173 | 
            +
            provided Your use, reproduction, and distribution of the Work otherwise complies
         | 
| 1174 | 
            +
            with the conditions stated in this License.
         | 
| 1175 | 
            +
             | 
| 1176 | 
            +
            5. Submission of Contributions.
         | 
| 1177 | 
            +
             | 
| 1178 | 
            +
            Unless You explicitly state otherwise, any Contribution intentionally submitted
         | 
| 1179 | 
            +
            for inclusion in the Work by You to the Licensor shall be under the terms and
         | 
| 1180 | 
            +
            conditions of this License, without any additional terms or conditions.
         | 
| 1181 | 
            +
            Notwithstanding the above, nothing herein shall supersede or modify the terms of
         | 
| 1182 | 
            +
            any separate license agreement you may have executed with Licensor regarding
         | 
| 1183 | 
            +
            such Contributions.
         | 
| 1184 | 
            +
             | 
| 1185 | 
            +
            6. Trademarks.
         | 
| 1186 | 
            +
             | 
| 1187 | 
            +
            This License does not grant permission to use the trade names, trademarks,
         | 
| 1188 | 
            +
            service marks, or product names of the Licensor, except as required for
         | 
| 1189 | 
            +
            reasonable and customary use in describing the origin of the Work and
         | 
| 1190 | 
            +
            reproducing the content of the NOTICE file.
         | 
| 1191 | 
            +
             | 
| 1192 | 
            +
            7. Disclaimer of Warranty.
         | 
| 1193 | 
            +
             | 
| 1194 | 
            +
            Unless required by applicable law or agreed to in writing, Licensor provides the
         | 
| 1195 | 
            +
            Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
         | 
| 1196 | 
            +
            WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
         | 
| 1197 | 
            +
            including, without limitation, any warranties or conditions of TITLE,
         | 
| 1198 | 
            +
            NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
         | 
| 1199 | 
            +
            solely responsible for determining the appropriateness of using or
         | 
| 1200 | 
            +
            redistributing the Work and assume any risks associated with Your exercise of
         | 
| 1201 | 
            +
            permissions under this License.
         | 
| 1202 | 
            +
             | 
| 1203 | 
            +
            8. Limitation of Liability.
         | 
| 1204 | 
            +
             | 
| 1205 | 
            +
            In no event and under no legal theory, whether in tort (including negligence),
         | 
| 1206 | 
            +
            contract, or otherwise, unless required by applicable law (such as deliberate
         | 
| 1207 | 
            +
            and grossly negligent acts) or agreed to in writing, shall any Contributor be
         | 
| 1208 | 
            +
            liable to You for damages, including any direct, indirect, special, incidental,
         | 
| 1209 | 
            +
            or consequential damages of any character arising as a result of this License or
         | 
| 1210 | 
            +
            out of the use or inability to use the Work (including but not limited to
         | 
| 1211 | 
            +
            damages for loss of goodwill, work stoppage, computer failure or malfunction, or
         | 
| 1212 | 
            +
            any and all other commercial damages or losses), even if such Contributor has
         | 
| 1213 | 
            +
            been advised of the possibility of such damages.
         | 
| 1214 | 
            +
             | 
| 1215 | 
            +
            9. Accepting Warranty or Additional Liability.
         | 
| 1216 | 
            +
             | 
| 1217 | 
            +
            While redistributing the Work or Derivative Works thereof, You may choose to
         | 
| 1218 | 
            +
            offer, and charge a fee for, acceptance of support, warranty, indemnity, or
         | 
| 1219 | 
            +
            other liability obligations and/or rights consistent with this License. However,
         | 
| 1220 | 
            +
            in accepting such obligations, You may act only on Your own behalf and on Your
         | 
| 1221 | 
            +
            sole responsibility, not on behalf of any other Contributor, and only if You
         | 
| 1222 | 
            +
            agree to indemnify, defend, and hold each Contributor harmless for any liability
         | 
| 1223 | 
            +
            incurred by, or claims asserted against, such Contributor by reason of your
         | 
| 1224 | 
            +
            accepting any such warranty or additional liability.
         | 
| 1225 | 
            +
             | 
| 1226 | 
            +
            END OF TERMS AND CONDITIONS
         | 
| 1227 | 
            +
             | 
| 1228 | 
            +
            APPENDIX: How to apply the Apache License to your work
         | 
| 1229 | 
            +
             | 
| 1230 | 
            +
            To apply the Apache License to your work, attach the following boilerplate
         | 
| 1231 | 
            +
            notice, with the fields enclosed by brackets "[]" replaced with your own
         | 
| 1232 | 
            +
            identifying information. (Don't include the brackets!) The text should be
         | 
| 1233 | 
            +
            enclosed in the appropriate comment syntax for the file format. We also
         | 
| 1234 | 
            +
            recommend that a file or class name and description of purpose be included on
         | 
| 1235 | 
            +
            the same "printed page" as the copyright notice for easier identification within
         | 
| 1236 | 
            +
            third-party archives.
         | 
| 1237 | 
            +
             | 
| 1238 | 
            +
               Copyright [yyyy] [name of copyright owner]
         | 
| 1239 | 
            +
             | 
| 1240 | 
            +
               Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 1241 | 
            +
               you may not use this file except in compliance with the License.
         | 
| 1242 | 
            +
               You may obtain a copy of the License at
         | 
| 1243 | 
            +
             | 
| 1244 | 
            +
                 http://www.apache.org/licenses/LICENSE-2.0
         | 
| 1245 | 
            +
             | 
| 1246 | 
            +
               Unless required by applicable law or agreed to in writing, software
         | 
| 1247 | 
            +
               distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 1248 | 
            +
               WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 1249 | 
            +
               See the License for the specific language governing permissions and
         | 
| 1250 | 
            +
               limitations under the License.
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/METADATA
    ADDED
    
    | @@ -0,0 +1,1573 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            Metadata-Version: 2.1
         | 
| 2 | 
            +
            Name: pandas
         | 
| 3 | 
            +
            Version: 2.2.3
         | 
| 4 | 
            +
            Summary: Powerful data structures for data analysis, time series, and statistics
         | 
| 5 | 
            +
            Home-page: https://pandas.pydata.org
         | 
| 6 | 
            +
            Author-Email: The Pandas Development Team <[email protected]>
         | 
| 7 | 
            +
            License: BSD 3-Clause License
         | 
| 8 | 
            +
                    
         | 
| 9 | 
            +
                    Copyright (c) 2008-2011, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team
         | 
| 10 | 
            +
                    All rights reserved.
         | 
| 11 | 
            +
                    
         | 
| 12 | 
            +
                    Copyright (c) 2011-2023, Open source contributors.
         | 
| 13 | 
            +
                    
         | 
| 14 | 
            +
                    Redistribution and use in source and binary forms, with or without
         | 
| 15 | 
            +
                    modification, are permitted provided that the following conditions are met:
         | 
| 16 | 
            +
                    
         | 
| 17 | 
            +
                    * Redistributions of source code must retain the above copyright notice, this
         | 
| 18 | 
            +
                      list of conditions and the following disclaimer.
         | 
| 19 | 
            +
                    
         | 
| 20 | 
            +
                    * Redistributions in binary form must reproduce the above copyright notice,
         | 
| 21 | 
            +
                      this list of conditions and the following disclaimer in the documentation
         | 
| 22 | 
            +
                      and/or other materials provided with the distribution.
         | 
| 23 | 
            +
                    
         | 
| 24 | 
            +
                    * Neither the name of the copyright holder nor the names of its
         | 
| 25 | 
            +
                      contributors may be used to endorse or promote products derived from
         | 
| 26 | 
            +
                      this software without specific prior written permission.
         | 
| 27 | 
            +
                    
         | 
| 28 | 
            +
                    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
         | 
| 29 | 
            +
                    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
         | 
| 30 | 
            +
                    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         | 
| 31 | 
            +
                    DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
         | 
| 32 | 
            +
                    FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
         | 
| 33 | 
            +
                    DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
         | 
| 34 | 
            +
                    SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
         | 
| 35 | 
            +
                    CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
         | 
| 36 | 
            +
                    OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 37 | 
            +
                    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 38 | 
            +
                    Copyright (c) 2010-2019 Keith Goodman
         | 
| 39 | 
            +
                    Copyright (c) 2019 Bottleneck Developers
         | 
| 40 | 
            +
                    All rights reserved.
         | 
| 41 | 
            +
                    
         | 
| 42 | 
            +
                    Redistribution and use in source and binary forms, with or without
         | 
| 43 | 
            +
                    modification, are permitted provided that the following conditions are met:
         | 
| 44 | 
            +
                    
         | 
| 45 | 
            +
                        * Redistributions of source code must retain the above copyright notice,
         | 
| 46 | 
            +
                          this list of conditions and the following disclaimer.
         | 
| 47 | 
            +
                    
         | 
| 48 | 
            +
                        * Redistributions in binary form must reproduce the above copyright
         | 
| 49 | 
            +
                          notice, this list of conditions and the following disclaimer in the
         | 
| 50 | 
            +
                          documentation and/or other materials provided with the distribution.
         | 
| 51 | 
            +
                    
         | 
| 52 | 
            +
                    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
         | 
| 53 | 
            +
                    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
         | 
| 54 | 
            +
                    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
         | 
| 55 | 
            +
                    ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
         | 
| 56 | 
            +
                    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
         | 
| 57 | 
            +
                    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
         | 
| 58 | 
            +
                    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
         | 
| 59 | 
            +
                    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
         | 
| 60 | 
            +
                    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
         | 
| 61 | 
            +
                    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
         | 
| 62 | 
            +
                    POSSIBILITY OF SUCH DAMAGE.Copyright 2017- Paul Ganssle <[email protected]>
         | 
| 63 | 
            +
                    Copyright 2017- dateutil contributors (see AUTHORS file)
         | 
| 64 | 
            +
                    
         | 
| 65 | 
            +
                       Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 66 | 
            +
                       you may not use this file except in compliance with the License.
         | 
| 67 | 
            +
                       You may obtain a copy of the License at
         | 
| 68 | 
            +
                    
         | 
| 69 | 
            +
                           http://www.apache.org/licenses/LICENSE-2.0
         | 
| 70 | 
            +
                    
         | 
| 71 | 
            +
                       Unless required by applicable law or agreed to in writing, software
         | 
| 72 | 
            +
                       distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 73 | 
            +
                       WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 74 | 
            +
                       See the License for the specific language governing permissions and
         | 
| 75 | 
            +
                       limitations under the License.
         | 
| 76 | 
            +
                    
         | 
| 77 | 
            +
                    The above license applies to all contributions after 2017-12-01, as well as
         | 
| 78 | 
            +
                    all contributions that have been re-licensed (see AUTHORS file for the list of
         | 
| 79 | 
            +
                    contributors who have re-licensed their code).
         | 
| 80 | 
            +
                    --------------------------------------------------------------------------------
         | 
| 81 | 
            +
                    dateutil - Extensions to the standard Python datetime module.
         | 
| 82 | 
            +
                    
         | 
| 83 | 
            +
                    Copyright (c) 2003-2011 - Gustavo Niemeyer <[email protected]>
         | 
| 84 | 
            +
                    Copyright (c) 2012-2014 - Tomi Pieviläinen <[email protected]>
         | 
| 85 | 
            +
                    Copyright (c) 2014-2016 - Yaron de Leeuw <[email protected]>
         | 
| 86 | 
            +
                    Copyright (c) 2015-     - Paul Ganssle <[email protected]>
         | 
| 87 | 
            +
                    Copyright (c) 2015-     - dateutil contributors (see AUTHORS file)
         | 
| 88 | 
            +
                    
         | 
| 89 | 
            +
                    All rights reserved.
         | 
| 90 | 
            +
                    
         | 
| 91 | 
            +
                    Redistribution and use in source and binary forms, with or without
         | 
| 92 | 
            +
                    modification, are permitted provided that the following conditions are met:
         | 
| 93 | 
            +
                    
         | 
| 94 | 
            +
                        * Redistributions of source code must retain the above copyright notice,
         | 
| 95 | 
            +
                          this list of conditions and the following disclaimer.
         | 
| 96 | 
            +
                        * Redistributions in binary form must reproduce the above copyright notice,
         | 
| 97 | 
            +
                          this list of conditions and the following disclaimer in the documentation
         | 
| 98 | 
            +
                          and/or other materials provided with the distribution.
         | 
| 99 | 
            +
                        * Neither the name of the copyright holder nor the names of its
         | 
| 100 | 
            +
                          contributors may be used to endorse or promote products derived from
         | 
| 101 | 
            +
                          this software without specific prior written permission.
         | 
| 102 | 
            +
                    
         | 
| 103 | 
            +
                    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
         | 
| 104 | 
            +
                    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
         | 
| 105 | 
            +
                    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
         | 
| 106 | 
            +
                    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
         | 
| 107 | 
            +
                    CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
         | 
| 108 | 
            +
                    EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
         | 
| 109 | 
            +
                    PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
         | 
| 110 | 
            +
                    PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
         | 
| 111 | 
            +
                    LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
         | 
| 112 | 
            +
                    NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
         | 
| 113 | 
            +
                    SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 114 | 
            +
                    
         | 
| 115 | 
            +
                    The above BSD License Applies to all code, even that also covered by Apache 2.0.# MIT License
         | 
| 116 | 
            +
                    
         | 
| 117 | 
            +
                    Copyright (c) 2019 Hadley Wickham; RStudio; and Evan Miller
         | 
| 118 | 
            +
                    
         | 
| 119 | 
            +
                    Permission is hereby granted, free of charge, to any person obtaining a copy
         | 
| 120 | 
            +
                    of this software and associated documentation files (the "Software"), to deal
         | 
| 121 | 
            +
                    in the Software without restriction, including without limitation the rights
         | 
| 122 | 
            +
                    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
         | 
| 123 | 
            +
                    copies of the Software, and to permit persons to whom the Software is
         | 
| 124 | 
            +
                    furnished to do so, subject to the following conditions:
         | 
| 125 | 
            +
                    
         | 
| 126 | 
            +
                    The above copyright notice and this permission notice shall be included in all
         | 
| 127 | 
            +
                    copies or substantial portions of the Software.
         | 
| 128 | 
            +
                    
         | 
| 129 | 
            +
                    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
         | 
| 130 | 
            +
                    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
         | 
| 131 | 
            +
                    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
         | 
| 132 | 
            +
                    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
         | 
| 133 | 
            +
                    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
         | 
| 134 | 
            +
                    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
         | 
| 135 | 
            +
                    SOFTWARE.
         | 
| 136 | 
            +
                    Based on http://opensource.org/licenses/MIT
         | 
| 137 | 
            +
                    
         | 
| 138 | 
            +
                    This is a template. Complete and ship as file LICENSE the following 2
         | 
| 139 | 
            +
                    lines (only)
         | 
| 140 | 
            +
                    
         | 
| 141 | 
            +
                    YEAR:
         | 
| 142 | 
            +
                    COPYRIGHT HOLDER:
         | 
| 143 | 
            +
                    
         | 
| 144 | 
            +
                    and specify as
         | 
| 145 | 
            +
                    
         | 
| 146 | 
            +
                    License: MIT + file LICENSE
         | 
| 147 | 
            +
                    
         | 
| 148 | 
            +
                    Copyright (c) <YEAR>, <COPYRIGHT HOLDER>
         | 
| 149 | 
            +
                    
         | 
| 150 | 
            +
                    Permission is hereby granted, free of charge, to any person obtaining
         | 
| 151 | 
            +
                    a copy of this software and associated documentation files (the
         | 
| 152 | 
            +
                    "Software"), to deal in the Software without restriction, including
         | 
| 153 | 
            +
                    without limitation the rights to use, copy, modify, merge, publish,
         | 
| 154 | 
            +
                    distribute, sublicense, and/or sell copies of the Software, and to
         | 
| 155 | 
            +
                    permit persons to whom the Software is furnished to do so, subject to
         | 
| 156 | 
            +
                    the following conditions:
         | 
| 157 | 
            +
                    
         | 
| 158 | 
            +
                    The above copyright notice and this permission notice shall be
         | 
| 159 | 
            +
                    included in all copies or substantial portions of the Software.
         | 
| 160 | 
            +
                    
         | 
| 161 | 
            +
                    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
         | 
| 162 | 
            +
                    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
         | 
| 163 | 
            +
                    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
         | 
| 164 | 
            +
                    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
         | 
| 165 | 
            +
                    LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
         | 
| 166 | 
            +
                    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
         | 
| 167 | 
            +
                    WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
         | 
| 168 | 
            +
                    The MIT License
         | 
| 169 | 
            +
                    
         | 
| 170 | 
            +
                    Copyright (c) 2008-     Attractive Chaos <[email protected]>
         | 
| 171 | 
            +
                    
         | 
| 172 | 
            +
                    Permission is hereby granted, free of charge, to any person obtaining
         | 
| 173 | 
            +
                    a copy of this software and associated documentation files (the
         | 
| 174 | 
            +
                    "Software"), to deal in the Software without restriction, including
         | 
| 175 | 
            +
                    without limitation the rights to use, copy, modify, merge, publish,
         | 
| 176 | 
            +
                    distribute, sublicense, and/or sell copies of the Software, and to
         | 
| 177 | 
            +
                    permit persons to whom the Software is furnished to do so, subject to
         | 
| 178 | 
            +
                    the following conditions:
         | 
| 179 | 
            +
                    
         | 
| 180 | 
            +
                    The above copyright notice and this permission notice shall be
         | 
| 181 | 
            +
                    included in all copies or substantial portions of the Software.
         | 
| 182 | 
            +
                    
         | 
| 183 | 
            +
                    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
         | 
| 184 | 
            +
                    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
         | 
| 185 | 
            +
                    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
         | 
| 186 | 
            +
                    NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
         | 
| 187 | 
            +
                    BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
         | 
| 188 | 
            +
                    ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
         | 
| 189 | 
            +
                    CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
         | 
| 190 | 
            +
                    SOFTWARE.musl as a whole is licensed under the following standard MIT license:
         | 
| 191 | 
            +
                    
         | 
| 192 | 
            +
                    ----------------------------------------------------------------------
         | 
| 193 | 
            +
                    Copyright © 2005-2020 Rich Felker, et al.
         | 
| 194 | 
            +
                    
         | 
| 195 | 
            +
                    Permission is hereby granted, free of charge, to any person obtaining
         | 
| 196 | 
            +
                    a copy of this software and associated documentation files (the
         | 
| 197 | 
            +
                    "Software"), to deal in the Software without restriction, including
         | 
| 198 | 
            +
                    without limitation the rights to use, copy, modify, merge, publish,
         | 
| 199 | 
            +
                    distribute, sublicense, and/or sell copies of the Software, and to
         | 
| 200 | 
            +
                    permit persons to whom the Software is furnished to do so, subject to
         | 
| 201 | 
            +
                    the following conditions:
         | 
| 202 | 
            +
                    
         | 
| 203 | 
            +
                    The above copyright notice and this permission notice shall be
         | 
| 204 | 
            +
                    included in all copies or substantial portions of the Software.
         | 
| 205 | 
            +
                    
         | 
| 206 | 
            +
                    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
         | 
| 207 | 
            +
                    EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
         | 
| 208 | 
            +
                    MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
         | 
| 209 | 
            +
                    IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
         | 
| 210 | 
            +
                    CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
         | 
| 211 | 
            +
                    TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
         | 
| 212 | 
            +
                    SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
         | 
| 213 | 
            +
                    ----------------------------------------------------------------------
         | 
| 214 | 
            +
                    
         | 
| 215 | 
            +
                    Authors/contributors include:
         | 
| 216 | 
            +
                    
         | 
| 217 | 
            +
                    A. Wilcox
         | 
| 218 | 
            +
                    Ada Worcester
         | 
| 219 | 
            +
                    Alex Dowad
         | 
| 220 | 
            +
                    Alex Suykov
         | 
| 221 | 
            +
                    Alexander Monakov
         | 
| 222 | 
            +
                    Andre McCurdy
         | 
| 223 | 
            +
                    Andrew Kelley
         | 
| 224 | 
            +
                    Anthony G. Basile
         | 
| 225 | 
            +
                    Aric Belsito
         | 
| 226 | 
            +
                    Arvid Picciani
         | 
| 227 | 
            +
                    Bartosz Brachaczek
         | 
| 228 | 
            +
                    Benjamin Peterson
         | 
| 229 | 
            +
                    Bobby Bingham
         | 
| 230 | 
            +
                    Boris Brezillon
         | 
| 231 | 
            +
                    Brent Cook
         | 
| 232 | 
            +
                    Chris Spiegel
         | 
| 233 | 
            +
                    Clément Vasseur
         | 
| 234 | 
            +
                    Daniel Micay
         | 
| 235 | 
            +
                    Daniel Sabogal
         | 
| 236 | 
            +
                    Daurnimator
         | 
| 237 | 
            +
                    David Carlier
         | 
| 238 | 
            +
                    David Edelsohn
         | 
| 239 | 
            +
                    Denys Vlasenko
         | 
| 240 | 
            +
                    Dmitry Ivanov
         | 
| 241 | 
            +
                    Dmitry V. Levin
         | 
| 242 | 
            +
                    Drew DeVault
         | 
| 243 | 
            +
                    Emil Renner Berthing
         | 
| 244 | 
            +
                    Fangrui Song
         | 
| 245 | 
            +
                    Felix Fietkau
         | 
| 246 | 
            +
                    Felix Janda
         | 
| 247 | 
            +
                    Gianluca Anzolin
         | 
| 248 | 
            +
                    Hauke Mehrtens
         | 
| 249 | 
            +
                    He X
         | 
| 250 | 
            +
                    Hiltjo Posthuma
         | 
| 251 | 
            +
                    Isaac Dunham
         | 
| 252 | 
            +
                    Jaydeep Patil
         | 
| 253 | 
            +
                    Jens Gustedt
         | 
| 254 | 
            +
                    Jeremy Huntwork
         | 
| 255 | 
            +
                    Jo-Philipp Wich
         | 
| 256 | 
            +
                    Joakim Sindholt
         | 
| 257 | 
            +
                    John Spencer
         | 
| 258 | 
            +
                    Julien Ramseier
         | 
| 259 | 
            +
                    Justin Cormack
         | 
| 260 | 
            +
                    Kaarle Ritvanen
         | 
| 261 | 
            +
                    Khem Raj
         | 
| 262 | 
            +
                    Kylie McClain
         | 
| 263 | 
            +
                    Leah Neukirchen
         | 
| 264 | 
            +
                    Luca Barbato
         | 
| 265 | 
            +
                    Luka Perkov
         | 
| 266 | 
            +
                    M Farkas-Dyck (Strake)
         | 
| 267 | 
            +
                    Mahesh Bodapati
         | 
| 268 | 
            +
                    Markus Wichmann
         | 
| 269 | 
            +
                    Masanori Ogino
         | 
| 270 | 
            +
                    Michael Clark
         | 
| 271 | 
            +
                    Michael Forney
         | 
| 272 | 
            +
                    Mikhail Kremnyov
         | 
| 273 | 
            +
                    Natanael Copa
         | 
| 274 | 
            +
                    Nicholas J. Kain
         | 
| 275 | 
            +
                    orc
         | 
| 276 | 
            +
                    Pascal Cuoq
         | 
| 277 | 
            +
                    Patrick Oppenlander
         | 
| 278 | 
            +
                    Petr Hosek
         | 
| 279 | 
            +
                    Petr Skocik
         | 
| 280 | 
            +
                    Pierre Carrier
         | 
| 281 | 
            +
                    Reini Urban
         | 
| 282 | 
            +
                    Rich Felker
         | 
| 283 | 
            +
                    Richard Pennington
         | 
| 284 | 
            +
                    Ryan Fairfax
         | 
| 285 | 
            +
                    Samuel Holland
         | 
| 286 | 
            +
                    Segev Finer
         | 
| 287 | 
            +
                    Shiz
         | 
| 288 | 
            +
                    sin
         | 
| 289 | 
            +
                    Solar Designer
         | 
| 290 | 
            +
                    Stefan Kristiansson
         | 
| 291 | 
            +
                    Stefan O'Rear
         | 
| 292 | 
            +
                    Szabolcs Nagy
         | 
| 293 | 
            +
                    Timo Teräs
         | 
| 294 | 
            +
                    Trutz Behn
         | 
| 295 | 
            +
                    Valentin Ochs
         | 
| 296 | 
            +
                    Will Dietz
         | 
| 297 | 
            +
                    William Haddon
         | 
| 298 | 
            +
                    William Pitcock
         | 
| 299 | 
            +
                    
         | 
| 300 | 
            +
                    Portions of this software are derived from third-party works licensed
         | 
| 301 | 
            +
                    under terms compatible with the above MIT license:
         | 
| 302 | 
            +
                    
         | 
| 303 | 
            +
                    The TRE regular expression implementation (src/regex/reg* and
         | 
| 304 | 
            +
                    src/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed
         | 
| 305 | 
            +
                    under a 2-clause BSD license (license text in the source files). The
         | 
| 306 | 
            +
                    included version has been heavily modified by Rich Felker in 2012, in
         | 
| 307 | 
            +
                    the interests of size, simplicity, and namespace cleanliness.
         | 
| 308 | 
            +
                    
         | 
| 309 | 
            +
                    Much of the math library code (src/math/* and src/complex/*) is
         | 
| 310 | 
            +
                    Copyright © 1993,2004 Sun Microsystems or
         | 
| 311 | 
            +
                    Copyright © 2003-2011 David Schultz or
         | 
| 312 | 
            +
                    Copyright © 2003-2009 Steven G. Kargl or
         | 
| 313 | 
            +
                    Copyright © 2003-2009 Bruce D. Evans or
         | 
| 314 | 
            +
                    Copyright © 2008 Stephen L. Moshier or
         | 
| 315 | 
            +
                    Copyright © 2017-2018 Arm Limited
         | 
| 316 | 
            +
                    and labelled as such in comments in the individual source files. All
         | 
| 317 | 
            +
                    have been licensed under extremely permissive terms.
         | 
| 318 | 
            +
                    
         | 
| 319 | 
            +
                    The ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008
         | 
| 320 | 
            +
                    The Android Open Source Project and is licensed under a two-clause BSD
         | 
| 321 | 
            +
                    license. It was taken from Bionic libc, used on Android.
         | 
| 322 | 
            +
                    
         | 
| 323 | 
            +
                    The AArch64 memcpy and memset code (src/string/aarch64/*) are
         | 
| 324 | 
            +
                    Copyright © 1999-2019, Arm Limited.
         | 
| 325 | 
            +
                    
         | 
| 326 | 
            +
                    The implementation of DES for crypt (src/crypt/crypt_des.c) is
         | 
| 327 | 
            +
                    Copyright © 1994 David Burren. It is licensed under a BSD license.
         | 
| 328 | 
            +
                    
         | 
| 329 | 
            +
                    The implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was
         | 
| 330 | 
            +
                    originally written by Solar Designer and placed into the public
         | 
| 331 | 
            +
                    domain. The code also comes with a fallback permissive license for use
         | 
| 332 | 
            +
                    in jurisdictions that may not recognize the public domain.
         | 
| 333 | 
            +
                    
         | 
| 334 | 
            +
                    The smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011
         | 
| 335 | 
            +
                    Valentin Ochs and is licensed under an MIT-style license.
         | 
| 336 | 
            +
                    
         | 
| 337 | 
            +
                    The x86_64 port was written by Nicholas J. Kain and is licensed under
         | 
| 338 | 
            +
                    the standard MIT terms.
         | 
| 339 | 
            +
                    
         | 
| 340 | 
            +
                    The mips and microblaze ports were originally written by Richard
         | 
| 341 | 
            +
                    Pennington for use in the ellcc project. The original code was adapted
         | 
| 342 | 
            +
                    by Rich Felker for build system and code conventions during upstream
         | 
| 343 | 
            +
                    integration. It is licensed under the standard MIT terms.
         | 
| 344 | 
            +
                    
         | 
| 345 | 
            +
                    The mips64 port was contributed by Imagination Technologies and is
         | 
| 346 | 
            +
                    licensed under the standard MIT terms.
         | 
| 347 | 
            +
                    
         | 
| 348 | 
            +
                    The powerpc port was also originally written by Richard Pennington,
         | 
| 349 | 
            +
                    and later supplemented and integrated by John Spencer. It is licensed
         | 
| 350 | 
            +
                    under the standard MIT terms.
         | 
| 351 | 
            +
                    
         | 
| 352 | 
            +
                    All other files which have no copyright comments are original works
         | 
| 353 | 
            +
                    produced specifically for use as part of this library, written either
         | 
| 354 | 
            +
                    by Rich Felker, the main author of the library, or by one or more
         | 
| 355 | 
            +
                    contibutors listed above. Details on authorship of individual files
         | 
| 356 | 
            +
                    can be found in the git version control history of the project. The
         | 
| 357 | 
            +
                    omission of copyright and license comments in each file is in the
         | 
| 358 | 
            +
                    interest of source tree size.
         | 
| 359 | 
            +
                    
         | 
| 360 | 
            +
                    In addition, permission is hereby granted for all public header files
         | 
| 361 | 
            +
                    (include/* and arch/*/bits/*) and crt files intended to be linked into
         | 
| 362 | 
            +
                    applications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit
         | 
| 363 | 
            +
                    the copyright notice and permission notice otherwise required by the
         | 
| 364 | 
            +
                    license, and to use these files without any requirement of
         | 
| 365 | 
            +
                    attribution. These files include substantial contributions from:
         | 
| 366 | 
            +
                    
         | 
| 367 | 
            +
                    Bobby Bingham
         | 
| 368 | 
            +
                    John Spencer
         | 
| 369 | 
            +
                    Nicholas J. Kain
         | 
| 370 | 
            +
                    Rich Felker
         | 
| 371 | 
            +
                    Richard Pennington
         | 
| 372 | 
            +
                    Stefan Kristiansson
         | 
| 373 | 
            +
                    Szabolcs Nagy
         | 
| 374 | 
            +
                    
         | 
| 375 | 
            +
                    all of whom have explicitly granted such permission.
         | 
| 376 | 
            +
                    
         | 
| 377 | 
            +
                    This file previously contained text expressing a belief that most of
         | 
| 378 | 
            +
                    the files covered by the above exception were sufficiently trivial not
         | 
| 379 | 
            +
                    to be subject to copyright, resulting in confusion over whether it
         | 
| 380 | 
            +
                    negated the permissions granted in the license. In the spirit of
         | 
| 381 | 
            +
                    permissive licensing, and of not having licensing issues being an
         | 
| 382 | 
            +
                    obstacle to adoption, that text has been removed.Copyright (c) 2005-2023, NumPy Developers.
         | 
| 383 | 
            +
                    All rights reserved.
         | 
| 384 | 
            +
                    
         | 
| 385 | 
            +
                    Redistribution and use in source and binary forms, with or without
         | 
| 386 | 
            +
                    modification, are permitted provided that the following conditions are
         | 
| 387 | 
            +
                    met:
         | 
| 388 | 
            +
                    
         | 
| 389 | 
            +
                        * Redistributions of source code must retain the above copyright
         | 
| 390 | 
            +
                           notice, this list of conditions and the following disclaimer.
         | 
| 391 | 
            +
                    
         | 
| 392 | 
            +
                        * Redistributions in binary form must reproduce the above
         | 
| 393 | 
            +
                           copyright notice, this list of conditions and the following
         | 
| 394 | 
            +
                           disclaimer in the documentation and/or other materials provided
         | 
| 395 | 
            +
                           with the distribution.
         | 
| 396 | 
            +
                    
         | 
| 397 | 
            +
                        * Neither the name of the NumPy Developers nor the names of any
         | 
| 398 | 
            +
                           contributors may be used to endorse or promote products derived
         | 
| 399 | 
            +
                           from this software without specific prior written permission.
         | 
| 400 | 
            +
                    
         | 
| 401 | 
            +
                    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
         | 
| 402 | 
            +
                    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
         | 
| 403 | 
            +
                    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
         | 
| 404 | 
            +
                    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
         | 
| 405 | 
            +
                    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
         | 
| 406 | 
            +
                    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
         | 
| 407 | 
            +
                    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
         | 
| 408 | 
            +
                    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
         | 
| 409 | 
            +
                    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
         | 
| 410 | 
            +
                    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 411 | 
            +
                    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 412 | 
            +
                                                     Apache License
         | 
| 413 | 
            +
                                               Version 2.0, January 2004
         | 
| 414 | 
            +
                                            http://www.apache.org/licenses/
         | 
| 415 | 
            +
                    
         | 
| 416 | 
            +
                       TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
         | 
| 417 | 
            +
                    
         | 
| 418 | 
            +
                       1. Definitions.
         | 
| 419 | 
            +
                    
         | 
| 420 | 
            +
                          "License" shall mean the terms and conditions for use, reproduction,
         | 
| 421 | 
            +
                          and distribution as defined by Sections 1 through 9 of this document.
         | 
| 422 | 
            +
                    
         | 
| 423 | 
            +
                          "Licensor" shall mean the copyright owner or entity authorized by
         | 
| 424 | 
            +
                          the copyright owner that is granting the License.
         | 
| 425 | 
            +
                    
         | 
| 426 | 
            +
                          "Legal Entity" shall mean the union of the acting entity and all
         | 
| 427 | 
            +
                          other entities that control, are controlled by, or are under common
         | 
| 428 | 
            +
                          control with that entity. For the purposes of this definition,
         | 
| 429 | 
            +
                          "control" means (i) the power, direct or indirect, to cause the
         | 
| 430 | 
            +
                          direction or management of such entity, whether by contract or
         | 
| 431 | 
            +
                          otherwise, or (ii) ownership of fifty percent (50%) or more of the
         | 
| 432 | 
            +
                          outstanding shares, or (iii) beneficial ownership of such entity.
         | 
| 433 | 
            +
                    
         | 
| 434 | 
            +
                          "You" (or "Your") shall mean an individual or Legal Entity
         | 
| 435 | 
            +
                          exercising permissions granted by this License.
         | 
| 436 | 
            +
                    
         | 
| 437 | 
            +
                          "Source" form shall mean the preferred form for making modifications,
         | 
| 438 | 
            +
                          including but not limited to software source code, documentation
         | 
| 439 | 
            +
                          source, and configuration files.
         | 
| 440 | 
            +
                    
         | 
| 441 | 
            +
                          "Object" form shall mean any form resulting from mechanical
         | 
| 442 | 
            +
                          transformation or translation of a Source form, including but
         | 
| 443 | 
            +
                          not limited to compiled object code, generated documentation,
         | 
| 444 | 
            +
                          and conversions to other media types.
         | 
| 445 | 
            +
                    
         | 
| 446 | 
            +
                          "Work" shall mean the work of authorship, whether in Source or
         | 
| 447 | 
            +
                          Object form, made available under the License, as indicated by a
         | 
| 448 | 
            +
                          copyright notice that is included in or attached to the work
         | 
| 449 | 
            +
                          (an example is provided in the Appendix below).
         | 
| 450 | 
            +
                    
         | 
| 451 | 
            +
                          "Derivative Works" shall mean any work, whether in Source or Object
         | 
| 452 | 
            +
                          form, that is based on (or derived from) the Work and for which the
         | 
| 453 | 
            +
                          editorial revisions, annotations, elaborations, or other modifications
         | 
| 454 | 
            +
                          represent, as a whole, an original work of authorship. For the purposes
         | 
| 455 | 
            +
                          of this License, Derivative Works shall not include works that remain
         | 
| 456 | 
            +
                          separable from, or merely link (or bind by name) to the interfaces of,
         | 
| 457 | 
            +
                          the Work and Derivative Works thereof.
         | 
| 458 | 
            +
                    
         | 
| 459 | 
            +
                          "Contribution" shall mean any work of authorship, including
         | 
| 460 | 
            +
                          the original version of the Work and any modifications or additions
         | 
| 461 | 
            +
                          to that Work or Derivative Works thereof, that is intentionally
         | 
| 462 | 
            +
                          submitted to Licensor for inclusion in the Work by the copyright owner
         | 
| 463 | 
            +
                          or by an individual or Legal Entity authorized to submit on behalf of
         | 
| 464 | 
            +
                          the copyright owner. For the purposes of this definition, "submitted"
         | 
| 465 | 
            +
                          means any form of electronic, verbal, or written communication sent
         | 
| 466 | 
            +
                          to the Licensor or its representatives, including but not limited to
         | 
| 467 | 
            +
                          communication on electronic mailing lists, source code control systems,
         | 
| 468 | 
            +
                          and issue tracking systems that are managed by, or on behalf of, the
         | 
| 469 | 
            +
                          Licensor for the purpose of discussing and improving the Work, but
         | 
| 470 | 
            +
                          excluding communication that is conspicuously marked or otherwise
         | 
| 471 | 
            +
                          designated in writing by the copyright owner as "Not a Contribution."
         | 
| 472 | 
            +
                    
         | 
| 473 | 
            +
                          "Contributor" shall mean Licensor and any individual or Legal Entity
         | 
| 474 | 
            +
                          on behalf of whom a Contribution has been received by Licensor and
         | 
| 475 | 
            +
                          subsequently incorporated within the Work.
         | 
| 476 | 
            +
                    
         | 
| 477 | 
            +
                       2. Grant of Copyright License. Subject to the terms and conditions of
         | 
| 478 | 
            +
                          this License, each Contributor hereby grants to You a perpetual,
         | 
| 479 | 
            +
                          worldwide, non-exclusive, no-charge, royalty-free, irrevocable
         | 
| 480 | 
            +
                          copyright license to reproduce, prepare Derivative Works of,
         | 
| 481 | 
            +
                          publicly display, publicly perform, sublicense, and distribute the
         | 
| 482 | 
            +
                          Work and such Derivative Works in Source or Object form.
         | 
| 483 | 
            +
                    
         | 
| 484 | 
            +
                       3. Grant of Patent License. Subject to the terms and conditions of
         | 
| 485 | 
            +
                          this License, each Contributor hereby grants to You a perpetual,
         | 
| 486 | 
            +
                          worldwide, non-exclusive, no-charge, royalty-free, irrevocable
         | 
| 487 | 
            +
                          (except as stated in this section) patent license to make, have made,
         | 
| 488 | 
            +
                          use, offer to sell, sell, import, and otherwise transfer the Work,
         | 
| 489 | 
            +
                          where such license applies only to those patent claims licensable
         | 
| 490 | 
            +
                          by such Contributor that are necessarily infringed by their
         | 
| 491 | 
            +
                          Contribution(s) alone or by combination of their Contribution(s)
         | 
| 492 | 
            +
                          with the Work to which such Contribution(s) was submitted. If You
         | 
| 493 | 
            +
                          institute patent litigation against any entity (including a
         | 
| 494 | 
            +
                          cross-claim or counterclaim in a lawsuit) alleging that the Work
         | 
| 495 | 
            +
                          or a Contribution incorporated within the Work constitutes direct
         | 
| 496 | 
            +
                          or contributory patent infringement, then any patent licenses
         | 
| 497 | 
            +
                          granted to You under this License for that Work shall terminate
         | 
| 498 | 
            +
                          as of the date such litigation is filed.
         | 
| 499 | 
            +
                    
         | 
| 500 | 
            +
                       4. Redistribution. You may reproduce and distribute copies of the
         | 
| 501 | 
            +
                          Work or Derivative Works thereof in any medium, with or without
         | 
| 502 | 
            +
                          modifications, and in Source or Object form, provided that You
         | 
| 503 | 
            +
                          meet the following conditions:
         | 
| 504 | 
            +
                    
         | 
| 505 | 
            +
                          (a) You must give any other recipients of the Work or
         | 
| 506 | 
            +
                              Derivative Works a copy of this License; and
         | 
| 507 | 
            +
                    
         | 
| 508 | 
            +
                          (b) You must cause any modified files to carry prominent notices
         | 
| 509 | 
            +
                              stating that You changed the files; and
         | 
| 510 | 
            +
                    
         | 
| 511 | 
            +
                          (c) You must retain, in the Source form of any Derivative Works
         | 
| 512 | 
            +
                              that You distribute, all copyright, patent, trademark, and
         | 
| 513 | 
            +
                              attribution notices from the Source form of the Work,
         | 
| 514 | 
            +
                              excluding those notices that do not pertain to any part of
         | 
| 515 | 
            +
                              the Derivative Works; and
         | 
| 516 | 
            +
                    
         | 
| 517 | 
            +
                          (d) If the Work includes a "NOTICE" text file as part of its
         | 
| 518 | 
            +
                              distribution, then any Derivative Works that You distribute must
         | 
| 519 | 
            +
                              include a readable copy of the attribution notices contained
         | 
| 520 | 
            +
                              within such NOTICE file, excluding those notices that do not
         | 
| 521 | 
            +
                              pertain to any part of the Derivative Works, in at least one
         | 
| 522 | 
            +
                              of the following places: within a NOTICE text file distributed
         | 
| 523 | 
            +
                              as part of the Derivative Works; within the Source form or
         | 
| 524 | 
            +
                              documentation, if provided along with the Derivative Works; or,
         | 
| 525 | 
            +
                              within a display generated by the Derivative Works, if and
         | 
| 526 | 
            +
                              wherever such third-party notices normally appear. The contents
         | 
| 527 | 
            +
                              of the NOTICE file are for informational purposes only and
         | 
| 528 | 
            +
                              do not modify the License. You may add Your own attribution
         | 
| 529 | 
            +
                              notices within Derivative Works that You distribute, alongside
         | 
| 530 | 
            +
                              or as an addendum to the NOTICE text from the Work, provided
         | 
| 531 | 
            +
                              that such additional attribution notices cannot be construed
         | 
| 532 | 
            +
                              as modifying the License.
         | 
| 533 | 
            +
                    
         | 
| 534 | 
            +
                          You may add Your own copyright statement to Your modifications and
         | 
| 535 | 
            +
                          may provide additional or different license terms and conditions
         | 
| 536 | 
            +
                          for use, reproduction, or distribution of Your modifications, or
         | 
| 537 | 
            +
                          for any such Derivative Works as a whole, provided Your use,
         | 
| 538 | 
            +
                          reproduction, and distribution of the Work otherwise complies with
         | 
| 539 | 
            +
                          the conditions stated in this License.
         | 
| 540 | 
            +
                    
         | 
| 541 | 
            +
                       5. Submission of Contributions. Unless You explicitly state otherwise,
         | 
| 542 | 
            +
                          any Contribution intentionally submitted for inclusion in the Work
         | 
| 543 | 
            +
                          by You to the Licensor shall be under the terms and conditions of
         | 
| 544 | 
            +
                          this License, without any additional terms or conditions.
         | 
| 545 | 
            +
                          Notwithstanding the above, nothing herein shall supersede or modify
         | 
| 546 | 
            +
                          the terms of any separate license agreement you may have executed
         | 
| 547 | 
            +
                          with Licensor regarding such Contributions.
         | 
| 548 | 
            +
                    
         | 
| 549 | 
            +
                       6. Trademarks. This License does not grant permission to use the trade
         | 
| 550 | 
            +
                          names, trademarks, service marks, or product names of the Licensor,
         | 
| 551 | 
            +
                          except as required for reasonable and customary use in describing the
         | 
| 552 | 
            +
                          origin of the Work and reproducing the content of the NOTICE file.
         | 
| 553 | 
            +
                    
         | 
| 554 | 
            +
                       7. Disclaimer of Warranty. Unless required by applicable law or
         | 
| 555 | 
            +
                          agreed to in writing, Licensor provides the Work (and each
         | 
| 556 | 
            +
                          Contributor provides its Contributions) on an "AS IS" BASIS,
         | 
| 557 | 
            +
                          WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
         | 
| 558 | 
            +
                          implied, including, without limitation, any warranties or conditions
         | 
| 559 | 
            +
                          of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
         | 
| 560 | 
            +
                          PARTICULAR PURPOSE. You are solely responsible for determining the
         | 
| 561 | 
            +
                          appropriateness of using or redistributing the Work and assume any
         | 
| 562 | 
            +
                          risks associated with Your exercise of permissions under this License.
         | 
| 563 | 
            +
                    
         | 
| 564 | 
            +
                       8. Limitation of Liability. In no event and under no legal theory,
         | 
| 565 | 
            +
                          whether in tort (including negligence), contract, or otherwise,
         | 
| 566 | 
            +
                          unless required by applicable law (such as deliberate and grossly
         | 
| 567 | 
            +
                          negligent acts) or agreed to in writing, shall any Contributor be
         | 
| 568 | 
            +
                          liable to You for damages, including any direct, indirect, special,
         | 
| 569 | 
            +
                          incidental, or consequential damages of any character arising as a
         | 
| 570 | 
            +
                          result of this License or out of the use or inability to use the
         | 
| 571 | 
            +
                          Work (including but not limited to damages for loss of goodwill,
         | 
| 572 | 
            +
                          work stoppage, computer failure or malfunction, or any and all
         | 
| 573 | 
            +
                          other commercial damages or losses), even if such Contributor
         | 
| 574 | 
            +
                          has been advised of the possibility of such damages.
         | 
| 575 | 
            +
                    
         | 
| 576 | 
            +
                       9. Accepting Warranty or Additional Liability. While redistributing
         | 
| 577 | 
            +
                          the Work or Derivative Works thereof, You may choose to offer,
         | 
| 578 | 
            +
                          and charge a fee for, acceptance of support, warranty, indemnity,
         | 
| 579 | 
            +
                          or other liability obligations and/or rights consistent with this
         | 
| 580 | 
            +
                          License. However, in accepting such obligations, You may act only
         | 
| 581 | 
            +
                          on Your own behalf and on Your sole responsibility, not on behalf
         | 
| 582 | 
            +
                          of any other Contributor, and only if You agree to indemnify,
         | 
| 583 | 
            +
                          defend, and hold each Contributor harmless for any liability
         | 
| 584 | 
            +
                          incurred by, or claims asserted against, such Contributor by reason
         | 
| 585 | 
            +
                          of your accepting any such warranty or additional liability.
         | 
| 586 | 
            +
                    
         | 
| 587 | 
            +
                       END OF TERMS AND CONDITIONS
         | 
| 588 | 
            +
                    
         | 
| 589 | 
            +
                    
         | 
| 590 | 
            +
                    Copyright (c) Donald Stufft and individual contributors.
         | 
| 591 | 
            +
                    All rights reserved.
         | 
| 592 | 
            +
                    
         | 
| 593 | 
            +
                    Redistribution and use in source and binary forms, with or without
         | 
| 594 | 
            +
                    modification, are permitted provided that the following conditions are met:
         | 
| 595 | 
            +
                    
         | 
| 596 | 
            +
                        1. Redistributions of source code must retain the above copyright notice,
         | 
| 597 | 
            +
                           this list of conditions and the following disclaimer.
         | 
| 598 | 
            +
                    
         | 
| 599 | 
            +
                        2. Redistributions in binary form must reproduce the above copyright
         | 
| 600 | 
            +
                           notice, this list of conditions and the following disclaimer in the
         | 
| 601 | 
            +
                           documentation and/or other materials provided with the distribution.
         | 
| 602 | 
            +
                    
         | 
| 603 | 
            +
                    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
         | 
| 604 | 
            +
                    ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
         | 
| 605 | 
            +
                    WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         | 
| 606 | 
            +
                    DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
         | 
| 607 | 
            +
                    FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
         | 
| 608 | 
            +
                    DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
         | 
| 609 | 
            +
                    SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
         | 
| 610 | 
            +
                    CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
         | 
| 611 | 
            +
                    OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 612 | 
            +
                    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.A. HISTORY OF THE SOFTWARE
         | 
| 613 | 
            +
                    ==========================
         | 
| 614 | 
            +
                    
         | 
| 615 | 
            +
                    Python was created in the early 1990s by Guido van Rossum at Stichting
         | 
| 616 | 
            +
                    Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
         | 
| 617 | 
            +
                    as a successor of a language called ABC.  Guido remains Python's
         | 
| 618 | 
            +
                    principal author, although it includes many contributions from others.
         | 
| 619 | 
            +
                    
         | 
| 620 | 
            +
                    In 1995, Guido continued his work on Python at the Corporation for
         | 
| 621 | 
            +
                    National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
         | 
| 622 | 
            +
                    in Reston, Virginia where he released several versions of the
         | 
| 623 | 
            +
                    software.
         | 
| 624 | 
            +
                    
         | 
| 625 | 
            +
                    In May 2000, Guido and the Python core development team moved to
         | 
| 626 | 
            +
                    BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
         | 
| 627 | 
            +
                    year, the PythonLabs team moved to Digital Creations, which became
         | 
| 628 | 
            +
                    Zope Corporation.  In 2001, the Python Software Foundation (PSF, see
         | 
| 629 | 
            +
                    https://www.python.org/psf/) was formed, a non-profit organization
         | 
| 630 | 
            +
                    created specifically to own Python-related Intellectual Property.
         | 
| 631 | 
            +
                    Zope Corporation was a sponsoring member of the PSF.
         | 
| 632 | 
            +
                    
         | 
| 633 | 
            +
                    All Python releases are Open Source (see https://opensource.org for
         | 
| 634 | 
            +
                    the Open Source Definition).  Historically, most, but not all, Python
         | 
| 635 | 
            +
                    releases have also been GPL-compatible; the table below summarizes
         | 
| 636 | 
            +
                    the various releases.
         | 
| 637 | 
            +
                    
         | 
| 638 | 
            +
                        Release         Derived     Year        Owner       GPL-
         | 
| 639 | 
            +
                                        from                                compatible? (1)
         | 
| 640 | 
            +
                    
         | 
| 641 | 
            +
                        0.9.0 thru 1.2              1991-1995   CWI         yes
         | 
| 642 | 
            +
                        1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
         | 
| 643 | 
            +
                        1.6             1.5.2       2000        CNRI        no
         | 
| 644 | 
            +
                        2.0             1.6         2000        BeOpen.com  no
         | 
| 645 | 
            +
                        1.6.1           1.6         2001        CNRI        yes (2)
         | 
| 646 | 
            +
                        2.1             2.0+1.6.1   2001        PSF         no
         | 
| 647 | 
            +
                        2.0.1           2.0+1.6.1   2001        PSF         yes
         | 
| 648 | 
            +
                        2.1.1           2.1+2.0.1   2001        PSF         yes
         | 
| 649 | 
            +
                        2.1.2           2.1.1       2002        PSF         yes
         | 
| 650 | 
            +
                        2.1.3           2.1.2       2002        PSF         yes
         | 
| 651 | 
            +
                        2.2 and above   2.1.1       2001-now    PSF         yes
         | 
| 652 | 
            +
                    
         | 
| 653 | 
            +
                    Footnotes:
         | 
| 654 | 
            +
                    
         | 
| 655 | 
            +
                    (1) GPL-compatible doesn't mean that we're distributing Python under
         | 
| 656 | 
            +
                        the GPL.  All Python licenses, unlike the GPL, let you distribute
         | 
| 657 | 
            +
                        a modified version without making your changes open source.  The
         | 
| 658 | 
            +
                        GPL-compatible licenses make it possible to combine Python with
         | 
| 659 | 
            +
                        other software that is released under the GPL; the others don't.
         | 
| 660 | 
            +
                    
         | 
| 661 | 
            +
                    (2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
         | 
| 662 | 
            +
                        because its license has a choice of law clause.  According to
         | 
| 663 | 
            +
                        CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
         | 
| 664 | 
            +
                        is "not incompatible" with the GPL.
         | 
| 665 | 
            +
                    
         | 
| 666 | 
            +
                    Thanks to the many outside volunteers who have worked under Guido's
         | 
| 667 | 
            +
                    direction to make these releases possible.
         | 
| 668 | 
            +
                    
         | 
| 669 | 
            +
                    
         | 
| 670 | 
            +
                    B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
         | 
| 671 | 
            +
                    ===============================================================
         | 
| 672 | 
            +
                    
         | 
| 673 | 
            +
                    Python software and documentation are licensed under the
         | 
| 674 | 
            +
                    Python Software Foundation License Version 2.
         | 
| 675 | 
            +
                    
         | 
| 676 | 
            +
                    Starting with Python 3.8.6, examples, recipes, and other code in
         | 
| 677 | 
            +
                    the documentation are dual licensed under the PSF License Version 2
         | 
| 678 | 
            +
                    and the Zero-Clause BSD license.
         | 
| 679 | 
            +
                    
         | 
| 680 | 
            +
                    Some software incorporated into Python is under different licenses.
         | 
| 681 | 
            +
                    The licenses are listed with code falling under that license.
         | 
| 682 | 
            +
                    
         | 
| 683 | 
            +
                    
         | 
| 684 | 
            +
                    PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
         | 
| 685 | 
            +
                    --------------------------------------------
         | 
| 686 | 
            +
                    
         | 
| 687 | 
            +
                    1. This LICENSE AGREEMENT is between the Python Software Foundation
         | 
| 688 | 
            +
                    ("PSF"), and the Individual or Organization ("Licensee") accessing and
         | 
| 689 | 
            +
                    otherwise using this software ("Python") in source or binary form and
         | 
| 690 | 
            +
                    its associated documentation.
         | 
| 691 | 
            +
                    
         | 
| 692 | 
            +
                    2. Subject to the terms and conditions of this License Agreement, PSF hereby
         | 
| 693 | 
            +
                    grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
         | 
| 694 | 
            +
                    analyze, test, perform and/or display publicly, prepare derivative works,
         | 
| 695 | 
            +
                    distribute, and otherwise use Python alone or in any derivative version,
         | 
| 696 | 
            +
                    provided, however, that PSF's License Agreement and PSF's notice of copyright,
         | 
| 697 | 
            +
                    i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
         | 
| 698 | 
            +
                    2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
         | 
| 699 | 
            +
                    All Rights Reserved" are retained in Python alone or in any derivative version
         | 
| 700 | 
            +
                    prepared by Licensee.
         | 
| 701 | 
            +
                    
         | 
| 702 | 
            +
                    3. In the event Licensee prepares a derivative work that is based on
         | 
| 703 | 
            +
                    or incorporates Python or any part thereof, and wants to make
         | 
| 704 | 
            +
                    the derivative work available to others as provided herein, then
         | 
| 705 | 
            +
                    Licensee hereby agrees to include in any such work a brief summary of
         | 
| 706 | 
            +
                    the changes made to Python.
         | 
| 707 | 
            +
                    
         | 
| 708 | 
            +
                    4. PSF is making Python available to Licensee on an "AS IS"
         | 
| 709 | 
            +
                    basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
         | 
| 710 | 
            +
                    IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
         | 
| 711 | 
            +
                    DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
         | 
| 712 | 
            +
                    FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
         | 
| 713 | 
            +
                    INFRINGE ANY THIRD PARTY RIGHTS.
         | 
| 714 | 
            +
                    
         | 
| 715 | 
            +
                    5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
         | 
| 716 | 
            +
                    FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
         | 
| 717 | 
            +
                    A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
         | 
| 718 | 
            +
                    OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
         | 
| 719 | 
            +
                    
         | 
| 720 | 
            +
                    6. This License Agreement will automatically terminate upon a material
         | 
| 721 | 
            +
                    breach of its terms and conditions.
         | 
| 722 | 
            +
                    
         | 
| 723 | 
            +
                    7. Nothing in this License Agreement shall be deemed to create any
         | 
| 724 | 
            +
                    relationship of agency, partnership, or joint venture between PSF and
         | 
| 725 | 
            +
                    Licensee.  This License Agreement does not grant permission to use PSF
         | 
| 726 | 
            +
                    trademarks or trade name in a trademark sense to endorse or promote
         | 
| 727 | 
            +
                    products or services of Licensee, or any third party.
         | 
| 728 | 
            +
                    
         | 
| 729 | 
            +
                    8. By copying, installing or otherwise using Python, Licensee
         | 
| 730 | 
            +
                    agrees to be bound by the terms and conditions of this License
         | 
| 731 | 
            +
                    Agreement.
         | 
| 732 | 
            +
                    
         | 
| 733 | 
            +
                    
         | 
| 734 | 
            +
                    BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
         | 
| 735 | 
            +
                    -------------------------------------------
         | 
| 736 | 
            +
                    
         | 
| 737 | 
            +
                    BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
         | 
| 738 | 
            +
                    
         | 
| 739 | 
            +
                    1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
         | 
| 740 | 
            +
                    office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
         | 
| 741 | 
            +
                    Individual or Organization ("Licensee") accessing and otherwise using
         | 
| 742 | 
            +
                    this software in source or binary form and its associated
         | 
| 743 | 
            +
                    documentation ("the Software").
         | 
| 744 | 
            +
                    
         | 
| 745 | 
            +
                    2. Subject to the terms and conditions of this BeOpen Python License
         | 
| 746 | 
            +
                    Agreement, BeOpen hereby grants Licensee a non-exclusive,
         | 
| 747 | 
            +
                    royalty-free, world-wide license to reproduce, analyze, test, perform
         | 
| 748 | 
            +
                    and/or display publicly, prepare derivative works, distribute, and
         | 
| 749 | 
            +
                    otherwise use the Software alone or in any derivative version,
         | 
| 750 | 
            +
                    provided, however, that the BeOpen Python License is retained in the
         | 
| 751 | 
            +
                    Software, alone or in any derivative version prepared by Licensee.
         | 
| 752 | 
            +
                    
         | 
| 753 | 
            +
                    3. BeOpen is making the Software available to Licensee on an "AS IS"
         | 
| 754 | 
            +
                    basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
         | 
| 755 | 
            +
                    IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
         | 
| 756 | 
            +
                    DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
         | 
| 757 | 
            +
                    FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
         | 
| 758 | 
            +
                    INFRINGE ANY THIRD PARTY RIGHTS.
         | 
| 759 | 
            +
                    
         | 
| 760 | 
            +
                    4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
         | 
| 761 | 
            +
                    SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
         | 
| 762 | 
            +
                    AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
         | 
| 763 | 
            +
                    DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
         | 
| 764 | 
            +
                    
         | 
| 765 | 
            +
                    5. This License Agreement will automatically terminate upon a material
         | 
| 766 | 
            +
                    breach of its terms and conditions.
         | 
| 767 | 
            +
                    
         | 
| 768 | 
            +
                    6. This License Agreement shall be governed by and interpreted in all
         | 
| 769 | 
            +
                    respects by the law of the State of California, excluding conflict of
         | 
| 770 | 
            +
                    law provisions.  Nothing in this License Agreement shall be deemed to
         | 
| 771 | 
            +
                    create any relationship of agency, partnership, or joint venture
         | 
| 772 | 
            +
                    between BeOpen and Licensee.  This License Agreement does not grant
         | 
| 773 | 
            +
                    permission to use BeOpen trademarks or trade names in a trademark
         | 
| 774 | 
            +
                    sense to endorse or promote products or services of Licensee, or any
         | 
| 775 | 
            +
                    third party.  As an exception, the "BeOpen Python" logos available at
         | 
| 776 | 
            +
                    http://www.pythonlabs.com/logos.html may be used according to the
         | 
| 777 | 
            +
                    permissions granted on that web page.
         | 
| 778 | 
            +
                    
         | 
| 779 | 
            +
                    7. By copying, installing or otherwise using the software, Licensee
         | 
| 780 | 
            +
                    agrees to be bound by the terms and conditions of this License
         | 
| 781 | 
            +
                    Agreement.
         | 
| 782 | 
            +
                    
         | 
| 783 | 
            +
                    
         | 
| 784 | 
            +
                    CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
         | 
| 785 | 
            +
                    ---------------------------------------
         | 
| 786 | 
            +
                    
         | 
| 787 | 
            +
                    1. This LICENSE AGREEMENT is between the Corporation for National
         | 
| 788 | 
            +
                    Research Initiatives, having an office at 1895 Preston White Drive,
         | 
| 789 | 
            +
                    Reston, VA 20191 ("CNRI"), and the Individual or Organization
         | 
| 790 | 
            +
                    ("Licensee") accessing and otherwise using Python 1.6.1 software in
         | 
| 791 | 
            +
                    source or binary form and its associated documentation.
         | 
| 792 | 
            +
                    
         | 
| 793 | 
            +
                    2. Subject to the terms and conditions of this License Agreement, CNRI
         | 
| 794 | 
            +
                    hereby grants Licensee a nonexclusive, royalty-free, world-wide
         | 
| 795 | 
            +
                    license to reproduce, analyze, test, perform and/or display publicly,
         | 
| 796 | 
            +
                    prepare derivative works, distribute, and otherwise use Python 1.6.1
         | 
| 797 | 
            +
                    alone or in any derivative version, provided, however, that CNRI's
         | 
| 798 | 
            +
                    License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
         | 
| 799 | 
            +
                    1995-2001 Corporation for National Research Initiatives; All Rights
         | 
| 800 | 
            +
                    Reserved" are retained in Python 1.6.1 alone or in any derivative
         | 
| 801 | 
            +
                    version prepared by Licensee.  Alternately, in lieu of CNRI's License
         | 
| 802 | 
            +
                    Agreement, Licensee may substitute the following text (omitting the
         | 
| 803 | 
            +
                    quotes): "Python 1.6.1 is made available subject to the terms and
         | 
| 804 | 
            +
                    conditions in CNRI's License Agreement.  This Agreement together with
         | 
| 805 | 
            +
                    Python 1.6.1 may be located on the internet using the following
         | 
| 806 | 
            +
                    unique, persistent identifier (known as a handle): 1895.22/1013.  This
         | 
| 807 | 
            +
                    Agreement may also be obtained from a proxy server on the internet
         | 
| 808 | 
            +
                    using the following URL: http://hdl.handle.net/1895.22/1013".
         | 
| 809 | 
            +
                    
         | 
| 810 | 
            +
                    3. In the event Licensee prepares a derivative work that is based on
         | 
| 811 | 
            +
                    or incorporates Python 1.6.1 or any part thereof, and wants to make
         | 
| 812 | 
            +
                    the derivative work available to others as provided herein, then
         | 
| 813 | 
            +
                    Licensee hereby agrees to include in any such work a brief summary of
         | 
| 814 | 
            +
                    the changes made to Python 1.6.1.
         | 
| 815 | 
            +
                    
         | 
| 816 | 
            +
                    4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
         | 
| 817 | 
            +
                    basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
         | 
| 818 | 
            +
                    IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
         | 
| 819 | 
            +
                    DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
         | 
| 820 | 
            +
                    FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
         | 
| 821 | 
            +
                    INFRINGE ANY THIRD PARTY RIGHTS.
         | 
| 822 | 
            +
                    
         | 
| 823 | 
            +
                    5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
         | 
| 824 | 
            +
                    1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
         | 
| 825 | 
            +
                    A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
         | 
| 826 | 
            +
                    OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
         | 
| 827 | 
            +
                    
         | 
| 828 | 
            +
                    6. This License Agreement will automatically terminate upon a material
         | 
| 829 | 
            +
                    breach of its terms and conditions.
         | 
| 830 | 
            +
                    
         | 
| 831 | 
            +
                    7. This License Agreement shall be governed by the federal
         | 
| 832 | 
            +
                    intellectual property law of the United States, including without
         | 
| 833 | 
            +
                    limitation the federal copyright law, and, to the extent such
         | 
| 834 | 
            +
                    U.S. federal law does not apply, by the law of the Commonwealth of
         | 
| 835 | 
            +
                    Virginia, excluding Virginia's conflict of law provisions.
         | 
| 836 | 
            +
                    Notwithstanding the foregoing, with regard to derivative works based
         | 
| 837 | 
            +
                    on Python 1.6.1 that incorporate non-separable material that was
         | 
| 838 | 
            +
                    previously distributed under the GNU General Public License (GPL), the
         | 
| 839 | 
            +
                    law of the Commonwealth of Virginia shall govern this License
         | 
| 840 | 
            +
                    Agreement only as to issues arising under or with respect to
         | 
| 841 | 
            +
                    Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
         | 
| 842 | 
            +
                    License Agreement shall be deemed to create any relationship of
         | 
| 843 | 
            +
                    agency, partnership, or joint venture between CNRI and Licensee.  This
         | 
| 844 | 
            +
                    License Agreement does not grant permission to use CNRI trademarks or
         | 
| 845 | 
            +
                    trade name in a trademark sense to endorse or promote products or
         | 
| 846 | 
            +
                    services of Licensee, or any third party.
         | 
| 847 | 
            +
                    
         | 
| 848 | 
            +
                    8. By clicking on the "ACCEPT" button where indicated, or by copying,
         | 
| 849 | 
            +
                    installing or otherwise using Python 1.6.1, Licensee agrees to be
         | 
| 850 | 
            +
                    bound by the terms and conditions of this License Agreement.
         | 
| 851 | 
            +
                    
         | 
| 852 | 
            +
                            ACCEPT
         | 
| 853 | 
            +
                    
         | 
| 854 | 
            +
                    
         | 
| 855 | 
            +
                    CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
         | 
| 856 | 
            +
                    --------------------------------------------------
         | 
| 857 | 
            +
                    
         | 
| 858 | 
            +
                    Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
         | 
| 859 | 
            +
                    The Netherlands.  All rights reserved.
         | 
| 860 | 
            +
                    
         | 
| 861 | 
            +
                    Permission to use, copy, modify, and distribute this software and its
         | 
| 862 | 
            +
                    documentation for any purpose and without fee is hereby granted,
         | 
| 863 | 
            +
                    provided that the above copyright notice appear in all copies and that
         | 
| 864 | 
            +
                    both that copyright notice and this permission notice appear in
         | 
| 865 | 
            +
                    supporting documentation, and that the name of Stichting Mathematisch
         | 
| 866 | 
            +
                    Centrum or CWI not be used in advertising or publicity pertaining to
         | 
| 867 | 
            +
                    distribution of the software without specific, written prior
         | 
| 868 | 
            +
                    permission.
         | 
| 869 | 
            +
                    
         | 
| 870 | 
            +
                    STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
         | 
| 871 | 
            +
                    THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
         | 
| 872 | 
            +
                    FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
         | 
| 873 | 
            +
                    FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
         | 
| 874 | 
            +
                    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
         | 
| 875 | 
            +
                    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
         | 
| 876 | 
            +
                    OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
         | 
| 877 | 
            +
                    
         | 
| 878 | 
            +
                    ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
         | 
| 879 | 
            +
                    ----------------------------------------------------------------------
         | 
| 880 | 
            +
                    
         | 
| 881 | 
            +
                    Permission to use, copy, modify, and/or distribute this software for any
         | 
| 882 | 
            +
                    purpose with or without fee is hereby granted.
         | 
| 883 | 
            +
                    
         | 
| 884 | 
            +
                    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
         | 
| 885 | 
            +
                    REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
         | 
| 886 | 
            +
                    AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
         | 
| 887 | 
            +
                    INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
         | 
| 888 | 
            +
                    LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
         | 
| 889 | 
            +
                    OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
         | 
| 890 | 
            +
                    PERFORMANCE OF THIS SOFTWARE.
         | 
| 891 | 
            +
                    Copyright (c) 2014, Al Sweigart
         | 
| 892 | 
            +
                    All rights reserved.
         | 
| 893 | 
            +
                    
         | 
| 894 | 
            +
                    Redistribution and use in source and binary forms, with or without
         | 
| 895 | 
            +
                    modification, are permitted provided that the following conditions are met:
         | 
| 896 | 
            +
                    
         | 
| 897 | 
            +
                    * Redistributions of source code must retain the above copyright notice, this
         | 
| 898 | 
            +
                      list of conditions and the following disclaimer.
         | 
| 899 | 
            +
                    
         | 
| 900 | 
            +
                    * Redistributions in binary form must reproduce the above copyright notice,
         | 
| 901 | 
            +
                      this list of conditions and the following disclaimer in the documentation
         | 
| 902 | 
            +
                      and/or other materials provided with the distribution.
         | 
| 903 | 
            +
                    
         | 
| 904 | 
            +
                    * Neither the name of the {organization} nor the names of its
         | 
| 905 | 
            +
                      contributors may be used to endorse or promote products derived from
         | 
| 906 | 
            +
                      this software without specific prior written permission.
         | 
| 907 | 
            +
                    
         | 
| 908 | 
            +
                    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
         | 
| 909 | 
            +
                    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
         | 
| 910 | 
            +
                    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         | 
| 911 | 
            +
                    DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
         | 
| 912 | 
            +
                    FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
         | 
| 913 | 
            +
                    DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
         | 
| 914 | 
            +
                    SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
         | 
| 915 | 
            +
                    CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
         | 
| 916 | 
            +
                    OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 917 | 
            +
                    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.Copyright (c) 2017 Anthony Sottile
         | 
| 918 | 
            +
                    
         | 
| 919 | 
            +
                    Permission is hereby granted, free of charge, to any person obtaining a copy
         | 
| 920 | 
            +
                    of this software and associated documentation files (the "Software"), to deal
         | 
| 921 | 
            +
                    in the Software without restriction, including without limitation the rights
         | 
| 922 | 
            +
                    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
         | 
| 923 | 
            +
                    copies of the Software, and to permit persons to whom the Software is
         | 
| 924 | 
            +
                    furnished to do so, subject to the following conditions:
         | 
| 925 | 
            +
                    
         | 
| 926 | 
            +
                    The above copyright notice and this permission notice shall be included in
         | 
| 927 | 
            +
                    all copies or substantial portions of the Software.
         | 
| 928 | 
            +
                    
         | 
| 929 | 
            +
                    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
         | 
| 930 | 
            +
                    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
         | 
| 931 | 
            +
                    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
         | 
| 932 | 
            +
                    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
         | 
| 933 | 
            +
                    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
         | 
| 934 | 
            +
                    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
         | 
| 935 | 
            +
                    THE SOFTWARE.Copyright (c) 2015-2019 Jared Hobbs
         | 
| 936 | 
            +
                    
         | 
| 937 | 
            +
                    Permission is hereby granted, free of charge, to any person obtaining a copy of
         | 
| 938 | 
            +
                    this software and associated documentation files (the "Software"), to deal in
         | 
| 939 | 
            +
                    the Software without restriction, including without limitation the rights to
         | 
| 940 | 
            +
                    use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
         | 
| 941 | 
            +
                    of the Software, and to permit persons to whom the Software is furnished to do
         | 
| 942 | 
            +
                    so, subject to the following conditions:
         | 
| 943 | 
            +
                    
         | 
| 944 | 
            +
                    The above copyright notice and this permission notice shall be included in all
         | 
| 945 | 
            +
                    copies or substantial portions of the Software.
         | 
| 946 | 
            +
                    
         | 
| 947 | 
            +
                    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
         | 
| 948 | 
            +
                    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
         | 
| 949 | 
            +
                    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
         | 
| 950 | 
            +
                    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
         | 
| 951 | 
            +
                    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
         | 
| 952 | 
            +
                    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
         | 
| 953 | 
            +
                    SOFTWARE.Developed by ESN, an Electronic Arts Inc. studio.
         | 
| 954 | 
            +
                    Copyright (c) 2014, Electronic Arts Inc.
         | 
| 955 | 
            +
                    All rights reserved.
         | 
| 956 | 
            +
                    
         | 
| 957 | 
            +
                    Redistribution and use in source and binary forms, with or without
         | 
| 958 | 
            +
                    modification, are permitted provided that the following conditions are met:
         | 
| 959 | 
            +
                    * Redistributions of source code must retain the above copyright
         | 
| 960 | 
            +
                    notice, this list of conditions and the following disclaimer.
         | 
| 961 | 
            +
                    * Redistributions in binary form must reproduce the above copyright
         | 
| 962 | 
            +
                    notice, this list of conditions and the following disclaimer in the
         | 
| 963 | 
            +
                    documentation and/or other materials provided with the distribution.
         | 
| 964 | 
            +
                    * Neither the name of ESN, Electronic Arts Inc. nor the
         | 
| 965 | 
            +
                    names of its contributors may be used to endorse or promote products
         | 
| 966 | 
            +
                    derived from this software without specific prior written permission.
         | 
| 967 | 
            +
                    
         | 
| 968 | 
            +
                    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
         | 
| 969 | 
            +
                    ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
         | 
| 970 | 
            +
                    WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
         | 
| 971 | 
            +
                    DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS INC. BE LIABLE
         | 
| 972 | 
            +
                    FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
         | 
| 973 | 
            +
                    (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
         | 
| 974 | 
            +
                    LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
         | 
| 975 | 
            +
                    ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
         | 
| 976 | 
            +
                    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
         | 
| 977 | 
            +
                    SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 978 | 
            +
                    
         | 
| 979 | 
            +
                    ----
         | 
| 980 | 
            +
                    
         | 
| 981 | 
            +
                    Portions of code from MODP_ASCII - Ascii transformations (upper/lower, etc)
         | 
| 982 | 
            +
                    https://github.com/client9/stringencoders
         | 
| 983 | 
            +
                    
         | 
| 984 | 
            +
                      Copyright 2005, 2006, 2007
         | 
| 985 | 
            +
                      Nick Galbreath -- nickg [at] modp [dot] com
         | 
| 986 | 
            +
                      All rights reserved.
         | 
| 987 | 
            +
                    
         | 
| 988 | 
            +
                      Redistribution and use in source and binary forms, with or without
         | 
| 989 | 
            +
                      modification, are permitted provided that the following conditions are
         | 
| 990 | 
            +
                      met:
         | 
| 991 | 
            +
                    
         | 
| 992 | 
            +
                        Redistributions of source code must retain the above copyright
         | 
| 993 | 
            +
                        notice, this list of conditions and the following disclaimer.
         | 
| 994 | 
            +
                    
         | 
| 995 | 
            +
                        Redistributions in binary form must reproduce the above copyright
         | 
| 996 | 
            +
                        notice, this list of conditions and the following disclaimer in the
         | 
| 997 | 
            +
                        documentation and/or other materials provided with the distribution.
         | 
| 998 | 
            +
                    
         | 
| 999 | 
            +
                        Neither the name of the modp.com nor the names of its
         | 
| 1000 | 
            +
                        contributors may be used to endorse or promote products derived from
         | 
| 1001 | 
            +
                        this software without specific prior written permission.
         | 
| 1002 | 
            +
                    
         | 
| 1003 | 
            +
                      THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
         | 
| 1004 | 
            +
                      "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
         | 
| 1005 | 
            +
                      LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
         | 
| 1006 | 
            +
                      A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
         | 
| 1007 | 
            +
                      OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
         | 
| 1008 | 
            +
                      SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
         | 
| 1009 | 
            +
                      LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
         | 
| 1010 | 
            +
                      DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
         | 
| 1011 | 
            +
                      THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
         | 
| 1012 | 
            +
                      (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
         | 
| 1013 | 
            +
                      OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
         | 
| 1014 | 
            +
                    
         | 
| 1015 | 
            +
                      This is the standard "new" BSD license:
         | 
| 1016 | 
            +
                      http://www.opensource.org/licenses/bsd-license.php
         | 
| 1017 | 
            +
                    
         | 
| 1018 | 
            +
                    https://github.com/client9/stringencoders/blob/cfd5c1507325ae497ea9bacdacba12c0ffd79d30/COPYING
         | 
| 1019 | 
            +
                    
         | 
| 1020 | 
            +
                    ----
         | 
| 1021 | 
            +
                    
         | 
| 1022 | 
            +
                    Numeric decoder derived from from TCL library
         | 
| 1023 | 
            +
                    https://opensource.apple.com/source/tcl/tcl-14/tcl/license.terms
         | 
| 1024 | 
            +
                     * Copyright (c) 1988-1993 The Regents of the University of California.
         | 
| 1025 | 
            +
                     * Copyright (c) 1994 Sun Microsystems, Inc.
         | 
| 1026 | 
            +
                    
         | 
| 1027 | 
            +
                      This software is copyrighted by the Regents of the University of
         | 
| 1028 | 
            +
                      California, Sun Microsystems, Inc., Scriptics Corporation, ActiveState
         | 
| 1029 | 
            +
                      Corporation and other parties.  The following terms apply to all files
         | 
| 1030 | 
            +
                      associated with the software unless explicitly disclaimed in
         | 
| 1031 | 
            +
                      individual files.
         | 
| 1032 | 
            +
                    
         | 
| 1033 | 
            +
                      The authors hereby grant permission to use, copy, modify, distribute,
         | 
| 1034 | 
            +
                      and license this software and its documentation for any purpose, provided
         | 
| 1035 | 
            +
                      that existing copyright notices are retained in all copies and that this
         | 
| 1036 | 
            +
                      notice is included verbatim in any distributions. No written agreement,
         | 
| 1037 | 
            +
                      license, or royalty fee is required for any of the authorized uses.
         | 
| 1038 | 
            +
                      Modifications to this software may be copyrighted by their authors
         | 
| 1039 | 
            +
                      and need not follow the licensing terms described here, provided that
         | 
| 1040 | 
            +
                      the new terms are clearly indicated on the first page of each file where
         | 
| 1041 | 
            +
                      they apply.
         | 
| 1042 | 
            +
                    
         | 
| 1043 | 
            +
                      IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY
         | 
| 1044 | 
            +
                      FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
         | 
| 1045 | 
            +
                      ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY
         | 
| 1046 | 
            +
                      DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE
         | 
| 1047 | 
            +
                      POSSIBILITY OF SUCH DAMAGE.
         | 
| 1048 | 
            +
                    
         | 
| 1049 | 
            +
                      THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,
         | 
| 1050 | 
            +
                      INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
         | 
| 1051 | 
            +
                      FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT.  THIS SOFTWARE
         | 
| 1052 | 
            +
                      IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE
         | 
| 1053 | 
            +
                      NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
         | 
| 1054 | 
            +
                      MODIFICATIONS.
         | 
| 1055 | 
            +
                    
         | 
| 1056 | 
            +
                      GOVERNMENT USE: If you are acquiring this software on behalf of the
         | 
| 1057 | 
            +
                      U.S. government, the Government shall have only "Restricted Rights"
         | 
| 1058 | 
            +
                      in the software and related documentation as defined in the Federal
         | 
| 1059 | 
            +
                      Acquisition Regulations (FARs) in Clause 52.227.19 (c) (2).  If you
         | 
| 1060 | 
            +
                      are acquiring the software on behalf of the Department of Defense, the
         | 
| 1061 | 
            +
                      software shall be classified as "Commercial Computer Software" and the
         | 
| 1062 | 
            +
                      Government shall have only "Restricted Rights" as defined in Clause
         | 
| 1063 | 
            +
                      252.227-7013 (c) (1) of DFARs.  Notwithstanding the foregoing, the
         | 
| 1064 | 
            +
                      authors grant the U.S. Government and others acting in its behalf
         | 
| 1065 | 
            +
                      permission to use and distribute the software in accordance with the
         | 
| 1066 | 
            +
                      terms specified in this license.Apache License
         | 
| 1067 | 
            +
                    Version 2.0, January 2004
         | 
| 1068 | 
            +
                    http://www.apache.org/licenses/
         | 
| 1069 | 
            +
                    
         | 
| 1070 | 
            +
                    TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
         | 
| 1071 | 
            +
                    
         | 
| 1072 | 
            +
                    1. Definitions.
         | 
| 1073 | 
            +
                    
         | 
| 1074 | 
            +
                    "License" shall mean the terms and conditions for use, reproduction, and
         | 
| 1075 | 
            +
                    distribution as defined by Sections 1 through 9 of this document.
         | 
| 1076 | 
            +
                    
         | 
| 1077 | 
            +
                    "Licensor" shall mean the copyright owner or entity authorized by the copyright
         | 
| 1078 | 
            +
                    owner that is granting the License.
         | 
| 1079 | 
            +
                    
         | 
| 1080 | 
            +
                    "Legal Entity" shall mean the union of the acting entity and all other entities
         | 
| 1081 | 
            +
                    that control, are controlled by, or are under common control with that entity.
         | 
| 1082 | 
            +
                    For the purposes of this definition, "control" means (i) the power, direct or
         | 
| 1083 | 
            +
                    indirect, to cause the direction or management of such entity, whether by
         | 
| 1084 | 
            +
                    contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the
         | 
| 1085 | 
            +
                    outstanding shares, or (iii) beneficial ownership of such entity.
         | 
| 1086 | 
            +
                    
         | 
| 1087 | 
            +
                    "You" (or "Your") shall mean an individual or Legal Entity exercising
         | 
| 1088 | 
            +
                    permissions granted by this License.
         | 
| 1089 | 
            +
                    
         | 
| 1090 | 
            +
                    "Source" form shall mean the preferred form for making modifications, including
         | 
| 1091 | 
            +
                    but not limited to software source code, documentation source, and configuration
         | 
| 1092 | 
            +
                    files.
         | 
| 1093 | 
            +
                    
         | 
| 1094 | 
            +
                    "Object" form shall mean any form resulting from mechanical transformation or
         | 
| 1095 | 
            +
                    translation of a Source form, including but not limited to compiled object code,
         | 
| 1096 | 
            +
                    generated documentation, and conversions to other media types.
         | 
| 1097 | 
            +
                    
         | 
| 1098 | 
            +
                    "Work" shall mean the work of authorship, whether in Source or Object form, made
         | 
| 1099 | 
            +
                    available under the License, as indicated by a copyright notice that is included
         | 
| 1100 | 
            +
                    in or attached to the work (an example is provided in the Appendix below).
         | 
| 1101 | 
            +
                    
         | 
| 1102 | 
            +
                    "Derivative Works" shall mean any work, whether in Source or Object form, that
         | 
| 1103 | 
            +
                    is based on (or derived from) the Work and for which the editorial revisions,
         | 
| 1104 | 
            +
                    annotations, elaborations, or other modifications represent, as a whole, an
         | 
| 1105 | 
            +
                    original work of authorship. For the purposes of this License, Derivative Works
         | 
| 1106 | 
            +
                    shall not include works that remain separable from, or merely link (or bind by
         | 
| 1107 | 
            +
                    name) to the interfaces of, the Work and Derivative Works thereof.
         | 
| 1108 | 
            +
                    
         | 
| 1109 | 
            +
                    "Contribution" shall mean any work of authorship, including the original version
         | 
| 1110 | 
            +
                    of the Work and any modifications or additions to that Work or Derivative Works
         | 
| 1111 | 
            +
                    thereof, that is intentionally submitted to Licensor for inclusion in the Work
         | 
| 1112 | 
            +
                    by the copyright owner or by an individual or Legal Entity authorized to submit
         | 
| 1113 | 
            +
                    on behalf of the copyright owner. For the purposes of this definition,
         | 
| 1114 | 
            +
                    "submitted" means any form of electronic, verbal, or written communication sent
         | 
| 1115 | 
            +
                    to the Licensor or its representatives, including but not limited to
         | 
| 1116 | 
            +
                    communication on electronic mailing lists, source code control systems, and
         | 
| 1117 | 
            +
                    issue tracking systems that are managed by, or on behalf of, the Licensor for
         | 
| 1118 | 
            +
                    the purpose of discussing and improving the Work, but excluding communication
         | 
| 1119 | 
            +
                    that is conspicuously marked or otherwise designated in writing by the copyright
         | 
| 1120 | 
            +
                    owner as "Not a Contribution."
         | 
| 1121 | 
            +
                    
         | 
| 1122 | 
            +
                    "Contributor" shall mean Licensor and any individual or Legal Entity on behalf
         | 
| 1123 | 
            +
                    of whom a Contribution has been received by Licensor and subsequently
         | 
| 1124 | 
            +
                    incorporated within the Work.
         | 
| 1125 | 
            +
                    
         | 
| 1126 | 
            +
                    2. Grant of Copyright License.
         | 
| 1127 | 
            +
                    
         | 
| 1128 | 
            +
                    Subject to the terms and conditions of this License, each Contributor hereby
         | 
| 1129 | 
            +
                    grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
         | 
| 1130 | 
            +
                    irrevocable copyright license to reproduce, prepare Derivative Works of,
         | 
| 1131 | 
            +
                    publicly display, publicly perform, sublicense, and distribute the Work and such
         | 
| 1132 | 
            +
                    Derivative Works in Source or Object form.
         | 
| 1133 | 
            +
                    
         | 
| 1134 | 
            +
                    3. Grant of Patent License.
         | 
| 1135 | 
            +
                    
         | 
| 1136 | 
            +
                    Subject to the terms and conditions of this License, each Contributor hereby
         | 
| 1137 | 
            +
                    grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
         | 
| 1138 | 
            +
                    irrevocable (except as stated in this section) patent license to make, have
         | 
| 1139 | 
            +
                    made, use, offer to sell, sell, import, and otherwise transfer the Work, where
         | 
| 1140 | 
            +
                    such license applies only to those patent claims licensable by such Contributor
         | 
| 1141 | 
            +
                    that are necessarily infringed by their Contribution(s) alone or by combination
         | 
| 1142 | 
            +
                    of their Contribution(s) with the Work to which such Contribution(s) was
         | 
| 1143 | 
            +
                    submitted. If You institute patent litigation against any entity (including a
         | 
| 1144 | 
            +
                    cross-claim or counterclaim in a lawsuit) alleging that the Work or a
         | 
| 1145 | 
            +
                    Contribution incorporated within the Work constitutes direct or contributory
         | 
| 1146 | 
            +
                    patent infringement, then any patent licenses granted to You under this License
         | 
| 1147 | 
            +
                    for that Work shall terminate as of the date such litigation is filed.
         | 
| 1148 | 
            +
                    
         | 
| 1149 | 
            +
                    4. Redistribution.
         | 
| 1150 | 
            +
                    
         | 
| 1151 | 
            +
                    You may reproduce and distribute copies of the Work or Derivative Works thereof
         | 
| 1152 | 
            +
                    in any medium, with or without modifications, and in Source or Object form,
         | 
| 1153 | 
            +
                    provided that You meet the following conditions:
         | 
| 1154 | 
            +
                    
         | 
| 1155 | 
            +
                    You must give any other recipients of the Work or Derivative Works a copy of
         | 
| 1156 | 
            +
                    this License; and
         | 
| 1157 | 
            +
                    You must cause any modified files to carry prominent notices stating that You
         | 
| 1158 | 
            +
                    changed the files; and
         | 
| 1159 | 
            +
                    You must retain, in the Source form of any Derivative Works that You distribute,
         | 
| 1160 | 
            +
                    all copyright, patent, trademark, and attribution notices from the Source form
         | 
| 1161 | 
            +
                    of the Work, excluding those notices that do not pertain to any part of the
         | 
| 1162 | 
            +
                    Derivative Works; and
         | 
| 1163 | 
            +
                    If the Work includes a "NOTICE" text file as part of its distribution, then any
         | 
| 1164 | 
            +
                    Derivative Works that You distribute must include a readable copy of the
         | 
| 1165 | 
            +
                    attribution notices contained within such NOTICE file, excluding those notices
         | 
| 1166 | 
            +
                    that do not pertain to any part of the Derivative Works, in at least one of the
         | 
| 1167 | 
            +
                    following places: within a NOTICE text file distributed as part of the
         | 
| 1168 | 
            +
                    Derivative Works; within the Source form or documentation, if provided along
         | 
| 1169 | 
            +
                    with the Derivative Works; or, within a display generated by the Derivative
         | 
| 1170 | 
            +
                    Works, if and wherever such third-party notices normally appear. The contents of
         | 
| 1171 | 
            +
                    the NOTICE file are for informational purposes only and do not modify the
         | 
| 1172 | 
            +
                    License. You may add Your own attribution notices within Derivative Works that
         | 
| 1173 | 
            +
                    You distribute, alongside or as an addendum to the NOTICE text from the Work,
         | 
| 1174 | 
            +
                    provided that such additional attribution notices cannot be construed as
         | 
| 1175 | 
            +
                    modifying the License.
         | 
| 1176 | 
            +
                    You may add Your own copyright statement to Your modifications and may provide
         | 
| 1177 | 
            +
                    additional or different license terms and conditions for use, reproduction, or
         | 
| 1178 | 
            +
                    distribution of Your modifications, or for any such Derivative Works as a whole,
         | 
| 1179 | 
            +
                    provided Your use, reproduction, and distribution of the Work otherwise complies
         | 
| 1180 | 
            +
                    with the conditions stated in this License.
         | 
| 1181 | 
            +
                    
         | 
| 1182 | 
            +
                    5. Submission of Contributions.
         | 
| 1183 | 
            +
                    
         | 
| 1184 | 
            +
                    Unless You explicitly state otherwise, any Contribution intentionally submitted
         | 
| 1185 | 
            +
                    for inclusion in the Work by You to the Licensor shall be under the terms and
         | 
| 1186 | 
            +
                    conditions of this License, without any additional terms or conditions.
         | 
| 1187 | 
            +
                    Notwithstanding the above, nothing herein shall supersede or modify the terms of
         | 
| 1188 | 
            +
                    any separate license agreement you may have executed with Licensor regarding
         | 
| 1189 | 
            +
                    such Contributions.
         | 
| 1190 | 
            +
                    
         | 
| 1191 | 
            +
                    6. Trademarks.
         | 
| 1192 | 
            +
                    
         | 
| 1193 | 
            +
                    This License does not grant permission to use the trade names, trademarks,
         | 
| 1194 | 
            +
                    service marks, or product names of the Licensor, except as required for
         | 
| 1195 | 
            +
                    reasonable and customary use in describing the origin of the Work and
         | 
| 1196 | 
            +
                    reproducing the content of the NOTICE file.
         | 
| 1197 | 
            +
                    
         | 
| 1198 | 
            +
                    7. Disclaimer of Warranty.
         | 
| 1199 | 
            +
                    
         | 
| 1200 | 
            +
                    Unless required by applicable law or agreed to in writing, Licensor provides the
         | 
| 1201 | 
            +
                    Work (and each Contributor provides its Contributions) on an "AS IS" BASIS,
         | 
| 1202 | 
            +
                    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
         | 
| 1203 | 
            +
                    including, without limitation, any warranties or conditions of TITLE,
         | 
| 1204 | 
            +
                    NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
         | 
| 1205 | 
            +
                    solely responsible for determining the appropriateness of using or
         | 
| 1206 | 
            +
                    redistributing the Work and assume any risks associated with Your exercise of
         | 
| 1207 | 
            +
                    permissions under this License.
         | 
| 1208 | 
            +
                    
         | 
| 1209 | 
            +
                    8. Limitation of Liability.
         | 
| 1210 | 
            +
                    
         | 
| 1211 | 
            +
                    In no event and under no legal theory, whether in tort (including negligence),
         | 
| 1212 | 
            +
                    contract, or otherwise, unless required by applicable law (such as deliberate
         | 
| 1213 | 
            +
                    and grossly negligent acts) or agreed to in writing, shall any Contributor be
         | 
| 1214 | 
            +
                    liable to You for damages, including any direct, indirect, special, incidental,
         | 
| 1215 | 
            +
                    or consequential damages of any character arising as a result of this License or
         | 
| 1216 | 
            +
                    out of the use or inability to use the Work (including but not limited to
         | 
| 1217 | 
            +
                    damages for loss of goodwill, work stoppage, computer failure or malfunction, or
         | 
| 1218 | 
            +
                    any and all other commercial damages or losses), even if such Contributor has
         | 
| 1219 | 
            +
                    been advised of the possibility of such damages.
         | 
| 1220 | 
            +
                    
         | 
| 1221 | 
            +
                    9. Accepting Warranty or Additional Liability.
         | 
| 1222 | 
            +
                    
         | 
| 1223 | 
            +
                    While redistributing the Work or Derivative Works thereof, You may choose to
         | 
| 1224 | 
            +
                    offer, and charge a fee for, acceptance of support, warranty, indemnity, or
         | 
| 1225 | 
            +
                    other liability obligations and/or rights consistent with this License. However,
         | 
| 1226 | 
            +
                    in accepting such obligations, You may act only on Your own behalf and on Your
         | 
| 1227 | 
            +
                    sole responsibility, not on behalf of any other Contributor, and only if You
         | 
| 1228 | 
            +
                    agree to indemnify, defend, and hold each Contributor harmless for any liability
         | 
| 1229 | 
            +
                    incurred by, or claims asserted against, such Contributor by reason of your
         | 
| 1230 | 
            +
                    accepting any such warranty or additional liability.
         | 
| 1231 | 
            +
                    
         | 
| 1232 | 
            +
                    END OF TERMS AND CONDITIONS
         | 
| 1233 | 
            +
                    
         | 
| 1234 | 
            +
                    APPENDIX: How to apply the Apache License to your work
         | 
| 1235 | 
            +
                    
         | 
| 1236 | 
            +
                    To apply the Apache License to your work, attach the following boilerplate
         | 
| 1237 | 
            +
                    notice, with the fields enclosed by brackets "[]" replaced with your own
         | 
| 1238 | 
            +
                    identifying information. (Don't include the brackets!) The text should be
         | 
| 1239 | 
            +
                    enclosed in the appropriate comment syntax for the file format. We also
         | 
| 1240 | 
            +
                    recommend that a file or class name and description of purpose be included on
         | 
| 1241 | 
            +
                    the same "printed page" as the copyright notice for easier identification within
         | 
| 1242 | 
            +
                    third-party archives.
         | 
| 1243 | 
            +
                    
         | 
| 1244 | 
            +
                       Copyright [yyyy] [name of copyright owner]
         | 
| 1245 | 
            +
                    
         | 
| 1246 | 
            +
                       Licensed under the Apache License, Version 2.0 (the "License");
         | 
| 1247 | 
            +
                       you may not use this file except in compliance with the License.
         | 
| 1248 | 
            +
                       You may obtain a copy of the License at
         | 
| 1249 | 
            +
                    
         | 
| 1250 | 
            +
                         http://www.apache.org/licenses/LICENSE-2.0
         | 
| 1251 | 
            +
                    
         | 
| 1252 | 
            +
                       Unless required by applicable law or agreed to in writing, software
         | 
| 1253 | 
            +
                       distributed under the License is distributed on an "AS IS" BASIS,
         | 
| 1254 | 
            +
                       WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
         | 
| 1255 | 
            +
                       See the License for the specific language governing permissions and
         | 
| 1256 | 
            +
                       limitations under the License.
         | 
| 1257 | 
            +
            Classifier: Development Status :: 5 - Production/Stable
         | 
| 1258 | 
            +
            Classifier: Environment :: Console
         | 
| 1259 | 
            +
            Classifier: Intended Audience :: Science/Research
         | 
| 1260 | 
            +
            Classifier: License :: OSI Approved :: BSD License
         | 
| 1261 | 
            +
            Classifier: Operating System :: OS Independent
         | 
| 1262 | 
            +
            Classifier: Programming Language :: Cython
         | 
| 1263 | 
            +
            Classifier: Programming Language :: Python
         | 
| 1264 | 
            +
            Classifier: Programming Language :: Python :: 3
         | 
| 1265 | 
            +
            Classifier: Programming Language :: Python :: 3 :: Only
         | 
| 1266 | 
            +
            Classifier: Programming Language :: Python :: 3.9
         | 
| 1267 | 
            +
            Classifier: Programming Language :: Python :: 3.10
         | 
| 1268 | 
            +
            Classifier: Programming Language :: Python :: 3.11
         | 
| 1269 | 
            +
            Classifier: Programming Language :: Python :: 3.12
         | 
| 1270 | 
            +
            Classifier: Topic :: Scientific/Engineering
         | 
| 1271 | 
            +
            Project-URL: Homepage, https://pandas.pydata.org
         | 
| 1272 | 
            +
            Project-URL: Documentation, https://pandas.pydata.org/docs/
         | 
| 1273 | 
            +
            Project-URL: Repository, https://github.com/pandas-dev/pandas
         | 
| 1274 | 
            +
            Requires-Python: >=3.9
         | 
| 1275 | 
            +
            Requires-Dist: numpy>=1.22.4; python_version < "3.11"
         | 
| 1276 | 
            +
            Requires-Dist: numpy>=1.23.2; python_version == "3.11"
         | 
| 1277 | 
            +
            Requires-Dist: numpy>=1.26.0; python_version >= "3.12"
         | 
| 1278 | 
            +
            Requires-Dist: python-dateutil>=2.8.2
         | 
| 1279 | 
            +
            Requires-Dist: pytz>=2020.1
         | 
| 1280 | 
            +
            Requires-Dist: tzdata>=2022.7
         | 
| 1281 | 
            +
            Requires-Dist: hypothesis>=6.46.1; extra == "test"
         | 
| 1282 | 
            +
            Requires-Dist: pytest>=7.3.2; extra == "test"
         | 
| 1283 | 
            +
            Requires-Dist: pytest-xdist>=2.2.0; extra == "test"
         | 
| 1284 | 
            +
            Requires-Dist: pyarrow>=10.0.1; extra == "pyarrow"
         | 
| 1285 | 
            +
            Requires-Dist: bottleneck>=1.3.6; extra == "performance"
         | 
| 1286 | 
            +
            Requires-Dist: numba>=0.56.4; extra == "performance"
         | 
| 1287 | 
            +
            Requires-Dist: numexpr>=2.8.4; extra == "performance"
         | 
| 1288 | 
            +
            Requires-Dist: scipy>=1.10.0; extra == "computation"
         | 
| 1289 | 
            +
            Requires-Dist: xarray>=2022.12.0; extra == "computation"
         | 
| 1290 | 
            +
            Requires-Dist: fsspec>=2022.11.0; extra == "fss"
         | 
| 1291 | 
            +
            Requires-Dist: s3fs>=2022.11.0; extra == "aws"
         | 
| 1292 | 
            +
            Requires-Dist: gcsfs>=2022.11.0; extra == "gcp"
         | 
| 1293 | 
            +
            Requires-Dist: pandas-gbq>=0.19.0; extra == "gcp"
         | 
| 1294 | 
            +
            Requires-Dist: odfpy>=1.4.1; extra == "excel"
         | 
| 1295 | 
            +
            Requires-Dist: openpyxl>=3.1.0; extra == "excel"
         | 
| 1296 | 
            +
            Requires-Dist: python-calamine>=0.1.7; extra == "excel"
         | 
| 1297 | 
            +
            Requires-Dist: pyxlsb>=1.0.10; extra == "excel"
         | 
| 1298 | 
            +
            Requires-Dist: xlrd>=2.0.1; extra == "excel"
         | 
| 1299 | 
            +
            Requires-Dist: xlsxwriter>=3.0.5; extra == "excel"
         | 
| 1300 | 
            +
            Requires-Dist: pyarrow>=10.0.1; extra == "parquet"
         | 
| 1301 | 
            +
            Requires-Dist: pyarrow>=10.0.1; extra == "feather"
         | 
| 1302 | 
            +
            Requires-Dist: tables>=3.8.0; extra == "hdf5"
         | 
| 1303 | 
            +
            Requires-Dist: pyreadstat>=1.2.0; extra == "spss"
         | 
| 1304 | 
            +
            Requires-Dist: SQLAlchemy>=2.0.0; extra == "postgresql"
         | 
| 1305 | 
            +
            Requires-Dist: psycopg2>=2.9.6; extra == "postgresql"
         | 
| 1306 | 
            +
            Requires-Dist: adbc-driver-postgresql>=0.8.0; extra == "postgresql"
         | 
| 1307 | 
            +
            Requires-Dist: SQLAlchemy>=2.0.0; extra == "mysql"
         | 
| 1308 | 
            +
            Requires-Dist: pymysql>=1.0.2; extra == "mysql"
         | 
| 1309 | 
            +
            Requires-Dist: SQLAlchemy>=2.0.0; extra == "sql-other"
         | 
| 1310 | 
            +
            Requires-Dist: adbc-driver-postgresql>=0.8.0; extra == "sql-other"
         | 
| 1311 | 
            +
            Requires-Dist: adbc-driver-sqlite>=0.8.0; extra == "sql-other"
         | 
| 1312 | 
            +
            Requires-Dist: beautifulsoup4>=4.11.2; extra == "html"
         | 
| 1313 | 
            +
            Requires-Dist: html5lib>=1.1; extra == "html"
         | 
| 1314 | 
            +
            Requires-Dist: lxml>=4.9.2; extra == "html"
         | 
| 1315 | 
            +
            Requires-Dist: lxml>=4.9.2; extra == "xml"
         | 
| 1316 | 
            +
            Requires-Dist: matplotlib>=3.6.3; extra == "plot"
         | 
| 1317 | 
            +
            Requires-Dist: jinja2>=3.1.2; extra == "output-formatting"
         | 
| 1318 | 
            +
            Requires-Dist: tabulate>=0.9.0; extra == "output-formatting"
         | 
| 1319 | 
            +
            Requires-Dist: PyQt5>=5.15.9; extra == "clipboard"
         | 
| 1320 | 
            +
            Requires-Dist: qtpy>=2.3.0; extra == "clipboard"
         | 
| 1321 | 
            +
            Requires-Dist: zstandard>=0.19.0; extra == "compression"
         | 
| 1322 | 
            +
            Requires-Dist: dataframe-api-compat>=0.1.7; extra == "consortium-standard"
         | 
| 1323 | 
            +
            Requires-Dist: adbc-driver-postgresql>=0.8.0; extra == "all"
         | 
| 1324 | 
            +
            Requires-Dist: adbc-driver-sqlite>=0.8.0; extra == "all"
         | 
| 1325 | 
            +
            Requires-Dist: beautifulsoup4>=4.11.2; extra == "all"
         | 
| 1326 | 
            +
            Requires-Dist: bottleneck>=1.3.6; extra == "all"
         | 
| 1327 | 
            +
            Requires-Dist: dataframe-api-compat>=0.1.7; extra == "all"
         | 
| 1328 | 
            +
            Requires-Dist: fastparquet>=2022.12.0; extra == "all"
         | 
| 1329 | 
            +
            Requires-Dist: fsspec>=2022.11.0; extra == "all"
         | 
| 1330 | 
            +
            Requires-Dist: gcsfs>=2022.11.0; extra == "all"
         | 
| 1331 | 
            +
            Requires-Dist: html5lib>=1.1; extra == "all"
         | 
| 1332 | 
            +
            Requires-Dist: hypothesis>=6.46.1; extra == "all"
         | 
| 1333 | 
            +
            Requires-Dist: jinja2>=3.1.2; extra == "all"
         | 
| 1334 | 
            +
            Requires-Dist: lxml>=4.9.2; extra == "all"
         | 
| 1335 | 
            +
            Requires-Dist: matplotlib>=3.6.3; extra == "all"
         | 
| 1336 | 
            +
            Requires-Dist: numba>=0.56.4; extra == "all"
         | 
| 1337 | 
            +
            Requires-Dist: numexpr>=2.8.4; extra == "all"
         | 
| 1338 | 
            +
            Requires-Dist: odfpy>=1.4.1; extra == "all"
         | 
| 1339 | 
            +
            Requires-Dist: openpyxl>=3.1.0; extra == "all"
         | 
| 1340 | 
            +
            Requires-Dist: pandas-gbq>=0.19.0; extra == "all"
         | 
| 1341 | 
            +
            Requires-Dist: psycopg2>=2.9.6; extra == "all"
         | 
| 1342 | 
            +
            Requires-Dist: pyarrow>=10.0.1; extra == "all"
         | 
| 1343 | 
            +
            Requires-Dist: pymysql>=1.0.2; extra == "all"
         | 
| 1344 | 
            +
            Requires-Dist: PyQt5>=5.15.9; extra == "all"
         | 
| 1345 | 
            +
            Requires-Dist: pyreadstat>=1.2.0; extra == "all"
         | 
| 1346 | 
            +
            Requires-Dist: pytest>=7.3.2; extra == "all"
         | 
| 1347 | 
            +
            Requires-Dist: pytest-xdist>=2.2.0; extra == "all"
         | 
| 1348 | 
            +
            Requires-Dist: python-calamine>=0.1.7; extra == "all"
         | 
| 1349 | 
            +
            Requires-Dist: pyxlsb>=1.0.10; extra == "all"
         | 
| 1350 | 
            +
            Requires-Dist: qtpy>=2.3.0; extra == "all"
         | 
| 1351 | 
            +
            Requires-Dist: scipy>=1.10.0; extra == "all"
         | 
| 1352 | 
            +
            Requires-Dist: s3fs>=2022.11.0; extra == "all"
         | 
| 1353 | 
            +
            Requires-Dist: SQLAlchemy>=2.0.0; extra == "all"
         | 
| 1354 | 
            +
            Requires-Dist: tables>=3.8.0; extra == "all"
         | 
| 1355 | 
            +
            Requires-Dist: tabulate>=0.9.0; extra == "all"
         | 
| 1356 | 
            +
            Requires-Dist: xarray>=2022.12.0; extra == "all"
         | 
| 1357 | 
            +
            Requires-Dist: xlrd>=2.0.1; extra == "all"
         | 
| 1358 | 
            +
            Requires-Dist: xlsxwriter>=3.0.5; extra == "all"
         | 
| 1359 | 
            +
            Requires-Dist: zstandard>=0.19.0; extra == "all"
         | 
| 1360 | 
            +
            Provides-Extra: test
         | 
| 1361 | 
            +
            Provides-Extra: pyarrow
         | 
| 1362 | 
            +
            Provides-Extra: performance
         | 
| 1363 | 
            +
            Provides-Extra: computation
         | 
| 1364 | 
            +
            Provides-Extra: fss
         | 
| 1365 | 
            +
            Provides-Extra: aws
         | 
| 1366 | 
            +
            Provides-Extra: gcp
         | 
| 1367 | 
            +
            Provides-Extra: excel
         | 
| 1368 | 
            +
            Provides-Extra: parquet
         | 
| 1369 | 
            +
            Provides-Extra: feather
         | 
| 1370 | 
            +
            Provides-Extra: hdf5
         | 
| 1371 | 
            +
            Provides-Extra: spss
         | 
| 1372 | 
            +
            Provides-Extra: postgresql
         | 
| 1373 | 
            +
            Provides-Extra: mysql
         | 
| 1374 | 
            +
            Provides-Extra: sql-other
         | 
| 1375 | 
            +
            Provides-Extra: html
         | 
| 1376 | 
            +
            Provides-Extra: xml
         | 
| 1377 | 
            +
            Provides-Extra: plot
         | 
| 1378 | 
            +
            Provides-Extra: output-formatting
         | 
| 1379 | 
            +
            Provides-Extra: clipboard
         | 
| 1380 | 
            +
            Provides-Extra: compression
         | 
| 1381 | 
            +
            Provides-Extra: consortium-standard
         | 
| 1382 | 
            +
            Provides-Extra: all
         | 
| 1383 | 
            +
            Description-Content-Type: text/markdown
         | 
| 1384 | 
            +
             | 
| 1385 | 
            +
            <div align="center">
         | 
| 1386 | 
            +
              <img src="https://pandas.pydata.org/static/img/pandas.svg"><br>
         | 
| 1387 | 
            +
            </div>
         | 
| 1388 | 
            +
             | 
| 1389 | 
            +
            -----------------
         | 
| 1390 | 
            +
             | 
| 1391 | 
            +
            # pandas: powerful Python data analysis toolkit
         | 
| 1392 | 
            +
             | 
| 1393 | 
            +
            | | |
         | 
| 1394 | 
            +
            | --- | --- |
         | 
| 1395 | 
            +
            | Testing | [](https://github.com/pandas-dev/pandas/actions/workflows/unit-tests.yml) [](https://codecov.io/gh/pandas-dev/pandas) |
         | 
| 1396 | 
            +
            | Package | [](https://pypi.org/project/pandas/) [](https://pypi.org/project/pandas/) [](https://anaconda.org/conda-forge/pandas) [](https://anaconda.org/conda-forge/pandas) |
         | 
| 1397 | 
            +
            | Meta | [](https://numfocus.org) [](https://doi.org/10.5281/zenodo.3509134) [](https://github.com/pandas-dev/pandas/blob/main/LICENSE) [](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack) |
         | 
| 1398 | 
            +
             | 
| 1399 | 
            +
             | 
| 1400 | 
            +
            ## What is it?
         | 
| 1401 | 
            +
             | 
| 1402 | 
            +
            **pandas** is a Python package that provides fast, flexible, and expressive data
         | 
| 1403 | 
            +
            structures designed to make working with "relational" or "labeled" data both
         | 
| 1404 | 
            +
            easy and intuitive. It aims to be the fundamental high-level building block for
         | 
| 1405 | 
            +
            doing practical, **real world** data analysis in Python. Additionally, it has
         | 
| 1406 | 
            +
            the broader goal of becoming **the most powerful and flexible open source data
         | 
| 1407 | 
            +
            analysis / manipulation tool available in any language**. It is already well on
         | 
| 1408 | 
            +
            its way towards this goal.
         | 
| 1409 | 
            +
             | 
| 1410 | 
            +
            ## Table of Contents
         | 
| 1411 | 
            +
             | 
| 1412 | 
            +
            - [Main Features](#main-features)
         | 
| 1413 | 
            +
            - [Where to get it](#where-to-get-it)
         | 
| 1414 | 
            +
            - [Dependencies](#dependencies)
         | 
| 1415 | 
            +
            - [Installation from sources](#installation-from-sources)
         | 
| 1416 | 
            +
            - [License](#license)
         | 
| 1417 | 
            +
            - [Documentation](#documentation)
         | 
| 1418 | 
            +
            - [Background](#background)
         | 
| 1419 | 
            +
            - [Getting Help](#getting-help)
         | 
| 1420 | 
            +
            - [Discussion and Development](#discussion-and-development)
         | 
| 1421 | 
            +
            - [Contributing to pandas](#contributing-to-pandas)
         | 
| 1422 | 
            +
             | 
| 1423 | 
            +
            ## Main Features
         | 
| 1424 | 
            +
            Here are just a few of the things that pandas does well:
         | 
| 1425 | 
            +
             | 
| 1426 | 
            +
              - Easy handling of [**missing data**][missing-data] (represented as
         | 
| 1427 | 
            +
                `NaN`, `NA`, or `NaT`) in floating point as well as non-floating point data
         | 
| 1428 | 
            +
              - Size mutability: columns can be [**inserted and
         | 
| 1429 | 
            +
                deleted**][insertion-deletion] from DataFrame and higher dimensional
         | 
| 1430 | 
            +
                objects
         | 
| 1431 | 
            +
              - Automatic and explicit [**data alignment**][alignment]: objects can
         | 
| 1432 | 
            +
                be explicitly aligned to a set of labels, or the user can simply
         | 
| 1433 | 
            +
                ignore the labels and let `Series`, `DataFrame`, etc. automatically
         | 
| 1434 | 
            +
                align the data for you in computations
         | 
| 1435 | 
            +
              - Powerful, flexible [**group by**][groupby] functionality to perform
         | 
| 1436 | 
            +
                split-apply-combine operations on data sets, for both aggregating
         | 
| 1437 | 
            +
                and transforming data
         | 
| 1438 | 
            +
              - Make it [**easy to convert**][conversion] ragged,
         | 
| 1439 | 
            +
                differently-indexed data in other Python and NumPy data structures
         | 
| 1440 | 
            +
                into DataFrame objects
         | 
| 1441 | 
            +
              - Intelligent label-based [**slicing**][slicing], [**fancy
         | 
| 1442 | 
            +
                indexing**][fancy-indexing], and [**subsetting**][subsetting] of
         | 
| 1443 | 
            +
                large data sets
         | 
| 1444 | 
            +
              - Intuitive [**merging**][merging] and [**joining**][joining] data
         | 
| 1445 | 
            +
                sets
         | 
| 1446 | 
            +
              - Flexible [**reshaping**][reshape] and [**pivoting**][pivot-table] of
         | 
| 1447 | 
            +
                data sets
         | 
| 1448 | 
            +
              - [**Hierarchical**][mi] labeling of axes (possible to have multiple
         | 
| 1449 | 
            +
                labels per tick)
         | 
| 1450 | 
            +
              - Robust IO tools for loading data from [**flat files**][flat-files]
         | 
| 1451 | 
            +
                (CSV and delimited), [**Excel files**][excel], [**databases**][db],
         | 
| 1452 | 
            +
                and saving/loading data from the ultrafast [**HDF5 format**][hdfstore]
         | 
| 1453 | 
            +
              - [**Time series**][timeseries]-specific functionality: date range
         | 
| 1454 | 
            +
                generation and frequency conversion, moving window statistics,
         | 
| 1455 | 
            +
                date shifting and lagging
         | 
| 1456 | 
            +
             | 
| 1457 | 
            +
             | 
| 1458 | 
            +
               [missing-data]: https://pandas.pydata.org/pandas-docs/stable/user_guide/missing_data.html
         | 
| 1459 | 
            +
               [insertion-deletion]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html#column-selection-addition-deletion
         | 
| 1460 | 
            +
               [alignment]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html?highlight=alignment#intro-to-data-structures
         | 
| 1461 | 
            +
               [groupby]: https://pandas.pydata.org/pandas-docs/stable/user_guide/groupby.html#group-by-split-apply-combine
         | 
| 1462 | 
            +
               [conversion]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html#dataframe
         | 
| 1463 | 
            +
               [slicing]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#slicing-ranges
         | 
| 1464 | 
            +
               [fancy-indexing]: https://pandas.pydata.org/pandas-docs/stable/user_guide/advanced.html#advanced
         | 
| 1465 | 
            +
               [subsetting]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#boolean-indexing
         | 
| 1466 | 
            +
               [merging]: https://pandas.pydata.org/pandas-docs/stable/user_guide/merging.html#database-style-dataframe-or-named-series-joining-merging
         | 
| 1467 | 
            +
               [joining]: https://pandas.pydata.org/pandas-docs/stable/user_guide/merging.html#joining-on-index
         | 
| 1468 | 
            +
               [reshape]: https://pandas.pydata.org/pandas-docs/stable/user_guide/reshaping.html
         | 
| 1469 | 
            +
               [pivot-table]: https://pandas.pydata.org/pandas-docs/stable/user_guide/reshaping.html
         | 
| 1470 | 
            +
               [mi]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#hierarchical-indexing-multiindex
         | 
| 1471 | 
            +
               [flat-files]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#csv-text-files
         | 
| 1472 | 
            +
               [excel]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#excel-files
         | 
| 1473 | 
            +
               [db]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#sql-queries
         | 
| 1474 | 
            +
               [hdfstore]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#hdf5-pytables
         | 
| 1475 | 
            +
               [timeseries]: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#time-series-date-functionality
         | 
| 1476 | 
            +
             | 
| 1477 | 
            +
            ## Where to get it
         | 
| 1478 | 
            +
            The source code is currently hosted on GitHub at:
         | 
| 1479 | 
            +
            https://github.com/pandas-dev/pandas
         | 
| 1480 | 
            +
             | 
| 1481 | 
            +
            Binary installers for the latest released version are available at the [Python
         | 
| 1482 | 
            +
            Package Index (PyPI)](https://pypi.org/project/pandas) and on [Conda](https://docs.conda.io/en/latest/).
         | 
| 1483 | 
            +
             | 
| 1484 | 
            +
            ```sh
         | 
| 1485 | 
            +
            # conda
         | 
| 1486 | 
            +
            conda install -c conda-forge pandas
         | 
| 1487 | 
            +
            ```
         | 
| 1488 | 
            +
             | 
| 1489 | 
            +
            ```sh
         | 
| 1490 | 
            +
            # or PyPI
         | 
| 1491 | 
            +
            pip install pandas
         | 
| 1492 | 
            +
            ```
         | 
| 1493 | 
            +
             | 
| 1494 | 
            +
            The list of changes to pandas between each release can be found
         | 
| 1495 | 
            +
            [here](https://pandas.pydata.org/pandas-docs/stable/whatsnew/index.html). For full
         | 
| 1496 | 
            +
            details, see the commit logs at https://github.com/pandas-dev/pandas.
         | 
| 1497 | 
            +
             | 
| 1498 | 
            +
            ## Dependencies
         | 
| 1499 | 
            +
            - [NumPy - Adds support for large, multi-dimensional arrays, matrices and high-level mathematical functions to operate on these arrays](https://www.numpy.org)
         | 
| 1500 | 
            +
            - [python-dateutil - Provides powerful extensions to the standard datetime module](https://dateutil.readthedocs.io/en/stable/index.html)
         | 
| 1501 | 
            +
            - [pytz - Brings the Olson tz database into Python which allows accurate and cross platform timezone calculations](https://github.com/stub42/pytz)
         | 
| 1502 | 
            +
             | 
| 1503 | 
            +
            See the [full installation instructions](https://pandas.pydata.org/pandas-docs/stable/install.html#dependencies) for minimum supported versions of required, recommended and optional dependencies.
         | 
| 1504 | 
            +
             | 
| 1505 | 
            +
            ## Installation from sources
         | 
| 1506 | 
            +
            To install pandas from source you need [Cython](https://cython.org/) in addition to the normal
         | 
| 1507 | 
            +
            dependencies above. Cython can be installed from PyPI:
         | 
| 1508 | 
            +
             | 
| 1509 | 
            +
            ```sh
         | 
| 1510 | 
            +
            pip install cython
         | 
| 1511 | 
            +
            ```
         | 
| 1512 | 
            +
             | 
| 1513 | 
            +
            In the `pandas` directory (same one where you found this file after
         | 
| 1514 | 
            +
            cloning the git repo), execute:
         | 
| 1515 | 
            +
             | 
| 1516 | 
            +
            ```sh
         | 
| 1517 | 
            +
            pip install .
         | 
| 1518 | 
            +
            ```
         | 
| 1519 | 
            +
             | 
| 1520 | 
            +
            or for installing in [development mode](https://pip.pypa.io/en/latest/cli/pip_install/#install-editable):
         | 
| 1521 | 
            +
             | 
| 1522 | 
            +
             | 
| 1523 | 
            +
            ```sh
         | 
| 1524 | 
            +
            python -m pip install -ve . --no-build-isolation --config-settings=editable-verbose=true
         | 
| 1525 | 
            +
            ```
         | 
| 1526 | 
            +
             | 
| 1527 | 
            +
            See the full instructions for [installing from source](https://pandas.pydata.org/docs/dev/development/contributing_environment.html).
         | 
| 1528 | 
            +
             | 
| 1529 | 
            +
            ## License
         | 
| 1530 | 
            +
            [BSD 3](LICENSE)
         | 
| 1531 | 
            +
             | 
| 1532 | 
            +
            ## Documentation
         | 
| 1533 | 
            +
            The official documentation is hosted on [PyData.org](https://pandas.pydata.org/pandas-docs/stable/).
         | 
| 1534 | 
            +
             | 
| 1535 | 
            +
            ## Background
         | 
| 1536 | 
            +
            Work on ``pandas`` started at [AQR](https://www.aqr.com/) (a quantitative hedge fund) in 2008 and
         | 
| 1537 | 
            +
            has been under active development since then.
         | 
| 1538 | 
            +
             | 
| 1539 | 
            +
            ## Getting Help
         | 
| 1540 | 
            +
             | 
| 1541 | 
            +
            For usage questions, the best place to go to is [StackOverflow](https://stackoverflow.com/questions/tagged/pandas).
         | 
| 1542 | 
            +
            Further, general questions and discussions can also take place on the [pydata mailing list](https://groups.google.com/forum/?fromgroups#!forum/pydata).
         | 
| 1543 | 
            +
             | 
| 1544 | 
            +
            ## Discussion and Development
         | 
| 1545 | 
            +
            Most development discussions take place on GitHub in this repo, via the [GitHub issue tracker](https://github.com/pandas-dev/pandas/issues).
         | 
| 1546 | 
            +
             | 
| 1547 | 
            +
            Further, the [pandas-dev mailing list](https://mail.python.org/mailman/listinfo/pandas-dev) can also be used for specialized discussions or design issues, and a [Slack channel](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack) is available for quick development related questions.
         | 
| 1548 | 
            +
             | 
| 1549 | 
            +
            There are also frequent [community meetings](https://pandas.pydata.org/docs/dev/development/community.html#community-meeting) for project maintainers open to the community as well as monthly [new contributor meetings](https://pandas.pydata.org/docs/dev/development/community.html#new-contributor-meeting) to help support new contributors.
         | 
| 1550 | 
            +
             | 
| 1551 | 
            +
            Additional information on the communication channels can be found on the [contributor community](https://pandas.pydata.org/docs/development/community.html) page.
         | 
| 1552 | 
            +
             | 
| 1553 | 
            +
            ## Contributing to pandas
         | 
| 1554 | 
            +
             | 
| 1555 | 
            +
            [](https://www.codetriage.com/pandas-dev/pandas)
         | 
| 1556 | 
            +
             | 
| 1557 | 
            +
            All contributions, bug reports, bug fixes, documentation improvements, enhancements, and ideas are welcome.
         | 
| 1558 | 
            +
             | 
| 1559 | 
            +
            A detailed overview on how to contribute can be found in the **[contributing guide](https://pandas.pydata.org/docs/dev/development/contributing.html)**.
         | 
| 1560 | 
            +
             | 
| 1561 | 
            +
            If you are simply looking to start working with the pandas codebase, navigate to the [GitHub "issues" tab](https://github.com/pandas-dev/pandas/issues) and start looking through interesting issues. There are a number of issues listed under [Docs](https://github.com/pandas-dev/pandas/issues?labels=Docs&sort=updated&state=open) and [good first issue](https://github.com/pandas-dev/pandas/issues?labels=good+first+issue&sort=updated&state=open) where you could start out.
         | 
| 1562 | 
            +
             | 
| 1563 | 
            +
            You can also triage issues which may include reproducing bug reports, or asking for vital information such as version numbers or reproduction instructions. If you would like to start triaging issues, one easy way to get started is to [subscribe to pandas on CodeTriage](https://www.codetriage.com/pandas-dev/pandas).
         | 
| 1564 | 
            +
             | 
| 1565 | 
            +
            Or maybe through using pandas you have an idea of your own or are looking for something in the documentation and thinking ‘this can be improved’...you can do something about it!
         | 
| 1566 | 
            +
             | 
| 1567 | 
            +
            Feel free to ask questions on the [mailing list](https://groups.google.com/forum/?fromgroups#!forum/pydata) or on [Slack](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack).
         | 
| 1568 | 
            +
             | 
| 1569 | 
            +
            As contributors and maintainers to this project, you are expected to abide by pandas' code of conduct. More information can be found at: [Contributor Code of Conduct](https://github.com/pandas-dev/.github/blob/master/CODE_OF_CONDUCT.md)
         | 
| 1570 | 
            +
             | 
| 1571 | 
            +
            <hr>
         | 
| 1572 | 
            +
             | 
| 1573 | 
            +
            [Go to Top](#table-of-contents)
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/RECORD
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/REQUESTED
    ADDED
    
    | 
            File without changes
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/WHEEL
    ADDED
    
    | @@ -0,0 +1,6 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            Wheel-Version: 1.0
         | 
| 2 | 
            +
            Generator: meson
         | 
| 3 | 
            +
            Root-Is-Purelib: false
         | 
| 4 | 
            +
            Tag: cp310-cp310-manylinux_2_17_x86_64
         | 
| 5 | 
            +
            Tag: cp310-cp310-manylinux2014_x86_64
         | 
| 6 | 
            +
             | 
    	
        evalkit_tf437/lib/python3.10/site-packages/pandas-2.2.3.dist-info/entry_points.txt
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            [pandas_plotting_backends]
         | 
| 2 | 
            +
            matplotlib = pandas:plotting._matplotlib
         | 
| 3 | 
            +
             | 
    	
        evalkit_tf437/lib/python3.10/site-packages/pyarrow/libarrow_substrait.so.1800
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:2a9588bbb6ed632ec93e4268425f77cd7c26a148d6a83bfab2c9e37ee3de0a73
         | 
| 3 | 
            +
            size 5338320
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/INSTALLER
    ADDED
    
    | @@ -0,0 +1 @@ | |
|  | 
|  | |
| 1 | 
            +
            pip
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/LICENSE
    ADDED
    
    | @@ -0,0 +1,175 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
             | 
| 2 | 
            +
                                             Apache License
         | 
| 3 | 
            +
                                       Version 2.0, January 2004
         | 
| 4 | 
            +
                                    http://www.apache.org/licenses/
         | 
| 5 | 
            +
             | 
| 6 | 
            +
               TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
         | 
| 7 | 
            +
             | 
| 8 | 
            +
               1. Definitions.
         | 
| 9 | 
            +
             | 
| 10 | 
            +
                  "License" shall mean the terms and conditions for use, reproduction,
         | 
| 11 | 
            +
                  and distribution as defined by Sections 1 through 9 of this document.
         | 
| 12 | 
            +
             | 
| 13 | 
            +
                  "Licensor" shall mean the copyright owner or entity authorized by
         | 
| 14 | 
            +
                  the copyright owner that is granting the License.
         | 
| 15 | 
            +
             | 
| 16 | 
            +
                  "Legal Entity" shall mean the union of the acting entity and all
         | 
| 17 | 
            +
                  other entities that control, are controlled by, or are under common
         | 
| 18 | 
            +
                  control with that entity. For the purposes of this definition,
         | 
| 19 | 
            +
                  "control" means (i) the power, direct or indirect, to cause the
         | 
| 20 | 
            +
                  direction or management of such entity, whether by contract or
         | 
| 21 | 
            +
                  otherwise, or (ii) ownership of fifty percent (50%) or more of the
         | 
| 22 | 
            +
                  outstanding shares, or (iii) beneficial ownership of such entity.
         | 
| 23 | 
            +
             | 
| 24 | 
            +
                  "You" (or "Your") shall mean an individual or Legal Entity
         | 
| 25 | 
            +
                  exercising permissions granted by this License.
         | 
| 26 | 
            +
             | 
| 27 | 
            +
                  "Source" form shall mean the preferred form for making modifications,
         | 
| 28 | 
            +
                  including but not limited to software source code, documentation
         | 
| 29 | 
            +
                  source, and configuration files.
         | 
| 30 | 
            +
             | 
| 31 | 
            +
                  "Object" form shall mean any form resulting from mechanical
         | 
| 32 | 
            +
                  transformation or translation of a Source form, including but
         | 
| 33 | 
            +
                  not limited to compiled object code, generated documentation,
         | 
| 34 | 
            +
                  and conversions to other media types.
         | 
| 35 | 
            +
             | 
| 36 | 
            +
                  "Work" shall mean the work of authorship, whether in Source or
         | 
| 37 | 
            +
                  Object form, made available under the License, as indicated by a
         | 
| 38 | 
            +
                  copyright notice that is included in or attached to the work
         | 
| 39 | 
            +
                  (an example is provided in the Appendix below).
         | 
| 40 | 
            +
             | 
| 41 | 
            +
                  "Derivative Works" shall mean any work, whether in Source or Object
         | 
| 42 | 
            +
                  form, that is based on (or derived from) the Work and for which the
         | 
| 43 | 
            +
                  editorial revisions, annotations, elaborations, or other modifications
         | 
| 44 | 
            +
                  represent, as a whole, an original work of authorship. For the purposes
         | 
| 45 | 
            +
                  of this License, Derivative Works shall not include works that remain
         | 
| 46 | 
            +
                  separable from, or merely link (or bind by name) to the interfaces of,
         | 
| 47 | 
            +
                  the Work and Derivative Works thereof.
         | 
| 48 | 
            +
             | 
| 49 | 
            +
                  "Contribution" shall mean any work of authorship, including
         | 
| 50 | 
            +
                  the original version of the Work and any modifications or additions
         | 
| 51 | 
            +
                  to that Work or Derivative Works thereof, that is intentionally
         | 
| 52 | 
            +
                  submitted to Licensor for inclusion in the Work by the copyright owner
         | 
| 53 | 
            +
                  or by an individual or Legal Entity authorized to submit on behalf of
         | 
| 54 | 
            +
                  the copyright owner. For the purposes of this definition, "submitted"
         | 
| 55 | 
            +
                  means any form of electronic, verbal, or written communication sent
         | 
| 56 | 
            +
                  to the Licensor or its representatives, including but not limited to
         | 
| 57 | 
            +
                  communication on electronic mailing lists, source code control systems,
         | 
| 58 | 
            +
                  and issue tracking systems that are managed by, or on behalf of, the
         | 
| 59 | 
            +
                  Licensor for the purpose of discussing and improving the Work, but
         | 
| 60 | 
            +
                  excluding communication that is conspicuously marked or otherwise
         | 
| 61 | 
            +
                  designated in writing by the copyright owner as "Not a Contribution."
         | 
| 62 | 
            +
             | 
| 63 | 
            +
                  "Contributor" shall mean Licensor and any individual or Legal Entity
         | 
| 64 | 
            +
                  on behalf of whom a Contribution has been received by Licensor and
         | 
| 65 | 
            +
                  subsequently incorporated within the Work.
         | 
| 66 | 
            +
             | 
| 67 | 
            +
               2. Grant of Copyright License. Subject to the terms and conditions of
         | 
| 68 | 
            +
                  this License, each Contributor hereby grants to You a perpetual,
         | 
| 69 | 
            +
                  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
         | 
| 70 | 
            +
                  copyright license to reproduce, prepare Derivative Works of,
         | 
| 71 | 
            +
                  publicly display, publicly perform, sublicense, and distribute the
         | 
| 72 | 
            +
                  Work and such Derivative Works in Source or Object form.
         | 
| 73 | 
            +
             | 
| 74 | 
            +
               3. Grant of Patent License. Subject to the terms and conditions of
         | 
| 75 | 
            +
                  this License, each Contributor hereby grants to You a perpetual,
         | 
| 76 | 
            +
                  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
         | 
| 77 | 
            +
                  (except as stated in this section) patent license to make, have made,
         | 
| 78 | 
            +
                  use, offer to sell, sell, import, and otherwise transfer the Work,
         | 
| 79 | 
            +
                  where such license applies only to those patent claims licensable
         | 
| 80 | 
            +
                  by such Contributor that are necessarily infringed by their
         | 
| 81 | 
            +
                  Contribution(s) alone or by combination of their Contribution(s)
         | 
| 82 | 
            +
                  with the Work to which such Contribution(s) was submitted. If You
         | 
| 83 | 
            +
                  institute patent litigation against any entity (including a
         | 
| 84 | 
            +
                  cross-claim or counterclaim in a lawsuit) alleging that the Work
         | 
| 85 | 
            +
                  or a Contribution incorporated within the Work constitutes direct
         | 
| 86 | 
            +
                  or contributory patent infringement, then any patent licenses
         | 
| 87 | 
            +
                  granted to You under this License for that Work shall terminate
         | 
| 88 | 
            +
                  as of the date such litigation is filed.
         | 
| 89 | 
            +
             | 
| 90 | 
            +
               4. Redistribution. You may reproduce and distribute copies of the
         | 
| 91 | 
            +
                  Work or Derivative Works thereof in any medium, with or without
         | 
| 92 | 
            +
                  modifications, and in Source or Object form, provided that You
         | 
| 93 | 
            +
                  meet the following conditions:
         | 
| 94 | 
            +
             | 
| 95 | 
            +
                  (a) You must give any other recipients of the Work or
         | 
| 96 | 
            +
                      Derivative Works a copy of this License; and
         | 
| 97 | 
            +
             | 
| 98 | 
            +
                  (b) You must cause any modified files to carry prominent notices
         | 
| 99 | 
            +
                      stating that You changed the files; and
         | 
| 100 | 
            +
             | 
| 101 | 
            +
                  (c) You must retain, in the Source form of any Derivative Works
         | 
| 102 | 
            +
                      that You distribute, all copyright, patent, trademark, and
         | 
| 103 | 
            +
                      attribution notices from the Source form of the Work,
         | 
| 104 | 
            +
                      excluding those notices that do not pertain to any part of
         | 
| 105 | 
            +
                      the Derivative Works; and
         | 
| 106 | 
            +
             | 
| 107 | 
            +
                  (d) If the Work includes a "NOTICE" text file as part of its
         | 
| 108 | 
            +
                      distribution, then any Derivative Works that You distribute must
         | 
| 109 | 
            +
                      include a readable copy of the attribution notices contained
         | 
| 110 | 
            +
                      within such NOTICE file, excluding those notices that do not
         | 
| 111 | 
            +
                      pertain to any part of the Derivative Works, in at least one
         | 
| 112 | 
            +
                      of the following places: within a NOTICE text file distributed
         | 
| 113 | 
            +
                      as part of the Derivative Works; within the Source form or
         | 
| 114 | 
            +
                      documentation, if provided along with the Derivative Works; or,
         | 
| 115 | 
            +
                      within a display generated by the Derivative Works, if and
         | 
| 116 | 
            +
                      wherever such third-party notices normally appear. The contents
         | 
| 117 | 
            +
                      of the NOTICE file are for informational purposes only and
         | 
| 118 | 
            +
                      do not modify the License. You may add Your own attribution
         | 
| 119 | 
            +
                      notices within Derivative Works that You distribute, alongside
         | 
| 120 | 
            +
                      or as an addendum to the NOTICE text from the Work, provided
         | 
| 121 | 
            +
                      that such additional attribution notices cannot be construed
         | 
| 122 | 
            +
                      as modifying the License.
         | 
| 123 | 
            +
             | 
| 124 | 
            +
                  You may add Your own copyright statement to Your modifications and
         | 
| 125 | 
            +
                  may provide additional or different license terms and conditions
         | 
| 126 | 
            +
                  for use, reproduction, or distribution of Your modifications, or
         | 
| 127 | 
            +
                  for any such Derivative Works as a whole, provided Your use,
         | 
| 128 | 
            +
                  reproduction, and distribution of the Work otherwise complies with
         | 
| 129 | 
            +
                  the conditions stated in this License.
         | 
| 130 | 
            +
             | 
| 131 | 
            +
               5. Submission of Contributions. Unless You explicitly state otherwise,
         | 
| 132 | 
            +
                  any Contribution intentionally submitted for inclusion in the Work
         | 
| 133 | 
            +
                  by You to the Licensor shall be under the terms and conditions of
         | 
| 134 | 
            +
                  this License, without any additional terms or conditions.
         | 
| 135 | 
            +
                  Notwithstanding the above, nothing herein shall supersede or modify
         | 
| 136 | 
            +
                  the terms of any separate license agreement you may have executed
         | 
| 137 | 
            +
                  with Licensor regarding such Contributions.
         | 
| 138 | 
            +
             | 
| 139 | 
            +
               6. Trademarks. This License does not grant permission to use the trade
         | 
| 140 | 
            +
                  names, trademarks, service marks, or product names of the Licensor,
         | 
| 141 | 
            +
                  except as required for reasonable and customary use in describing the
         | 
| 142 | 
            +
                  origin of the Work and reproducing the content of the NOTICE file.
         | 
| 143 | 
            +
             | 
| 144 | 
            +
               7. Disclaimer of Warranty. Unless required by applicable law or
         | 
| 145 | 
            +
                  agreed to in writing, Licensor provides the Work (and each
         | 
| 146 | 
            +
                  Contributor provides its Contributions) on an "AS IS" BASIS,
         | 
| 147 | 
            +
                  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
         | 
| 148 | 
            +
                  implied, including, without limitation, any warranties or conditions
         | 
| 149 | 
            +
                  of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
         | 
| 150 | 
            +
                  PARTICULAR PURPOSE. You are solely responsible for determining the
         | 
| 151 | 
            +
                  appropriateness of using or redistributing the Work and assume any
         | 
| 152 | 
            +
                  risks associated with Your exercise of permissions under this License.
         | 
| 153 | 
            +
             | 
| 154 | 
            +
               8. Limitation of Liability. In no event and under no legal theory,
         | 
| 155 | 
            +
                  whether in tort (including negligence), contract, or otherwise,
         | 
| 156 | 
            +
                  unless required by applicable law (such as deliberate and grossly
         | 
| 157 | 
            +
                  negligent acts) or agreed to in writing, shall any Contributor be
         | 
| 158 | 
            +
                  liable to You for damages, including any direct, indirect, special,
         | 
| 159 | 
            +
                  incidental, or consequential damages of any character arising as a
         | 
| 160 | 
            +
                  result of this License or out of the use or inability to use the
         | 
| 161 | 
            +
                  Work (including but not limited to damages for loss of goodwill,
         | 
| 162 | 
            +
                  work stoppage, computer failure or malfunction, or any and all
         | 
| 163 | 
            +
                  other commercial damages or losses), even if such Contributor
         | 
| 164 | 
            +
                  has been advised of the possibility of such damages.
         | 
| 165 | 
            +
             | 
| 166 | 
            +
               9. Accepting Warranty or Additional Liability. While redistributing
         | 
| 167 | 
            +
                  the Work or Derivative Works thereof, You may choose to offer,
         | 
| 168 | 
            +
                  and charge a fee for, acceptance of support, warranty, indemnity,
         | 
| 169 | 
            +
                  or other liability obligations and/or rights consistent with this
         | 
| 170 | 
            +
                  License. However, in accepting such obligations, You may act only
         | 
| 171 | 
            +
                  on Your own behalf and on Your sole responsibility, not on behalf
         | 
| 172 | 
            +
                  of any other Contributor, and only if You agree to indemnify,
         | 
| 173 | 
            +
                  defend, and hold each Contributor harmless for any liability
         | 
| 174 | 
            +
                  incurred by, or claims asserted against, such Contributor by reason
         | 
| 175 | 
            +
                  of your accepting any such warranty or additional liability.
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/RECORD
    ADDED
    
    | @@ -0,0 +1,43 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            requests-2.32.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
         | 
| 2 | 
            +
            requests-2.32.3.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
         | 
| 3 | 
            +
            requests-2.32.3.dist-info/METADATA,sha256=ZY7oRUweLnb7jCEnEW9hFWs7IpQbNVnAA4ncpwA4WBo,4610
         | 
| 4 | 
            +
            requests-2.32.3.dist-info/RECORD,,
         | 
| 5 | 
            +
            requests-2.32.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
         | 
| 6 | 
            +
            requests-2.32.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
         | 
| 7 | 
            +
            requests-2.32.3.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
         | 
| 8 | 
            +
            requests/__init__.py,sha256=4xaAERmPDIBPsa2PsjpU9r06yooK-2mZKHTZAhWRWts,5072
         | 
| 9 | 
            +
            requests/__pycache__/__init__.cpython-310.pyc,,
         | 
| 10 | 
            +
            requests/__pycache__/__version__.cpython-310.pyc,,
         | 
| 11 | 
            +
            requests/__pycache__/_internal_utils.cpython-310.pyc,,
         | 
| 12 | 
            +
            requests/__pycache__/adapters.cpython-310.pyc,,
         | 
| 13 | 
            +
            requests/__pycache__/api.cpython-310.pyc,,
         | 
| 14 | 
            +
            requests/__pycache__/auth.cpython-310.pyc,,
         | 
| 15 | 
            +
            requests/__pycache__/certs.cpython-310.pyc,,
         | 
| 16 | 
            +
            requests/__pycache__/compat.cpython-310.pyc,,
         | 
| 17 | 
            +
            requests/__pycache__/cookies.cpython-310.pyc,,
         | 
| 18 | 
            +
            requests/__pycache__/exceptions.cpython-310.pyc,,
         | 
| 19 | 
            +
            requests/__pycache__/help.cpython-310.pyc,,
         | 
| 20 | 
            +
            requests/__pycache__/hooks.cpython-310.pyc,,
         | 
| 21 | 
            +
            requests/__pycache__/models.cpython-310.pyc,,
         | 
| 22 | 
            +
            requests/__pycache__/packages.cpython-310.pyc,,
         | 
| 23 | 
            +
            requests/__pycache__/sessions.cpython-310.pyc,,
         | 
| 24 | 
            +
            requests/__pycache__/status_codes.cpython-310.pyc,,
         | 
| 25 | 
            +
            requests/__pycache__/structures.cpython-310.pyc,,
         | 
| 26 | 
            +
            requests/__pycache__/utils.cpython-310.pyc,,
         | 
| 27 | 
            +
            requests/__version__.py,sha256=FVfglgZmNQnmYPXpOohDU58F5EUb_-VnSTaAesS187g,435
         | 
| 28 | 
            +
            requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
         | 
| 29 | 
            +
            requests/adapters.py,sha256=KIcecscqam6reOCXRl4DwP4jX8Jcl8sd57ft17KR2cQ,27451
         | 
| 30 | 
            +
            requests/api.py,sha256=_Zb9Oa7tzVIizTKwFrPjDEY9ejtm_OnSRERnADxGsQs,6449
         | 
| 31 | 
            +
            requests/auth.py,sha256=kF75tqnLctZ9Mf_hm9TZIj4cQWnN5uxRz8oWsx5wmR0,10186
         | 
| 32 | 
            +
            requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429
         | 
| 33 | 
            +
            requests/compat.py,sha256=C5w_DPLSurXPgcdWU78fora0APmbYkX2G89QvH5xzPA,1817
         | 
| 34 | 
            +
            requests/cookies.py,sha256=bNi-iqEj4NPZ00-ob-rHvzkvObzN3lEpgw3g6paS3Xw,18590
         | 
| 35 | 
            +
            requests/exceptions.py,sha256=jJPS1UWATs86ShVUaLorTiJb1SaGuoNEWgICJep-VkY,4260
         | 
| 36 | 
            +
            requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875
         | 
| 37 | 
            +
            requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
         | 
| 38 | 
            +
            requests/models.py,sha256=k42roXzC8u_OagAPQi9U4MkfO7i4r2FdaqvMqstPehc,35418
         | 
| 39 | 
            +
            requests/packages.py,sha256=_g0gZ681UyAlKHRjH6kanbaoxx2eAb6qzcXiODyTIoc,904
         | 
| 40 | 
            +
            requests/sessions.py,sha256=ykTI8UWGSltOfH07HKollH7kTBGw4WhiBVaQGmckTw4,30495
         | 
| 41 | 
            +
            requests/status_codes.py,sha256=iJUAeA25baTdw-6PfD0eF4qhpINDJRJI-yaMqxs4LEI,4322
         | 
| 42 | 
            +
            requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
         | 
| 43 | 
            +
            requests/utils.py,sha256=HiQC6Nq_Da3ktaMiFzQkh-dCk3iQHHKEsYS5kDc-8Cw,33619
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/REQUESTED
    ADDED
    
    | 
            File without changes
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/WHEEL
    ADDED
    
    | @@ -0,0 +1,5 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            Wheel-Version: 1.0
         | 
| 2 | 
            +
            Generator: bdist_wheel (0.43.0)
         | 
| 3 | 
            +
            Root-Is-Purelib: true
         | 
| 4 | 
            +
            Tag: py3-none-any
         | 
| 5 | 
            +
             | 
    	
        evalkit_tf437/lib/python3.10/site-packages/requests-2.32.3.dist-info/top_level.txt
    ADDED
    
    | @@ -0,0 +1 @@ | |
|  | 
|  | |
| 1 | 
            +
            requests
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/__init__.py
    ADDED
    
    | @@ -0,0 +1,162 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Configure global settings and get information about the working environment."""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            # Machine learning module for Python
         | 
| 7 | 
            +
            # ==================================
         | 
| 8 | 
            +
            #
         | 
| 9 | 
            +
            # sklearn is a Python module integrating classical machine
         | 
| 10 | 
            +
            # learning algorithms in the tightly-knit world of scientific Python
         | 
| 11 | 
            +
            # packages (numpy, scipy, matplotlib).
         | 
| 12 | 
            +
            #
         | 
| 13 | 
            +
            # It aims to provide simple and efficient solutions to learning problems
         | 
| 14 | 
            +
            # that are accessible to everybody and reusable in various contexts:
         | 
| 15 | 
            +
            # machine-learning as a versatile tool for science and engineering.
         | 
| 16 | 
            +
            #
         | 
| 17 | 
            +
            # See https://scikit-learn.org for complete documentation.
         | 
| 18 | 
            +
             | 
| 19 | 
            +
            import importlib as _importlib
         | 
| 20 | 
            +
            import logging
         | 
| 21 | 
            +
            import os
         | 
| 22 | 
            +
            import random
         | 
| 23 | 
            +
             | 
| 24 | 
            +
            from ._config import config_context, get_config, set_config
         | 
| 25 | 
            +
             | 
| 26 | 
            +
            logger = logging.getLogger(__name__)
         | 
| 27 | 
            +
             | 
| 28 | 
            +
             | 
| 29 | 
            +
            # PEP0440 compatible formatted version, see:
         | 
| 30 | 
            +
            # https://www.python.org/dev/peps/pep-0440/
         | 
| 31 | 
            +
            #
         | 
| 32 | 
            +
            # Generic release markers:
         | 
| 33 | 
            +
            #   X.Y.0   # For first release after an increment in Y
         | 
| 34 | 
            +
            #   X.Y.Z   # For bugfix releases
         | 
| 35 | 
            +
            #
         | 
| 36 | 
            +
            # Admissible pre-release markers:
         | 
| 37 | 
            +
            #   X.Y.ZaN   # Alpha release
         | 
| 38 | 
            +
            #   X.Y.ZbN   # Beta release
         | 
| 39 | 
            +
            #   X.Y.ZrcN  # Release Candidate
         | 
| 40 | 
            +
            #   X.Y.Z     # Final release
         | 
| 41 | 
            +
            #
         | 
| 42 | 
            +
            # Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
         | 
| 43 | 
            +
            # 'X.Y.dev0' is the canonical version of 'X.Y.dev'
         | 
| 44 | 
            +
            #
         | 
| 45 | 
            +
            __version__ = "1.6.1"
         | 
| 46 | 
            +
             | 
| 47 | 
            +
             | 
| 48 | 
            +
            # On OSX, we can get a runtime error due to multiple OpenMP libraries loaded
         | 
| 49 | 
            +
            # simultaneously. This can happen for instance when calling BLAS inside a
         | 
| 50 | 
            +
            # prange. Setting the following environment variable allows multiple OpenMP
         | 
| 51 | 
            +
            # libraries to be loaded. It should not degrade performances since we manually
         | 
| 52 | 
            +
            # take care of potential over-subcription performance issues, in sections of
         | 
| 53 | 
            +
            # the code where nested OpenMP loops can happen, by dynamically reconfiguring
         | 
| 54 | 
            +
            # the inner OpenMP runtime to temporarily disable it while under the scope of
         | 
| 55 | 
            +
            # the outer OpenMP parallel section.
         | 
| 56 | 
            +
            os.environ.setdefault("KMP_DUPLICATE_LIB_OK", "True")
         | 
| 57 | 
            +
             | 
| 58 | 
            +
            # Workaround issue discovered in intel-openmp 2019.5:
         | 
| 59 | 
            +
            # https://github.com/ContinuumIO/anaconda-issues/issues/11294
         | 
| 60 | 
            +
            os.environ.setdefault("KMP_INIT_AT_FORK", "FALSE")
         | 
| 61 | 
            +
             | 
| 62 | 
            +
            # `_distributor_init` allows distributors to run custom init code.
         | 
| 63 | 
            +
            # For instance, for the Windows wheel, this is used to pre-load the
         | 
| 64 | 
            +
            # vcomp shared library runtime for OpenMP embedded in the sklearn/.libs
         | 
| 65 | 
            +
            # sub-folder.
         | 
| 66 | 
            +
            # It is necessary to do this prior to importing show_versions as the
         | 
| 67 | 
            +
            # later is linked to the OpenMP runtime to make it possible to introspect
         | 
| 68 | 
            +
            # it and importing it first would fail if the OpenMP dll cannot be found.
         | 
| 69 | 
            +
            from . import (  # noqa: F401 E402
         | 
| 70 | 
            +
                __check_build,
         | 
| 71 | 
            +
                _distributor_init,
         | 
| 72 | 
            +
            )
         | 
| 73 | 
            +
            from .base import clone  # noqa: E402
         | 
| 74 | 
            +
            from .utils._show_versions import show_versions  # noqa: E402
         | 
| 75 | 
            +
             | 
| 76 | 
            +
            _submodules = [
         | 
| 77 | 
            +
                "calibration",
         | 
| 78 | 
            +
                "cluster",
         | 
| 79 | 
            +
                "covariance",
         | 
| 80 | 
            +
                "cross_decomposition",
         | 
| 81 | 
            +
                "datasets",
         | 
| 82 | 
            +
                "decomposition",
         | 
| 83 | 
            +
                "dummy",
         | 
| 84 | 
            +
                "ensemble",
         | 
| 85 | 
            +
                "exceptions",
         | 
| 86 | 
            +
                "experimental",
         | 
| 87 | 
            +
                "externals",
         | 
| 88 | 
            +
                "feature_extraction",
         | 
| 89 | 
            +
                "feature_selection",
         | 
| 90 | 
            +
                "frozen",
         | 
| 91 | 
            +
                "gaussian_process",
         | 
| 92 | 
            +
                "inspection",
         | 
| 93 | 
            +
                "isotonic",
         | 
| 94 | 
            +
                "kernel_approximation",
         | 
| 95 | 
            +
                "kernel_ridge",
         | 
| 96 | 
            +
                "linear_model",
         | 
| 97 | 
            +
                "manifold",
         | 
| 98 | 
            +
                "metrics",
         | 
| 99 | 
            +
                "mixture",
         | 
| 100 | 
            +
                "model_selection",
         | 
| 101 | 
            +
                "multiclass",
         | 
| 102 | 
            +
                "multioutput",
         | 
| 103 | 
            +
                "naive_bayes",
         | 
| 104 | 
            +
                "neighbors",
         | 
| 105 | 
            +
                "neural_network",
         | 
| 106 | 
            +
                "pipeline",
         | 
| 107 | 
            +
                "preprocessing",
         | 
| 108 | 
            +
                "random_projection",
         | 
| 109 | 
            +
                "semi_supervised",
         | 
| 110 | 
            +
                "svm",
         | 
| 111 | 
            +
                "tree",
         | 
| 112 | 
            +
                "discriminant_analysis",
         | 
| 113 | 
            +
                "impute",
         | 
| 114 | 
            +
                "compose",
         | 
| 115 | 
            +
            ]
         | 
| 116 | 
            +
             | 
| 117 | 
            +
            __all__ = _submodules + [
         | 
| 118 | 
            +
                # Non-modules:
         | 
| 119 | 
            +
                "clone",
         | 
| 120 | 
            +
                "get_config",
         | 
| 121 | 
            +
                "set_config",
         | 
| 122 | 
            +
                "config_context",
         | 
| 123 | 
            +
                "show_versions",
         | 
| 124 | 
            +
            ]
         | 
| 125 | 
            +
             | 
| 126 | 
            +
             | 
| 127 | 
            +
            def __dir__():
         | 
| 128 | 
            +
                return __all__
         | 
| 129 | 
            +
             | 
| 130 | 
            +
             | 
| 131 | 
            +
            def __getattr__(name):
         | 
| 132 | 
            +
                if name in _submodules:
         | 
| 133 | 
            +
                    return _importlib.import_module(f"sklearn.{name}")
         | 
| 134 | 
            +
                else:
         | 
| 135 | 
            +
                    try:
         | 
| 136 | 
            +
                        return globals()[name]
         | 
| 137 | 
            +
                    except KeyError:
         | 
| 138 | 
            +
                        raise AttributeError(f"Module 'sklearn' has no attribute '{name}'")
         | 
| 139 | 
            +
             | 
| 140 | 
            +
             | 
| 141 | 
            +
            _BUILT_WITH_MESON = False
         | 
| 142 | 
            +
            try:
         | 
| 143 | 
            +
                import sklearn._built_with_meson  # noqa: F401
         | 
| 144 | 
            +
             | 
| 145 | 
            +
                _BUILT_WITH_MESON = True
         | 
| 146 | 
            +
            except ModuleNotFoundError:
         | 
| 147 | 
            +
                pass
         | 
| 148 | 
            +
             | 
| 149 | 
            +
             | 
| 150 | 
            +
            def setup_module(module):
         | 
| 151 | 
            +
                """Fixture for the tests to assure globally controllable seeding of RNGs"""
         | 
| 152 | 
            +
             | 
| 153 | 
            +
                import numpy as np
         | 
| 154 | 
            +
             | 
| 155 | 
            +
                # Check if a random seed exists in the environment, if not create one.
         | 
| 156 | 
            +
                _random_seed = os.environ.get("SKLEARN_SEED", None)
         | 
| 157 | 
            +
                if _random_seed is None:
         | 
| 158 | 
            +
                    _random_seed = np.random.uniform() * np.iinfo(np.int32).max
         | 
| 159 | 
            +
                _random_seed = int(_random_seed)
         | 
| 160 | 
            +
                print("I: Seeding RNGs with %r" % _random_seed)
         | 
| 161 | 
            +
                np.random.seed(_random_seed)
         | 
| 162 | 
            +
                random.seed(_random_seed)
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/_built_with_meson.py
    ADDED
    
    | 
            File without changes
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/_config.py
    ADDED
    
    | @@ -0,0 +1,376 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Global configuration state and functions for management"""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            import os
         | 
| 7 | 
            +
            import threading
         | 
| 8 | 
            +
            from contextlib import contextmanager as contextmanager
         | 
| 9 | 
            +
             | 
| 10 | 
            +
            _global_config = {
         | 
| 11 | 
            +
                "assume_finite": bool(os.environ.get("SKLEARN_ASSUME_FINITE", False)),
         | 
| 12 | 
            +
                "working_memory": int(os.environ.get("SKLEARN_WORKING_MEMORY", 1024)),
         | 
| 13 | 
            +
                "print_changed_only": True,
         | 
| 14 | 
            +
                "display": "diagram",
         | 
| 15 | 
            +
                "pairwise_dist_chunk_size": int(
         | 
| 16 | 
            +
                    os.environ.get("SKLEARN_PAIRWISE_DIST_CHUNK_SIZE", 256)
         | 
| 17 | 
            +
                ),
         | 
| 18 | 
            +
                "enable_cython_pairwise_dist": True,
         | 
| 19 | 
            +
                "array_api_dispatch": False,
         | 
| 20 | 
            +
                "transform_output": "default",
         | 
| 21 | 
            +
                "enable_metadata_routing": False,
         | 
| 22 | 
            +
                "skip_parameter_validation": False,
         | 
| 23 | 
            +
            }
         | 
| 24 | 
            +
            _threadlocal = threading.local()
         | 
| 25 | 
            +
             | 
| 26 | 
            +
             | 
| 27 | 
            +
            def _get_threadlocal_config():
         | 
| 28 | 
            +
                """Get a threadlocal **mutable** configuration. If the configuration
         | 
| 29 | 
            +
                does not exist, copy the default global configuration."""
         | 
| 30 | 
            +
                if not hasattr(_threadlocal, "global_config"):
         | 
| 31 | 
            +
                    _threadlocal.global_config = _global_config.copy()
         | 
| 32 | 
            +
                return _threadlocal.global_config
         | 
| 33 | 
            +
             | 
| 34 | 
            +
             | 
| 35 | 
            +
            def get_config():
         | 
| 36 | 
            +
                """Retrieve current values for configuration set by :func:`set_config`.
         | 
| 37 | 
            +
             | 
| 38 | 
            +
                Returns
         | 
| 39 | 
            +
                -------
         | 
| 40 | 
            +
                config : dict
         | 
| 41 | 
            +
                    Keys are parameter names that can be passed to :func:`set_config`.
         | 
| 42 | 
            +
             | 
| 43 | 
            +
                See Also
         | 
| 44 | 
            +
                --------
         | 
| 45 | 
            +
                config_context : Context manager for global scikit-learn configuration.
         | 
| 46 | 
            +
                set_config : Set global scikit-learn configuration.
         | 
| 47 | 
            +
             | 
| 48 | 
            +
                Examples
         | 
| 49 | 
            +
                --------
         | 
| 50 | 
            +
                >>> import sklearn
         | 
| 51 | 
            +
                >>> config = sklearn.get_config()
         | 
| 52 | 
            +
                >>> config.keys()
         | 
| 53 | 
            +
                dict_keys([...])
         | 
| 54 | 
            +
                """
         | 
| 55 | 
            +
                # Return a copy of the threadlocal configuration so that users will
         | 
| 56 | 
            +
                # not be able to modify the configuration with the returned dict.
         | 
| 57 | 
            +
                return _get_threadlocal_config().copy()
         | 
| 58 | 
            +
             | 
| 59 | 
            +
             | 
| 60 | 
            +
            def set_config(
         | 
| 61 | 
            +
                assume_finite=None,
         | 
| 62 | 
            +
                working_memory=None,
         | 
| 63 | 
            +
                print_changed_only=None,
         | 
| 64 | 
            +
                display=None,
         | 
| 65 | 
            +
                pairwise_dist_chunk_size=None,
         | 
| 66 | 
            +
                enable_cython_pairwise_dist=None,
         | 
| 67 | 
            +
                array_api_dispatch=None,
         | 
| 68 | 
            +
                transform_output=None,
         | 
| 69 | 
            +
                enable_metadata_routing=None,
         | 
| 70 | 
            +
                skip_parameter_validation=None,
         | 
| 71 | 
            +
            ):
         | 
| 72 | 
            +
                """Set global scikit-learn configuration.
         | 
| 73 | 
            +
             | 
| 74 | 
            +
                .. versionadded:: 0.19
         | 
| 75 | 
            +
             | 
| 76 | 
            +
                Parameters
         | 
| 77 | 
            +
                ----------
         | 
| 78 | 
            +
                assume_finite : bool, default=None
         | 
| 79 | 
            +
                    If True, validation for finiteness will be skipped,
         | 
| 80 | 
            +
                    saving time, but leading to potential crashes. If
         | 
| 81 | 
            +
                    False, validation for finiteness will be performed,
         | 
| 82 | 
            +
                    avoiding error.  Global default: False.
         | 
| 83 | 
            +
             | 
| 84 | 
            +
                    .. versionadded:: 0.19
         | 
| 85 | 
            +
             | 
| 86 | 
            +
                working_memory : int, default=None
         | 
| 87 | 
            +
                    If set, scikit-learn will attempt to limit the size of temporary arrays
         | 
| 88 | 
            +
                    to this number of MiB (per job when parallelised), often saving both
         | 
| 89 | 
            +
                    computation time and memory on expensive operations that can be
         | 
| 90 | 
            +
                    performed in chunks. Global default: 1024.
         | 
| 91 | 
            +
             | 
| 92 | 
            +
                    .. versionadded:: 0.20
         | 
| 93 | 
            +
             | 
| 94 | 
            +
                print_changed_only : bool, default=None
         | 
| 95 | 
            +
                    If True, only the parameters that were set to non-default
         | 
| 96 | 
            +
                    values will be printed when printing an estimator. For example,
         | 
| 97 | 
            +
                    ``print(SVC())`` while True will only print 'SVC()' while the default
         | 
| 98 | 
            +
                    behaviour would be to print 'SVC(C=1.0, cache_size=200, ...)' with
         | 
| 99 | 
            +
                    all the non-changed parameters.
         | 
| 100 | 
            +
             | 
| 101 | 
            +
                    .. versionadded:: 0.21
         | 
| 102 | 
            +
             | 
| 103 | 
            +
                display : {'text', 'diagram'}, default=None
         | 
| 104 | 
            +
                    If 'diagram', estimators will be displayed as a diagram in a Jupyter
         | 
| 105 | 
            +
                    lab or notebook context. If 'text', estimators will be displayed as
         | 
| 106 | 
            +
                    text. Default is 'diagram'.
         | 
| 107 | 
            +
             | 
| 108 | 
            +
                    .. versionadded:: 0.23
         | 
| 109 | 
            +
             | 
| 110 | 
            +
                pairwise_dist_chunk_size : int, default=None
         | 
| 111 | 
            +
                    The number of row vectors per chunk for the accelerated pairwise-
         | 
| 112 | 
            +
                    distances reduction backend. Default is 256 (suitable for most of
         | 
| 113 | 
            +
                    modern laptops' caches and architectures).
         | 
| 114 | 
            +
             | 
| 115 | 
            +
                    Intended for easier benchmarking and testing of scikit-learn internals.
         | 
| 116 | 
            +
                    End users are not expected to benefit from customizing this configuration
         | 
| 117 | 
            +
                    setting.
         | 
| 118 | 
            +
             | 
| 119 | 
            +
                    .. versionadded:: 1.1
         | 
| 120 | 
            +
             | 
| 121 | 
            +
                enable_cython_pairwise_dist : bool, default=None
         | 
| 122 | 
            +
                    Use the accelerated pairwise-distances reduction backend when
         | 
| 123 | 
            +
                    possible. Global default: True.
         | 
| 124 | 
            +
             | 
| 125 | 
            +
                    Intended for easier benchmarking and testing of scikit-learn internals.
         | 
| 126 | 
            +
                    End users are not expected to benefit from customizing this configuration
         | 
| 127 | 
            +
                    setting.
         | 
| 128 | 
            +
             | 
| 129 | 
            +
                    .. versionadded:: 1.1
         | 
| 130 | 
            +
             | 
| 131 | 
            +
                array_api_dispatch : bool, default=None
         | 
| 132 | 
            +
                    Use Array API dispatching when inputs follow the Array API standard.
         | 
| 133 | 
            +
                    Default is False.
         | 
| 134 | 
            +
             | 
| 135 | 
            +
                    See the :ref:`User Guide <array_api>` for more details.
         | 
| 136 | 
            +
             | 
| 137 | 
            +
                    .. versionadded:: 1.2
         | 
| 138 | 
            +
             | 
| 139 | 
            +
                transform_output : str, default=None
         | 
| 140 | 
            +
                    Configure output of `transform` and `fit_transform`.
         | 
| 141 | 
            +
             | 
| 142 | 
            +
                    See :ref:`sphx_glr_auto_examples_miscellaneous_plot_set_output.py`
         | 
| 143 | 
            +
                    for an example on how to use the API.
         | 
| 144 | 
            +
             | 
| 145 | 
            +
                    - `"default"`: Default output format of a transformer
         | 
| 146 | 
            +
                    - `"pandas"`: DataFrame output
         | 
| 147 | 
            +
                    - `"polars"`: Polars output
         | 
| 148 | 
            +
                    - `None`: Transform configuration is unchanged
         | 
| 149 | 
            +
             | 
| 150 | 
            +
                    .. versionadded:: 1.2
         | 
| 151 | 
            +
                    .. versionadded:: 1.4
         | 
| 152 | 
            +
                        `"polars"` option was added.
         | 
| 153 | 
            +
             | 
| 154 | 
            +
                enable_metadata_routing : bool, default=None
         | 
| 155 | 
            +
                    Enable metadata routing. By default this feature is disabled.
         | 
| 156 | 
            +
             | 
| 157 | 
            +
                    Refer to :ref:`metadata routing user guide <metadata_routing>` for more
         | 
| 158 | 
            +
                    details.
         | 
| 159 | 
            +
             | 
| 160 | 
            +
                    - `True`: Metadata routing is enabled
         | 
| 161 | 
            +
                    - `False`: Metadata routing is disabled, use the old syntax.
         | 
| 162 | 
            +
                    - `None`: Configuration is unchanged
         | 
| 163 | 
            +
             | 
| 164 | 
            +
                    .. versionadded:: 1.3
         | 
| 165 | 
            +
             | 
| 166 | 
            +
                skip_parameter_validation : bool, default=None
         | 
| 167 | 
            +
                    If `True`, disable the validation of the hyper-parameters' types and values in
         | 
| 168 | 
            +
                    the fit method of estimators and for arguments passed to public helper
         | 
| 169 | 
            +
                    functions. It can save time in some situations but can lead to low level
         | 
| 170 | 
            +
                    crashes and exceptions with confusing error messages.
         | 
| 171 | 
            +
             | 
| 172 | 
            +
                    Note that for data parameters, such as `X` and `y`, only type validation is
         | 
| 173 | 
            +
                    skipped but validation with `check_array` will continue to run.
         | 
| 174 | 
            +
             | 
| 175 | 
            +
                    .. versionadded:: 1.3
         | 
| 176 | 
            +
             | 
| 177 | 
            +
                See Also
         | 
| 178 | 
            +
                --------
         | 
| 179 | 
            +
                config_context : Context manager for global scikit-learn configuration.
         | 
| 180 | 
            +
                get_config : Retrieve current values of the global configuration.
         | 
| 181 | 
            +
             | 
| 182 | 
            +
                Examples
         | 
| 183 | 
            +
                --------
         | 
| 184 | 
            +
                >>> from sklearn import set_config
         | 
| 185 | 
            +
                >>> set_config(display='diagram')  # doctest: +SKIP
         | 
| 186 | 
            +
                """
         | 
| 187 | 
            +
                local_config = _get_threadlocal_config()
         | 
| 188 | 
            +
             | 
| 189 | 
            +
                if assume_finite is not None:
         | 
| 190 | 
            +
                    local_config["assume_finite"] = assume_finite
         | 
| 191 | 
            +
                if working_memory is not None:
         | 
| 192 | 
            +
                    local_config["working_memory"] = working_memory
         | 
| 193 | 
            +
                if print_changed_only is not None:
         | 
| 194 | 
            +
                    local_config["print_changed_only"] = print_changed_only
         | 
| 195 | 
            +
                if display is not None:
         | 
| 196 | 
            +
                    local_config["display"] = display
         | 
| 197 | 
            +
                if pairwise_dist_chunk_size is not None:
         | 
| 198 | 
            +
                    local_config["pairwise_dist_chunk_size"] = pairwise_dist_chunk_size
         | 
| 199 | 
            +
                if enable_cython_pairwise_dist is not None:
         | 
| 200 | 
            +
                    local_config["enable_cython_pairwise_dist"] = enable_cython_pairwise_dist
         | 
| 201 | 
            +
                if array_api_dispatch is not None:
         | 
| 202 | 
            +
                    from .utils._array_api import _check_array_api_dispatch
         | 
| 203 | 
            +
             | 
| 204 | 
            +
                    _check_array_api_dispatch(array_api_dispatch)
         | 
| 205 | 
            +
                    local_config["array_api_dispatch"] = array_api_dispatch
         | 
| 206 | 
            +
                if transform_output is not None:
         | 
| 207 | 
            +
                    local_config["transform_output"] = transform_output
         | 
| 208 | 
            +
                if enable_metadata_routing is not None:
         | 
| 209 | 
            +
                    local_config["enable_metadata_routing"] = enable_metadata_routing
         | 
| 210 | 
            +
                if skip_parameter_validation is not None:
         | 
| 211 | 
            +
                    local_config["skip_parameter_validation"] = skip_parameter_validation
         | 
| 212 | 
            +
             | 
| 213 | 
            +
             | 
| 214 | 
            +
            @contextmanager
         | 
| 215 | 
            +
            def config_context(
         | 
| 216 | 
            +
                *,
         | 
| 217 | 
            +
                assume_finite=None,
         | 
| 218 | 
            +
                working_memory=None,
         | 
| 219 | 
            +
                print_changed_only=None,
         | 
| 220 | 
            +
                display=None,
         | 
| 221 | 
            +
                pairwise_dist_chunk_size=None,
         | 
| 222 | 
            +
                enable_cython_pairwise_dist=None,
         | 
| 223 | 
            +
                array_api_dispatch=None,
         | 
| 224 | 
            +
                transform_output=None,
         | 
| 225 | 
            +
                enable_metadata_routing=None,
         | 
| 226 | 
            +
                skip_parameter_validation=None,
         | 
| 227 | 
            +
            ):
         | 
| 228 | 
            +
                """Context manager for global scikit-learn configuration.
         | 
| 229 | 
            +
             | 
| 230 | 
            +
                Parameters
         | 
| 231 | 
            +
                ----------
         | 
| 232 | 
            +
                assume_finite : bool, default=None
         | 
| 233 | 
            +
                    If True, validation for finiteness will be skipped,
         | 
| 234 | 
            +
                    saving time, but leading to potential crashes. If
         | 
| 235 | 
            +
                    False, validation for finiteness will be performed,
         | 
| 236 | 
            +
                    avoiding error. If None, the existing value won't change.
         | 
| 237 | 
            +
                    The default value is False.
         | 
| 238 | 
            +
             | 
| 239 | 
            +
                working_memory : int, default=None
         | 
| 240 | 
            +
                    If set, scikit-learn will attempt to limit the size of temporary arrays
         | 
| 241 | 
            +
                    to this number of MiB (per job when parallelised), often saving both
         | 
| 242 | 
            +
                    computation time and memory on expensive operations that can be
         | 
| 243 | 
            +
                    performed in chunks. If None, the existing value won't change.
         | 
| 244 | 
            +
                    The default value is 1024.
         | 
| 245 | 
            +
             | 
| 246 | 
            +
                print_changed_only : bool, default=None
         | 
| 247 | 
            +
                    If True, only the parameters that were set to non-default
         | 
| 248 | 
            +
                    values will be printed when printing an estimator. For example,
         | 
| 249 | 
            +
                    ``print(SVC())`` while True will only print 'SVC()', but would print
         | 
| 250 | 
            +
                    'SVC(C=1.0, cache_size=200, ...)' with all the non-changed parameters
         | 
| 251 | 
            +
                    when False. If None, the existing value won't change.
         | 
| 252 | 
            +
                    The default value is True.
         | 
| 253 | 
            +
             | 
| 254 | 
            +
                    .. versionchanged:: 0.23
         | 
| 255 | 
            +
                       Default changed from False to True.
         | 
| 256 | 
            +
             | 
| 257 | 
            +
                display : {'text', 'diagram'}, default=None
         | 
| 258 | 
            +
                    If 'diagram', estimators will be displayed as a diagram in a Jupyter
         | 
| 259 | 
            +
                    lab or notebook context. If 'text', estimators will be displayed as
         | 
| 260 | 
            +
                    text. If None, the existing value won't change.
         | 
| 261 | 
            +
                    The default value is 'diagram'.
         | 
| 262 | 
            +
             | 
| 263 | 
            +
                    .. versionadded:: 0.23
         | 
| 264 | 
            +
             | 
| 265 | 
            +
                pairwise_dist_chunk_size : int, default=None
         | 
| 266 | 
            +
                    The number of row vectors per chunk for the accelerated pairwise-
         | 
| 267 | 
            +
                    distances reduction backend. Default is 256 (suitable for most of
         | 
| 268 | 
            +
                    modern laptops' caches and architectures).
         | 
| 269 | 
            +
             | 
| 270 | 
            +
                    Intended for easier benchmarking and testing of scikit-learn internals.
         | 
| 271 | 
            +
                    End users are not expected to benefit from customizing this configuration
         | 
| 272 | 
            +
                    setting.
         | 
| 273 | 
            +
             | 
| 274 | 
            +
                    .. versionadded:: 1.1
         | 
| 275 | 
            +
             | 
| 276 | 
            +
                enable_cython_pairwise_dist : bool, default=None
         | 
| 277 | 
            +
                    Use the accelerated pairwise-distances reduction backend when
         | 
| 278 | 
            +
                    possible. Global default: True.
         | 
| 279 | 
            +
             | 
| 280 | 
            +
                    Intended for easier benchmarking and testing of scikit-learn internals.
         | 
| 281 | 
            +
                    End users are not expected to benefit from customizing this configuration
         | 
| 282 | 
            +
                    setting.
         | 
| 283 | 
            +
             | 
| 284 | 
            +
                    .. versionadded:: 1.1
         | 
| 285 | 
            +
             | 
| 286 | 
            +
                array_api_dispatch : bool, default=None
         | 
| 287 | 
            +
                    Use Array API dispatching when inputs follow the Array API standard.
         | 
| 288 | 
            +
                    Default is False.
         | 
| 289 | 
            +
             | 
| 290 | 
            +
                    See the :ref:`User Guide <array_api>` for more details.
         | 
| 291 | 
            +
             | 
| 292 | 
            +
                    .. versionadded:: 1.2
         | 
| 293 | 
            +
             | 
| 294 | 
            +
                transform_output : str, default=None
         | 
| 295 | 
            +
                    Configure output of `transform` and `fit_transform`.
         | 
| 296 | 
            +
             | 
| 297 | 
            +
                    See :ref:`sphx_glr_auto_examples_miscellaneous_plot_set_output.py`
         | 
| 298 | 
            +
                    for an example on how to use the API.
         | 
| 299 | 
            +
             | 
| 300 | 
            +
                    - `"default"`: Default output format of a transformer
         | 
| 301 | 
            +
                    - `"pandas"`: DataFrame output
         | 
| 302 | 
            +
                    - `"polars"`: Polars output
         | 
| 303 | 
            +
                    - `None`: Transform configuration is unchanged
         | 
| 304 | 
            +
             | 
| 305 | 
            +
                    .. versionadded:: 1.2
         | 
| 306 | 
            +
                    .. versionadded:: 1.4
         | 
| 307 | 
            +
                        `"polars"` option was added.
         | 
| 308 | 
            +
             | 
| 309 | 
            +
                enable_metadata_routing : bool, default=None
         | 
| 310 | 
            +
                    Enable metadata routing. By default this feature is disabled.
         | 
| 311 | 
            +
             | 
| 312 | 
            +
                    Refer to :ref:`metadata routing user guide <metadata_routing>` for more
         | 
| 313 | 
            +
                    details.
         | 
| 314 | 
            +
             | 
| 315 | 
            +
                    - `True`: Metadata routing is enabled
         | 
| 316 | 
            +
                    - `False`: Metadata routing is disabled, use the old syntax.
         | 
| 317 | 
            +
                    - `None`: Configuration is unchanged
         | 
| 318 | 
            +
             | 
| 319 | 
            +
                    .. versionadded:: 1.3
         | 
| 320 | 
            +
             | 
| 321 | 
            +
                skip_parameter_validation : bool, default=None
         | 
| 322 | 
            +
                    If `True`, disable the validation of the hyper-parameters' types and values in
         | 
| 323 | 
            +
                    the fit method of estimators and for arguments passed to public helper
         | 
| 324 | 
            +
                    functions. It can save time in some situations but can lead to low level
         | 
| 325 | 
            +
                    crashes and exceptions with confusing error messages.
         | 
| 326 | 
            +
             | 
| 327 | 
            +
                    Note that for data parameters, such as `X` and `y`, only type validation is
         | 
| 328 | 
            +
                    skipped but validation with `check_array` will continue to run.
         | 
| 329 | 
            +
             | 
| 330 | 
            +
                    .. versionadded:: 1.3
         | 
| 331 | 
            +
             | 
| 332 | 
            +
                Yields
         | 
| 333 | 
            +
                ------
         | 
| 334 | 
            +
                None.
         | 
| 335 | 
            +
             | 
| 336 | 
            +
                See Also
         | 
| 337 | 
            +
                --------
         | 
| 338 | 
            +
                set_config : Set global scikit-learn configuration.
         | 
| 339 | 
            +
                get_config : Retrieve current values of the global configuration.
         | 
| 340 | 
            +
             | 
| 341 | 
            +
                Notes
         | 
| 342 | 
            +
                -----
         | 
| 343 | 
            +
                All settings, not just those presently modified, will be returned to
         | 
| 344 | 
            +
                their previous values when the context manager is exited.
         | 
| 345 | 
            +
             | 
| 346 | 
            +
                Examples
         | 
| 347 | 
            +
                --------
         | 
| 348 | 
            +
                >>> import sklearn
         | 
| 349 | 
            +
                >>> from sklearn.utils.validation import assert_all_finite
         | 
| 350 | 
            +
                >>> with sklearn.config_context(assume_finite=True):
         | 
| 351 | 
            +
                ...     assert_all_finite([float('nan')])
         | 
| 352 | 
            +
                >>> with sklearn.config_context(assume_finite=True):
         | 
| 353 | 
            +
                ...     with sklearn.config_context(assume_finite=False):
         | 
| 354 | 
            +
                ...         assert_all_finite([float('nan')])
         | 
| 355 | 
            +
                Traceback (most recent call last):
         | 
| 356 | 
            +
                ...
         | 
| 357 | 
            +
                ValueError: Input contains NaN...
         | 
| 358 | 
            +
                """
         | 
| 359 | 
            +
                old_config = get_config()
         | 
| 360 | 
            +
                set_config(
         | 
| 361 | 
            +
                    assume_finite=assume_finite,
         | 
| 362 | 
            +
                    working_memory=working_memory,
         | 
| 363 | 
            +
                    print_changed_only=print_changed_only,
         | 
| 364 | 
            +
                    display=display,
         | 
| 365 | 
            +
                    pairwise_dist_chunk_size=pairwise_dist_chunk_size,
         | 
| 366 | 
            +
                    enable_cython_pairwise_dist=enable_cython_pairwise_dist,
         | 
| 367 | 
            +
                    array_api_dispatch=array_api_dispatch,
         | 
| 368 | 
            +
                    transform_output=transform_output,
         | 
| 369 | 
            +
                    enable_metadata_routing=enable_metadata_routing,
         | 
| 370 | 
            +
                    skip_parameter_validation=skip_parameter_validation,
         | 
| 371 | 
            +
                )
         | 
| 372 | 
            +
             | 
| 373 | 
            +
                try:
         | 
| 374 | 
            +
                    yield
         | 
| 375 | 
            +
                finally:
         | 
| 376 | 
            +
                    set_config(**old_config)
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/_distributor_init.py
    ADDED
    
    | @@ -0,0 +1,13 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Distributor init file
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            Distributors: you can add custom code here to support particular distributions
         | 
| 4 | 
            +
            of scikit-learn.
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            For example, this is a good place to put any checks for hardware requirements.
         | 
| 7 | 
            +
             | 
| 8 | 
            +
            The scikit-learn standard source distribution will not put code in this file,
         | 
| 9 | 
            +
            so you can safely replace this file with your own version.
         | 
| 10 | 
            +
            """
         | 
| 11 | 
            +
             | 
| 12 | 
            +
            # Authors: The scikit-learn developers
         | 
| 13 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/_isotonic.pyx
    ADDED
    
    | @@ -0,0 +1,115 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            # Author: Nelle Varoquaux, Andrew Tulloch, Antony Lee
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Uses the pool adjacent violators algorithm (PAVA), with the
         | 
| 4 | 
            +
            # enhancement of searching for the longest decreasing subsequence to
         | 
| 5 | 
            +
            # pool at each step.
         | 
| 6 | 
            +
             | 
| 7 | 
            +
            import numpy as np
         | 
| 8 | 
            +
            from cython cimport floating
         | 
| 9 | 
            +
             | 
| 10 | 
            +
             | 
| 11 | 
            +
            def _inplace_contiguous_isotonic_regression(floating[::1] y, floating[::1] w):
         | 
| 12 | 
            +
                cdef:
         | 
| 13 | 
            +
                    Py_ssize_t n = y.shape[0], i, k
         | 
| 14 | 
            +
                    floating prev_y, sum_wy, sum_w
         | 
| 15 | 
            +
                    Py_ssize_t[::1] target = np.arange(n, dtype=np.intp)
         | 
| 16 | 
            +
             | 
| 17 | 
            +
                # target describes a list of blocks.  At any time, if [i..j] (inclusive) is
         | 
| 18 | 
            +
                # an active block, then target[i] := j and target[j] := i.
         | 
| 19 | 
            +
             | 
| 20 | 
            +
                # For "active" indices (block starts):
         | 
| 21 | 
            +
                # w[i] := sum{w_orig[j], j=[i..target[i]]}
         | 
| 22 | 
            +
                # y[i] := sum{y_orig[j]*w_orig[j], j=[i..target[i]]} / w[i]
         | 
| 23 | 
            +
             | 
| 24 | 
            +
                with nogil:
         | 
| 25 | 
            +
                    i = 0
         | 
| 26 | 
            +
                    while i < n:
         | 
| 27 | 
            +
                        k = target[i] + 1
         | 
| 28 | 
            +
                        if k == n:
         | 
| 29 | 
            +
                            break
         | 
| 30 | 
            +
                        if y[i] < y[k]:
         | 
| 31 | 
            +
                            i = k
         | 
| 32 | 
            +
                            continue
         | 
| 33 | 
            +
                        sum_wy = w[i] * y[i]
         | 
| 34 | 
            +
                        sum_w = w[i]
         | 
| 35 | 
            +
                        while True:
         | 
| 36 | 
            +
                            # We are within a decreasing subsequence.
         | 
| 37 | 
            +
                            prev_y = y[k]
         | 
| 38 | 
            +
                            sum_wy += w[k] * y[k]
         | 
| 39 | 
            +
                            sum_w += w[k]
         | 
| 40 | 
            +
                            k = target[k] + 1
         | 
| 41 | 
            +
                            if k == n or prev_y < y[k]:
         | 
| 42 | 
            +
                                # Non-singleton decreasing subsequence is finished,
         | 
| 43 | 
            +
                                # update first entry.
         | 
| 44 | 
            +
                                y[i] = sum_wy / sum_w
         | 
| 45 | 
            +
                                w[i] = sum_w
         | 
| 46 | 
            +
                                target[i] = k - 1
         | 
| 47 | 
            +
                                target[k - 1] = i
         | 
| 48 | 
            +
                                if i > 0:
         | 
| 49 | 
            +
                                    # Backtrack if we can.  This makes the algorithm
         | 
| 50 | 
            +
                                    # single-pass and ensures O(n) complexity.
         | 
| 51 | 
            +
                                    i = target[i - 1]
         | 
| 52 | 
            +
                                # Otherwise, restart from the same point.
         | 
| 53 | 
            +
                                break
         | 
| 54 | 
            +
                    # Reconstruct the solution.
         | 
| 55 | 
            +
                    i = 0
         | 
| 56 | 
            +
                    while i < n:
         | 
| 57 | 
            +
                        k = target[i] + 1
         | 
| 58 | 
            +
                        y[i + 1 : k] = y[i]
         | 
| 59 | 
            +
                        i = k
         | 
| 60 | 
            +
             | 
| 61 | 
            +
             | 
| 62 | 
            +
            def _make_unique(const floating[::1] X,
         | 
| 63 | 
            +
                             const floating[::1] y,
         | 
| 64 | 
            +
                             const floating[::1] sample_weights):
         | 
| 65 | 
            +
                """Average targets for duplicate X, drop duplicates.
         | 
| 66 | 
            +
             | 
| 67 | 
            +
                Aggregates duplicate X values into a single X value where
         | 
| 68 | 
            +
                the target y is a (sample_weighted) average of the individual
         | 
| 69 | 
            +
                targets.
         | 
| 70 | 
            +
             | 
| 71 | 
            +
                Assumes that X is ordered, so that all duplicates follow each other.
         | 
| 72 | 
            +
                """
         | 
| 73 | 
            +
                unique_values = len(np.unique(X))
         | 
| 74 | 
            +
             | 
| 75 | 
            +
                if floating is float:
         | 
| 76 | 
            +
                    dtype = np.float32
         | 
| 77 | 
            +
                else:
         | 
| 78 | 
            +
                    dtype = np.float64
         | 
| 79 | 
            +
             | 
| 80 | 
            +
                cdef floating[::1] y_out = np.empty(unique_values, dtype=dtype)
         | 
| 81 | 
            +
                cdef floating[::1] x_out = np.empty_like(y_out)
         | 
| 82 | 
            +
                cdef floating[::1] weights_out = np.empty_like(y_out)
         | 
| 83 | 
            +
             | 
| 84 | 
            +
                cdef floating current_x = X[0]
         | 
| 85 | 
            +
                cdef floating current_y = 0
         | 
| 86 | 
            +
                cdef floating current_weight = 0
         | 
| 87 | 
            +
                cdef int i = 0
         | 
| 88 | 
            +
                cdef int j
         | 
| 89 | 
            +
                cdef floating x
         | 
| 90 | 
            +
                cdef int n_samples = len(X)
         | 
| 91 | 
            +
                cdef floating eps = np.finfo(dtype).resolution
         | 
| 92 | 
            +
             | 
| 93 | 
            +
                for j in range(n_samples):
         | 
| 94 | 
            +
                    x = X[j]
         | 
| 95 | 
            +
                    if x - current_x >= eps:
         | 
| 96 | 
            +
                        # next unique value
         | 
| 97 | 
            +
                        x_out[i] = current_x
         | 
| 98 | 
            +
                        weights_out[i] = current_weight
         | 
| 99 | 
            +
                        y_out[i] = current_y / current_weight
         | 
| 100 | 
            +
                        i += 1
         | 
| 101 | 
            +
                        current_x = x
         | 
| 102 | 
            +
                        current_weight = sample_weights[j]
         | 
| 103 | 
            +
                        current_y = y[j] * sample_weights[j]
         | 
| 104 | 
            +
                    else:
         | 
| 105 | 
            +
                        current_weight += sample_weights[j]
         | 
| 106 | 
            +
                        current_y += y[j] * sample_weights[j]
         | 
| 107 | 
            +
             | 
| 108 | 
            +
                x_out[i] = current_x
         | 
| 109 | 
            +
                weights_out[i] = current_weight
         | 
| 110 | 
            +
                y_out[i] = current_y / current_weight
         | 
| 111 | 
            +
                return(
         | 
| 112 | 
            +
                    np.asarray(x_out[:i+1]),
         | 
| 113 | 
            +
                    np.asarray(y_out[:i+1]),
         | 
| 114 | 
            +
                    np.asarray(weights_out[:i+1]),
         | 
| 115 | 
            +
                )
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/_min_dependencies.py
    ADDED
    
    | @@ -0,0 +1,75 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """All minimum dependencies for scikit-learn."""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            import argparse
         | 
| 7 | 
            +
            from collections import defaultdict
         | 
| 8 | 
            +
             | 
| 9 | 
            +
            # scipy and cython should by in sync with pyproject.toml
         | 
| 10 | 
            +
            NUMPY_MIN_VERSION = "1.19.5"
         | 
| 11 | 
            +
            SCIPY_MIN_VERSION = "1.6.0"
         | 
| 12 | 
            +
            JOBLIB_MIN_VERSION = "1.2.0"
         | 
| 13 | 
            +
            THREADPOOLCTL_MIN_VERSION = "3.1.0"
         | 
| 14 | 
            +
            PYTEST_MIN_VERSION = "7.1.2"
         | 
| 15 | 
            +
            CYTHON_MIN_VERSION = "3.0.10"
         | 
| 16 | 
            +
             | 
| 17 | 
            +
             | 
| 18 | 
            +
            # 'build' and 'install' is included to have structured metadata for CI.
         | 
| 19 | 
            +
            # It will NOT be included in setup's extras_require
         | 
| 20 | 
            +
            # The values are (version_spec, comma separated tags)
         | 
| 21 | 
            +
            dependent_packages = {
         | 
| 22 | 
            +
                "numpy": (NUMPY_MIN_VERSION, "build, install"),
         | 
| 23 | 
            +
                "scipy": (SCIPY_MIN_VERSION, "build, install"),
         | 
| 24 | 
            +
                "joblib": (JOBLIB_MIN_VERSION, "install"),
         | 
| 25 | 
            +
                "threadpoolctl": (THREADPOOLCTL_MIN_VERSION, "install"),
         | 
| 26 | 
            +
                "cython": (CYTHON_MIN_VERSION, "build"),
         | 
| 27 | 
            +
                "meson-python": ("0.16.0", "build"),
         | 
| 28 | 
            +
                "matplotlib": ("3.3.4", "benchmark, docs, examples, tests"),
         | 
| 29 | 
            +
                "scikit-image": ("0.17.2", "docs, examples, tests"),
         | 
| 30 | 
            +
                "pandas": ("1.1.5", "benchmark, docs, examples, tests"),
         | 
| 31 | 
            +
                "seaborn": ("0.9.0", "docs, examples"),
         | 
| 32 | 
            +
                "memory_profiler": ("0.57.0", "benchmark, docs"),
         | 
| 33 | 
            +
                "pytest": (PYTEST_MIN_VERSION, "tests"),
         | 
| 34 | 
            +
                "pytest-cov": ("2.9.0", "tests"),
         | 
| 35 | 
            +
                "ruff": ("0.5.1", "tests"),
         | 
| 36 | 
            +
                "black": ("24.3.0", "tests"),
         | 
| 37 | 
            +
                "mypy": ("1.9", "tests"),
         | 
| 38 | 
            +
                "pyamg": ("4.0.0", "tests"),
         | 
| 39 | 
            +
                "polars": ("0.20.30", "docs, tests"),
         | 
| 40 | 
            +
                "pyarrow": ("12.0.0", "tests"),
         | 
| 41 | 
            +
                "sphinx": ("7.3.7", "docs"),
         | 
| 42 | 
            +
                "sphinx-copybutton": ("0.5.2", "docs"),
         | 
| 43 | 
            +
                "sphinx-gallery": ("0.17.1", "docs"),
         | 
| 44 | 
            +
                "numpydoc": ("1.2.0", "docs, tests"),
         | 
| 45 | 
            +
                "Pillow": ("7.1.2", "docs"),
         | 
| 46 | 
            +
                "pooch": ("1.6.0", "docs, examples, tests"),
         | 
| 47 | 
            +
                "sphinx-prompt": ("1.4.0", "docs"),
         | 
| 48 | 
            +
                "sphinxext-opengraph": ("0.9.1", "docs"),
         | 
| 49 | 
            +
                "plotly": ("5.14.0", "docs, examples"),
         | 
| 50 | 
            +
                "sphinxcontrib-sass": ("0.3.4", "docs"),
         | 
| 51 | 
            +
                "sphinx-remove-toctrees": ("1.0.0.post1", "docs"),
         | 
| 52 | 
            +
                "sphinx-design": ("0.6.0", "docs"),
         | 
| 53 | 
            +
                "pydata-sphinx-theme": ("0.15.3", "docs"),
         | 
| 54 | 
            +
                "towncrier": ("24.8.0", "docs"),
         | 
| 55 | 
            +
                # XXX: Pin conda-lock to the latest released version (needs manual update
         | 
| 56 | 
            +
                # from time to time)
         | 
| 57 | 
            +
                "conda-lock": ("2.5.6", "maintenance"),
         | 
| 58 | 
            +
            }
         | 
| 59 | 
            +
             | 
| 60 | 
            +
             | 
| 61 | 
            +
            # create inverse mapping for setuptools
         | 
| 62 | 
            +
            tag_to_packages: dict = defaultdict(list)
         | 
| 63 | 
            +
            for package, (min_version, extras) in dependent_packages.items():
         | 
| 64 | 
            +
                for extra in extras.split(", "):
         | 
| 65 | 
            +
                    tag_to_packages[extra].append("{}>={}".format(package, min_version))
         | 
| 66 | 
            +
             | 
| 67 | 
            +
             | 
| 68 | 
            +
            # Used by CI to get the min dependencies
         | 
| 69 | 
            +
            if __name__ == "__main__":
         | 
| 70 | 
            +
                parser = argparse.ArgumentParser(description="Get min dependencies for a package")
         | 
| 71 | 
            +
             | 
| 72 | 
            +
                parser.add_argument("package", choices=dependent_packages)
         | 
| 73 | 
            +
                args = parser.parse_args()
         | 
| 74 | 
            +
                min_version = dependent_packages[args.package][0]
         | 
| 75 | 
            +
                print(min_version)
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/base.py
    ADDED
    
    | @@ -0,0 +1,1393 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Base classes for all estimators and various utility functions."""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            import copy
         | 
| 7 | 
            +
            import functools
         | 
| 8 | 
            +
            import inspect
         | 
| 9 | 
            +
            import platform
         | 
| 10 | 
            +
            import re
         | 
| 11 | 
            +
            import warnings
         | 
| 12 | 
            +
            from collections import defaultdict
         | 
| 13 | 
            +
             | 
| 14 | 
            +
            import numpy as np
         | 
| 15 | 
            +
             | 
| 16 | 
            +
            from . import __version__
         | 
| 17 | 
            +
            from ._config import config_context, get_config
         | 
| 18 | 
            +
            from .exceptions import InconsistentVersionWarning
         | 
| 19 | 
            +
            from .utils._estimator_html_repr import _HTMLDocumentationLinkMixin, estimator_html_repr
         | 
| 20 | 
            +
            from .utils._metadata_requests import _MetadataRequester, _routing_enabled
         | 
| 21 | 
            +
            from .utils._param_validation import validate_parameter_constraints
         | 
| 22 | 
            +
            from .utils._set_output import _SetOutputMixin
         | 
| 23 | 
            +
            from .utils._tags import (
         | 
| 24 | 
            +
                ClassifierTags,
         | 
| 25 | 
            +
                RegressorTags,
         | 
| 26 | 
            +
                Tags,
         | 
| 27 | 
            +
                TargetTags,
         | 
| 28 | 
            +
                TransformerTags,
         | 
| 29 | 
            +
                get_tags,
         | 
| 30 | 
            +
            )
         | 
| 31 | 
            +
            from .utils.fixes import _IS_32BIT
         | 
| 32 | 
            +
            from .utils.validation import (
         | 
| 33 | 
            +
                _check_feature_names,
         | 
| 34 | 
            +
                _check_feature_names_in,
         | 
| 35 | 
            +
                _check_n_features,
         | 
| 36 | 
            +
                _generate_get_feature_names_out,
         | 
| 37 | 
            +
                _is_fitted,
         | 
| 38 | 
            +
                check_array,
         | 
| 39 | 
            +
                check_is_fitted,
         | 
| 40 | 
            +
                validate_data,
         | 
| 41 | 
            +
            )
         | 
| 42 | 
            +
             | 
| 43 | 
            +
             | 
| 44 | 
            +
            def clone(estimator, *, safe=True):
         | 
| 45 | 
            +
                """Construct a new unfitted estimator with the same parameters.
         | 
| 46 | 
            +
             | 
| 47 | 
            +
                Clone does a deep copy of the model in an estimator
         | 
| 48 | 
            +
                without actually copying attached data. It returns a new estimator
         | 
| 49 | 
            +
                with the same parameters that has not been fitted on any data.
         | 
| 50 | 
            +
             | 
| 51 | 
            +
                .. versionchanged:: 1.3
         | 
| 52 | 
            +
                    Delegates to `estimator.__sklearn_clone__` if the method exists.
         | 
| 53 | 
            +
             | 
| 54 | 
            +
                Parameters
         | 
| 55 | 
            +
                ----------
         | 
| 56 | 
            +
                estimator : {list, tuple, set} of estimator instance or a single \
         | 
| 57 | 
            +
                        estimator instance
         | 
| 58 | 
            +
                    The estimator or group of estimators to be cloned.
         | 
| 59 | 
            +
                safe : bool, default=True
         | 
| 60 | 
            +
                    If safe is False, clone will fall back to a deep copy on objects
         | 
| 61 | 
            +
                    that are not estimators. Ignored if `estimator.__sklearn_clone__`
         | 
| 62 | 
            +
                    exists.
         | 
| 63 | 
            +
             | 
| 64 | 
            +
                Returns
         | 
| 65 | 
            +
                -------
         | 
| 66 | 
            +
                estimator : object
         | 
| 67 | 
            +
                    The deep copy of the input, an estimator if input is an estimator.
         | 
| 68 | 
            +
             | 
| 69 | 
            +
                Notes
         | 
| 70 | 
            +
                -----
         | 
| 71 | 
            +
                If the estimator's `random_state` parameter is an integer (or if the
         | 
| 72 | 
            +
                estimator doesn't have a `random_state` parameter), an *exact clone* is
         | 
| 73 | 
            +
                returned: the clone and the original estimator will give the exact same
         | 
| 74 | 
            +
                results. Otherwise, *statistical clone* is returned: the clone might
         | 
| 75 | 
            +
                return different results from the original estimator. More details can be
         | 
| 76 | 
            +
                found in :ref:`randomness`.
         | 
| 77 | 
            +
             | 
| 78 | 
            +
                Examples
         | 
| 79 | 
            +
                --------
         | 
| 80 | 
            +
                >>> from sklearn.base import clone
         | 
| 81 | 
            +
                >>> from sklearn.linear_model import LogisticRegression
         | 
| 82 | 
            +
                >>> X = [[-1, 0], [0, 1], [0, -1], [1, 0]]
         | 
| 83 | 
            +
                >>> y = [0, 0, 1, 1]
         | 
| 84 | 
            +
                >>> classifier = LogisticRegression().fit(X, y)
         | 
| 85 | 
            +
                >>> cloned_classifier = clone(classifier)
         | 
| 86 | 
            +
                >>> hasattr(classifier, "classes_")
         | 
| 87 | 
            +
                True
         | 
| 88 | 
            +
                >>> hasattr(cloned_classifier, "classes_")
         | 
| 89 | 
            +
                False
         | 
| 90 | 
            +
                >>> classifier is cloned_classifier
         | 
| 91 | 
            +
                False
         | 
| 92 | 
            +
                """
         | 
| 93 | 
            +
                if hasattr(estimator, "__sklearn_clone__") and not inspect.isclass(estimator):
         | 
| 94 | 
            +
                    return estimator.__sklearn_clone__()
         | 
| 95 | 
            +
                return _clone_parametrized(estimator, safe=safe)
         | 
| 96 | 
            +
             | 
| 97 | 
            +
             | 
| 98 | 
            +
            def _clone_parametrized(estimator, *, safe=True):
         | 
| 99 | 
            +
                """Default implementation of clone. See :func:`sklearn.base.clone` for details."""
         | 
| 100 | 
            +
             | 
| 101 | 
            +
                estimator_type = type(estimator)
         | 
| 102 | 
            +
                if estimator_type is dict:
         | 
| 103 | 
            +
                    return {k: clone(v, safe=safe) for k, v in estimator.items()}
         | 
| 104 | 
            +
                elif estimator_type in (list, tuple, set, frozenset):
         | 
| 105 | 
            +
                    return estimator_type([clone(e, safe=safe) for e in estimator])
         | 
| 106 | 
            +
                elif not hasattr(estimator, "get_params") or isinstance(estimator, type):
         | 
| 107 | 
            +
                    if not safe:
         | 
| 108 | 
            +
                        return copy.deepcopy(estimator)
         | 
| 109 | 
            +
                    else:
         | 
| 110 | 
            +
                        if isinstance(estimator, type):
         | 
| 111 | 
            +
                            raise TypeError(
         | 
| 112 | 
            +
                                "Cannot clone object. "
         | 
| 113 | 
            +
                                + "You should provide an instance of "
         | 
| 114 | 
            +
                                + "scikit-learn estimator instead of a class."
         | 
| 115 | 
            +
                            )
         | 
| 116 | 
            +
                        else:
         | 
| 117 | 
            +
                            raise TypeError(
         | 
| 118 | 
            +
                                "Cannot clone object '%s' (type %s): "
         | 
| 119 | 
            +
                                "it does not seem to be a scikit-learn "
         | 
| 120 | 
            +
                                "estimator as it does not implement a "
         | 
| 121 | 
            +
                                "'get_params' method." % (repr(estimator), type(estimator))
         | 
| 122 | 
            +
                            )
         | 
| 123 | 
            +
             | 
| 124 | 
            +
                klass = estimator.__class__
         | 
| 125 | 
            +
                new_object_params = estimator.get_params(deep=False)
         | 
| 126 | 
            +
                for name, param in new_object_params.items():
         | 
| 127 | 
            +
                    new_object_params[name] = clone(param, safe=False)
         | 
| 128 | 
            +
             | 
| 129 | 
            +
                new_object = klass(**new_object_params)
         | 
| 130 | 
            +
                try:
         | 
| 131 | 
            +
                    new_object._metadata_request = copy.deepcopy(estimator._metadata_request)
         | 
| 132 | 
            +
                except AttributeError:
         | 
| 133 | 
            +
                    pass
         | 
| 134 | 
            +
             | 
| 135 | 
            +
                params_set = new_object.get_params(deep=False)
         | 
| 136 | 
            +
             | 
| 137 | 
            +
                # quick sanity check of the parameters of the clone
         | 
| 138 | 
            +
                for name in new_object_params:
         | 
| 139 | 
            +
                    param1 = new_object_params[name]
         | 
| 140 | 
            +
                    param2 = params_set[name]
         | 
| 141 | 
            +
                    if param1 is not param2:
         | 
| 142 | 
            +
                        raise RuntimeError(
         | 
| 143 | 
            +
                            "Cannot clone object %s, as the constructor "
         | 
| 144 | 
            +
                            "either does not set or modifies parameter %s" % (estimator, name)
         | 
| 145 | 
            +
                        )
         | 
| 146 | 
            +
             | 
| 147 | 
            +
                # _sklearn_output_config is used by `set_output` to configure the output
         | 
| 148 | 
            +
                # container of an estimator.
         | 
| 149 | 
            +
                if hasattr(estimator, "_sklearn_output_config"):
         | 
| 150 | 
            +
                    new_object._sklearn_output_config = copy.deepcopy(
         | 
| 151 | 
            +
                        estimator._sklearn_output_config
         | 
| 152 | 
            +
                    )
         | 
| 153 | 
            +
                return new_object
         | 
| 154 | 
            +
             | 
| 155 | 
            +
             | 
| 156 | 
            +
            class BaseEstimator(_HTMLDocumentationLinkMixin, _MetadataRequester):
         | 
| 157 | 
            +
                """Base class for all estimators in scikit-learn.
         | 
| 158 | 
            +
             | 
| 159 | 
            +
                Inheriting from this class provides default implementations of:
         | 
| 160 | 
            +
             | 
| 161 | 
            +
                - setting and getting parameters used by `GridSearchCV` and friends;
         | 
| 162 | 
            +
                - textual and HTML representation displayed in terminals and IDEs;
         | 
| 163 | 
            +
                - estimator serialization;
         | 
| 164 | 
            +
                - parameters validation;
         | 
| 165 | 
            +
                - data validation;
         | 
| 166 | 
            +
                - feature names validation.
         | 
| 167 | 
            +
             | 
| 168 | 
            +
                Read more in the :ref:`User Guide <rolling_your_own_estimator>`.
         | 
| 169 | 
            +
             | 
| 170 | 
            +
             | 
| 171 | 
            +
                Notes
         | 
| 172 | 
            +
                -----
         | 
| 173 | 
            +
                All estimators should specify all the parameters that can be set
         | 
| 174 | 
            +
                at the class level in their ``__init__`` as explicit keyword
         | 
| 175 | 
            +
                arguments (no ``*args`` or ``**kwargs``).
         | 
| 176 | 
            +
             | 
| 177 | 
            +
                Examples
         | 
| 178 | 
            +
                --------
         | 
| 179 | 
            +
                >>> import numpy as np
         | 
| 180 | 
            +
                >>> from sklearn.base import BaseEstimator
         | 
| 181 | 
            +
                >>> class MyEstimator(BaseEstimator):
         | 
| 182 | 
            +
                ...     def __init__(self, *, param=1):
         | 
| 183 | 
            +
                ...         self.param = param
         | 
| 184 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 185 | 
            +
                ...         self.is_fitted_ = True
         | 
| 186 | 
            +
                ...         return self
         | 
| 187 | 
            +
                ...     def predict(self, X):
         | 
| 188 | 
            +
                ...         return np.full(shape=X.shape[0], fill_value=self.param)
         | 
| 189 | 
            +
                >>> estimator = MyEstimator(param=2)
         | 
| 190 | 
            +
                >>> estimator.get_params()
         | 
| 191 | 
            +
                {'param': 2}
         | 
| 192 | 
            +
                >>> X = np.array([[1, 2], [2, 3], [3, 4]])
         | 
| 193 | 
            +
                >>> y = np.array([1, 0, 1])
         | 
| 194 | 
            +
                >>> estimator.fit(X, y).predict(X)
         | 
| 195 | 
            +
                array([2, 2, 2])
         | 
| 196 | 
            +
                >>> estimator.set_params(param=3).fit(X, y).predict(X)
         | 
| 197 | 
            +
                array([3, 3, 3])
         | 
| 198 | 
            +
                """
         | 
| 199 | 
            +
             | 
| 200 | 
            +
                @classmethod
         | 
| 201 | 
            +
                def _get_param_names(cls):
         | 
| 202 | 
            +
                    """Get parameter names for the estimator"""
         | 
| 203 | 
            +
                    # fetch the constructor or the original constructor before
         | 
| 204 | 
            +
                    # deprecation wrapping if any
         | 
| 205 | 
            +
                    init = getattr(cls.__init__, "deprecated_original", cls.__init__)
         | 
| 206 | 
            +
                    if init is object.__init__:
         | 
| 207 | 
            +
                        # No explicit constructor to introspect
         | 
| 208 | 
            +
                        return []
         | 
| 209 | 
            +
             | 
| 210 | 
            +
                    # introspect the constructor arguments to find the model parameters
         | 
| 211 | 
            +
                    # to represent
         | 
| 212 | 
            +
                    init_signature = inspect.signature(init)
         | 
| 213 | 
            +
                    # Consider the constructor parameters excluding 'self'
         | 
| 214 | 
            +
                    parameters = [
         | 
| 215 | 
            +
                        p
         | 
| 216 | 
            +
                        for p in init_signature.parameters.values()
         | 
| 217 | 
            +
                        if p.name != "self" and p.kind != p.VAR_KEYWORD
         | 
| 218 | 
            +
                    ]
         | 
| 219 | 
            +
                    for p in parameters:
         | 
| 220 | 
            +
                        if p.kind == p.VAR_POSITIONAL:
         | 
| 221 | 
            +
                            raise RuntimeError(
         | 
| 222 | 
            +
                                "scikit-learn estimators should always "
         | 
| 223 | 
            +
                                "specify their parameters in the signature"
         | 
| 224 | 
            +
                                " of their __init__ (no varargs)."
         | 
| 225 | 
            +
                                " %s with constructor %s doesn't "
         | 
| 226 | 
            +
                                " follow this convention." % (cls, init_signature)
         | 
| 227 | 
            +
                            )
         | 
| 228 | 
            +
                    # Extract and sort argument names excluding 'self'
         | 
| 229 | 
            +
                    return sorted([p.name for p in parameters])
         | 
| 230 | 
            +
             | 
| 231 | 
            +
                def get_params(self, deep=True):
         | 
| 232 | 
            +
                    """
         | 
| 233 | 
            +
                    Get parameters for this estimator.
         | 
| 234 | 
            +
             | 
| 235 | 
            +
                    Parameters
         | 
| 236 | 
            +
                    ----------
         | 
| 237 | 
            +
                    deep : bool, default=True
         | 
| 238 | 
            +
                        If True, will return the parameters for this estimator and
         | 
| 239 | 
            +
                        contained subobjects that are estimators.
         | 
| 240 | 
            +
             | 
| 241 | 
            +
                    Returns
         | 
| 242 | 
            +
                    -------
         | 
| 243 | 
            +
                    params : dict
         | 
| 244 | 
            +
                        Parameter names mapped to their values.
         | 
| 245 | 
            +
                    """
         | 
| 246 | 
            +
                    out = dict()
         | 
| 247 | 
            +
                    for key in self._get_param_names():
         | 
| 248 | 
            +
                        value = getattr(self, key)
         | 
| 249 | 
            +
                        if deep and hasattr(value, "get_params") and not isinstance(value, type):
         | 
| 250 | 
            +
                            deep_items = value.get_params().items()
         | 
| 251 | 
            +
                            out.update((key + "__" + k, val) for k, val in deep_items)
         | 
| 252 | 
            +
                        out[key] = value
         | 
| 253 | 
            +
                    return out
         | 
| 254 | 
            +
             | 
| 255 | 
            +
                def set_params(self, **params):
         | 
| 256 | 
            +
                    """Set the parameters of this estimator.
         | 
| 257 | 
            +
             | 
| 258 | 
            +
                    The method works on simple estimators as well as on nested objects
         | 
| 259 | 
            +
                    (such as :class:`~sklearn.pipeline.Pipeline`). The latter have
         | 
| 260 | 
            +
                    parameters of the form ``<component>__<parameter>`` so that it's
         | 
| 261 | 
            +
                    possible to update each component of a nested object.
         | 
| 262 | 
            +
             | 
| 263 | 
            +
                    Parameters
         | 
| 264 | 
            +
                    ----------
         | 
| 265 | 
            +
                    **params : dict
         | 
| 266 | 
            +
                        Estimator parameters.
         | 
| 267 | 
            +
             | 
| 268 | 
            +
                    Returns
         | 
| 269 | 
            +
                    -------
         | 
| 270 | 
            +
                    self : estimator instance
         | 
| 271 | 
            +
                        Estimator instance.
         | 
| 272 | 
            +
                    """
         | 
| 273 | 
            +
                    if not params:
         | 
| 274 | 
            +
                        # Simple optimization to gain speed (inspect is slow)
         | 
| 275 | 
            +
                        return self
         | 
| 276 | 
            +
                    valid_params = self.get_params(deep=True)
         | 
| 277 | 
            +
             | 
| 278 | 
            +
                    nested_params = defaultdict(dict)  # grouped by prefix
         | 
| 279 | 
            +
                    for key, value in params.items():
         | 
| 280 | 
            +
                        key, delim, sub_key = key.partition("__")
         | 
| 281 | 
            +
                        if key not in valid_params:
         | 
| 282 | 
            +
                            local_valid_params = self._get_param_names()
         | 
| 283 | 
            +
                            raise ValueError(
         | 
| 284 | 
            +
                                f"Invalid parameter {key!r} for estimator {self}. "
         | 
| 285 | 
            +
                                f"Valid parameters are: {local_valid_params!r}."
         | 
| 286 | 
            +
                            )
         | 
| 287 | 
            +
             | 
| 288 | 
            +
                        if delim:
         | 
| 289 | 
            +
                            nested_params[key][sub_key] = value
         | 
| 290 | 
            +
                        else:
         | 
| 291 | 
            +
                            setattr(self, key, value)
         | 
| 292 | 
            +
                            valid_params[key] = value
         | 
| 293 | 
            +
             | 
| 294 | 
            +
                    for key, sub_params in nested_params.items():
         | 
| 295 | 
            +
                        valid_params[key].set_params(**sub_params)
         | 
| 296 | 
            +
             | 
| 297 | 
            +
                    return self
         | 
| 298 | 
            +
             | 
| 299 | 
            +
                def __sklearn_clone__(self):
         | 
| 300 | 
            +
                    return _clone_parametrized(self)
         | 
| 301 | 
            +
             | 
| 302 | 
            +
                def __repr__(self, N_CHAR_MAX=700):
         | 
| 303 | 
            +
                    # N_CHAR_MAX is the (approximate) maximum number of non-blank
         | 
| 304 | 
            +
                    # characters to render. We pass it as an optional parameter to ease
         | 
| 305 | 
            +
                    # the tests.
         | 
| 306 | 
            +
             | 
| 307 | 
            +
                    from .utils._pprint import _EstimatorPrettyPrinter
         | 
| 308 | 
            +
             | 
| 309 | 
            +
                    N_MAX_ELEMENTS_TO_SHOW = 30  # number of elements to show in sequences
         | 
| 310 | 
            +
             | 
| 311 | 
            +
                    # use ellipsis for sequences with a lot of elements
         | 
| 312 | 
            +
                    pp = _EstimatorPrettyPrinter(
         | 
| 313 | 
            +
                        compact=True,
         | 
| 314 | 
            +
                        indent=1,
         | 
| 315 | 
            +
                        indent_at_name=True,
         | 
| 316 | 
            +
                        n_max_elements_to_show=N_MAX_ELEMENTS_TO_SHOW,
         | 
| 317 | 
            +
                    )
         | 
| 318 | 
            +
             | 
| 319 | 
            +
                    repr_ = pp.pformat(self)
         | 
| 320 | 
            +
             | 
| 321 | 
            +
                    # Use bruteforce ellipsis when there are a lot of non-blank characters
         | 
| 322 | 
            +
                    n_nonblank = len("".join(repr_.split()))
         | 
| 323 | 
            +
                    if n_nonblank > N_CHAR_MAX:
         | 
| 324 | 
            +
                        lim = N_CHAR_MAX // 2  # apprx number of chars to keep on both ends
         | 
| 325 | 
            +
                        regex = r"^(\s*\S){%d}" % lim
         | 
| 326 | 
            +
                        # The regex '^(\s*\S){%d}' % n
         | 
| 327 | 
            +
                        # matches from the start of the string until the nth non-blank
         | 
| 328 | 
            +
                        # character:
         | 
| 329 | 
            +
                        # - ^ matches the start of string
         | 
| 330 | 
            +
                        # - (pattern){n} matches n repetitions of pattern
         | 
| 331 | 
            +
                        # - \s*\S matches a non-blank char following zero or more blanks
         | 
| 332 | 
            +
                        left_lim = re.match(regex, repr_).end()
         | 
| 333 | 
            +
                        right_lim = re.match(regex, repr_[::-1]).end()
         | 
| 334 | 
            +
             | 
| 335 | 
            +
                        if "\n" in repr_[left_lim:-right_lim]:
         | 
| 336 | 
            +
                            # The left side and right side aren't on the same line.
         | 
| 337 | 
            +
                            # To avoid weird cuts, e.g.:
         | 
| 338 | 
            +
                            # categoric...ore',
         | 
| 339 | 
            +
                            # we need to start the right side with an appropriate newline
         | 
| 340 | 
            +
                            # character so that it renders properly as:
         | 
| 341 | 
            +
                            # categoric...
         | 
| 342 | 
            +
                            # handle_unknown='ignore',
         | 
| 343 | 
            +
                            # so we add [^\n]*\n which matches until the next \n
         | 
| 344 | 
            +
                            regex += r"[^\n]*\n"
         | 
| 345 | 
            +
                            right_lim = re.match(regex, repr_[::-1]).end()
         | 
| 346 | 
            +
             | 
| 347 | 
            +
                        ellipsis = "..."
         | 
| 348 | 
            +
                        if left_lim + len(ellipsis) < len(repr_) - right_lim:
         | 
| 349 | 
            +
                            # Only add ellipsis if it results in a shorter repr
         | 
| 350 | 
            +
                            repr_ = repr_[:left_lim] + "..." + repr_[-right_lim:]
         | 
| 351 | 
            +
             | 
| 352 | 
            +
                    return repr_
         | 
| 353 | 
            +
             | 
| 354 | 
            +
                def __getstate__(self):
         | 
| 355 | 
            +
                    if getattr(self, "__slots__", None):
         | 
| 356 | 
            +
                        raise TypeError(
         | 
| 357 | 
            +
                            "You cannot use `__slots__` in objects inheriting from "
         | 
| 358 | 
            +
                            "`sklearn.base.BaseEstimator`."
         | 
| 359 | 
            +
                        )
         | 
| 360 | 
            +
             | 
| 361 | 
            +
                    try:
         | 
| 362 | 
            +
                        state = super().__getstate__()
         | 
| 363 | 
            +
                        if state is None:
         | 
| 364 | 
            +
                            # For Python 3.11+, empty instance (no `__slots__`,
         | 
| 365 | 
            +
                            # and `__dict__`) will return a state equal to `None`.
         | 
| 366 | 
            +
                            state = self.__dict__.copy()
         | 
| 367 | 
            +
                    except AttributeError:
         | 
| 368 | 
            +
                        # Python < 3.11
         | 
| 369 | 
            +
                        state = self.__dict__.copy()
         | 
| 370 | 
            +
             | 
| 371 | 
            +
                    if type(self).__module__.startswith("sklearn."):
         | 
| 372 | 
            +
                        return dict(state.items(), _sklearn_version=__version__)
         | 
| 373 | 
            +
                    else:
         | 
| 374 | 
            +
                        return state
         | 
| 375 | 
            +
             | 
| 376 | 
            +
                def __setstate__(self, state):
         | 
| 377 | 
            +
                    if type(self).__module__.startswith("sklearn."):
         | 
| 378 | 
            +
                        pickle_version = state.pop("_sklearn_version", "pre-0.18")
         | 
| 379 | 
            +
                        if pickle_version != __version__:
         | 
| 380 | 
            +
                            warnings.warn(
         | 
| 381 | 
            +
                                InconsistentVersionWarning(
         | 
| 382 | 
            +
                                    estimator_name=self.__class__.__name__,
         | 
| 383 | 
            +
                                    current_sklearn_version=__version__,
         | 
| 384 | 
            +
                                    original_sklearn_version=pickle_version,
         | 
| 385 | 
            +
                                ),
         | 
| 386 | 
            +
                            )
         | 
| 387 | 
            +
                    try:
         | 
| 388 | 
            +
                        super().__setstate__(state)
         | 
| 389 | 
            +
                    except AttributeError:
         | 
| 390 | 
            +
                        self.__dict__.update(state)
         | 
| 391 | 
            +
             | 
| 392 | 
            +
                # TODO(1.7): Remove this method
         | 
| 393 | 
            +
                def _more_tags(self):
         | 
| 394 | 
            +
                    """This code should never be reached since our `get_tags` will fallback on
         | 
| 395 | 
            +
                    `__sklearn_tags__` implemented below. We keep it for backward compatibility.
         | 
| 396 | 
            +
                    It is tested in `test_base_estimator_more_tags` in
         | 
| 397 | 
            +
                    `sklearn/utils/testing/test_tags.py`."""
         | 
| 398 | 
            +
                    from sklearn.utils._tags import _to_old_tags, default_tags
         | 
| 399 | 
            +
             | 
| 400 | 
            +
                    warnings.warn(
         | 
| 401 | 
            +
                        "The `_more_tags` method is deprecated in 1.6 and will be removed in "
         | 
| 402 | 
            +
                        "1.7. Please implement the `__sklearn_tags__` method.",
         | 
| 403 | 
            +
                        category=DeprecationWarning,
         | 
| 404 | 
            +
                    )
         | 
| 405 | 
            +
                    return _to_old_tags(default_tags(self))
         | 
| 406 | 
            +
             | 
| 407 | 
            +
                # TODO(1.7): Remove this method
         | 
| 408 | 
            +
                def _get_tags(self):
         | 
| 409 | 
            +
                    from sklearn.utils._tags import _to_old_tags, get_tags
         | 
| 410 | 
            +
             | 
| 411 | 
            +
                    warnings.warn(
         | 
| 412 | 
            +
                        "The `_get_tags` method is deprecated in 1.6 and will be removed in "
         | 
| 413 | 
            +
                        "1.7. Please implement the `__sklearn_tags__` method.",
         | 
| 414 | 
            +
                        category=DeprecationWarning,
         | 
| 415 | 
            +
                    )
         | 
| 416 | 
            +
             | 
| 417 | 
            +
                    return _to_old_tags(get_tags(self))
         | 
| 418 | 
            +
             | 
| 419 | 
            +
                def __sklearn_tags__(self):
         | 
| 420 | 
            +
                    return Tags(
         | 
| 421 | 
            +
                        estimator_type=None,
         | 
| 422 | 
            +
                        target_tags=TargetTags(required=False),
         | 
| 423 | 
            +
                        transformer_tags=None,
         | 
| 424 | 
            +
                        regressor_tags=None,
         | 
| 425 | 
            +
                        classifier_tags=None,
         | 
| 426 | 
            +
                    )
         | 
| 427 | 
            +
             | 
| 428 | 
            +
                def _validate_params(self):
         | 
| 429 | 
            +
                    """Validate types and values of constructor parameters
         | 
| 430 | 
            +
             | 
| 431 | 
            +
                    The expected type and values must be defined in the `_parameter_constraints`
         | 
| 432 | 
            +
                    class attribute, which is a dictionary `param_name: list of constraints`. See
         | 
| 433 | 
            +
                    the docstring of `validate_parameter_constraints` for a description of the
         | 
| 434 | 
            +
                    accepted constraints.
         | 
| 435 | 
            +
                    """
         | 
| 436 | 
            +
                    validate_parameter_constraints(
         | 
| 437 | 
            +
                        self._parameter_constraints,
         | 
| 438 | 
            +
                        self.get_params(deep=False),
         | 
| 439 | 
            +
                        caller_name=self.__class__.__name__,
         | 
| 440 | 
            +
                    )
         | 
| 441 | 
            +
             | 
| 442 | 
            +
                @property
         | 
| 443 | 
            +
                def _repr_html_(self):
         | 
| 444 | 
            +
                    """HTML representation of estimator.
         | 
| 445 | 
            +
             | 
| 446 | 
            +
                    This is redundant with the logic of `_repr_mimebundle_`. The latter
         | 
| 447 | 
            +
                    should be favorted in the long term, `_repr_html_` is only
         | 
| 448 | 
            +
                    implemented for consumers who do not interpret `_repr_mimbundle_`.
         | 
| 449 | 
            +
                    """
         | 
| 450 | 
            +
                    if get_config()["display"] != "diagram":
         | 
| 451 | 
            +
                        raise AttributeError(
         | 
| 452 | 
            +
                            "_repr_html_ is only defined when the "
         | 
| 453 | 
            +
                            "'display' configuration option is set to "
         | 
| 454 | 
            +
                            "'diagram'"
         | 
| 455 | 
            +
                        )
         | 
| 456 | 
            +
                    return self._repr_html_inner
         | 
| 457 | 
            +
             | 
| 458 | 
            +
                def _repr_html_inner(self):
         | 
| 459 | 
            +
                    """This function is returned by the @property `_repr_html_` to make
         | 
| 460 | 
            +
                    `hasattr(estimator, "_repr_html_") return `True` or `False` depending
         | 
| 461 | 
            +
                    on `get_config()["display"]`.
         | 
| 462 | 
            +
                    """
         | 
| 463 | 
            +
                    return estimator_html_repr(self)
         | 
| 464 | 
            +
             | 
| 465 | 
            +
                def _repr_mimebundle_(self, **kwargs):
         | 
| 466 | 
            +
                    """Mime bundle used by jupyter kernels to display estimator"""
         | 
| 467 | 
            +
                    output = {"text/plain": repr(self)}
         | 
| 468 | 
            +
                    if get_config()["display"] == "diagram":
         | 
| 469 | 
            +
                        output["text/html"] = estimator_html_repr(self)
         | 
| 470 | 
            +
                    return output
         | 
| 471 | 
            +
             | 
| 472 | 
            +
                # TODO(1.7): Remove this method
         | 
| 473 | 
            +
                def _validate_data(self, *args, **kwargs):
         | 
| 474 | 
            +
                    warnings.warn(
         | 
| 475 | 
            +
                        "`BaseEstimator._validate_data` is deprecated in 1.6 and will be removed "
         | 
| 476 | 
            +
                        "in 1.7. Use `sklearn.utils.validation.validate_data` instead. This "
         | 
| 477 | 
            +
                        "function becomes public and is part of the scikit-learn developer API.",
         | 
| 478 | 
            +
                        FutureWarning,
         | 
| 479 | 
            +
                    )
         | 
| 480 | 
            +
                    return validate_data(self, *args, **kwargs)
         | 
| 481 | 
            +
             | 
| 482 | 
            +
                # TODO(1.7): Remove this method
         | 
| 483 | 
            +
                def _check_n_features(self, *args, **kwargs):
         | 
| 484 | 
            +
                    warnings.warn(
         | 
| 485 | 
            +
                        "`BaseEstimator._check_n_features` is deprecated in 1.6 and will be "
         | 
| 486 | 
            +
                        "removed in 1.7. Use `sklearn.utils.validation._check_n_features` instead.",
         | 
| 487 | 
            +
                        FutureWarning,
         | 
| 488 | 
            +
                    )
         | 
| 489 | 
            +
                    _check_n_features(self, *args, **kwargs)
         | 
| 490 | 
            +
             | 
| 491 | 
            +
                # TODO(1.7): Remove this method
         | 
| 492 | 
            +
                def _check_feature_names(self, *args, **kwargs):
         | 
| 493 | 
            +
                    warnings.warn(
         | 
| 494 | 
            +
                        "`BaseEstimator._check_feature_names` is deprecated in 1.6 and will be "
         | 
| 495 | 
            +
                        "removed in 1.7. Use `sklearn.utils.validation._check_feature_names` "
         | 
| 496 | 
            +
                        "instead.",
         | 
| 497 | 
            +
                        FutureWarning,
         | 
| 498 | 
            +
                    )
         | 
| 499 | 
            +
                    _check_feature_names(self, *args, **kwargs)
         | 
| 500 | 
            +
             | 
| 501 | 
            +
             | 
| 502 | 
            +
            class ClassifierMixin:
         | 
| 503 | 
            +
                """Mixin class for all classifiers in scikit-learn.
         | 
| 504 | 
            +
             | 
| 505 | 
            +
                This mixin defines the following functionality:
         | 
| 506 | 
            +
             | 
| 507 | 
            +
                - set estimator type to `"classifier"` through the `estimator_type` tag;
         | 
| 508 | 
            +
                - `score` method that default to :func:`~sklearn.metrics.accuracy_score`.
         | 
| 509 | 
            +
                - enforce that `fit` requires `y` to be passed through the `requires_y` tag,
         | 
| 510 | 
            +
                  which is done by setting the classifier type tag.
         | 
| 511 | 
            +
             | 
| 512 | 
            +
                Read more in the :ref:`User Guide <rolling_your_own_estimator>`.
         | 
| 513 | 
            +
             | 
| 514 | 
            +
                Examples
         | 
| 515 | 
            +
                --------
         | 
| 516 | 
            +
                >>> import numpy as np
         | 
| 517 | 
            +
                >>> from sklearn.base import BaseEstimator, ClassifierMixin
         | 
| 518 | 
            +
                >>> # Mixin classes should always be on the left-hand side for a correct MRO
         | 
| 519 | 
            +
                >>> class MyEstimator(ClassifierMixin, BaseEstimator):
         | 
| 520 | 
            +
                ...     def __init__(self, *, param=1):
         | 
| 521 | 
            +
                ...         self.param = param
         | 
| 522 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 523 | 
            +
                ...         self.is_fitted_ = True
         | 
| 524 | 
            +
                ...         return self
         | 
| 525 | 
            +
                ...     def predict(self, X):
         | 
| 526 | 
            +
                ...         return np.full(shape=X.shape[0], fill_value=self.param)
         | 
| 527 | 
            +
                >>> estimator = MyEstimator(param=1)
         | 
| 528 | 
            +
                >>> X = np.array([[1, 2], [2, 3], [3, 4]])
         | 
| 529 | 
            +
                >>> y = np.array([1, 0, 1])
         | 
| 530 | 
            +
                >>> estimator.fit(X, y).predict(X)
         | 
| 531 | 
            +
                array([1, 1, 1])
         | 
| 532 | 
            +
                >>> estimator.score(X, y)
         | 
| 533 | 
            +
                0.66...
         | 
| 534 | 
            +
                """
         | 
| 535 | 
            +
             | 
| 536 | 
            +
                # TODO(1.8): Remove this attribute
         | 
| 537 | 
            +
                _estimator_type = "classifier"
         | 
| 538 | 
            +
             | 
| 539 | 
            +
                def __sklearn_tags__(self):
         | 
| 540 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 541 | 
            +
                    tags.estimator_type = "classifier"
         | 
| 542 | 
            +
                    tags.classifier_tags = ClassifierTags()
         | 
| 543 | 
            +
                    tags.target_tags.required = True
         | 
| 544 | 
            +
                    return tags
         | 
| 545 | 
            +
             | 
| 546 | 
            +
                def score(self, X, y, sample_weight=None):
         | 
| 547 | 
            +
                    """
         | 
| 548 | 
            +
                    Return the mean accuracy on the given test data and labels.
         | 
| 549 | 
            +
             | 
| 550 | 
            +
                    In multi-label classification, this is the subset accuracy
         | 
| 551 | 
            +
                    which is a harsh metric since you require for each sample that
         | 
| 552 | 
            +
                    each label set be correctly predicted.
         | 
| 553 | 
            +
             | 
| 554 | 
            +
                    Parameters
         | 
| 555 | 
            +
                    ----------
         | 
| 556 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 557 | 
            +
                        Test samples.
         | 
| 558 | 
            +
             | 
| 559 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 560 | 
            +
                        True labels for `X`.
         | 
| 561 | 
            +
             | 
| 562 | 
            +
                    sample_weight : array-like of shape (n_samples,), default=None
         | 
| 563 | 
            +
                        Sample weights.
         | 
| 564 | 
            +
             | 
| 565 | 
            +
                    Returns
         | 
| 566 | 
            +
                    -------
         | 
| 567 | 
            +
                    score : float
         | 
| 568 | 
            +
                        Mean accuracy of ``self.predict(X)`` w.r.t. `y`.
         | 
| 569 | 
            +
                    """
         | 
| 570 | 
            +
                    from .metrics import accuracy_score
         | 
| 571 | 
            +
             | 
| 572 | 
            +
                    return accuracy_score(y, self.predict(X), sample_weight=sample_weight)
         | 
| 573 | 
            +
             | 
| 574 | 
            +
             | 
| 575 | 
            +
            class RegressorMixin:
         | 
| 576 | 
            +
                """Mixin class for all regression estimators in scikit-learn.
         | 
| 577 | 
            +
             | 
| 578 | 
            +
                This mixin defines the following functionality:
         | 
| 579 | 
            +
             | 
| 580 | 
            +
                - set estimator type to `"regressor"` through the `estimator_type` tag;
         | 
| 581 | 
            +
                - `score` method that default to :func:`~sklearn.metrics.r2_score`.
         | 
| 582 | 
            +
                - enforce that `fit` requires `y` to be passed through the `requires_y` tag,
         | 
| 583 | 
            +
                  which is done by setting the regressor type tag.
         | 
| 584 | 
            +
             | 
| 585 | 
            +
                Read more in the :ref:`User Guide <rolling_your_own_estimator>`.
         | 
| 586 | 
            +
             | 
| 587 | 
            +
                Examples
         | 
| 588 | 
            +
                --------
         | 
| 589 | 
            +
                >>> import numpy as np
         | 
| 590 | 
            +
                >>> from sklearn.base import BaseEstimator, RegressorMixin
         | 
| 591 | 
            +
                >>> # Mixin classes should always be on the left-hand side for a correct MRO
         | 
| 592 | 
            +
                >>> class MyEstimator(RegressorMixin, BaseEstimator):
         | 
| 593 | 
            +
                ...     def __init__(self, *, param=1):
         | 
| 594 | 
            +
                ...         self.param = param
         | 
| 595 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 596 | 
            +
                ...         self.is_fitted_ = True
         | 
| 597 | 
            +
                ...         return self
         | 
| 598 | 
            +
                ...     def predict(self, X):
         | 
| 599 | 
            +
                ...         return np.full(shape=X.shape[0], fill_value=self.param)
         | 
| 600 | 
            +
                >>> estimator = MyEstimator(param=0)
         | 
| 601 | 
            +
                >>> X = np.array([[1, 2], [2, 3], [3, 4]])
         | 
| 602 | 
            +
                >>> y = np.array([-1, 0, 1])
         | 
| 603 | 
            +
                >>> estimator.fit(X, y).predict(X)
         | 
| 604 | 
            +
                array([0, 0, 0])
         | 
| 605 | 
            +
                >>> estimator.score(X, y)
         | 
| 606 | 
            +
                0.0
         | 
| 607 | 
            +
                """
         | 
| 608 | 
            +
             | 
| 609 | 
            +
                # TODO(1.8): Remove this attribute
         | 
| 610 | 
            +
                _estimator_type = "regressor"
         | 
| 611 | 
            +
             | 
| 612 | 
            +
                def __sklearn_tags__(self):
         | 
| 613 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 614 | 
            +
                    tags.estimator_type = "regressor"
         | 
| 615 | 
            +
                    tags.regressor_tags = RegressorTags()
         | 
| 616 | 
            +
                    tags.target_tags.required = True
         | 
| 617 | 
            +
                    return tags
         | 
| 618 | 
            +
             | 
| 619 | 
            +
                def score(self, X, y, sample_weight=None):
         | 
| 620 | 
            +
                    """Return the coefficient of determination of the prediction.
         | 
| 621 | 
            +
             | 
| 622 | 
            +
                    The coefficient of determination :math:`R^2` is defined as
         | 
| 623 | 
            +
                    :math:`(1 - \\frac{u}{v})`, where :math:`u` is the residual
         | 
| 624 | 
            +
                    sum of squares ``((y_true - y_pred)** 2).sum()`` and :math:`v`
         | 
| 625 | 
            +
                    is the total sum of squares ``((y_true - y_true.mean()) ** 2).sum()``.
         | 
| 626 | 
            +
                    The best possible score is 1.0 and it can be negative (because the
         | 
| 627 | 
            +
                    model can be arbitrarily worse). A constant model that always predicts
         | 
| 628 | 
            +
                    the expected value of `y`, disregarding the input features, would get
         | 
| 629 | 
            +
                    a :math:`R^2` score of 0.0.
         | 
| 630 | 
            +
             | 
| 631 | 
            +
                    Parameters
         | 
| 632 | 
            +
                    ----------
         | 
| 633 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 634 | 
            +
                        Test samples. For some estimators this may be a precomputed
         | 
| 635 | 
            +
                        kernel matrix or a list of generic objects instead with shape
         | 
| 636 | 
            +
                        ``(n_samples, n_samples_fitted)``, where ``n_samples_fitted``
         | 
| 637 | 
            +
                        is the number of samples used in the fitting for the estimator.
         | 
| 638 | 
            +
             | 
| 639 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 640 | 
            +
                        True values for `X`.
         | 
| 641 | 
            +
             | 
| 642 | 
            +
                    sample_weight : array-like of shape (n_samples,), default=None
         | 
| 643 | 
            +
                        Sample weights.
         | 
| 644 | 
            +
             | 
| 645 | 
            +
                    Returns
         | 
| 646 | 
            +
                    -------
         | 
| 647 | 
            +
                    score : float
         | 
| 648 | 
            +
                        :math:`R^2` of ``self.predict(X)`` w.r.t. `y`.
         | 
| 649 | 
            +
             | 
| 650 | 
            +
                    Notes
         | 
| 651 | 
            +
                    -----
         | 
| 652 | 
            +
                    The :math:`R^2` score used when calling ``score`` on a regressor uses
         | 
| 653 | 
            +
                    ``multioutput='uniform_average'`` from version 0.23 to keep consistent
         | 
| 654 | 
            +
                    with default value of :func:`~sklearn.metrics.r2_score`.
         | 
| 655 | 
            +
                    This influences the ``score`` method of all the multioutput
         | 
| 656 | 
            +
                    regressors (except for
         | 
| 657 | 
            +
                    :class:`~sklearn.multioutput.MultiOutputRegressor`).
         | 
| 658 | 
            +
                    """
         | 
| 659 | 
            +
             | 
| 660 | 
            +
                    from .metrics import r2_score
         | 
| 661 | 
            +
             | 
| 662 | 
            +
                    y_pred = self.predict(X)
         | 
| 663 | 
            +
                    return r2_score(y, y_pred, sample_weight=sample_weight)
         | 
| 664 | 
            +
             | 
| 665 | 
            +
             | 
| 666 | 
            +
            class ClusterMixin:
         | 
| 667 | 
            +
                """Mixin class for all cluster estimators in scikit-learn.
         | 
| 668 | 
            +
             | 
| 669 | 
            +
                - set estimator type to `"clusterer"` through the `estimator_type` tag;
         | 
| 670 | 
            +
                - `fit_predict` method returning the cluster labels associated to each sample.
         | 
| 671 | 
            +
             | 
| 672 | 
            +
                Examples
         | 
| 673 | 
            +
                --------
         | 
| 674 | 
            +
                >>> import numpy as np
         | 
| 675 | 
            +
                >>> from sklearn.base import BaseEstimator, ClusterMixin
         | 
| 676 | 
            +
                >>> class MyClusterer(ClusterMixin, BaseEstimator):
         | 
| 677 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 678 | 
            +
                ...         self.labels_ = np.ones(shape=(len(X),), dtype=np.int64)
         | 
| 679 | 
            +
                ...         return self
         | 
| 680 | 
            +
                >>> X = [[1, 2], [2, 3], [3, 4]]
         | 
| 681 | 
            +
                >>> MyClusterer().fit_predict(X)
         | 
| 682 | 
            +
                array([1, 1, 1])
         | 
| 683 | 
            +
                """
         | 
| 684 | 
            +
             | 
| 685 | 
            +
                # TODO(1.8): Remove this attribute
         | 
| 686 | 
            +
                _estimator_type = "clusterer"
         | 
| 687 | 
            +
             | 
| 688 | 
            +
                def __sklearn_tags__(self):
         | 
| 689 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 690 | 
            +
                    tags.estimator_type = "clusterer"
         | 
| 691 | 
            +
                    if tags.transformer_tags is not None:
         | 
| 692 | 
            +
                        tags.transformer_tags.preserves_dtype = []
         | 
| 693 | 
            +
                    return tags
         | 
| 694 | 
            +
             | 
| 695 | 
            +
                def fit_predict(self, X, y=None, **kwargs):
         | 
| 696 | 
            +
                    """
         | 
| 697 | 
            +
                    Perform clustering on `X` and returns cluster labels.
         | 
| 698 | 
            +
             | 
| 699 | 
            +
                    Parameters
         | 
| 700 | 
            +
                    ----------
         | 
| 701 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 702 | 
            +
                        Input data.
         | 
| 703 | 
            +
             | 
| 704 | 
            +
                    y : Ignored
         | 
| 705 | 
            +
                        Not used, present for API consistency by convention.
         | 
| 706 | 
            +
             | 
| 707 | 
            +
                    **kwargs : dict
         | 
| 708 | 
            +
                        Arguments to be passed to ``fit``.
         | 
| 709 | 
            +
             | 
| 710 | 
            +
                        .. versionadded:: 1.4
         | 
| 711 | 
            +
             | 
| 712 | 
            +
                    Returns
         | 
| 713 | 
            +
                    -------
         | 
| 714 | 
            +
                    labels : ndarray of shape (n_samples,), dtype=np.int64
         | 
| 715 | 
            +
                        Cluster labels.
         | 
| 716 | 
            +
                    """
         | 
| 717 | 
            +
                    # non-optimized default implementation; override when a better
         | 
| 718 | 
            +
                    # method is possible for a given clustering algorithm
         | 
| 719 | 
            +
                    self.fit(X, **kwargs)
         | 
| 720 | 
            +
                    return self.labels_
         | 
| 721 | 
            +
             | 
| 722 | 
            +
             | 
| 723 | 
            +
            class BiclusterMixin:
         | 
| 724 | 
            +
                """Mixin class for all bicluster estimators in scikit-learn.
         | 
| 725 | 
            +
             | 
| 726 | 
            +
                This mixin defines the following functionality:
         | 
| 727 | 
            +
             | 
| 728 | 
            +
                - `biclusters_` property that returns the row and column indicators;
         | 
| 729 | 
            +
                - `get_indices` method that returns the row and column indices of a bicluster;
         | 
| 730 | 
            +
                - `get_shape` method that returns the shape of a bicluster;
         | 
| 731 | 
            +
                - `get_submatrix` method that returns the submatrix corresponding to a bicluster.
         | 
| 732 | 
            +
             | 
| 733 | 
            +
                Examples
         | 
| 734 | 
            +
                --------
         | 
| 735 | 
            +
                >>> import numpy as np
         | 
| 736 | 
            +
                >>> from sklearn.base import BaseEstimator, BiclusterMixin
         | 
| 737 | 
            +
                >>> class DummyBiClustering(BiclusterMixin, BaseEstimator):
         | 
| 738 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 739 | 
            +
                ...         self.rows_ = np.ones(shape=(1, X.shape[0]), dtype=bool)
         | 
| 740 | 
            +
                ...         self.columns_ = np.ones(shape=(1, X.shape[1]), dtype=bool)
         | 
| 741 | 
            +
                ...         return self
         | 
| 742 | 
            +
                >>> X = np.array([[1, 1], [2, 1], [1, 0],
         | 
| 743 | 
            +
                ...               [4, 7], [3, 5], [3, 6]])
         | 
| 744 | 
            +
                >>> bicluster = DummyBiClustering().fit(X)
         | 
| 745 | 
            +
                >>> hasattr(bicluster, "biclusters_")
         | 
| 746 | 
            +
                True
         | 
| 747 | 
            +
                >>> bicluster.get_indices(0)
         | 
| 748 | 
            +
                (array([0, 1, 2, 3, 4, 5]), array([0, 1]))
         | 
| 749 | 
            +
                """
         | 
| 750 | 
            +
             | 
| 751 | 
            +
                @property
         | 
| 752 | 
            +
                def biclusters_(self):
         | 
| 753 | 
            +
                    """Convenient way to get row and column indicators together.
         | 
| 754 | 
            +
             | 
| 755 | 
            +
                    Returns the ``rows_`` and ``columns_`` members.
         | 
| 756 | 
            +
                    """
         | 
| 757 | 
            +
                    return self.rows_, self.columns_
         | 
| 758 | 
            +
             | 
| 759 | 
            +
                def get_indices(self, i):
         | 
| 760 | 
            +
                    """Row and column indices of the `i`'th bicluster.
         | 
| 761 | 
            +
             | 
| 762 | 
            +
                    Only works if ``rows_`` and ``columns_`` attributes exist.
         | 
| 763 | 
            +
             | 
| 764 | 
            +
                    Parameters
         | 
| 765 | 
            +
                    ----------
         | 
| 766 | 
            +
                    i : int
         | 
| 767 | 
            +
                        The index of the cluster.
         | 
| 768 | 
            +
             | 
| 769 | 
            +
                    Returns
         | 
| 770 | 
            +
                    -------
         | 
| 771 | 
            +
                    row_ind : ndarray, dtype=np.intp
         | 
| 772 | 
            +
                        Indices of rows in the dataset that belong to the bicluster.
         | 
| 773 | 
            +
                    col_ind : ndarray, dtype=np.intp
         | 
| 774 | 
            +
                        Indices of columns in the dataset that belong to the bicluster.
         | 
| 775 | 
            +
                    """
         | 
| 776 | 
            +
                    rows = self.rows_[i]
         | 
| 777 | 
            +
                    columns = self.columns_[i]
         | 
| 778 | 
            +
                    return np.nonzero(rows)[0], np.nonzero(columns)[0]
         | 
| 779 | 
            +
             | 
| 780 | 
            +
                def get_shape(self, i):
         | 
| 781 | 
            +
                    """Shape of the `i`'th bicluster.
         | 
| 782 | 
            +
             | 
| 783 | 
            +
                    Parameters
         | 
| 784 | 
            +
                    ----------
         | 
| 785 | 
            +
                    i : int
         | 
| 786 | 
            +
                        The index of the cluster.
         | 
| 787 | 
            +
             | 
| 788 | 
            +
                    Returns
         | 
| 789 | 
            +
                    -------
         | 
| 790 | 
            +
                    n_rows : int
         | 
| 791 | 
            +
                        Number of rows in the bicluster.
         | 
| 792 | 
            +
             | 
| 793 | 
            +
                    n_cols : int
         | 
| 794 | 
            +
                        Number of columns in the bicluster.
         | 
| 795 | 
            +
                    """
         | 
| 796 | 
            +
                    indices = self.get_indices(i)
         | 
| 797 | 
            +
                    return tuple(len(i) for i in indices)
         | 
| 798 | 
            +
             | 
| 799 | 
            +
                def get_submatrix(self, i, data):
         | 
| 800 | 
            +
                    """Return the submatrix corresponding to bicluster `i`.
         | 
| 801 | 
            +
             | 
| 802 | 
            +
                    Parameters
         | 
| 803 | 
            +
                    ----------
         | 
| 804 | 
            +
                    i : int
         | 
| 805 | 
            +
                        The index of the cluster.
         | 
| 806 | 
            +
                    data : array-like of shape (n_samples, n_features)
         | 
| 807 | 
            +
                        The data.
         | 
| 808 | 
            +
             | 
| 809 | 
            +
                    Returns
         | 
| 810 | 
            +
                    -------
         | 
| 811 | 
            +
                    submatrix : ndarray of shape (n_rows, n_cols)
         | 
| 812 | 
            +
                        The submatrix corresponding to bicluster `i`.
         | 
| 813 | 
            +
             | 
| 814 | 
            +
                    Notes
         | 
| 815 | 
            +
                    -----
         | 
| 816 | 
            +
                    Works with sparse matrices. Only works if ``rows_`` and
         | 
| 817 | 
            +
                    ``columns_`` attributes exist.
         | 
| 818 | 
            +
                    """
         | 
| 819 | 
            +
             | 
| 820 | 
            +
                    data = check_array(data, accept_sparse="csr")
         | 
| 821 | 
            +
                    row_ind, col_ind = self.get_indices(i)
         | 
| 822 | 
            +
                    return data[row_ind[:, np.newaxis], col_ind]
         | 
| 823 | 
            +
             | 
| 824 | 
            +
             | 
| 825 | 
            +
            class TransformerMixin(_SetOutputMixin):
         | 
| 826 | 
            +
                """Mixin class for all transformers in scikit-learn.
         | 
| 827 | 
            +
             | 
| 828 | 
            +
                This mixin defines the following functionality:
         | 
| 829 | 
            +
             | 
| 830 | 
            +
                - a `fit_transform` method that delegates to `fit` and `transform`;
         | 
| 831 | 
            +
                - a `set_output` method to output `X` as a specific container type.
         | 
| 832 | 
            +
             | 
| 833 | 
            +
                If :term:`get_feature_names_out` is defined, then :class:`BaseEstimator` will
         | 
| 834 | 
            +
                automatically wrap `transform` and `fit_transform` to follow the `set_output`
         | 
| 835 | 
            +
                API. See the :ref:`developer_api_set_output` for details.
         | 
| 836 | 
            +
             | 
| 837 | 
            +
                :class:`OneToOneFeatureMixin` and
         | 
| 838 | 
            +
                :class:`ClassNamePrefixFeaturesOutMixin` are helpful mixins for
         | 
| 839 | 
            +
                defining :term:`get_feature_names_out`.
         | 
| 840 | 
            +
             | 
| 841 | 
            +
                Examples
         | 
| 842 | 
            +
                --------
         | 
| 843 | 
            +
                >>> import numpy as np
         | 
| 844 | 
            +
                >>> from sklearn.base import BaseEstimator, TransformerMixin
         | 
| 845 | 
            +
                >>> class MyTransformer(TransformerMixin, BaseEstimator):
         | 
| 846 | 
            +
                ...     def __init__(self, *, param=1):
         | 
| 847 | 
            +
                ...         self.param = param
         | 
| 848 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 849 | 
            +
                ...         return self
         | 
| 850 | 
            +
                ...     def transform(self, X):
         | 
| 851 | 
            +
                ...         return np.full(shape=len(X), fill_value=self.param)
         | 
| 852 | 
            +
                >>> transformer = MyTransformer()
         | 
| 853 | 
            +
                >>> X = [[1, 2], [2, 3], [3, 4]]
         | 
| 854 | 
            +
                >>> transformer.fit_transform(X)
         | 
| 855 | 
            +
                array([1, 1, 1])
         | 
| 856 | 
            +
                """
         | 
| 857 | 
            +
             | 
| 858 | 
            +
                def __sklearn_tags__(self):
         | 
| 859 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 860 | 
            +
                    tags.transformer_tags = TransformerTags()
         | 
| 861 | 
            +
                    return tags
         | 
| 862 | 
            +
             | 
| 863 | 
            +
                def fit_transform(self, X, y=None, **fit_params):
         | 
| 864 | 
            +
                    """
         | 
| 865 | 
            +
                    Fit to data, then transform it.
         | 
| 866 | 
            +
             | 
| 867 | 
            +
                    Fits transformer to `X` and `y` with optional parameters `fit_params`
         | 
| 868 | 
            +
                    and returns a transformed version of `X`.
         | 
| 869 | 
            +
             | 
| 870 | 
            +
                    Parameters
         | 
| 871 | 
            +
                    ----------
         | 
| 872 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 873 | 
            +
                        Input samples.
         | 
| 874 | 
            +
             | 
| 875 | 
            +
                    y :  array-like of shape (n_samples,) or (n_samples, n_outputs), \
         | 
| 876 | 
            +
                            default=None
         | 
| 877 | 
            +
                        Target values (None for unsupervised transformations).
         | 
| 878 | 
            +
             | 
| 879 | 
            +
                    **fit_params : dict
         | 
| 880 | 
            +
                        Additional fit parameters.
         | 
| 881 | 
            +
             | 
| 882 | 
            +
                    Returns
         | 
| 883 | 
            +
                    -------
         | 
| 884 | 
            +
                    X_new : ndarray array of shape (n_samples, n_features_new)
         | 
| 885 | 
            +
                        Transformed array.
         | 
| 886 | 
            +
                    """
         | 
| 887 | 
            +
                    # non-optimized default implementation; override when a better
         | 
| 888 | 
            +
                    # method is possible for a given clustering algorithm
         | 
| 889 | 
            +
             | 
| 890 | 
            +
                    # we do not route parameters here, since consumers don't route. But
         | 
| 891 | 
            +
                    # since it's possible for a `transform` method to also consume
         | 
| 892 | 
            +
                    # metadata, we check if that's the case, and we raise a warning telling
         | 
| 893 | 
            +
                    # users that they should implement a custom `fit_transform` method
         | 
| 894 | 
            +
                    # to forward metadata to `transform` as well.
         | 
| 895 | 
            +
                    #
         | 
| 896 | 
            +
                    # For that, we calculate routing and check if anything would be routed
         | 
| 897 | 
            +
                    # to `transform` if we were to route them.
         | 
| 898 | 
            +
                    if _routing_enabled():
         | 
| 899 | 
            +
                        transform_params = self.get_metadata_routing().consumes(
         | 
| 900 | 
            +
                            method="transform", params=fit_params.keys()
         | 
| 901 | 
            +
                        )
         | 
| 902 | 
            +
                        if transform_params:
         | 
| 903 | 
            +
                            warnings.warn(
         | 
| 904 | 
            +
                                (
         | 
| 905 | 
            +
                                    f"This object ({self.__class__.__name__}) has a `transform`"
         | 
| 906 | 
            +
                                    " method which consumes metadata, but `fit_transform` does not"
         | 
| 907 | 
            +
                                    " forward metadata to `transform`. Please implement a custom"
         | 
| 908 | 
            +
                                    " `fit_transform` method to forward metadata to `transform` as"
         | 
| 909 | 
            +
                                    " well. Alternatively, you can explicitly do"
         | 
| 910 | 
            +
                                    " `set_transform_request`and set all values to `False` to"
         | 
| 911 | 
            +
                                    " disable metadata routed to `transform`, if that's an option."
         | 
| 912 | 
            +
                                ),
         | 
| 913 | 
            +
                                UserWarning,
         | 
| 914 | 
            +
                            )
         | 
| 915 | 
            +
             | 
| 916 | 
            +
                    if y is None:
         | 
| 917 | 
            +
                        # fit method of arity 1 (unsupervised transformation)
         | 
| 918 | 
            +
                        return self.fit(X, **fit_params).transform(X)
         | 
| 919 | 
            +
                    else:
         | 
| 920 | 
            +
                        # fit method of arity 2 (supervised transformation)
         | 
| 921 | 
            +
                        return self.fit(X, y, **fit_params).transform(X)
         | 
| 922 | 
            +
             | 
| 923 | 
            +
             | 
| 924 | 
            +
            class OneToOneFeatureMixin:
         | 
| 925 | 
            +
                """Provides `get_feature_names_out` for simple transformers.
         | 
| 926 | 
            +
             | 
| 927 | 
            +
                This mixin assumes there's a 1-to-1 correspondence between input features
         | 
| 928 | 
            +
                and output features, such as :class:`~sklearn.preprocessing.StandardScaler`.
         | 
| 929 | 
            +
             | 
| 930 | 
            +
                Examples
         | 
| 931 | 
            +
                --------
         | 
| 932 | 
            +
                >>> import numpy as np
         | 
| 933 | 
            +
                >>> from sklearn.base import OneToOneFeatureMixin, BaseEstimator
         | 
| 934 | 
            +
                >>> class MyEstimator(OneToOneFeatureMixin, BaseEstimator):
         | 
| 935 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 936 | 
            +
                ...         self.n_features_in_ = X.shape[1]
         | 
| 937 | 
            +
                ...         return self
         | 
| 938 | 
            +
                >>> X = np.array([[1, 2], [3, 4]])
         | 
| 939 | 
            +
                >>> MyEstimator().fit(X).get_feature_names_out()
         | 
| 940 | 
            +
                array(['x0', 'x1'], dtype=object)
         | 
| 941 | 
            +
                """
         | 
| 942 | 
            +
             | 
| 943 | 
            +
                def get_feature_names_out(self, input_features=None):
         | 
| 944 | 
            +
                    """Get output feature names for transformation.
         | 
| 945 | 
            +
             | 
| 946 | 
            +
                    Parameters
         | 
| 947 | 
            +
                    ----------
         | 
| 948 | 
            +
                    input_features : array-like of str or None, default=None
         | 
| 949 | 
            +
                        Input features.
         | 
| 950 | 
            +
             | 
| 951 | 
            +
                        - If `input_features` is `None`, then `feature_names_in_` is
         | 
| 952 | 
            +
                          used as feature names in. If `feature_names_in_` is not defined,
         | 
| 953 | 
            +
                          then the following input feature names are generated:
         | 
| 954 | 
            +
                          `["x0", "x1", ..., "x(n_features_in_ - 1)"]`.
         | 
| 955 | 
            +
                        - If `input_features` is an array-like, then `input_features` must
         | 
| 956 | 
            +
                          match `feature_names_in_` if `feature_names_in_` is defined.
         | 
| 957 | 
            +
             | 
| 958 | 
            +
                    Returns
         | 
| 959 | 
            +
                    -------
         | 
| 960 | 
            +
                    feature_names_out : ndarray of str objects
         | 
| 961 | 
            +
                        Same as input features.
         | 
| 962 | 
            +
                    """
         | 
| 963 | 
            +
                    # Note that passing attributes="n_features_in_" forces check_is_fitted
         | 
| 964 | 
            +
                    # to check if the attribute is present. Otherwise it will pass on
         | 
| 965 | 
            +
                    # stateless estimators (requires_fit=False)
         | 
| 966 | 
            +
                    check_is_fitted(self, attributes="n_features_in_")
         | 
| 967 | 
            +
                    return _check_feature_names_in(self, input_features)
         | 
| 968 | 
            +
             | 
| 969 | 
            +
             | 
| 970 | 
            +
            class ClassNamePrefixFeaturesOutMixin:
         | 
| 971 | 
            +
                """Mixin class for transformers that generate their own names by prefixing.
         | 
| 972 | 
            +
             | 
| 973 | 
            +
                This mixin is useful when the transformer needs to generate its own feature
         | 
| 974 | 
            +
                names out, such as :class:`~sklearn.decomposition.PCA`. For example, if
         | 
| 975 | 
            +
                :class:`~sklearn.decomposition.PCA` outputs 3 features, then the generated feature
         | 
| 976 | 
            +
                names out are: `["pca0", "pca1", "pca2"]`.
         | 
| 977 | 
            +
             | 
| 978 | 
            +
                This mixin assumes that a `_n_features_out` attribute is defined when the
         | 
| 979 | 
            +
                transformer is fitted. `_n_features_out` is the number of output features
         | 
| 980 | 
            +
                that the transformer will return in `transform` of `fit_transform`.
         | 
| 981 | 
            +
             | 
| 982 | 
            +
                Examples
         | 
| 983 | 
            +
                --------
         | 
| 984 | 
            +
                >>> import numpy as np
         | 
| 985 | 
            +
                >>> from sklearn.base import ClassNamePrefixFeaturesOutMixin, BaseEstimator
         | 
| 986 | 
            +
                >>> class MyEstimator(ClassNamePrefixFeaturesOutMixin, BaseEstimator):
         | 
| 987 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 988 | 
            +
                ...         self._n_features_out = X.shape[1]
         | 
| 989 | 
            +
                ...         return self
         | 
| 990 | 
            +
                >>> X = np.array([[1, 2], [3, 4]])
         | 
| 991 | 
            +
                >>> MyEstimator().fit(X).get_feature_names_out()
         | 
| 992 | 
            +
                array(['myestimator0', 'myestimator1'], dtype=object)
         | 
| 993 | 
            +
                """
         | 
| 994 | 
            +
             | 
| 995 | 
            +
                def get_feature_names_out(self, input_features=None):
         | 
| 996 | 
            +
                    """Get output feature names for transformation.
         | 
| 997 | 
            +
             | 
| 998 | 
            +
                    The feature names out will prefixed by the lowercased class name. For
         | 
| 999 | 
            +
                    example, if the transformer outputs 3 features, then the feature names
         | 
| 1000 | 
            +
                    out are: `["class_name0", "class_name1", "class_name2"]`.
         | 
| 1001 | 
            +
             | 
| 1002 | 
            +
                    Parameters
         | 
| 1003 | 
            +
                    ----------
         | 
| 1004 | 
            +
                    input_features : array-like of str or None, default=None
         | 
| 1005 | 
            +
                        Only used to validate feature names with the names seen in `fit`.
         | 
| 1006 | 
            +
             | 
| 1007 | 
            +
                    Returns
         | 
| 1008 | 
            +
                    -------
         | 
| 1009 | 
            +
                    feature_names_out : ndarray of str objects
         | 
| 1010 | 
            +
                        Transformed feature names.
         | 
| 1011 | 
            +
                    """
         | 
| 1012 | 
            +
                    check_is_fitted(self, "_n_features_out")
         | 
| 1013 | 
            +
                    return _generate_get_feature_names_out(
         | 
| 1014 | 
            +
                        self, self._n_features_out, input_features=input_features
         | 
| 1015 | 
            +
                    )
         | 
| 1016 | 
            +
             | 
| 1017 | 
            +
             | 
| 1018 | 
            +
            class DensityMixin:
         | 
| 1019 | 
            +
                """Mixin class for all density estimators in scikit-learn.
         | 
| 1020 | 
            +
             | 
| 1021 | 
            +
                This mixin defines the following functionality:
         | 
| 1022 | 
            +
             | 
| 1023 | 
            +
                - sets estimator type to `"density_estimator"` through the `estimator_type` tag;
         | 
| 1024 | 
            +
                - `score` method that default that do no-op.
         | 
| 1025 | 
            +
             | 
| 1026 | 
            +
                Examples
         | 
| 1027 | 
            +
                --------
         | 
| 1028 | 
            +
                >>> from sklearn.base import DensityMixin
         | 
| 1029 | 
            +
                >>> class MyEstimator(DensityMixin):
         | 
| 1030 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 1031 | 
            +
                ...         self.is_fitted_ = True
         | 
| 1032 | 
            +
                ...         return self
         | 
| 1033 | 
            +
                >>> estimator = MyEstimator()
         | 
| 1034 | 
            +
                >>> hasattr(estimator, "score")
         | 
| 1035 | 
            +
                True
         | 
| 1036 | 
            +
                """
         | 
| 1037 | 
            +
             | 
| 1038 | 
            +
                # TODO(1.8): Remove this attribute
         | 
| 1039 | 
            +
                _estimator_type = "DensityEstimator"
         | 
| 1040 | 
            +
             | 
| 1041 | 
            +
                def __sklearn_tags__(self):
         | 
| 1042 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 1043 | 
            +
                    tags.estimator_type = "density_estimator"
         | 
| 1044 | 
            +
                    return tags
         | 
| 1045 | 
            +
             | 
| 1046 | 
            +
                def score(self, X, y=None):
         | 
| 1047 | 
            +
                    """Return the score of the model on the data `X`.
         | 
| 1048 | 
            +
             | 
| 1049 | 
            +
                    Parameters
         | 
| 1050 | 
            +
                    ----------
         | 
| 1051 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 1052 | 
            +
                        Test samples.
         | 
| 1053 | 
            +
             | 
| 1054 | 
            +
                    y : Ignored
         | 
| 1055 | 
            +
                        Not used, present for API consistency by convention.
         | 
| 1056 | 
            +
             | 
| 1057 | 
            +
                    Returns
         | 
| 1058 | 
            +
                    -------
         | 
| 1059 | 
            +
                    score : float
         | 
| 1060 | 
            +
                    """
         | 
| 1061 | 
            +
                    pass
         | 
| 1062 | 
            +
             | 
| 1063 | 
            +
             | 
| 1064 | 
            +
            class OutlierMixin:
         | 
| 1065 | 
            +
                """Mixin class for all outlier detection estimators in scikit-learn.
         | 
| 1066 | 
            +
             | 
| 1067 | 
            +
                This mixin defines the following functionality:
         | 
| 1068 | 
            +
             | 
| 1069 | 
            +
                - set estimator type to `"outlier_detector"` through the `estimator_type` tag;
         | 
| 1070 | 
            +
                - `fit_predict` method that default to `fit` and `predict`.
         | 
| 1071 | 
            +
             | 
| 1072 | 
            +
                Examples
         | 
| 1073 | 
            +
                --------
         | 
| 1074 | 
            +
                >>> import numpy as np
         | 
| 1075 | 
            +
                >>> from sklearn.base import BaseEstimator, OutlierMixin
         | 
| 1076 | 
            +
                >>> class MyEstimator(OutlierMixin):
         | 
| 1077 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 1078 | 
            +
                ...         self.is_fitted_ = True
         | 
| 1079 | 
            +
                ...         return self
         | 
| 1080 | 
            +
                ...     def predict(self, X):
         | 
| 1081 | 
            +
                ...         return np.ones(shape=len(X))
         | 
| 1082 | 
            +
                >>> estimator = MyEstimator()
         | 
| 1083 | 
            +
                >>> X = np.array([[1, 2], [2, 3], [3, 4]])
         | 
| 1084 | 
            +
                >>> estimator.fit_predict(X)
         | 
| 1085 | 
            +
                array([1., 1., 1.])
         | 
| 1086 | 
            +
                """
         | 
| 1087 | 
            +
             | 
| 1088 | 
            +
                # TODO(1.8): Remove this attribute
         | 
| 1089 | 
            +
                _estimator_type = "outlier_detector"
         | 
| 1090 | 
            +
             | 
| 1091 | 
            +
                def __sklearn_tags__(self):
         | 
| 1092 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 1093 | 
            +
                    tags.estimator_type = "outlier_detector"
         | 
| 1094 | 
            +
                    return tags
         | 
| 1095 | 
            +
             | 
| 1096 | 
            +
                def fit_predict(self, X, y=None, **kwargs):
         | 
| 1097 | 
            +
                    """Perform fit on X and returns labels for X.
         | 
| 1098 | 
            +
             | 
| 1099 | 
            +
                    Returns -1 for outliers and 1 for inliers.
         | 
| 1100 | 
            +
             | 
| 1101 | 
            +
                    Parameters
         | 
| 1102 | 
            +
                    ----------
         | 
| 1103 | 
            +
                    X : {array-like, sparse matrix} of shape (n_samples, n_features)
         | 
| 1104 | 
            +
                        The input samples.
         | 
| 1105 | 
            +
             | 
| 1106 | 
            +
                    y : Ignored
         | 
| 1107 | 
            +
                        Not used, present for API consistency by convention.
         | 
| 1108 | 
            +
             | 
| 1109 | 
            +
                    **kwargs : dict
         | 
| 1110 | 
            +
                        Arguments to be passed to ``fit``.
         | 
| 1111 | 
            +
             | 
| 1112 | 
            +
                        .. versionadded:: 1.4
         | 
| 1113 | 
            +
             | 
| 1114 | 
            +
                    Returns
         | 
| 1115 | 
            +
                    -------
         | 
| 1116 | 
            +
                    y : ndarray of shape (n_samples,)
         | 
| 1117 | 
            +
                        1 for inliers, -1 for outliers.
         | 
| 1118 | 
            +
                    """
         | 
| 1119 | 
            +
                    # we do not route parameters here, since consumers don't route. But
         | 
| 1120 | 
            +
                    # since it's possible for a `predict` method to also consume
         | 
| 1121 | 
            +
                    # metadata, we check if that's the case, and we raise a warning telling
         | 
| 1122 | 
            +
                    # users that they should implement a custom `fit_predict` method
         | 
| 1123 | 
            +
                    # to forward metadata to `predict` as well.
         | 
| 1124 | 
            +
                    #
         | 
| 1125 | 
            +
                    # For that, we calculate routing and check if anything would be routed
         | 
| 1126 | 
            +
                    # to `predict` if we were to route them.
         | 
| 1127 | 
            +
                    if _routing_enabled():
         | 
| 1128 | 
            +
                        transform_params = self.get_metadata_routing().consumes(
         | 
| 1129 | 
            +
                            method="predict", params=kwargs.keys()
         | 
| 1130 | 
            +
                        )
         | 
| 1131 | 
            +
                        if transform_params:
         | 
| 1132 | 
            +
                            warnings.warn(
         | 
| 1133 | 
            +
                                (
         | 
| 1134 | 
            +
                                    f"This object ({self.__class__.__name__}) has a `predict` "
         | 
| 1135 | 
            +
                                    "method which consumes metadata, but `fit_predict` does not "
         | 
| 1136 | 
            +
                                    "forward metadata to `predict`. Please implement a custom "
         | 
| 1137 | 
            +
                                    "`fit_predict` method to forward metadata to `predict` as well."
         | 
| 1138 | 
            +
                                    "Alternatively, you can explicitly do `set_predict_request`"
         | 
| 1139 | 
            +
                                    "and set all values to `False` to disable metadata routed to "
         | 
| 1140 | 
            +
                                    "`predict`, if that's an option."
         | 
| 1141 | 
            +
                                ),
         | 
| 1142 | 
            +
                                UserWarning,
         | 
| 1143 | 
            +
                            )
         | 
| 1144 | 
            +
             | 
| 1145 | 
            +
                    # override for transductive outlier detectors like LocalOulierFactor
         | 
| 1146 | 
            +
                    return self.fit(X, **kwargs).predict(X)
         | 
| 1147 | 
            +
             | 
| 1148 | 
            +
             | 
| 1149 | 
            +
            class MetaEstimatorMixin:
         | 
| 1150 | 
            +
                """Mixin class for all meta estimators in scikit-learn.
         | 
| 1151 | 
            +
             | 
| 1152 | 
            +
                This mixin is empty, and only exists to indicate that the estimator is a
         | 
| 1153 | 
            +
                meta-estimator.
         | 
| 1154 | 
            +
             | 
| 1155 | 
            +
                .. versionchanged:: 1.6
         | 
| 1156 | 
            +
                    The `_required_parameters` is now removed and is unnecessary since tests are
         | 
| 1157 | 
            +
                    refactored and don't use this anymore.
         | 
| 1158 | 
            +
             | 
| 1159 | 
            +
                Examples
         | 
| 1160 | 
            +
                --------
         | 
| 1161 | 
            +
                >>> from sklearn.base import MetaEstimatorMixin
         | 
| 1162 | 
            +
                >>> from sklearn.datasets import load_iris
         | 
| 1163 | 
            +
                >>> from sklearn.linear_model import LogisticRegression
         | 
| 1164 | 
            +
                >>> class MyEstimator(MetaEstimatorMixin):
         | 
| 1165 | 
            +
                ...     def __init__(self, *, estimator=None):
         | 
| 1166 | 
            +
                ...         self.estimator = estimator
         | 
| 1167 | 
            +
                ...     def fit(self, X, y=None):
         | 
| 1168 | 
            +
                ...         if self.estimator is None:
         | 
| 1169 | 
            +
                ...             self.estimator_ = LogisticRegression()
         | 
| 1170 | 
            +
                ...         else:
         | 
| 1171 | 
            +
                ...             self.estimator_ = self.estimator
         | 
| 1172 | 
            +
                ...         return self
         | 
| 1173 | 
            +
                >>> X, y = load_iris(return_X_y=True)
         | 
| 1174 | 
            +
                >>> estimator = MyEstimator().fit(X, y)
         | 
| 1175 | 
            +
                >>> estimator.estimator_
         | 
| 1176 | 
            +
                LogisticRegression()
         | 
| 1177 | 
            +
                """
         | 
| 1178 | 
            +
             | 
| 1179 | 
            +
             | 
| 1180 | 
            +
            class MultiOutputMixin:
         | 
| 1181 | 
            +
                """Mixin to mark estimators that support multioutput."""
         | 
| 1182 | 
            +
             | 
| 1183 | 
            +
                def __sklearn_tags__(self):
         | 
| 1184 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 1185 | 
            +
                    tags.target_tags.multi_output = True
         | 
| 1186 | 
            +
                    return tags
         | 
| 1187 | 
            +
             | 
| 1188 | 
            +
             | 
| 1189 | 
            +
            class _UnstableArchMixin:
         | 
| 1190 | 
            +
                """Mark estimators that are non-determinstic on 32bit or PowerPC"""
         | 
| 1191 | 
            +
             | 
| 1192 | 
            +
                def __sklearn_tags__(self):
         | 
| 1193 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 1194 | 
            +
                    tags.non_deterministic = _IS_32BIT or platform.machine().startswith(
         | 
| 1195 | 
            +
                        ("ppc", "powerpc")
         | 
| 1196 | 
            +
                    )
         | 
| 1197 | 
            +
                    return tags
         | 
| 1198 | 
            +
             | 
| 1199 | 
            +
             | 
| 1200 | 
            +
            def is_classifier(estimator):
         | 
| 1201 | 
            +
                """Return True if the given estimator is (probably) a classifier.
         | 
| 1202 | 
            +
             | 
| 1203 | 
            +
                Parameters
         | 
| 1204 | 
            +
                ----------
         | 
| 1205 | 
            +
                estimator : object
         | 
| 1206 | 
            +
                    Estimator object to test.
         | 
| 1207 | 
            +
             | 
| 1208 | 
            +
                Returns
         | 
| 1209 | 
            +
                -------
         | 
| 1210 | 
            +
                out : bool
         | 
| 1211 | 
            +
                    True if estimator is a classifier and False otherwise.
         | 
| 1212 | 
            +
             | 
| 1213 | 
            +
                Examples
         | 
| 1214 | 
            +
                --------
         | 
| 1215 | 
            +
                >>> from sklearn.base import is_classifier
         | 
| 1216 | 
            +
                >>> from sklearn.cluster import KMeans
         | 
| 1217 | 
            +
                >>> from sklearn.svm import SVC, SVR
         | 
| 1218 | 
            +
                >>> classifier = SVC()
         | 
| 1219 | 
            +
                >>> regressor = SVR()
         | 
| 1220 | 
            +
                >>> kmeans = KMeans()
         | 
| 1221 | 
            +
                >>> is_classifier(classifier)
         | 
| 1222 | 
            +
                True
         | 
| 1223 | 
            +
                >>> is_classifier(regressor)
         | 
| 1224 | 
            +
                False
         | 
| 1225 | 
            +
                >>> is_classifier(kmeans)
         | 
| 1226 | 
            +
                False
         | 
| 1227 | 
            +
                """
         | 
| 1228 | 
            +
                # TODO(1.8): Remove this check
         | 
| 1229 | 
            +
                if isinstance(estimator, type):
         | 
| 1230 | 
            +
                    warnings.warn(
         | 
| 1231 | 
            +
                        f"passing a class to {print(inspect.stack()[0][3])} is deprecated and "
         | 
| 1232 | 
            +
                        "will be removed in 1.8. Use an instance of the class instead.",
         | 
| 1233 | 
            +
                        FutureWarning,
         | 
| 1234 | 
            +
                    )
         | 
| 1235 | 
            +
                    return getattr(estimator, "_estimator_type", None) == "classifier"
         | 
| 1236 | 
            +
             | 
| 1237 | 
            +
                return get_tags(estimator).estimator_type == "classifier"
         | 
| 1238 | 
            +
             | 
| 1239 | 
            +
             | 
| 1240 | 
            +
            def is_regressor(estimator):
         | 
| 1241 | 
            +
                """Return True if the given estimator is (probably) a regressor.
         | 
| 1242 | 
            +
             | 
| 1243 | 
            +
                Parameters
         | 
| 1244 | 
            +
                ----------
         | 
| 1245 | 
            +
                estimator : estimator instance
         | 
| 1246 | 
            +
                    Estimator object to test.
         | 
| 1247 | 
            +
             | 
| 1248 | 
            +
                Returns
         | 
| 1249 | 
            +
                -------
         | 
| 1250 | 
            +
                out : bool
         | 
| 1251 | 
            +
                    True if estimator is a regressor and False otherwise.
         | 
| 1252 | 
            +
             | 
| 1253 | 
            +
                Examples
         | 
| 1254 | 
            +
                --------
         | 
| 1255 | 
            +
                >>> from sklearn.base import is_regressor
         | 
| 1256 | 
            +
                >>> from sklearn.cluster import KMeans
         | 
| 1257 | 
            +
                >>> from sklearn.svm import SVC, SVR
         | 
| 1258 | 
            +
                >>> classifier = SVC()
         | 
| 1259 | 
            +
                >>> regressor = SVR()
         | 
| 1260 | 
            +
                >>> kmeans = KMeans()
         | 
| 1261 | 
            +
                >>> is_regressor(classifier)
         | 
| 1262 | 
            +
                False
         | 
| 1263 | 
            +
                >>> is_regressor(regressor)
         | 
| 1264 | 
            +
                True
         | 
| 1265 | 
            +
                >>> is_regressor(kmeans)
         | 
| 1266 | 
            +
                False
         | 
| 1267 | 
            +
                """
         | 
| 1268 | 
            +
                # TODO(1.8): Remove this check
         | 
| 1269 | 
            +
                if isinstance(estimator, type):
         | 
| 1270 | 
            +
                    warnings.warn(
         | 
| 1271 | 
            +
                        f"passing a class to {print(inspect.stack()[0][3])} is deprecated and "
         | 
| 1272 | 
            +
                        "will be removed in 1.8. Use an instance of the class instead.",
         | 
| 1273 | 
            +
                        FutureWarning,
         | 
| 1274 | 
            +
                    )
         | 
| 1275 | 
            +
                    return getattr(estimator, "_estimator_type", None) == "regressor"
         | 
| 1276 | 
            +
             | 
| 1277 | 
            +
                return get_tags(estimator).estimator_type == "regressor"
         | 
| 1278 | 
            +
             | 
| 1279 | 
            +
             | 
| 1280 | 
            +
            def is_clusterer(estimator):
         | 
| 1281 | 
            +
                """Return True if the given estimator is (probably) a clusterer.
         | 
| 1282 | 
            +
             | 
| 1283 | 
            +
                .. versionadded:: 1.6
         | 
| 1284 | 
            +
             | 
| 1285 | 
            +
                Parameters
         | 
| 1286 | 
            +
                ----------
         | 
| 1287 | 
            +
                estimator : object
         | 
| 1288 | 
            +
                    Estimator object to test.
         | 
| 1289 | 
            +
             | 
| 1290 | 
            +
                Returns
         | 
| 1291 | 
            +
                -------
         | 
| 1292 | 
            +
                out : bool
         | 
| 1293 | 
            +
                    True if estimator is a clusterer and False otherwise.
         | 
| 1294 | 
            +
             | 
| 1295 | 
            +
                Examples
         | 
| 1296 | 
            +
                --------
         | 
| 1297 | 
            +
                >>> from sklearn.base import is_clusterer
         | 
| 1298 | 
            +
                >>> from sklearn.cluster import KMeans
         | 
| 1299 | 
            +
                >>> from sklearn.svm import SVC, SVR
         | 
| 1300 | 
            +
                >>> classifier = SVC()
         | 
| 1301 | 
            +
                >>> regressor = SVR()
         | 
| 1302 | 
            +
                >>> kmeans = KMeans()
         | 
| 1303 | 
            +
                >>> is_clusterer(classifier)
         | 
| 1304 | 
            +
                False
         | 
| 1305 | 
            +
                >>> is_clusterer(regressor)
         | 
| 1306 | 
            +
                False
         | 
| 1307 | 
            +
                >>> is_clusterer(kmeans)
         | 
| 1308 | 
            +
                True
         | 
| 1309 | 
            +
                """
         | 
| 1310 | 
            +
                # TODO(1.8): Remove this check
         | 
| 1311 | 
            +
                if isinstance(estimator, type):
         | 
| 1312 | 
            +
                    warnings.warn(
         | 
| 1313 | 
            +
                        f"passing a class to {print(inspect.stack()[0][3])} is deprecated and "
         | 
| 1314 | 
            +
                        "will be removed in 1.8. Use an instance of the class instead.",
         | 
| 1315 | 
            +
                        FutureWarning,
         | 
| 1316 | 
            +
                    )
         | 
| 1317 | 
            +
                    return getattr(estimator, "_estimator_type", None) == "clusterer"
         | 
| 1318 | 
            +
             | 
| 1319 | 
            +
                return get_tags(estimator).estimator_type == "clusterer"
         | 
| 1320 | 
            +
             | 
| 1321 | 
            +
             | 
| 1322 | 
            +
            def is_outlier_detector(estimator):
         | 
| 1323 | 
            +
                """Return True if the given estimator is (probably) an outlier detector.
         | 
| 1324 | 
            +
             | 
| 1325 | 
            +
                Parameters
         | 
| 1326 | 
            +
                ----------
         | 
| 1327 | 
            +
                estimator : estimator instance
         | 
| 1328 | 
            +
                    Estimator object to test.
         | 
| 1329 | 
            +
             | 
| 1330 | 
            +
                Returns
         | 
| 1331 | 
            +
                -------
         | 
| 1332 | 
            +
                out : bool
         | 
| 1333 | 
            +
                    True if estimator is an outlier detector and False otherwise.
         | 
| 1334 | 
            +
                """
         | 
| 1335 | 
            +
                # TODO(1.8): Remove this check
         | 
| 1336 | 
            +
                if isinstance(estimator, type):
         | 
| 1337 | 
            +
                    warnings.warn(
         | 
| 1338 | 
            +
                        f"passing a class to {print(inspect.stack()[0][3])} is deprecated and "
         | 
| 1339 | 
            +
                        "will be removed in 1.8. Use an instance of the class instead.",
         | 
| 1340 | 
            +
                        FutureWarning,
         | 
| 1341 | 
            +
                    )
         | 
| 1342 | 
            +
                    return getattr(estimator, "_estimator_type", None) == "outlier_detector"
         | 
| 1343 | 
            +
             | 
| 1344 | 
            +
                return get_tags(estimator).estimator_type == "outlier_detector"
         | 
| 1345 | 
            +
             | 
| 1346 | 
            +
             | 
| 1347 | 
            +
            def _fit_context(*, prefer_skip_nested_validation):
         | 
| 1348 | 
            +
                """Decorator to run the fit methods of estimators within context managers.
         | 
| 1349 | 
            +
             | 
| 1350 | 
            +
                Parameters
         | 
| 1351 | 
            +
                ----------
         | 
| 1352 | 
            +
                prefer_skip_nested_validation : bool
         | 
| 1353 | 
            +
                    If True, the validation of parameters of inner estimators or functions
         | 
| 1354 | 
            +
                    called during fit will be skipped.
         | 
| 1355 | 
            +
             | 
| 1356 | 
            +
                    This is useful to avoid validating many times the parameters passed by the
         | 
| 1357 | 
            +
                    user from the public facing API. It's also useful to avoid validating
         | 
| 1358 | 
            +
                    parameters that we pass internally to inner functions that are guaranteed to
         | 
| 1359 | 
            +
                    be valid by the test suite.
         | 
| 1360 | 
            +
             | 
| 1361 | 
            +
                    It should be set to True for most estimators, except for those that receive
         | 
| 1362 | 
            +
                    non-validated objects as parameters, such as meta-estimators that are given
         | 
| 1363 | 
            +
                    estimator objects.
         | 
| 1364 | 
            +
             | 
| 1365 | 
            +
                Returns
         | 
| 1366 | 
            +
                -------
         | 
| 1367 | 
            +
                decorated_fit : method
         | 
| 1368 | 
            +
                    The decorated fit method.
         | 
| 1369 | 
            +
                """
         | 
| 1370 | 
            +
             | 
| 1371 | 
            +
                def decorator(fit_method):
         | 
| 1372 | 
            +
                    @functools.wraps(fit_method)
         | 
| 1373 | 
            +
                    def wrapper(estimator, *args, **kwargs):
         | 
| 1374 | 
            +
                        global_skip_validation = get_config()["skip_parameter_validation"]
         | 
| 1375 | 
            +
             | 
| 1376 | 
            +
                        # we don't want to validate again for each call to partial_fit
         | 
| 1377 | 
            +
                        partial_fit_and_fitted = (
         | 
| 1378 | 
            +
                            fit_method.__name__ == "partial_fit" and _is_fitted(estimator)
         | 
| 1379 | 
            +
                        )
         | 
| 1380 | 
            +
             | 
| 1381 | 
            +
                        if not global_skip_validation and not partial_fit_and_fitted:
         | 
| 1382 | 
            +
                            estimator._validate_params()
         | 
| 1383 | 
            +
             | 
| 1384 | 
            +
                        with config_context(
         | 
| 1385 | 
            +
                            skip_parameter_validation=(
         | 
| 1386 | 
            +
                                prefer_skip_nested_validation or global_skip_validation
         | 
| 1387 | 
            +
                            )
         | 
| 1388 | 
            +
                        ):
         | 
| 1389 | 
            +
                            return fit_method(estimator, *args, **kwargs)
         | 
| 1390 | 
            +
             | 
| 1391 | 
            +
                    return wrapper
         | 
| 1392 | 
            +
             | 
| 1393 | 
            +
                return decorator
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/calibration.py
    ADDED
    
    | @@ -0,0 +1,1423 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Methods for calibrating predicted probabilities."""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            import warnings
         | 
| 7 | 
            +
            from inspect import signature
         | 
| 8 | 
            +
            from math import log
         | 
| 9 | 
            +
            from numbers import Integral, Real
         | 
| 10 | 
            +
             | 
| 11 | 
            +
            import numpy as np
         | 
| 12 | 
            +
            from scipy.optimize import minimize
         | 
| 13 | 
            +
            from scipy.special import expit
         | 
| 14 | 
            +
             | 
| 15 | 
            +
            from sklearn.utils import Bunch
         | 
| 16 | 
            +
             | 
| 17 | 
            +
            from ._loss import HalfBinomialLoss
         | 
| 18 | 
            +
            from .base import (
         | 
| 19 | 
            +
                BaseEstimator,
         | 
| 20 | 
            +
                ClassifierMixin,
         | 
| 21 | 
            +
                MetaEstimatorMixin,
         | 
| 22 | 
            +
                RegressorMixin,
         | 
| 23 | 
            +
                _fit_context,
         | 
| 24 | 
            +
                clone,
         | 
| 25 | 
            +
            )
         | 
| 26 | 
            +
            from .frozen import FrozenEstimator
         | 
| 27 | 
            +
            from .isotonic import IsotonicRegression
         | 
| 28 | 
            +
            from .model_selection import LeaveOneOut, check_cv, cross_val_predict
         | 
| 29 | 
            +
            from .preprocessing import LabelEncoder, label_binarize
         | 
| 30 | 
            +
            from .svm import LinearSVC
         | 
| 31 | 
            +
            from .utils import _safe_indexing, column_or_1d, get_tags, indexable
         | 
| 32 | 
            +
            from .utils._param_validation import (
         | 
| 33 | 
            +
                HasMethods,
         | 
| 34 | 
            +
                Hidden,
         | 
| 35 | 
            +
                Interval,
         | 
| 36 | 
            +
                StrOptions,
         | 
| 37 | 
            +
                validate_params,
         | 
| 38 | 
            +
            )
         | 
| 39 | 
            +
            from .utils._plotting import _BinaryClassifierCurveDisplayMixin, _validate_style_kwargs
         | 
| 40 | 
            +
            from .utils._response import _get_response_values, _process_predict_proba
         | 
| 41 | 
            +
            from .utils.metadata_routing import (
         | 
| 42 | 
            +
                MetadataRouter,
         | 
| 43 | 
            +
                MethodMapping,
         | 
| 44 | 
            +
                _routing_enabled,
         | 
| 45 | 
            +
                process_routing,
         | 
| 46 | 
            +
            )
         | 
| 47 | 
            +
            from .utils.multiclass import check_classification_targets
         | 
| 48 | 
            +
            from .utils.parallel import Parallel, delayed
         | 
| 49 | 
            +
            from .utils.validation import (
         | 
| 50 | 
            +
                _check_method_params,
         | 
| 51 | 
            +
                _check_pos_label_consistency,
         | 
| 52 | 
            +
                _check_response_method,
         | 
| 53 | 
            +
                _check_sample_weight,
         | 
| 54 | 
            +
                _num_samples,
         | 
| 55 | 
            +
                check_consistent_length,
         | 
| 56 | 
            +
                check_is_fitted,
         | 
| 57 | 
            +
            )
         | 
| 58 | 
            +
             | 
| 59 | 
            +
             | 
| 60 | 
            +
            class CalibratedClassifierCV(ClassifierMixin, MetaEstimatorMixin, BaseEstimator):
         | 
| 61 | 
            +
                """Probability calibration with isotonic regression or logistic regression.
         | 
| 62 | 
            +
             | 
| 63 | 
            +
                This class uses cross-validation to both estimate the parameters of a
         | 
| 64 | 
            +
                classifier and subsequently calibrate a classifier. With default
         | 
| 65 | 
            +
                `ensemble=True`, for each cv split it
         | 
| 66 | 
            +
                fits a copy of the base estimator to the training subset, and calibrates it
         | 
| 67 | 
            +
                using the testing subset. For prediction, predicted probabilities are
         | 
| 68 | 
            +
                averaged across these individual calibrated classifiers. When
         | 
| 69 | 
            +
                `ensemble=False`, cross-validation is used to obtain unbiased predictions,
         | 
| 70 | 
            +
                via :func:`~sklearn.model_selection.cross_val_predict`, which are then
         | 
| 71 | 
            +
                used for calibration. For prediction, the base estimator, trained using all
         | 
| 72 | 
            +
                the data, is used. This is the prediction method implemented when
         | 
| 73 | 
            +
                `probabilities=True` for :class:`~sklearn.svm.SVC` and :class:`~sklearn.svm.NuSVC`
         | 
| 74 | 
            +
                estimators (see :ref:`User Guide <scores_probabilities>` for details).
         | 
| 75 | 
            +
             | 
| 76 | 
            +
                Already fitted classifiers can be calibrated by wrapping the model in a
         | 
| 77 | 
            +
                :class:`~sklearn.frozen.FrozenEstimator`. In this case all provided
         | 
| 78 | 
            +
                data is used for calibration. The user has to take care manually that data
         | 
| 79 | 
            +
                for model fitting and calibration are disjoint.
         | 
| 80 | 
            +
             | 
| 81 | 
            +
                The calibration is based on the :term:`decision_function` method of the
         | 
| 82 | 
            +
                `estimator` if it exists, else on :term:`predict_proba`.
         | 
| 83 | 
            +
             | 
| 84 | 
            +
                Read more in the :ref:`User Guide <calibration>`.
         | 
| 85 | 
            +
                In order to learn more on the CalibratedClassifierCV class, see the
         | 
| 86 | 
            +
                following calibration examples:
         | 
| 87 | 
            +
                :ref:`sphx_glr_auto_examples_calibration_plot_calibration.py`,
         | 
| 88 | 
            +
                :ref:`sphx_glr_auto_examples_calibration_plot_calibration_curve.py`, and
         | 
| 89 | 
            +
                :ref:`sphx_glr_auto_examples_calibration_plot_calibration_multiclass.py`.
         | 
| 90 | 
            +
             | 
| 91 | 
            +
                Parameters
         | 
| 92 | 
            +
                ----------
         | 
| 93 | 
            +
                estimator : estimator instance, default=None
         | 
| 94 | 
            +
                    The classifier whose output need to be calibrated to provide more
         | 
| 95 | 
            +
                    accurate `predict_proba` outputs. The default classifier is
         | 
| 96 | 
            +
                    a :class:`~sklearn.svm.LinearSVC`.
         | 
| 97 | 
            +
             | 
| 98 | 
            +
                    .. versionadded:: 1.2
         | 
| 99 | 
            +
             | 
| 100 | 
            +
                method : {'sigmoid', 'isotonic'}, default='sigmoid'
         | 
| 101 | 
            +
                    The method to use for calibration. Can be 'sigmoid' which
         | 
| 102 | 
            +
                    corresponds to Platt's method (i.e. a logistic regression model) or
         | 
| 103 | 
            +
                    'isotonic' which is a non-parametric approach. It is not advised to
         | 
| 104 | 
            +
                    use isotonic calibration with too few calibration samples
         | 
| 105 | 
            +
                    ``(<<1000)`` since it tends to overfit.
         | 
| 106 | 
            +
             | 
| 107 | 
            +
                cv : int, cross-validation generator, or iterable, default=None
         | 
| 108 | 
            +
                    Determines the cross-validation splitting strategy.
         | 
| 109 | 
            +
                    Possible inputs for cv are:
         | 
| 110 | 
            +
             | 
| 111 | 
            +
                    - None, to use the default 5-fold cross-validation,
         | 
| 112 | 
            +
                    - integer, to specify the number of folds.
         | 
| 113 | 
            +
                    - :term:`CV splitter`,
         | 
| 114 | 
            +
                    - An iterable yielding (train, test) splits as arrays of indices.
         | 
| 115 | 
            +
             | 
| 116 | 
            +
                    For integer/None inputs, if ``y`` is binary or multiclass,
         | 
| 117 | 
            +
                    :class:`~sklearn.model_selection.StratifiedKFold` is used. If ``y`` is
         | 
| 118 | 
            +
                    neither binary nor multiclass, :class:`~sklearn.model_selection.KFold`
         | 
| 119 | 
            +
                    is used.
         | 
| 120 | 
            +
             | 
| 121 | 
            +
                    Refer to the :ref:`User Guide <cross_validation>` for the various
         | 
| 122 | 
            +
                    cross-validation strategies that can be used here.
         | 
| 123 | 
            +
             | 
| 124 | 
            +
                    .. versionchanged:: 0.22
         | 
| 125 | 
            +
                        ``cv`` default value if None changed from 3-fold to 5-fold.
         | 
| 126 | 
            +
             | 
| 127 | 
            +
                    .. versionchanged:: 1.6
         | 
| 128 | 
            +
                        `"prefit"` is deprecated. Use :class:`~sklearn.frozen.FrozenEstimator`
         | 
| 129 | 
            +
                        instead.
         | 
| 130 | 
            +
             | 
| 131 | 
            +
                n_jobs : int, default=None
         | 
| 132 | 
            +
                    Number of jobs to run in parallel.
         | 
| 133 | 
            +
                    ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
         | 
| 134 | 
            +
                    ``-1`` means using all processors.
         | 
| 135 | 
            +
             | 
| 136 | 
            +
                    Base estimator clones are fitted in parallel across cross-validation
         | 
| 137 | 
            +
                    iterations. Therefore parallelism happens only when `cv != "prefit"`.
         | 
| 138 | 
            +
             | 
| 139 | 
            +
                    See :term:`Glossary <n_jobs>` for more details.
         | 
| 140 | 
            +
             | 
| 141 | 
            +
                    .. versionadded:: 0.24
         | 
| 142 | 
            +
             | 
| 143 | 
            +
                ensemble : bool, or "auto", default="auto"
         | 
| 144 | 
            +
                    Determines how the calibrator is fitted.
         | 
| 145 | 
            +
             | 
| 146 | 
            +
                    "auto" will use `False` if the `estimator` is a
         | 
| 147 | 
            +
                    :class:`~sklearn.frozen.FrozenEstimator`, and `True` otherwise.
         | 
| 148 | 
            +
             | 
| 149 | 
            +
                    If `True`, the `estimator` is fitted using training data, and
         | 
| 150 | 
            +
                    calibrated using testing data, for each `cv` fold. The final estimator
         | 
| 151 | 
            +
                    is an ensemble of `n_cv` fitted classifier and calibrator pairs, where
         | 
| 152 | 
            +
                    `n_cv` is the number of cross-validation folds. The output is the
         | 
| 153 | 
            +
                    average predicted probabilities of all pairs.
         | 
| 154 | 
            +
             | 
| 155 | 
            +
                    If `False`, `cv` is used to compute unbiased predictions, via
         | 
| 156 | 
            +
                    :func:`~sklearn.model_selection.cross_val_predict`, which are then
         | 
| 157 | 
            +
                    used for calibration. At prediction time, the classifier used is the
         | 
| 158 | 
            +
                    `estimator` trained on all the data.
         | 
| 159 | 
            +
                    Note that this method is also internally implemented  in
         | 
| 160 | 
            +
                    :mod:`sklearn.svm` estimators with the `probabilities=True` parameter.
         | 
| 161 | 
            +
             | 
| 162 | 
            +
                    .. versionadded:: 0.24
         | 
| 163 | 
            +
             | 
| 164 | 
            +
                    .. versionchanged:: 1.6
         | 
| 165 | 
            +
                        `"auto"` option is added and is the default.
         | 
| 166 | 
            +
             | 
| 167 | 
            +
                Attributes
         | 
| 168 | 
            +
                ----------
         | 
| 169 | 
            +
                classes_ : ndarray of shape (n_classes,)
         | 
| 170 | 
            +
                    The class labels.
         | 
| 171 | 
            +
             | 
| 172 | 
            +
                n_features_in_ : int
         | 
| 173 | 
            +
                    Number of features seen during :term:`fit`. Only defined if the
         | 
| 174 | 
            +
                    underlying estimator exposes such an attribute when fit.
         | 
| 175 | 
            +
             | 
| 176 | 
            +
                    .. versionadded:: 0.24
         | 
| 177 | 
            +
             | 
| 178 | 
            +
                feature_names_in_ : ndarray of shape (`n_features_in_`,)
         | 
| 179 | 
            +
                    Names of features seen during :term:`fit`. Only defined if the
         | 
| 180 | 
            +
                    underlying estimator exposes such an attribute when fit.
         | 
| 181 | 
            +
             | 
| 182 | 
            +
                    .. versionadded:: 1.0
         | 
| 183 | 
            +
             | 
| 184 | 
            +
                calibrated_classifiers_ : list (len() equal to cv or 1 if `ensemble=False`)
         | 
| 185 | 
            +
                    The list of classifier and calibrator pairs.
         | 
| 186 | 
            +
             | 
| 187 | 
            +
                    - When `ensemble=True`, `n_cv` fitted `estimator` and calibrator pairs.
         | 
| 188 | 
            +
                      `n_cv` is the number of cross-validation folds.
         | 
| 189 | 
            +
                    - When `ensemble=False`, the `estimator`, fitted on all the data, and fitted
         | 
| 190 | 
            +
                      calibrator.
         | 
| 191 | 
            +
             | 
| 192 | 
            +
                    .. versionchanged:: 0.24
         | 
| 193 | 
            +
                        Single calibrated classifier case when `ensemble=False`.
         | 
| 194 | 
            +
             | 
| 195 | 
            +
                See Also
         | 
| 196 | 
            +
                --------
         | 
| 197 | 
            +
                calibration_curve : Compute true and predicted probabilities
         | 
| 198 | 
            +
                    for a calibration curve.
         | 
| 199 | 
            +
             | 
| 200 | 
            +
                References
         | 
| 201 | 
            +
                ----------
         | 
| 202 | 
            +
                .. [1] Obtaining calibrated probability estimates from decision trees
         | 
| 203 | 
            +
                       and naive Bayesian classifiers, B. Zadrozny & C. Elkan, ICML 2001
         | 
| 204 | 
            +
             | 
| 205 | 
            +
                .. [2] Transforming Classifier Scores into Accurate Multiclass
         | 
| 206 | 
            +
                       Probability Estimates, B. Zadrozny & C. Elkan, (KDD 2002)
         | 
| 207 | 
            +
             | 
| 208 | 
            +
                .. [3] Probabilistic Outputs for Support Vector Machines and Comparisons to
         | 
| 209 | 
            +
                       Regularized Likelihood Methods, J. Platt, (1999)
         | 
| 210 | 
            +
             | 
| 211 | 
            +
                .. [4] Predicting Good Probabilities with Supervised Learning,
         | 
| 212 | 
            +
                       A. Niculescu-Mizil & R. Caruana, ICML 2005
         | 
| 213 | 
            +
             | 
| 214 | 
            +
                Examples
         | 
| 215 | 
            +
                --------
         | 
| 216 | 
            +
                >>> from sklearn.datasets import make_classification
         | 
| 217 | 
            +
                >>> from sklearn.naive_bayes import GaussianNB
         | 
| 218 | 
            +
                >>> from sklearn.calibration import CalibratedClassifierCV
         | 
| 219 | 
            +
                >>> X, y = make_classification(n_samples=100, n_features=2,
         | 
| 220 | 
            +
                ...                            n_redundant=0, random_state=42)
         | 
| 221 | 
            +
                >>> base_clf = GaussianNB()
         | 
| 222 | 
            +
                >>> calibrated_clf = CalibratedClassifierCV(base_clf, cv=3)
         | 
| 223 | 
            +
                >>> calibrated_clf.fit(X, y)
         | 
| 224 | 
            +
                CalibratedClassifierCV(...)
         | 
| 225 | 
            +
                >>> len(calibrated_clf.calibrated_classifiers_)
         | 
| 226 | 
            +
                3
         | 
| 227 | 
            +
                >>> calibrated_clf.predict_proba(X)[:5, :]
         | 
| 228 | 
            +
                array([[0.110..., 0.889...],
         | 
| 229 | 
            +
                       [0.072..., 0.927...],
         | 
| 230 | 
            +
                       [0.928..., 0.071...],
         | 
| 231 | 
            +
                       [0.928..., 0.071...],
         | 
| 232 | 
            +
                       [0.071..., 0.928...]])
         | 
| 233 | 
            +
                >>> from sklearn.model_selection import train_test_split
         | 
| 234 | 
            +
                >>> X, y = make_classification(n_samples=100, n_features=2,
         | 
| 235 | 
            +
                ...                            n_redundant=0, random_state=42)
         | 
| 236 | 
            +
                >>> X_train, X_calib, y_train, y_calib = train_test_split(
         | 
| 237 | 
            +
                ...        X, y, random_state=42
         | 
| 238 | 
            +
                ... )
         | 
| 239 | 
            +
                >>> base_clf = GaussianNB()
         | 
| 240 | 
            +
                >>> base_clf.fit(X_train, y_train)
         | 
| 241 | 
            +
                GaussianNB()
         | 
| 242 | 
            +
                >>> from sklearn.frozen import FrozenEstimator
         | 
| 243 | 
            +
                >>> calibrated_clf = CalibratedClassifierCV(FrozenEstimator(base_clf))
         | 
| 244 | 
            +
                >>> calibrated_clf.fit(X_calib, y_calib)
         | 
| 245 | 
            +
                CalibratedClassifierCV(...)
         | 
| 246 | 
            +
                >>> len(calibrated_clf.calibrated_classifiers_)
         | 
| 247 | 
            +
                1
         | 
| 248 | 
            +
                >>> calibrated_clf.predict_proba([[-0.5, 0.5]])
         | 
| 249 | 
            +
                array([[0.936..., 0.063...]])
         | 
| 250 | 
            +
                """
         | 
| 251 | 
            +
             | 
| 252 | 
            +
                _parameter_constraints: dict = {
         | 
| 253 | 
            +
                    "estimator": [
         | 
| 254 | 
            +
                        HasMethods(["fit", "predict_proba"]),
         | 
| 255 | 
            +
                        HasMethods(["fit", "decision_function"]),
         | 
| 256 | 
            +
                        None,
         | 
| 257 | 
            +
                    ],
         | 
| 258 | 
            +
                    "method": [StrOptions({"isotonic", "sigmoid"})],
         | 
| 259 | 
            +
                    "cv": ["cv_object", Hidden(StrOptions({"prefit"}))],
         | 
| 260 | 
            +
                    "n_jobs": [Integral, None],
         | 
| 261 | 
            +
                    "ensemble": ["boolean", StrOptions({"auto"})],
         | 
| 262 | 
            +
                }
         | 
| 263 | 
            +
             | 
| 264 | 
            +
                def __init__(
         | 
| 265 | 
            +
                    self,
         | 
| 266 | 
            +
                    estimator=None,
         | 
| 267 | 
            +
                    *,
         | 
| 268 | 
            +
                    method="sigmoid",
         | 
| 269 | 
            +
                    cv=None,
         | 
| 270 | 
            +
                    n_jobs=None,
         | 
| 271 | 
            +
                    ensemble="auto",
         | 
| 272 | 
            +
                ):
         | 
| 273 | 
            +
                    self.estimator = estimator
         | 
| 274 | 
            +
                    self.method = method
         | 
| 275 | 
            +
                    self.cv = cv
         | 
| 276 | 
            +
                    self.n_jobs = n_jobs
         | 
| 277 | 
            +
                    self.ensemble = ensemble
         | 
| 278 | 
            +
             | 
| 279 | 
            +
                def _get_estimator(self):
         | 
| 280 | 
            +
                    """Resolve which estimator to return (default is LinearSVC)"""
         | 
| 281 | 
            +
                    if self.estimator is None:
         | 
| 282 | 
            +
                        # we want all classifiers that don't expose a random_state
         | 
| 283 | 
            +
                        # to be deterministic (and we don't want to expose this one).
         | 
| 284 | 
            +
                        estimator = LinearSVC(random_state=0)
         | 
| 285 | 
            +
                        if _routing_enabled():
         | 
| 286 | 
            +
                            estimator.set_fit_request(sample_weight=True)
         | 
| 287 | 
            +
                    else:
         | 
| 288 | 
            +
                        estimator = self.estimator
         | 
| 289 | 
            +
             | 
| 290 | 
            +
                    return estimator
         | 
| 291 | 
            +
             | 
| 292 | 
            +
                @_fit_context(
         | 
| 293 | 
            +
                    # CalibratedClassifierCV.estimator is not validated yet
         | 
| 294 | 
            +
                    prefer_skip_nested_validation=False
         | 
| 295 | 
            +
                )
         | 
| 296 | 
            +
                def fit(self, X, y, sample_weight=None, **fit_params):
         | 
| 297 | 
            +
                    """Fit the calibrated model.
         | 
| 298 | 
            +
             | 
| 299 | 
            +
                    Parameters
         | 
| 300 | 
            +
                    ----------
         | 
| 301 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 302 | 
            +
                        Training data.
         | 
| 303 | 
            +
             | 
| 304 | 
            +
                    y : array-like of shape (n_samples,)
         | 
| 305 | 
            +
                        Target values.
         | 
| 306 | 
            +
             | 
| 307 | 
            +
                    sample_weight : array-like of shape (n_samples,), default=None
         | 
| 308 | 
            +
                        Sample weights. If None, then samples are equally weighted.
         | 
| 309 | 
            +
             | 
| 310 | 
            +
                    **fit_params : dict
         | 
| 311 | 
            +
                        Parameters to pass to the `fit` method of the underlying
         | 
| 312 | 
            +
                        classifier.
         | 
| 313 | 
            +
             | 
| 314 | 
            +
                    Returns
         | 
| 315 | 
            +
                    -------
         | 
| 316 | 
            +
                    self : object
         | 
| 317 | 
            +
                        Returns an instance of self.
         | 
| 318 | 
            +
                    """
         | 
| 319 | 
            +
                    check_classification_targets(y)
         | 
| 320 | 
            +
                    X, y = indexable(X, y)
         | 
| 321 | 
            +
                    if sample_weight is not None:
         | 
| 322 | 
            +
                        sample_weight = _check_sample_weight(sample_weight, X)
         | 
| 323 | 
            +
             | 
| 324 | 
            +
                    estimator = self._get_estimator()
         | 
| 325 | 
            +
             | 
| 326 | 
            +
                    _ensemble = self.ensemble
         | 
| 327 | 
            +
                    if _ensemble == "auto":
         | 
| 328 | 
            +
                        _ensemble = not isinstance(estimator, FrozenEstimator)
         | 
| 329 | 
            +
             | 
| 330 | 
            +
                    self.calibrated_classifiers_ = []
         | 
| 331 | 
            +
                    if self.cv == "prefit":
         | 
| 332 | 
            +
                        # TODO(1.8): Remove this code branch and cv='prefit'
         | 
| 333 | 
            +
                        warnings.warn(
         | 
| 334 | 
            +
                            "The `cv='prefit'` option is deprecated in 1.6 and will be removed in"
         | 
| 335 | 
            +
                            " 1.8. You can use CalibratedClassifierCV(FrozenEstimator(estimator))"
         | 
| 336 | 
            +
                            " instead."
         | 
| 337 | 
            +
                        )
         | 
| 338 | 
            +
                        # `classes_` should be consistent with that of estimator
         | 
| 339 | 
            +
                        check_is_fitted(self.estimator, attributes=["classes_"])
         | 
| 340 | 
            +
                        self.classes_ = self.estimator.classes_
         | 
| 341 | 
            +
             | 
| 342 | 
            +
                        predictions, _ = _get_response_values(
         | 
| 343 | 
            +
                            estimator,
         | 
| 344 | 
            +
                            X,
         | 
| 345 | 
            +
                            response_method=["decision_function", "predict_proba"],
         | 
| 346 | 
            +
                        )
         | 
| 347 | 
            +
                        if predictions.ndim == 1:
         | 
| 348 | 
            +
                            # Reshape binary output from `(n_samples,)` to `(n_samples, 1)`
         | 
| 349 | 
            +
                            predictions = predictions.reshape(-1, 1)
         | 
| 350 | 
            +
             | 
| 351 | 
            +
                        calibrated_classifier = _fit_calibrator(
         | 
| 352 | 
            +
                            estimator,
         | 
| 353 | 
            +
                            predictions,
         | 
| 354 | 
            +
                            y,
         | 
| 355 | 
            +
                            self.classes_,
         | 
| 356 | 
            +
                            self.method,
         | 
| 357 | 
            +
                            sample_weight,
         | 
| 358 | 
            +
                        )
         | 
| 359 | 
            +
                        self.calibrated_classifiers_.append(calibrated_classifier)
         | 
| 360 | 
            +
                    else:
         | 
| 361 | 
            +
                        # Set `classes_` using all `y`
         | 
| 362 | 
            +
                        label_encoder_ = LabelEncoder().fit(y)
         | 
| 363 | 
            +
                        self.classes_ = label_encoder_.classes_
         | 
| 364 | 
            +
             | 
| 365 | 
            +
                        if _routing_enabled():
         | 
| 366 | 
            +
                            routed_params = process_routing(
         | 
| 367 | 
            +
                                self,
         | 
| 368 | 
            +
                                "fit",
         | 
| 369 | 
            +
                                sample_weight=sample_weight,
         | 
| 370 | 
            +
                                **fit_params,
         | 
| 371 | 
            +
                            )
         | 
| 372 | 
            +
                        else:
         | 
| 373 | 
            +
                            # sample_weight checks
         | 
| 374 | 
            +
                            fit_parameters = signature(estimator.fit).parameters
         | 
| 375 | 
            +
                            supports_sw = "sample_weight" in fit_parameters
         | 
| 376 | 
            +
                            if sample_weight is not None and not supports_sw:
         | 
| 377 | 
            +
                                estimator_name = type(estimator).__name__
         | 
| 378 | 
            +
                                warnings.warn(
         | 
| 379 | 
            +
                                    f"Since {estimator_name} does not appear to accept"
         | 
| 380 | 
            +
                                    " sample_weight, sample weights will only be used for the"
         | 
| 381 | 
            +
                                    " calibration itself. This can be caused by a limitation of"
         | 
| 382 | 
            +
                                    " the current scikit-learn API. See the following issue for"
         | 
| 383 | 
            +
                                    " more details:"
         | 
| 384 | 
            +
                                    " https://github.com/scikit-learn/scikit-learn/issues/21134."
         | 
| 385 | 
            +
                                    " Be warned that the result of the calibration is likely to be"
         | 
| 386 | 
            +
                                    " incorrect."
         | 
| 387 | 
            +
                                )
         | 
| 388 | 
            +
                            routed_params = Bunch()
         | 
| 389 | 
            +
                            routed_params.splitter = Bunch(split={})  # no routing for splitter
         | 
| 390 | 
            +
                            routed_params.estimator = Bunch(fit=fit_params)
         | 
| 391 | 
            +
                            if sample_weight is not None and supports_sw:
         | 
| 392 | 
            +
                                routed_params.estimator.fit["sample_weight"] = sample_weight
         | 
| 393 | 
            +
             | 
| 394 | 
            +
                        # Check that each cross-validation fold can have at least one
         | 
| 395 | 
            +
                        # example per class
         | 
| 396 | 
            +
                        if isinstance(self.cv, int):
         | 
| 397 | 
            +
                            n_folds = self.cv
         | 
| 398 | 
            +
                        elif hasattr(self.cv, "n_splits"):
         | 
| 399 | 
            +
                            n_folds = self.cv.n_splits
         | 
| 400 | 
            +
                        else:
         | 
| 401 | 
            +
                            n_folds = None
         | 
| 402 | 
            +
                        if n_folds and np.any(np.unique(y, return_counts=True)[1] < n_folds):
         | 
| 403 | 
            +
                            raise ValueError(
         | 
| 404 | 
            +
                                f"Requesting {n_folds}-fold "
         | 
| 405 | 
            +
                                "cross-validation but provided less than "
         | 
| 406 | 
            +
                                f"{n_folds} examples for at least one class."
         | 
| 407 | 
            +
                            )
         | 
| 408 | 
            +
                        if isinstance(self.cv, LeaveOneOut):
         | 
| 409 | 
            +
                            raise ValueError(
         | 
| 410 | 
            +
                                "LeaveOneOut cross-validation does not allow"
         | 
| 411 | 
            +
                                "all classes to be present in test splits. "
         | 
| 412 | 
            +
                                "Please use a cross-validation generator that allows "
         | 
| 413 | 
            +
                                "all classes to appear in every test and train split."
         | 
| 414 | 
            +
                            )
         | 
| 415 | 
            +
                        cv = check_cv(self.cv, y, classifier=True)
         | 
| 416 | 
            +
             | 
| 417 | 
            +
                        if _ensemble:
         | 
| 418 | 
            +
                            parallel = Parallel(n_jobs=self.n_jobs)
         | 
| 419 | 
            +
                            self.calibrated_classifiers_ = parallel(
         | 
| 420 | 
            +
                                delayed(_fit_classifier_calibrator_pair)(
         | 
| 421 | 
            +
                                    clone(estimator),
         | 
| 422 | 
            +
                                    X,
         | 
| 423 | 
            +
                                    y,
         | 
| 424 | 
            +
                                    train=train,
         | 
| 425 | 
            +
                                    test=test,
         | 
| 426 | 
            +
                                    method=self.method,
         | 
| 427 | 
            +
                                    classes=self.classes_,
         | 
| 428 | 
            +
                                    sample_weight=sample_weight,
         | 
| 429 | 
            +
                                    fit_params=routed_params.estimator.fit,
         | 
| 430 | 
            +
                                )
         | 
| 431 | 
            +
                                for train, test in cv.split(X, y, **routed_params.splitter.split)
         | 
| 432 | 
            +
                            )
         | 
| 433 | 
            +
                        else:
         | 
| 434 | 
            +
                            this_estimator = clone(estimator)
         | 
| 435 | 
            +
                            method_name = _check_response_method(
         | 
| 436 | 
            +
                                this_estimator,
         | 
| 437 | 
            +
                                ["decision_function", "predict_proba"],
         | 
| 438 | 
            +
                            ).__name__
         | 
| 439 | 
            +
                            predictions = cross_val_predict(
         | 
| 440 | 
            +
                                estimator=this_estimator,
         | 
| 441 | 
            +
                                X=X,
         | 
| 442 | 
            +
                                y=y,
         | 
| 443 | 
            +
                                cv=cv,
         | 
| 444 | 
            +
                                method=method_name,
         | 
| 445 | 
            +
                                n_jobs=self.n_jobs,
         | 
| 446 | 
            +
                                params=routed_params.estimator.fit,
         | 
| 447 | 
            +
                            )
         | 
| 448 | 
            +
                            if len(self.classes_) == 2:
         | 
| 449 | 
            +
                                # Ensure shape (n_samples, 1) in the binary case
         | 
| 450 | 
            +
                                if method_name == "predict_proba":
         | 
| 451 | 
            +
                                    # Select the probability column of the postive class
         | 
| 452 | 
            +
                                    predictions = _process_predict_proba(
         | 
| 453 | 
            +
                                        y_pred=predictions,
         | 
| 454 | 
            +
                                        target_type="binary",
         | 
| 455 | 
            +
                                        classes=self.classes_,
         | 
| 456 | 
            +
                                        pos_label=self.classes_[1],
         | 
| 457 | 
            +
                                    )
         | 
| 458 | 
            +
                                predictions = predictions.reshape(-1, 1)
         | 
| 459 | 
            +
             | 
| 460 | 
            +
                            this_estimator.fit(X, y, **routed_params.estimator.fit)
         | 
| 461 | 
            +
                            # Note: Here we don't pass on fit_params because the supported
         | 
| 462 | 
            +
                            # calibrators don't support fit_params anyway
         | 
| 463 | 
            +
                            calibrated_classifier = _fit_calibrator(
         | 
| 464 | 
            +
                                this_estimator,
         | 
| 465 | 
            +
                                predictions,
         | 
| 466 | 
            +
                                y,
         | 
| 467 | 
            +
                                self.classes_,
         | 
| 468 | 
            +
                                self.method,
         | 
| 469 | 
            +
                                sample_weight,
         | 
| 470 | 
            +
                            )
         | 
| 471 | 
            +
                            self.calibrated_classifiers_.append(calibrated_classifier)
         | 
| 472 | 
            +
             | 
| 473 | 
            +
                    first_clf = self.calibrated_classifiers_[0].estimator
         | 
| 474 | 
            +
                    if hasattr(first_clf, "n_features_in_"):
         | 
| 475 | 
            +
                        self.n_features_in_ = first_clf.n_features_in_
         | 
| 476 | 
            +
                    if hasattr(first_clf, "feature_names_in_"):
         | 
| 477 | 
            +
                        self.feature_names_in_ = first_clf.feature_names_in_
         | 
| 478 | 
            +
                    return self
         | 
| 479 | 
            +
             | 
| 480 | 
            +
                def predict_proba(self, X):
         | 
| 481 | 
            +
                    """Calibrated probabilities of classification.
         | 
| 482 | 
            +
             | 
| 483 | 
            +
                    This function returns calibrated probabilities of classification
         | 
| 484 | 
            +
                    according to each class on an array of test vectors X.
         | 
| 485 | 
            +
             | 
| 486 | 
            +
                    Parameters
         | 
| 487 | 
            +
                    ----------
         | 
| 488 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 489 | 
            +
                        The samples, as accepted by `estimator.predict_proba`.
         | 
| 490 | 
            +
             | 
| 491 | 
            +
                    Returns
         | 
| 492 | 
            +
                    -------
         | 
| 493 | 
            +
                    C : ndarray of shape (n_samples, n_classes)
         | 
| 494 | 
            +
                        The predicted probas.
         | 
| 495 | 
            +
                    """
         | 
| 496 | 
            +
                    check_is_fitted(self)
         | 
| 497 | 
            +
                    # Compute the arithmetic mean of the predictions of the calibrated
         | 
| 498 | 
            +
                    # classifiers
         | 
| 499 | 
            +
                    mean_proba = np.zeros((_num_samples(X), len(self.classes_)))
         | 
| 500 | 
            +
                    for calibrated_classifier in self.calibrated_classifiers_:
         | 
| 501 | 
            +
                        proba = calibrated_classifier.predict_proba(X)
         | 
| 502 | 
            +
                        mean_proba += proba
         | 
| 503 | 
            +
             | 
| 504 | 
            +
                    mean_proba /= len(self.calibrated_classifiers_)
         | 
| 505 | 
            +
             | 
| 506 | 
            +
                    return mean_proba
         | 
| 507 | 
            +
             | 
| 508 | 
            +
                def predict(self, X):
         | 
| 509 | 
            +
                    """Predict the target of new samples.
         | 
| 510 | 
            +
             | 
| 511 | 
            +
                    The predicted class is the class that has the highest probability,
         | 
| 512 | 
            +
                    and can thus be different from the prediction of the uncalibrated classifier.
         | 
| 513 | 
            +
             | 
| 514 | 
            +
                    Parameters
         | 
| 515 | 
            +
                    ----------
         | 
| 516 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 517 | 
            +
                        The samples, as accepted by `estimator.predict`.
         | 
| 518 | 
            +
             | 
| 519 | 
            +
                    Returns
         | 
| 520 | 
            +
                    -------
         | 
| 521 | 
            +
                    C : ndarray of shape (n_samples,)
         | 
| 522 | 
            +
                        The predicted class.
         | 
| 523 | 
            +
                    """
         | 
| 524 | 
            +
                    check_is_fitted(self)
         | 
| 525 | 
            +
                    return self.classes_[np.argmax(self.predict_proba(X), axis=1)]
         | 
| 526 | 
            +
             | 
| 527 | 
            +
                def get_metadata_routing(self):
         | 
| 528 | 
            +
                    """Get metadata routing of this object.
         | 
| 529 | 
            +
             | 
| 530 | 
            +
                    Please check :ref:`User Guide <metadata_routing>` on how the routing
         | 
| 531 | 
            +
                    mechanism works.
         | 
| 532 | 
            +
             | 
| 533 | 
            +
                    Returns
         | 
| 534 | 
            +
                    -------
         | 
| 535 | 
            +
                    routing : MetadataRouter
         | 
| 536 | 
            +
                        A :class:`~sklearn.utils.metadata_routing.MetadataRouter` encapsulating
         | 
| 537 | 
            +
                        routing information.
         | 
| 538 | 
            +
                    """
         | 
| 539 | 
            +
                    router = (
         | 
| 540 | 
            +
                        MetadataRouter(owner=self.__class__.__name__)
         | 
| 541 | 
            +
                        .add_self_request(self)
         | 
| 542 | 
            +
                        .add(
         | 
| 543 | 
            +
                            estimator=self._get_estimator(),
         | 
| 544 | 
            +
                            method_mapping=MethodMapping().add(caller="fit", callee="fit"),
         | 
| 545 | 
            +
                        )
         | 
| 546 | 
            +
                        .add(
         | 
| 547 | 
            +
                            splitter=self.cv,
         | 
| 548 | 
            +
                            method_mapping=MethodMapping().add(caller="fit", callee="split"),
         | 
| 549 | 
            +
                        )
         | 
| 550 | 
            +
                    )
         | 
| 551 | 
            +
                    return router
         | 
| 552 | 
            +
             | 
| 553 | 
            +
                def __sklearn_tags__(self):
         | 
| 554 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 555 | 
            +
                    tags.input_tags.sparse = get_tags(self._get_estimator()).input_tags.sparse
         | 
| 556 | 
            +
                    return tags
         | 
| 557 | 
            +
             | 
| 558 | 
            +
             | 
| 559 | 
            +
            def _fit_classifier_calibrator_pair(
         | 
| 560 | 
            +
                estimator,
         | 
| 561 | 
            +
                X,
         | 
| 562 | 
            +
                y,
         | 
| 563 | 
            +
                train,
         | 
| 564 | 
            +
                test,
         | 
| 565 | 
            +
                method,
         | 
| 566 | 
            +
                classes,
         | 
| 567 | 
            +
                sample_weight=None,
         | 
| 568 | 
            +
                fit_params=None,
         | 
| 569 | 
            +
            ):
         | 
| 570 | 
            +
                """Fit a classifier/calibration pair on a given train/test split.
         | 
| 571 | 
            +
             | 
| 572 | 
            +
                Fit the classifier on the train set, compute its predictions on the test
         | 
| 573 | 
            +
                set and use the predictions as input to fit the calibrator along with the
         | 
| 574 | 
            +
                test labels.
         | 
| 575 | 
            +
             | 
| 576 | 
            +
                Parameters
         | 
| 577 | 
            +
                ----------
         | 
| 578 | 
            +
                estimator : estimator instance
         | 
| 579 | 
            +
                    Cloned base estimator.
         | 
| 580 | 
            +
             | 
| 581 | 
            +
                X : array-like, shape (n_samples, n_features)
         | 
| 582 | 
            +
                    Sample data.
         | 
| 583 | 
            +
             | 
| 584 | 
            +
                y : array-like, shape (n_samples,)
         | 
| 585 | 
            +
                    Targets.
         | 
| 586 | 
            +
             | 
| 587 | 
            +
                train : ndarray, shape (n_train_indices,)
         | 
| 588 | 
            +
                    Indices of the training subset.
         | 
| 589 | 
            +
             | 
| 590 | 
            +
                test : ndarray, shape (n_test_indices,)
         | 
| 591 | 
            +
                    Indices of the testing subset.
         | 
| 592 | 
            +
             | 
| 593 | 
            +
                method : {'sigmoid', 'isotonic'}
         | 
| 594 | 
            +
                    Method to use for calibration.
         | 
| 595 | 
            +
             | 
| 596 | 
            +
                classes : ndarray, shape (n_classes,)
         | 
| 597 | 
            +
                    The target classes.
         | 
| 598 | 
            +
             | 
| 599 | 
            +
                sample_weight : array-like, default=None
         | 
| 600 | 
            +
                    Sample weights for `X`.
         | 
| 601 | 
            +
             | 
| 602 | 
            +
                fit_params : dict, default=None
         | 
| 603 | 
            +
                    Parameters to pass to the `fit` method of the underlying
         | 
| 604 | 
            +
                    classifier.
         | 
| 605 | 
            +
             | 
| 606 | 
            +
                Returns
         | 
| 607 | 
            +
                -------
         | 
| 608 | 
            +
                calibrated_classifier : _CalibratedClassifier instance
         | 
| 609 | 
            +
                """
         | 
| 610 | 
            +
                fit_params_train = _check_method_params(X, params=fit_params, indices=train)
         | 
| 611 | 
            +
                X_train, y_train = _safe_indexing(X, train), _safe_indexing(y, train)
         | 
| 612 | 
            +
                X_test, y_test = _safe_indexing(X, test), _safe_indexing(y, test)
         | 
| 613 | 
            +
             | 
| 614 | 
            +
                estimator.fit(X_train, y_train, **fit_params_train)
         | 
| 615 | 
            +
             | 
| 616 | 
            +
                predictions, _ = _get_response_values(
         | 
| 617 | 
            +
                    estimator,
         | 
| 618 | 
            +
                    X_test,
         | 
| 619 | 
            +
                    response_method=["decision_function", "predict_proba"],
         | 
| 620 | 
            +
                )
         | 
| 621 | 
            +
                if predictions.ndim == 1:
         | 
| 622 | 
            +
                    # Reshape binary output from `(n_samples,)` to `(n_samples, 1)`
         | 
| 623 | 
            +
                    predictions = predictions.reshape(-1, 1)
         | 
| 624 | 
            +
             | 
| 625 | 
            +
                sw_test = None if sample_weight is None else _safe_indexing(sample_weight, test)
         | 
| 626 | 
            +
                calibrated_classifier = _fit_calibrator(
         | 
| 627 | 
            +
                    estimator, predictions, y_test, classes, method, sample_weight=sw_test
         | 
| 628 | 
            +
                )
         | 
| 629 | 
            +
                return calibrated_classifier
         | 
| 630 | 
            +
             | 
| 631 | 
            +
             | 
| 632 | 
            +
            def _fit_calibrator(clf, predictions, y, classes, method, sample_weight=None):
         | 
| 633 | 
            +
                """Fit calibrator(s) and return a `_CalibratedClassifier`
         | 
| 634 | 
            +
                instance.
         | 
| 635 | 
            +
             | 
| 636 | 
            +
                `n_classes` (i.e. `len(clf.classes_)`) calibrators are fitted.
         | 
| 637 | 
            +
                However, if `n_classes` equals 2, one calibrator is fitted.
         | 
| 638 | 
            +
             | 
| 639 | 
            +
                Parameters
         | 
| 640 | 
            +
                ----------
         | 
| 641 | 
            +
                clf : estimator instance
         | 
| 642 | 
            +
                    Fitted classifier.
         | 
| 643 | 
            +
             | 
| 644 | 
            +
                predictions : array-like, shape (n_samples, n_classes) or (n_samples, 1) \
         | 
| 645 | 
            +
                                when binary.
         | 
| 646 | 
            +
                    Raw predictions returned by the un-calibrated base classifier.
         | 
| 647 | 
            +
             | 
| 648 | 
            +
                y : array-like, shape (n_samples,)
         | 
| 649 | 
            +
                    The targets.
         | 
| 650 | 
            +
             | 
| 651 | 
            +
                classes : ndarray, shape (n_classes,)
         | 
| 652 | 
            +
                    All the prediction classes.
         | 
| 653 | 
            +
             | 
| 654 | 
            +
                method : {'sigmoid', 'isotonic'}
         | 
| 655 | 
            +
                    The method to use for calibration.
         | 
| 656 | 
            +
             | 
| 657 | 
            +
                sample_weight : ndarray, shape (n_samples,), default=None
         | 
| 658 | 
            +
                    Sample weights. If None, then samples are equally weighted.
         | 
| 659 | 
            +
             | 
| 660 | 
            +
                Returns
         | 
| 661 | 
            +
                -------
         | 
| 662 | 
            +
                pipeline : _CalibratedClassifier instance
         | 
| 663 | 
            +
                """
         | 
| 664 | 
            +
                Y = label_binarize(y, classes=classes)
         | 
| 665 | 
            +
                label_encoder = LabelEncoder().fit(classes)
         | 
| 666 | 
            +
                pos_class_indices = label_encoder.transform(clf.classes_)
         | 
| 667 | 
            +
                calibrators = []
         | 
| 668 | 
            +
                for class_idx, this_pred in zip(pos_class_indices, predictions.T):
         | 
| 669 | 
            +
                    if method == "isotonic":
         | 
| 670 | 
            +
                        calibrator = IsotonicRegression(out_of_bounds="clip")
         | 
| 671 | 
            +
                    else:  # "sigmoid"
         | 
| 672 | 
            +
                        calibrator = _SigmoidCalibration()
         | 
| 673 | 
            +
                    calibrator.fit(this_pred, Y[:, class_idx], sample_weight)
         | 
| 674 | 
            +
                    calibrators.append(calibrator)
         | 
| 675 | 
            +
             | 
| 676 | 
            +
                pipeline = _CalibratedClassifier(clf, calibrators, method=method, classes=classes)
         | 
| 677 | 
            +
                return pipeline
         | 
| 678 | 
            +
             | 
| 679 | 
            +
             | 
| 680 | 
            +
            class _CalibratedClassifier:
         | 
| 681 | 
            +
                """Pipeline-like chaining a fitted classifier and its fitted calibrators.
         | 
| 682 | 
            +
             | 
| 683 | 
            +
                Parameters
         | 
| 684 | 
            +
                ----------
         | 
| 685 | 
            +
                estimator : estimator instance
         | 
| 686 | 
            +
                    Fitted classifier.
         | 
| 687 | 
            +
             | 
| 688 | 
            +
                calibrators : list of fitted estimator instances
         | 
| 689 | 
            +
                    List of fitted calibrators (either 'IsotonicRegression' or
         | 
| 690 | 
            +
                    '_SigmoidCalibration'). The number of calibrators equals the number of
         | 
| 691 | 
            +
                    classes. However, if there are 2 classes, the list contains only one
         | 
| 692 | 
            +
                    fitted calibrator.
         | 
| 693 | 
            +
             | 
| 694 | 
            +
                classes : array-like of shape (n_classes,)
         | 
| 695 | 
            +
                    All the prediction classes.
         | 
| 696 | 
            +
             | 
| 697 | 
            +
                method : {'sigmoid', 'isotonic'}, default='sigmoid'
         | 
| 698 | 
            +
                    The method to use for calibration. Can be 'sigmoid' which
         | 
| 699 | 
            +
                    corresponds to Platt's method or 'isotonic' which is a
         | 
| 700 | 
            +
                    non-parametric approach based on isotonic regression.
         | 
| 701 | 
            +
                """
         | 
| 702 | 
            +
             | 
| 703 | 
            +
                def __init__(self, estimator, calibrators, *, classes, method="sigmoid"):
         | 
| 704 | 
            +
                    self.estimator = estimator
         | 
| 705 | 
            +
                    self.calibrators = calibrators
         | 
| 706 | 
            +
                    self.classes = classes
         | 
| 707 | 
            +
                    self.method = method
         | 
| 708 | 
            +
             | 
| 709 | 
            +
                def predict_proba(self, X):
         | 
| 710 | 
            +
                    """Calculate calibrated probabilities.
         | 
| 711 | 
            +
             | 
| 712 | 
            +
                    Calculates classification calibrated probabilities
         | 
| 713 | 
            +
                    for each class, in a one-vs-all manner, for `X`.
         | 
| 714 | 
            +
             | 
| 715 | 
            +
                    Parameters
         | 
| 716 | 
            +
                    ----------
         | 
| 717 | 
            +
                    X : ndarray of shape (n_samples, n_features)
         | 
| 718 | 
            +
                        The sample data.
         | 
| 719 | 
            +
             | 
| 720 | 
            +
                    Returns
         | 
| 721 | 
            +
                    -------
         | 
| 722 | 
            +
                    proba : array, shape (n_samples, n_classes)
         | 
| 723 | 
            +
                        The predicted probabilities. Can be exact zeros.
         | 
| 724 | 
            +
                    """
         | 
| 725 | 
            +
                    predictions, _ = _get_response_values(
         | 
| 726 | 
            +
                        self.estimator,
         | 
| 727 | 
            +
                        X,
         | 
| 728 | 
            +
                        response_method=["decision_function", "predict_proba"],
         | 
| 729 | 
            +
                    )
         | 
| 730 | 
            +
                    if predictions.ndim == 1:
         | 
| 731 | 
            +
                        # Reshape binary output from `(n_samples,)` to `(n_samples, 1)`
         | 
| 732 | 
            +
                        predictions = predictions.reshape(-1, 1)
         | 
| 733 | 
            +
             | 
| 734 | 
            +
                    n_classes = len(self.classes)
         | 
| 735 | 
            +
             | 
| 736 | 
            +
                    label_encoder = LabelEncoder().fit(self.classes)
         | 
| 737 | 
            +
                    pos_class_indices = label_encoder.transform(self.estimator.classes_)
         | 
| 738 | 
            +
             | 
| 739 | 
            +
                    proba = np.zeros((_num_samples(X), n_classes))
         | 
| 740 | 
            +
                    for class_idx, this_pred, calibrator in zip(
         | 
| 741 | 
            +
                        pos_class_indices, predictions.T, self.calibrators
         | 
| 742 | 
            +
                    ):
         | 
| 743 | 
            +
                        if n_classes == 2:
         | 
| 744 | 
            +
                            # When binary, `predictions` consists only of predictions for
         | 
| 745 | 
            +
                            # clf.classes_[1] but `pos_class_indices` = 0
         | 
| 746 | 
            +
                            class_idx += 1
         | 
| 747 | 
            +
                        proba[:, class_idx] = calibrator.predict(this_pred)
         | 
| 748 | 
            +
             | 
| 749 | 
            +
                    # Normalize the probabilities
         | 
| 750 | 
            +
                    if n_classes == 2:
         | 
| 751 | 
            +
                        proba[:, 0] = 1.0 - proba[:, 1]
         | 
| 752 | 
            +
                    else:
         | 
| 753 | 
            +
                        denominator = np.sum(proba, axis=1)[:, np.newaxis]
         | 
| 754 | 
            +
                        # In the edge case where for each class calibrator returns a null
         | 
| 755 | 
            +
                        # probability for a given sample, use the uniform distribution
         | 
| 756 | 
            +
                        # instead.
         | 
| 757 | 
            +
                        uniform_proba = np.full_like(proba, 1 / n_classes)
         | 
| 758 | 
            +
                        proba = np.divide(
         | 
| 759 | 
            +
                            proba, denominator, out=uniform_proba, where=denominator != 0
         | 
| 760 | 
            +
                        )
         | 
| 761 | 
            +
             | 
| 762 | 
            +
                    # Deal with cases where the predicted probability minimally exceeds 1.0
         | 
| 763 | 
            +
                    proba[(1.0 < proba) & (proba <= 1.0 + 1e-5)] = 1.0
         | 
| 764 | 
            +
             | 
| 765 | 
            +
                    return proba
         | 
| 766 | 
            +
             | 
| 767 | 
            +
             | 
| 768 | 
            +
            # The max_abs_prediction_threshold was approximated using
         | 
| 769 | 
            +
            # logit(np.finfo(np.float64).eps) which is about -36
         | 
| 770 | 
            +
            def _sigmoid_calibration(
         | 
| 771 | 
            +
                predictions, y, sample_weight=None, max_abs_prediction_threshold=30
         | 
| 772 | 
            +
            ):
         | 
| 773 | 
            +
                """Probability Calibration with sigmoid method (Platt 2000)
         | 
| 774 | 
            +
             | 
| 775 | 
            +
                Parameters
         | 
| 776 | 
            +
                ----------
         | 
| 777 | 
            +
                predictions : ndarray of shape (n_samples,)
         | 
| 778 | 
            +
                    The decision function or predict proba for the samples.
         | 
| 779 | 
            +
             | 
| 780 | 
            +
                y : ndarray of shape (n_samples,)
         | 
| 781 | 
            +
                    The targets.
         | 
| 782 | 
            +
             | 
| 783 | 
            +
                sample_weight : array-like of shape (n_samples,), default=None
         | 
| 784 | 
            +
                    Sample weights. If None, then samples are equally weighted.
         | 
| 785 | 
            +
             | 
| 786 | 
            +
                Returns
         | 
| 787 | 
            +
                -------
         | 
| 788 | 
            +
                a : float
         | 
| 789 | 
            +
                    The slope.
         | 
| 790 | 
            +
             | 
| 791 | 
            +
                b : float
         | 
| 792 | 
            +
                    The intercept.
         | 
| 793 | 
            +
             | 
| 794 | 
            +
                References
         | 
| 795 | 
            +
                ----------
         | 
| 796 | 
            +
                Platt, "Probabilistic Outputs for Support Vector Machines"
         | 
| 797 | 
            +
                """
         | 
| 798 | 
            +
                predictions = column_or_1d(predictions)
         | 
| 799 | 
            +
                y = column_or_1d(y)
         | 
| 800 | 
            +
             | 
| 801 | 
            +
                F = predictions  # F follows Platt's notations
         | 
| 802 | 
            +
             | 
| 803 | 
            +
                scale_constant = 1.0
         | 
| 804 | 
            +
                max_prediction = np.max(np.abs(F))
         | 
| 805 | 
            +
             | 
| 806 | 
            +
                # If the predictions have large values we scale them in order to bring
         | 
| 807 | 
            +
                # them within a suitable range. This has no effect on the final
         | 
| 808 | 
            +
                # (prediction) result because linear models like Logisitic Regression
         | 
| 809 | 
            +
                # without a penalty are invariant to multiplying the features by a
         | 
| 810 | 
            +
                # constant.
         | 
| 811 | 
            +
                if max_prediction >= max_abs_prediction_threshold:
         | 
| 812 | 
            +
                    scale_constant = max_prediction
         | 
| 813 | 
            +
                    # We rescale the features in a copy: inplace rescaling could confuse
         | 
| 814 | 
            +
                    # the caller and make the code harder to reason about.
         | 
| 815 | 
            +
                    F = F / scale_constant
         | 
| 816 | 
            +
             | 
| 817 | 
            +
                # Bayesian priors (see Platt end of section 2.2):
         | 
| 818 | 
            +
                # It corresponds to the number of samples, taking into account the
         | 
| 819 | 
            +
                # `sample_weight`.
         | 
| 820 | 
            +
                mask_negative_samples = y <= 0
         | 
| 821 | 
            +
                if sample_weight is not None:
         | 
| 822 | 
            +
                    prior0 = (sample_weight[mask_negative_samples]).sum()
         | 
| 823 | 
            +
                    prior1 = (sample_weight[~mask_negative_samples]).sum()
         | 
| 824 | 
            +
                else:
         | 
| 825 | 
            +
                    prior0 = float(np.sum(mask_negative_samples))
         | 
| 826 | 
            +
                    prior1 = y.shape[0] - prior0
         | 
| 827 | 
            +
                T = np.zeros_like(y, dtype=predictions.dtype)
         | 
| 828 | 
            +
                T[y > 0] = (prior1 + 1.0) / (prior1 + 2.0)
         | 
| 829 | 
            +
                T[y <= 0] = 1.0 / (prior0 + 2.0)
         | 
| 830 | 
            +
             | 
| 831 | 
            +
                bin_loss = HalfBinomialLoss()
         | 
| 832 | 
            +
             | 
| 833 | 
            +
                def loss_grad(AB):
         | 
| 834 | 
            +
                    # .astype below is needed to ensure y_true and raw_prediction have the
         | 
| 835 | 
            +
                    # same dtype. With result = np.float64(0) * np.array([1, 2], dtype=np.float32)
         | 
| 836 | 
            +
                    # - in Numpy 2, result.dtype is float64
         | 
| 837 | 
            +
                    # - in Numpy<2, result.dtype is float32
         | 
| 838 | 
            +
                    raw_prediction = -(AB[0] * F + AB[1]).astype(dtype=predictions.dtype)
         | 
| 839 | 
            +
                    l, g = bin_loss.loss_gradient(
         | 
| 840 | 
            +
                        y_true=T,
         | 
| 841 | 
            +
                        raw_prediction=raw_prediction,
         | 
| 842 | 
            +
                        sample_weight=sample_weight,
         | 
| 843 | 
            +
                    )
         | 
| 844 | 
            +
                    loss = l.sum()
         | 
| 845 | 
            +
                    # TODO: Remove casting to np.float64 when minimum supported SciPy is 1.11.2
         | 
| 846 | 
            +
                    # With SciPy >= 1.11.2, the LBFGS implementation will cast to float64
         | 
| 847 | 
            +
                    # https://github.com/scipy/scipy/pull/18825.
         | 
| 848 | 
            +
                    # Here we cast to float64 to support SciPy < 1.11.2
         | 
| 849 | 
            +
                    grad = np.asarray([-g @ F, -g.sum()], dtype=np.float64)
         | 
| 850 | 
            +
                    return loss, grad
         | 
| 851 | 
            +
             | 
| 852 | 
            +
                AB0 = np.array([0.0, log((prior0 + 1.0) / (prior1 + 1.0))])
         | 
| 853 | 
            +
             | 
| 854 | 
            +
                opt_result = minimize(
         | 
| 855 | 
            +
                    loss_grad,
         | 
| 856 | 
            +
                    AB0,
         | 
| 857 | 
            +
                    method="L-BFGS-B",
         | 
| 858 | 
            +
                    jac=True,
         | 
| 859 | 
            +
                    options={
         | 
| 860 | 
            +
                        "gtol": 1e-6,
         | 
| 861 | 
            +
                        "ftol": 64 * np.finfo(float).eps,
         | 
| 862 | 
            +
                    },
         | 
| 863 | 
            +
                )
         | 
| 864 | 
            +
                AB_ = opt_result.x
         | 
| 865 | 
            +
             | 
| 866 | 
            +
                # The tuned multiplicative parameter is converted back to the original
         | 
| 867 | 
            +
                # input feature scale. The offset parameter does not need rescaling since
         | 
| 868 | 
            +
                # we did not rescale the outcome variable.
         | 
| 869 | 
            +
                return AB_[0] / scale_constant, AB_[1]
         | 
| 870 | 
            +
             | 
| 871 | 
            +
             | 
| 872 | 
            +
            class _SigmoidCalibration(RegressorMixin, BaseEstimator):
         | 
| 873 | 
            +
                """Sigmoid regression model.
         | 
| 874 | 
            +
             | 
| 875 | 
            +
                Attributes
         | 
| 876 | 
            +
                ----------
         | 
| 877 | 
            +
                a_ : float
         | 
| 878 | 
            +
                    The slope.
         | 
| 879 | 
            +
             | 
| 880 | 
            +
                b_ : float
         | 
| 881 | 
            +
                    The intercept.
         | 
| 882 | 
            +
                """
         | 
| 883 | 
            +
             | 
| 884 | 
            +
                def fit(self, X, y, sample_weight=None):
         | 
| 885 | 
            +
                    """Fit the model using X, y as training data.
         | 
| 886 | 
            +
             | 
| 887 | 
            +
                    Parameters
         | 
| 888 | 
            +
                    ----------
         | 
| 889 | 
            +
                    X : array-like of shape (n_samples,)
         | 
| 890 | 
            +
                        Training data.
         | 
| 891 | 
            +
             | 
| 892 | 
            +
                    y : array-like of shape (n_samples,)
         | 
| 893 | 
            +
                        Training target.
         | 
| 894 | 
            +
             | 
| 895 | 
            +
                    sample_weight : array-like of shape (n_samples,), default=None
         | 
| 896 | 
            +
                        Sample weights. If None, then samples are equally weighted.
         | 
| 897 | 
            +
             | 
| 898 | 
            +
                    Returns
         | 
| 899 | 
            +
                    -------
         | 
| 900 | 
            +
                    self : object
         | 
| 901 | 
            +
                        Returns an instance of self.
         | 
| 902 | 
            +
                    """
         | 
| 903 | 
            +
                    X = column_or_1d(X)
         | 
| 904 | 
            +
                    y = column_or_1d(y)
         | 
| 905 | 
            +
                    X, y = indexable(X, y)
         | 
| 906 | 
            +
             | 
| 907 | 
            +
                    self.a_, self.b_ = _sigmoid_calibration(X, y, sample_weight)
         | 
| 908 | 
            +
                    return self
         | 
| 909 | 
            +
             | 
| 910 | 
            +
                def predict(self, T):
         | 
| 911 | 
            +
                    """Predict new data by linear interpolation.
         | 
| 912 | 
            +
             | 
| 913 | 
            +
                    Parameters
         | 
| 914 | 
            +
                    ----------
         | 
| 915 | 
            +
                    T : array-like of shape (n_samples,)
         | 
| 916 | 
            +
                        Data to predict from.
         | 
| 917 | 
            +
             | 
| 918 | 
            +
                    Returns
         | 
| 919 | 
            +
                    -------
         | 
| 920 | 
            +
                    T_ : ndarray of shape (n_samples,)
         | 
| 921 | 
            +
                        The predicted data.
         | 
| 922 | 
            +
                    """
         | 
| 923 | 
            +
                    T = column_or_1d(T)
         | 
| 924 | 
            +
                    return expit(-(self.a_ * T + self.b_))
         | 
| 925 | 
            +
             | 
| 926 | 
            +
             | 
| 927 | 
            +
            @validate_params(
         | 
| 928 | 
            +
                {
         | 
| 929 | 
            +
                    "y_true": ["array-like"],
         | 
| 930 | 
            +
                    "y_prob": ["array-like"],
         | 
| 931 | 
            +
                    "pos_label": [Real, str, "boolean", None],
         | 
| 932 | 
            +
                    "n_bins": [Interval(Integral, 1, None, closed="left")],
         | 
| 933 | 
            +
                    "strategy": [StrOptions({"uniform", "quantile"})],
         | 
| 934 | 
            +
                },
         | 
| 935 | 
            +
                prefer_skip_nested_validation=True,
         | 
| 936 | 
            +
            )
         | 
| 937 | 
            +
            def calibration_curve(
         | 
| 938 | 
            +
                y_true,
         | 
| 939 | 
            +
                y_prob,
         | 
| 940 | 
            +
                *,
         | 
| 941 | 
            +
                pos_label=None,
         | 
| 942 | 
            +
                n_bins=5,
         | 
| 943 | 
            +
                strategy="uniform",
         | 
| 944 | 
            +
            ):
         | 
| 945 | 
            +
                """Compute true and predicted probabilities for a calibration curve.
         | 
| 946 | 
            +
             | 
| 947 | 
            +
                The method assumes the inputs come from a binary classifier, and
         | 
| 948 | 
            +
                discretize the [0, 1] interval into bins.
         | 
| 949 | 
            +
             | 
| 950 | 
            +
                Calibration curves may also be referred to as reliability diagrams.
         | 
| 951 | 
            +
             | 
| 952 | 
            +
                Read more in the :ref:`User Guide <calibration>`.
         | 
| 953 | 
            +
             | 
| 954 | 
            +
                Parameters
         | 
| 955 | 
            +
                ----------
         | 
| 956 | 
            +
                y_true : array-like of shape (n_samples,)
         | 
| 957 | 
            +
                    True targets.
         | 
| 958 | 
            +
             | 
| 959 | 
            +
                y_prob : array-like of shape (n_samples,)
         | 
| 960 | 
            +
                    Probabilities of the positive class.
         | 
| 961 | 
            +
             | 
| 962 | 
            +
                pos_label : int, float, bool or str, default=None
         | 
| 963 | 
            +
                    The label of the positive class.
         | 
| 964 | 
            +
             | 
| 965 | 
            +
                    .. versionadded:: 1.1
         | 
| 966 | 
            +
             | 
| 967 | 
            +
                n_bins : int, default=5
         | 
| 968 | 
            +
                    Number of bins to discretize the [0, 1] interval. A bigger number
         | 
| 969 | 
            +
                    requires more data. Bins with no samples (i.e. without
         | 
| 970 | 
            +
                    corresponding values in `y_prob`) will not be returned, thus the
         | 
| 971 | 
            +
                    returned arrays may have less than `n_bins` values.
         | 
| 972 | 
            +
             | 
| 973 | 
            +
                strategy : {'uniform', 'quantile'}, default='uniform'
         | 
| 974 | 
            +
                    Strategy used to define the widths of the bins.
         | 
| 975 | 
            +
             | 
| 976 | 
            +
                    uniform
         | 
| 977 | 
            +
                        The bins have identical widths.
         | 
| 978 | 
            +
                    quantile
         | 
| 979 | 
            +
                        The bins have the same number of samples and depend on `y_prob`.
         | 
| 980 | 
            +
             | 
| 981 | 
            +
                Returns
         | 
| 982 | 
            +
                -------
         | 
| 983 | 
            +
                prob_true : ndarray of shape (n_bins,) or smaller
         | 
| 984 | 
            +
                    The proportion of samples whose class is the positive class, in each
         | 
| 985 | 
            +
                    bin (fraction of positives).
         | 
| 986 | 
            +
             | 
| 987 | 
            +
                prob_pred : ndarray of shape (n_bins,) or smaller
         | 
| 988 | 
            +
                    The mean predicted probability in each bin.
         | 
| 989 | 
            +
             | 
| 990 | 
            +
                References
         | 
| 991 | 
            +
                ----------
         | 
| 992 | 
            +
                Alexandru Niculescu-Mizil and Rich Caruana (2005) Predicting Good
         | 
| 993 | 
            +
                Probabilities With Supervised Learning, in Proceedings of the 22nd
         | 
| 994 | 
            +
                International Conference on Machine Learning (ICML).
         | 
| 995 | 
            +
                See section 4 (Qualitative Analysis of Predictions).
         | 
| 996 | 
            +
             | 
| 997 | 
            +
                Examples
         | 
| 998 | 
            +
                --------
         | 
| 999 | 
            +
                >>> import numpy as np
         | 
| 1000 | 
            +
                >>> from sklearn.calibration import calibration_curve
         | 
| 1001 | 
            +
                >>> y_true = np.array([0, 0, 0, 0, 1, 1, 1, 1, 1])
         | 
| 1002 | 
            +
                >>> y_pred = np.array([0.1, 0.2, 0.3, 0.4, 0.65, 0.7, 0.8, 0.9,  1.])
         | 
| 1003 | 
            +
                >>> prob_true, prob_pred = calibration_curve(y_true, y_pred, n_bins=3)
         | 
| 1004 | 
            +
                >>> prob_true
         | 
| 1005 | 
            +
                array([0. , 0.5, 1. ])
         | 
| 1006 | 
            +
                >>> prob_pred
         | 
| 1007 | 
            +
                array([0.2  , 0.525, 0.85 ])
         | 
| 1008 | 
            +
                """
         | 
| 1009 | 
            +
                y_true = column_or_1d(y_true)
         | 
| 1010 | 
            +
                y_prob = column_or_1d(y_prob)
         | 
| 1011 | 
            +
                check_consistent_length(y_true, y_prob)
         | 
| 1012 | 
            +
                pos_label = _check_pos_label_consistency(pos_label, y_true)
         | 
| 1013 | 
            +
             | 
| 1014 | 
            +
                if y_prob.min() < 0 or y_prob.max() > 1:
         | 
| 1015 | 
            +
                    raise ValueError("y_prob has values outside [0, 1].")
         | 
| 1016 | 
            +
             | 
| 1017 | 
            +
                labels = np.unique(y_true)
         | 
| 1018 | 
            +
                if len(labels) > 2:
         | 
| 1019 | 
            +
                    raise ValueError(
         | 
| 1020 | 
            +
                        f"Only binary classification is supported. Provided labels {labels}."
         | 
| 1021 | 
            +
                    )
         | 
| 1022 | 
            +
                y_true = y_true == pos_label
         | 
| 1023 | 
            +
             | 
| 1024 | 
            +
                if strategy == "quantile":  # Determine bin edges by distribution of data
         | 
| 1025 | 
            +
                    quantiles = np.linspace(0, 1, n_bins + 1)
         | 
| 1026 | 
            +
                    bins = np.percentile(y_prob, quantiles * 100)
         | 
| 1027 | 
            +
                elif strategy == "uniform":
         | 
| 1028 | 
            +
                    bins = np.linspace(0.0, 1.0, n_bins + 1)
         | 
| 1029 | 
            +
                else:
         | 
| 1030 | 
            +
                    raise ValueError(
         | 
| 1031 | 
            +
                        "Invalid entry to 'strategy' input. Strategy "
         | 
| 1032 | 
            +
                        "must be either 'quantile' or 'uniform'."
         | 
| 1033 | 
            +
                    )
         | 
| 1034 | 
            +
             | 
| 1035 | 
            +
                binids = np.searchsorted(bins[1:-1], y_prob)
         | 
| 1036 | 
            +
             | 
| 1037 | 
            +
                bin_sums = np.bincount(binids, weights=y_prob, minlength=len(bins))
         | 
| 1038 | 
            +
                bin_true = np.bincount(binids, weights=y_true, minlength=len(bins))
         | 
| 1039 | 
            +
                bin_total = np.bincount(binids, minlength=len(bins))
         | 
| 1040 | 
            +
             | 
| 1041 | 
            +
                nonzero = bin_total != 0
         | 
| 1042 | 
            +
                prob_true = bin_true[nonzero] / bin_total[nonzero]
         | 
| 1043 | 
            +
                prob_pred = bin_sums[nonzero] / bin_total[nonzero]
         | 
| 1044 | 
            +
             | 
| 1045 | 
            +
                return prob_true, prob_pred
         | 
| 1046 | 
            +
             | 
| 1047 | 
            +
             | 
| 1048 | 
            +
            class CalibrationDisplay(_BinaryClassifierCurveDisplayMixin):
         | 
| 1049 | 
            +
                """Calibration curve (also known as reliability diagram) visualization.
         | 
| 1050 | 
            +
             | 
| 1051 | 
            +
                It is recommended to use
         | 
| 1052 | 
            +
                :func:`~sklearn.calibration.CalibrationDisplay.from_estimator` or
         | 
| 1053 | 
            +
                :func:`~sklearn.calibration.CalibrationDisplay.from_predictions`
         | 
| 1054 | 
            +
                to create a `CalibrationDisplay`. All parameters are stored as attributes.
         | 
| 1055 | 
            +
             | 
| 1056 | 
            +
                Read more about calibration in the :ref:`User Guide <calibration>` and
         | 
| 1057 | 
            +
                more about the scikit-learn visualization API in :ref:`visualizations`.
         | 
| 1058 | 
            +
             | 
| 1059 | 
            +
                For an example on how to use the visualization, see
         | 
| 1060 | 
            +
                :ref:`sphx_glr_auto_examples_calibration_plot_calibration_curve.py`.
         | 
| 1061 | 
            +
             | 
| 1062 | 
            +
                .. versionadded:: 1.0
         | 
| 1063 | 
            +
             | 
| 1064 | 
            +
                Parameters
         | 
| 1065 | 
            +
                ----------
         | 
| 1066 | 
            +
                prob_true : ndarray of shape (n_bins,)
         | 
| 1067 | 
            +
                    The proportion of samples whose class is the positive class (fraction
         | 
| 1068 | 
            +
                    of positives), in each bin.
         | 
| 1069 | 
            +
             | 
| 1070 | 
            +
                prob_pred : ndarray of shape (n_bins,)
         | 
| 1071 | 
            +
                    The mean predicted probability in each bin.
         | 
| 1072 | 
            +
             | 
| 1073 | 
            +
                y_prob : ndarray of shape (n_samples,)
         | 
| 1074 | 
            +
                    Probability estimates for the positive class, for each sample.
         | 
| 1075 | 
            +
             | 
| 1076 | 
            +
                estimator_name : str, default=None
         | 
| 1077 | 
            +
                    Name of estimator. If None, the estimator name is not shown.
         | 
| 1078 | 
            +
             | 
| 1079 | 
            +
                pos_label : int, float, bool or str, default=None
         | 
| 1080 | 
            +
                    The positive class when computing the calibration curve.
         | 
| 1081 | 
            +
                    By default, `pos_label` is set to `estimators.classes_[1]` when using
         | 
| 1082 | 
            +
                    `from_estimator` and set to 1 when using `from_predictions`.
         | 
| 1083 | 
            +
             | 
| 1084 | 
            +
                    .. versionadded:: 1.1
         | 
| 1085 | 
            +
             | 
| 1086 | 
            +
                Attributes
         | 
| 1087 | 
            +
                ----------
         | 
| 1088 | 
            +
                line_ : matplotlib Artist
         | 
| 1089 | 
            +
                    Calibration curve.
         | 
| 1090 | 
            +
             | 
| 1091 | 
            +
                ax_ : matplotlib Axes
         | 
| 1092 | 
            +
                    Axes with calibration curve.
         | 
| 1093 | 
            +
             | 
| 1094 | 
            +
                figure_ : matplotlib Figure
         | 
| 1095 | 
            +
                    Figure containing the curve.
         | 
| 1096 | 
            +
             | 
| 1097 | 
            +
                See Also
         | 
| 1098 | 
            +
                --------
         | 
| 1099 | 
            +
                calibration_curve : Compute true and predicted probabilities for a
         | 
| 1100 | 
            +
                    calibration curve.
         | 
| 1101 | 
            +
                CalibrationDisplay.from_predictions : Plot calibration curve using true
         | 
| 1102 | 
            +
                    and predicted labels.
         | 
| 1103 | 
            +
                CalibrationDisplay.from_estimator : Plot calibration curve using an
         | 
| 1104 | 
            +
                    estimator and data.
         | 
| 1105 | 
            +
             | 
| 1106 | 
            +
                Examples
         | 
| 1107 | 
            +
                --------
         | 
| 1108 | 
            +
                >>> from sklearn.datasets import make_classification
         | 
| 1109 | 
            +
                >>> from sklearn.model_selection import train_test_split
         | 
| 1110 | 
            +
                >>> from sklearn.linear_model import LogisticRegression
         | 
| 1111 | 
            +
                >>> from sklearn.calibration import calibration_curve, CalibrationDisplay
         | 
| 1112 | 
            +
                >>> X, y = make_classification(random_state=0)
         | 
| 1113 | 
            +
                >>> X_train, X_test, y_train, y_test = train_test_split(
         | 
| 1114 | 
            +
                ...     X, y, random_state=0)
         | 
| 1115 | 
            +
                >>> clf = LogisticRegression(random_state=0)
         | 
| 1116 | 
            +
                >>> clf.fit(X_train, y_train)
         | 
| 1117 | 
            +
                LogisticRegression(random_state=0)
         | 
| 1118 | 
            +
                >>> y_prob = clf.predict_proba(X_test)[:, 1]
         | 
| 1119 | 
            +
                >>> prob_true, prob_pred = calibration_curve(y_test, y_prob, n_bins=10)
         | 
| 1120 | 
            +
                >>> disp = CalibrationDisplay(prob_true, prob_pred, y_prob)
         | 
| 1121 | 
            +
                >>> disp.plot()
         | 
| 1122 | 
            +
                <...>
         | 
| 1123 | 
            +
                """
         | 
| 1124 | 
            +
             | 
| 1125 | 
            +
                def __init__(
         | 
| 1126 | 
            +
                    self, prob_true, prob_pred, y_prob, *, estimator_name=None, pos_label=None
         | 
| 1127 | 
            +
                ):
         | 
| 1128 | 
            +
                    self.prob_true = prob_true
         | 
| 1129 | 
            +
                    self.prob_pred = prob_pred
         | 
| 1130 | 
            +
                    self.y_prob = y_prob
         | 
| 1131 | 
            +
                    self.estimator_name = estimator_name
         | 
| 1132 | 
            +
                    self.pos_label = pos_label
         | 
| 1133 | 
            +
             | 
| 1134 | 
            +
                def plot(self, *, ax=None, name=None, ref_line=True, **kwargs):
         | 
| 1135 | 
            +
                    """Plot visualization.
         | 
| 1136 | 
            +
             | 
| 1137 | 
            +
                    Extra keyword arguments will be passed to
         | 
| 1138 | 
            +
                    :func:`matplotlib.pyplot.plot`.
         | 
| 1139 | 
            +
             | 
| 1140 | 
            +
                    Parameters
         | 
| 1141 | 
            +
                    ----------
         | 
| 1142 | 
            +
                    ax : Matplotlib Axes, default=None
         | 
| 1143 | 
            +
                        Axes object to plot on. If `None`, a new figure and axes is
         | 
| 1144 | 
            +
                        created.
         | 
| 1145 | 
            +
             | 
| 1146 | 
            +
                    name : str, default=None
         | 
| 1147 | 
            +
                        Name for labeling curve. If `None`, use `estimator_name` if
         | 
| 1148 | 
            +
                        not `None`, otherwise no labeling is shown.
         | 
| 1149 | 
            +
             | 
| 1150 | 
            +
                    ref_line : bool, default=True
         | 
| 1151 | 
            +
                        If `True`, plots a reference line representing a perfectly
         | 
| 1152 | 
            +
                        calibrated classifier.
         | 
| 1153 | 
            +
             | 
| 1154 | 
            +
                    **kwargs : dict
         | 
| 1155 | 
            +
                        Keyword arguments to be passed to :func:`matplotlib.pyplot.plot`.
         | 
| 1156 | 
            +
             | 
| 1157 | 
            +
                    Returns
         | 
| 1158 | 
            +
                    -------
         | 
| 1159 | 
            +
                    display : :class:`~sklearn.calibration.CalibrationDisplay`
         | 
| 1160 | 
            +
                        Object that stores computed values.
         | 
| 1161 | 
            +
                    """
         | 
| 1162 | 
            +
                    self.ax_, self.figure_, name = self._validate_plot_params(ax=ax, name=name)
         | 
| 1163 | 
            +
             | 
| 1164 | 
            +
                    info_pos_label = (
         | 
| 1165 | 
            +
                        f"(Positive class: {self.pos_label})" if self.pos_label is not None else ""
         | 
| 1166 | 
            +
                    )
         | 
| 1167 | 
            +
             | 
| 1168 | 
            +
                    default_line_kwargs = {"marker": "s", "linestyle": "-"}
         | 
| 1169 | 
            +
                    if name is not None:
         | 
| 1170 | 
            +
                        default_line_kwargs["label"] = name
         | 
| 1171 | 
            +
                    line_kwargs = _validate_style_kwargs(default_line_kwargs, kwargs)
         | 
| 1172 | 
            +
             | 
| 1173 | 
            +
                    ref_line_label = "Perfectly calibrated"
         | 
| 1174 | 
            +
                    existing_ref_line = ref_line_label in self.ax_.get_legend_handles_labels()[1]
         | 
| 1175 | 
            +
                    if ref_line and not existing_ref_line:
         | 
| 1176 | 
            +
                        self.ax_.plot([0, 1], [0, 1], "k:", label=ref_line_label)
         | 
| 1177 | 
            +
                    self.line_ = self.ax_.plot(self.prob_pred, self.prob_true, **line_kwargs)[0]
         | 
| 1178 | 
            +
             | 
| 1179 | 
            +
                    # We always have to show the legend for at least the reference line
         | 
| 1180 | 
            +
                    self.ax_.legend(loc="lower right")
         | 
| 1181 | 
            +
             | 
| 1182 | 
            +
                    xlabel = f"Mean predicted probability {info_pos_label}"
         | 
| 1183 | 
            +
                    ylabel = f"Fraction of positives {info_pos_label}"
         | 
| 1184 | 
            +
                    self.ax_.set(xlabel=xlabel, ylabel=ylabel)
         | 
| 1185 | 
            +
             | 
| 1186 | 
            +
                    return self
         | 
| 1187 | 
            +
             | 
| 1188 | 
            +
                @classmethod
         | 
| 1189 | 
            +
                def from_estimator(
         | 
| 1190 | 
            +
                    cls,
         | 
| 1191 | 
            +
                    estimator,
         | 
| 1192 | 
            +
                    X,
         | 
| 1193 | 
            +
                    y,
         | 
| 1194 | 
            +
                    *,
         | 
| 1195 | 
            +
                    n_bins=5,
         | 
| 1196 | 
            +
                    strategy="uniform",
         | 
| 1197 | 
            +
                    pos_label=None,
         | 
| 1198 | 
            +
                    name=None,
         | 
| 1199 | 
            +
                    ref_line=True,
         | 
| 1200 | 
            +
                    ax=None,
         | 
| 1201 | 
            +
                    **kwargs,
         | 
| 1202 | 
            +
                ):
         | 
| 1203 | 
            +
                    """Plot calibration curve using a binary classifier and data.
         | 
| 1204 | 
            +
             | 
| 1205 | 
            +
                    A calibration curve, also known as a reliability diagram, uses inputs
         | 
| 1206 | 
            +
                    from a binary classifier and plots the average predicted probability
         | 
| 1207 | 
            +
                    for each bin against the fraction of positive classes, on the
         | 
| 1208 | 
            +
                    y-axis.
         | 
| 1209 | 
            +
             | 
| 1210 | 
            +
                    Extra keyword arguments will be passed to
         | 
| 1211 | 
            +
                    :func:`matplotlib.pyplot.plot`.
         | 
| 1212 | 
            +
             | 
| 1213 | 
            +
                    Read more about calibration in the :ref:`User Guide <calibration>` and
         | 
| 1214 | 
            +
                    more about the scikit-learn visualization API in :ref:`visualizations`.
         | 
| 1215 | 
            +
             | 
| 1216 | 
            +
                    .. versionadded:: 1.0
         | 
| 1217 | 
            +
             | 
| 1218 | 
            +
                    Parameters
         | 
| 1219 | 
            +
                    ----------
         | 
| 1220 | 
            +
                    estimator : estimator instance
         | 
| 1221 | 
            +
                        Fitted classifier or a fitted :class:`~sklearn.pipeline.Pipeline`
         | 
| 1222 | 
            +
                        in which the last estimator is a classifier. The classifier must
         | 
| 1223 | 
            +
                        have a :term:`predict_proba` method.
         | 
| 1224 | 
            +
             | 
| 1225 | 
            +
                    X : {array-like, sparse matrix} of shape (n_samples, n_features)
         | 
| 1226 | 
            +
                        Input values.
         | 
| 1227 | 
            +
             | 
| 1228 | 
            +
                    y : array-like of shape (n_samples,)
         | 
| 1229 | 
            +
                        Binary target values.
         | 
| 1230 | 
            +
             | 
| 1231 | 
            +
                    n_bins : int, default=5
         | 
| 1232 | 
            +
                        Number of bins to discretize the [0, 1] interval into when
         | 
| 1233 | 
            +
                        calculating the calibration curve. A bigger number requires more
         | 
| 1234 | 
            +
                        data.
         | 
| 1235 | 
            +
             | 
| 1236 | 
            +
                    strategy : {'uniform', 'quantile'}, default='uniform'
         | 
| 1237 | 
            +
                        Strategy used to define the widths of the bins.
         | 
| 1238 | 
            +
             | 
| 1239 | 
            +
                        - `'uniform'`: The bins have identical widths.
         | 
| 1240 | 
            +
                        - `'quantile'`: The bins have the same number of samples and depend
         | 
| 1241 | 
            +
                          on predicted probabilities.
         | 
| 1242 | 
            +
             | 
| 1243 | 
            +
                    pos_label : int, float, bool or str, default=None
         | 
| 1244 | 
            +
                        The positive class when computing the calibration curve.
         | 
| 1245 | 
            +
                        By default, `estimators.classes_[1]` is considered as the
         | 
| 1246 | 
            +
                        positive class.
         | 
| 1247 | 
            +
             | 
| 1248 | 
            +
                        .. versionadded:: 1.1
         | 
| 1249 | 
            +
             | 
| 1250 | 
            +
                    name : str, default=None
         | 
| 1251 | 
            +
                        Name for labeling curve. If `None`, the name of the estimator is
         | 
| 1252 | 
            +
                        used.
         | 
| 1253 | 
            +
             | 
| 1254 | 
            +
                    ref_line : bool, default=True
         | 
| 1255 | 
            +
                        If `True`, plots a reference line representing a perfectly
         | 
| 1256 | 
            +
                        calibrated classifier.
         | 
| 1257 | 
            +
             | 
| 1258 | 
            +
                    ax : matplotlib axes, default=None
         | 
| 1259 | 
            +
                        Axes object to plot on. If `None`, a new figure and axes is
         | 
| 1260 | 
            +
                        created.
         | 
| 1261 | 
            +
             | 
| 1262 | 
            +
                    **kwargs : dict
         | 
| 1263 | 
            +
                        Keyword arguments to be passed to :func:`matplotlib.pyplot.plot`.
         | 
| 1264 | 
            +
             | 
| 1265 | 
            +
                    Returns
         | 
| 1266 | 
            +
                    -------
         | 
| 1267 | 
            +
                    display : :class:`~sklearn.calibration.CalibrationDisplay`.
         | 
| 1268 | 
            +
                        Object that stores computed values.
         | 
| 1269 | 
            +
             | 
| 1270 | 
            +
                    See Also
         | 
| 1271 | 
            +
                    --------
         | 
| 1272 | 
            +
                    CalibrationDisplay.from_predictions : Plot calibration curve using true
         | 
| 1273 | 
            +
                        and predicted labels.
         | 
| 1274 | 
            +
             | 
| 1275 | 
            +
                    Examples
         | 
| 1276 | 
            +
                    --------
         | 
| 1277 | 
            +
                    >>> import matplotlib.pyplot as plt
         | 
| 1278 | 
            +
                    >>> from sklearn.datasets import make_classification
         | 
| 1279 | 
            +
                    >>> from sklearn.model_selection import train_test_split
         | 
| 1280 | 
            +
                    >>> from sklearn.linear_model import LogisticRegression
         | 
| 1281 | 
            +
                    >>> from sklearn.calibration import CalibrationDisplay
         | 
| 1282 | 
            +
                    >>> X, y = make_classification(random_state=0)
         | 
| 1283 | 
            +
                    >>> X_train, X_test, y_train, y_test = train_test_split(
         | 
| 1284 | 
            +
                    ...     X, y, random_state=0)
         | 
| 1285 | 
            +
                    >>> clf = LogisticRegression(random_state=0)
         | 
| 1286 | 
            +
                    >>> clf.fit(X_train, y_train)
         | 
| 1287 | 
            +
                    LogisticRegression(random_state=0)
         | 
| 1288 | 
            +
                    >>> disp = CalibrationDisplay.from_estimator(clf, X_test, y_test)
         | 
| 1289 | 
            +
                    >>> plt.show()
         | 
| 1290 | 
            +
                    """
         | 
| 1291 | 
            +
                    y_prob, pos_label, name = cls._validate_and_get_response_values(
         | 
| 1292 | 
            +
                        estimator,
         | 
| 1293 | 
            +
                        X,
         | 
| 1294 | 
            +
                        y,
         | 
| 1295 | 
            +
                        response_method="predict_proba",
         | 
| 1296 | 
            +
                        pos_label=pos_label,
         | 
| 1297 | 
            +
                        name=name,
         | 
| 1298 | 
            +
                    )
         | 
| 1299 | 
            +
             | 
| 1300 | 
            +
                    return cls.from_predictions(
         | 
| 1301 | 
            +
                        y,
         | 
| 1302 | 
            +
                        y_prob,
         | 
| 1303 | 
            +
                        n_bins=n_bins,
         | 
| 1304 | 
            +
                        strategy=strategy,
         | 
| 1305 | 
            +
                        pos_label=pos_label,
         | 
| 1306 | 
            +
                        name=name,
         | 
| 1307 | 
            +
                        ref_line=ref_line,
         | 
| 1308 | 
            +
                        ax=ax,
         | 
| 1309 | 
            +
                        **kwargs,
         | 
| 1310 | 
            +
                    )
         | 
| 1311 | 
            +
             | 
| 1312 | 
            +
                @classmethod
         | 
| 1313 | 
            +
                def from_predictions(
         | 
| 1314 | 
            +
                    cls,
         | 
| 1315 | 
            +
                    y_true,
         | 
| 1316 | 
            +
                    y_prob,
         | 
| 1317 | 
            +
                    *,
         | 
| 1318 | 
            +
                    n_bins=5,
         | 
| 1319 | 
            +
                    strategy="uniform",
         | 
| 1320 | 
            +
                    pos_label=None,
         | 
| 1321 | 
            +
                    name=None,
         | 
| 1322 | 
            +
                    ref_line=True,
         | 
| 1323 | 
            +
                    ax=None,
         | 
| 1324 | 
            +
                    **kwargs,
         | 
| 1325 | 
            +
                ):
         | 
| 1326 | 
            +
                    """Plot calibration curve using true labels and predicted probabilities.
         | 
| 1327 | 
            +
             | 
| 1328 | 
            +
                    Calibration curve, also known as reliability diagram, uses inputs
         | 
| 1329 | 
            +
                    from a binary classifier and plots the average predicted probability
         | 
| 1330 | 
            +
                    for each bin against the fraction of positive classes, on the
         | 
| 1331 | 
            +
                    y-axis.
         | 
| 1332 | 
            +
             | 
| 1333 | 
            +
                    Extra keyword arguments will be passed to
         | 
| 1334 | 
            +
                    :func:`matplotlib.pyplot.plot`.
         | 
| 1335 | 
            +
             | 
| 1336 | 
            +
                    Read more about calibration in the :ref:`User Guide <calibration>` and
         | 
| 1337 | 
            +
                    more about the scikit-learn visualization API in :ref:`visualizations`.
         | 
| 1338 | 
            +
             | 
| 1339 | 
            +
                    .. versionadded:: 1.0
         | 
| 1340 | 
            +
             | 
| 1341 | 
            +
                    Parameters
         | 
| 1342 | 
            +
                    ----------
         | 
| 1343 | 
            +
                    y_true : array-like of shape (n_samples,)
         | 
| 1344 | 
            +
                        True labels.
         | 
| 1345 | 
            +
             | 
| 1346 | 
            +
                    y_prob : array-like of shape (n_samples,)
         | 
| 1347 | 
            +
                        The predicted probabilities of the positive class.
         | 
| 1348 | 
            +
             | 
| 1349 | 
            +
                    n_bins : int, default=5
         | 
| 1350 | 
            +
                        Number of bins to discretize the [0, 1] interval into when
         | 
| 1351 | 
            +
                        calculating the calibration curve. A bigger number requires more
         | 
| 1352 | 
            +
                        data.
         | 
| 1353 | 
            +
             | 
| 1354 | 
            +
                    strategy : {'uniform', 'quantile'}, default='uniform'
         | 
| 1355 | 
            +
                        Strategy used to define the widths of the bins.
         | 
| 1356 | 
            +
             | 
| 1357 | 
            +
                        - `'uniform'`: The bins have identical widths.
         | 
| 1358 | 
            +
                        - `'quantile'`: The bins have the same number of samples and depend
         | 
| 1359 | 
            +
                          on predicted probabilities.
         | 
| 1360 | 
            +
             | 
| 1361 | 
            +
                    pos_label : int, float, bool or str, default=None
         | 
| 1362 | 
            +
                        The positive class when computing the calibration curve.
         | 
| 1363 | 
            +
                        By default `pos_label` is set to 1.
         | 
| 1364 | 
            +
             | 
| 1365 | 
            +
                        .. versionadded:: 1.1
         | 
| 1366 | 
            +
             | 
| 1367 | 
            +
                    name : str, default=None
         | 
| 1368 | 
            +
                        Name for labeling curve.
         | 
| 1369 | 
            +
             | 
| 1370 | 
            +
                    ref_line : bool, default=True
         | 
| 1371 | 
            +
                        If `True`, plots a reference line representing a perfectly
         | 
| 1372 | 
            +
                        calibrated classifier.
         | 
| 1373 | 
            +
             | 
| 1374 | 
            +
                    ax : matplotlib axes, default=None
         | 
| 1375 | 
            +
                        Axes object to plot on. If `None`, a new figure and axes is
         | 
| 1376 | 
            +
                        created.
         | 
| 1377 | 
            +
             | 
| 1378 | 
            +
                    **kwargs : dict
         | 
| 1379 | 
            +
                        Keyword arguments to be passed to :func:`matplotlib.pyplot.plot`.
         | 
| 1380 | 
            +
             | 
| 1381 | 
            +
                    Returns
         | 
| 1382 | 
            +
                    -------
         | 
| 1383 | 
            +
                    display : :class:`~sklearn.calibration.CalibrationDisplay`.
         | 
| 1384 | 
            +
                        Object that stores computed values.
         | 
| 1385 | 
            +
             | 
| 1386 | 
            +
                    See Also
         | 
| 1387 | 
            +
                    --------
         | 
| 1388 | 
            +
                    CalibrationDisplay.from_estimator : Plot calibration curve using an
         | 
| 1389 | 
            +
                        estimator and data.
         | 
| 1390 | 
            +
             | 
| 1391 | 
            +
                    Examples
         | 
| 1392 | 
            +
                    --------
         | 
| 1393 | 
            +
                    >>> import matplotlib.pyplot as plt
         | 
| 1394 | 
            +
                    >>> from sklearn.datasets import make_classification
         | 
| 1395 | 
            +
                    >>> from sklearn.model_selection import train_test_split
         | 
| 1396 | 
            +
                    >>> from sklearn.linear_model import LogisticRegression
         | 
| 1397 | 
            +
                    >>> from sklearn.calibration import CalibrationDisplay
         | 
| 1398 | 
            +
                    >>> X, y = make_classification(random_state=0)
         | 
| 1399 | 
            +
                    >>> X_train, X_test, y_train, y_test = train_test_split(
         | 
| 1400 | 
            +
                    ...     X, y, random_state=0)
         | 
| 1401 | 
            +
                    >>> clf = LogisticRegression(random_state=0)
         | 
| 1402 | 
            +
                    >>> clf.fit(X_train, y_train)
         | 
| 1403 | 
            +
                    LogisticRegression(random_state=0)
         | 
| 1404 | 
            +
                    >>> y_prob = clf.predict_proba(X_test)[:, 1]
         | 
| 1405 | 
            +
                    >>> disp = CalibrationDisplay.from_predictions(y_test, y_prob)
         | 
| 1406 | 
            +
                    >>> plt.show()
         | 
| 1407 | 
            +
                    """
         | 
| 1408 | 
            +
                    pos_label_validated, name = cls._validate_from_predictions_params(
         | 
| 1409 | 
            +
                        y_true, y_prob, sample_weight=None, pos_label=pos_label, name=name
         | 
| 1410 | 
            +
                    )
         | 
| 1411 | 
            +
             | 
| 1412 | 
            +
                    prob_true, prob_pred = calibration_curve(
         | 
| 1413 | 
            +
                        y_true, y_prob, n_bins=n_bins, strategy=strategy, pos_label=pos_label
         | 
| 1414 | 
            +
                    )
         | 
| 1415 | 
            +
             | 
| 1416 | 
            +
                    disp = cls(
         | 
| 1417 | 
            +
                        prob_true=prob_true,
         | 
| 1418 | 
            +
                        prob_pred=prob_pred,
         | 
| 1419 | 
            +
                        y_prob=y_prob,
         | 
| 1420 | 
            +
                        estimator_name=name,
         | 
| 1421 | 
            +
                        pos_label=pos_label_validated,
         | 
| 1422 | 
            +
                    )
         | 
| 1423 | 
            +
                    return disp.plot(ax=ax, ref_line=ref_line, **kwargs)
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/conftest.py
    ADDED
    
    | @@ -0,0 +1,358 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            # Authors: The scikit-learn developers
         | 
| 2 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 3 | 
            +
             | 
| 4 | 
            +
            import builtins
         | 
| 5 | 
            +
            import platform
         | 
| 6 | 
            +
            import sys
         | 
| 7 | 
            +
            from contextlib import suppress
         | 
| 8 | 
            +
            from functools import wraps
         | 
| 9 | 
            +
            from os import environ
         | 
| 10 | 
            +
            from unittest import SkipTest
         | 
| 11 | 
            +
             | 
| 12 | 
            +
            import joblib
         | 
| 13 | 
            +
            import numpy as np
         | 
| 14 | 
            +
            import pytest
         | 
| 15 | 
            +
            from _pytest.doctest import DoctestItem
         | 
| 16 | 
            +
            from threadpoolctl import threadpool_limits
         | 
| 17 | 
            +
             | 
| 18 | 
            +
            from sklearn import set_config
         | 
| 19 | 
            +
            from sklearn._min_dependencies import PYTEST_MIN_VERSION
         | 
| 20 | 
            +
            from sklearn.datasets import (
         | 
| 21 | 
            +
                fetch_20newsgroups,
         | 
| 22 | 
            +
                fetch_20newsgroups_vectorized,
         | 
| 23 | 
            +
                fetch_california_housing,
         | 
| 24 | 
            +
                fetch_covtype,
         | 
| 25 | 
            +
                fetch_kddcup99,
         | 
| 26 | 
            +
                fetch_lfw_pairs,
         | 
| 27 | 
            +
                fetch_lfw_people,
         | 
| 28 | 
            +
                fetch_olivetti_faces,
         | 
| 29 | 
            +
                fetch_rcv1,
         | 
| 30 | 
            +
                fetch_species_distributions,
         | 
| 31 | 
            +
            )
         | 
| 32 | 
            +
            from sklearn.utils._testing import get_pytest_filterwarning_lines
         | 
| 33 | 
            +
            from sklearn.utils.fixes import (
         | 
| 34 | 
            +
                _IS_32BIT,
         | 
| 35 | 
            +
                np_base_version,
         | 
| 36 | 
            +
                parse_version,
         | 
| 37 | 
            +
                sp_version,
         | 
| 38 | 
            +
            )
         | 
| 39 | 
            +
             | 
| 40 | 
            +
            if parse_version(pytest.__version__) < parse_version(PYTEST_MIN_VERSION):
         | 
| 41 | 
            +
                raise ImportError(
         | 
| 42 | 
            +
                    f"Your version of pytest is too old. Got version {pytest.__version__}, you"
         | 
| 43 | 
            +
                    f" should have pytest >= {PYTEST_MIN_VERSION} installed."
         | 
| 44 | 
            +
                )
         | 
| 45 | 
            +
             | 
| 46 | 
            +
            scipy_datasets_require_network = sp_version >= parse_version("1.10")
         | 
| 47 | 
            +
             | 
| 48 | 
            +
             | 
| 49 | 
            +
            def raccoon_face_or_skip():
         | 
| 50 | 
            +
                # SciPy >= 1.10 requires network to access to get data
         | 
| 51 | 
            +
                if scipy_datasets_require_network:
         | 
| 52 | 
            +
                    run_network_tests = environ.get("SKLEARN_SKIP_NETWORK_TESTS", "1") == "0"
         | 
| 53 | 
            +
                    if not run_network_tests:
         | 
| 54 | 
            +
                        raise SkipTest("test is enabled when SKLEARN_SKIP_NETWORK_TESTS=0")
         | 
| 55 | 
            +
             | 
| 56 | 
            +
                    try:
         | 
| 57 | 
            +
                        import pooch  # noqa
         | 
| 58 | 
            +
                    except ImportError:
         | 
| 59 | 
            +
                        raise SkipTest("test requires pooch to be installed")
         | 
| 60 | 
            +
             | 
| 61 | 
            +
                    from scipy.datasets import face
         | 
| 62 | 
            +
                else:
         | 
| 63 | 
            +
                    from scipy.misc import face
         | 
| 64 | 
            +
             | 
| 65 | 
            +
                return face(gray=True)
         | 
| 66 | 
            +
             | 
| 67 | 
            +
             | 
| 68 | 
            +
            dataset_fetchers = {
         | 
| 69 | 
            +
                "fetch_20newsgroups_fxt": fetch_20newsgroups,
         | 
| 70 | 
            +
                "fetch_20newsgroups_vectorized_fxt": fetch_20newsgroups_vectorized,
         | 
| 71 | 
            +
                "fetch_california_housing_fxt": fetch_california_housing,
         | 
| 72 | 
            +
                "fetch_covtype_fxt": fetch_covtype,
         | 
| 73 | 
            +
                "fetch_kddcup99_fxt": fetch_kddcup99,
         | 
| 74 | 
            +
                "fetch_lfw_pairs_fxt": fetch_lfw_pairs,
         | 
| 75 | 
            +
                "fetch_lfw_people_fxt": fetch_lfw_people,
         | 
| 76 | 
            +
                "fetch_olivetti_faces_fxt": fetch_olivetti_faces,
         | 
| 77 | 
            +
                "fetch_rcv1_fxt": fetch_rcv1,
         | 
| 78 | 
            +
                "fetch_species_distributions_fxt": fetch_species_distributions,
         | 
| 79 | 
            +
            }
         | 
| 80 | 
            +
             | 
| 81 | 
            +
            if scipy_datasets_require_network:
         | 
| 82 | 
            +
                dataset_fetchers["raccoon_face_fxt"] = raccoon_face_or_skip
         | 
| 83 | 
            +
             | 
| 84 | 
            +
            _SKIP32_MARK = pytest.mark.skipif(
         | 
| 85 | 
            +
                environ.get("SKLEARN_RUN_FLOAT32_TESTS", "0") != "1",
         | 
| 86 | 
            +
                reason="Set SKLEARN_RUN_FLOAT32_TESTS=1 to run float32 dtype tests",
         | 
| 87 | 
            +
            )
         | 
| 88 | 
            +
             | 
| 89 | 
            +
             | 
| 90 | 
            +
            # Global fixtures
         | 
| 91 | 
            +
            @pytest.fixture(params=[pytest.param(np.float32, marks=_SKIP32_MARK), np.float64])
         | 
| 92 | 
            +
            def global_dtype(request):
         | 
| 93 | 
            +
                yield request.param
         | 
| 94 | 
            +
             | 
| 95 | 
            +
             | 
| 96 | 
            +
            def _fetch_fixture(f):
         | 
| 97 | 
            +
                """Fetch dataset (download if missing and requested by environment)."""
         | 
| 98 | 
            +
                download_if_missing = environ.get("SKLEARN_SKIP_NETWORK_TESTS", "1") == "0"
         | 
| 99 | 
            +
             | 
| 100 | 
            +
                @wraps(f)
         | 
| 101 | 
            +
                def wrapped(*args, **kwargs):
         | 
| 102 | 
            +
                    kwargs["download_if_missing"] = download_if_missing
         | 
| 103 | 
            +
                    try:
         | 
| 104 | 
            +
                        return f(*args, **kwargs)
         | 
| 105 | 
            +
                    except OSError as e:
         | 
| 106 | 
            +
                        if str(e) != "Data not found and `download_if_missing` is False":
         | 
| 107 | 
            +
                            raise
         | 
| 108 | 
            +
                        pytest.skip("test is enabled when SKLEARN_SKIP_NETWORK_TESTS=0")
         | 
| 109 | 
            +
             | 
| 110 | 
            +
                return pytest.fixture(lambda: wrapped)
         | 
| 111 | 
            +
             | 
| 112 | 
            +
             | 
| 113 | 
            +
            # Adds fixtures for fetching data
         | 
| 114 | 
            +
            fetch_20newsgroups_fxt = _fetch_fixture(fetch_20newsgroups)
         | 
| 115 | 
            +
            fetch_20newsgroups_vectorized_fxt = _fetch_fixture(fetch_20newsgroups_vectorized)
         | 
| 116 | 
            +
            fetch_california_housing_fxt = _fetch_fixture(fetch_california_housing)
         | 
| 117 | 
            +
            fetch_covtype_fxt = _fetch_fixture(fetch_covtype)
         | 
| 118 | 
            +
            fetch_kddcup99_fxt = _fetch_fixture(fetch_kddcup99)
         | 
| 119 | 
            +
            fetch_lfw_pairs_fxt = _fetch_fixture(fetch_lfw_pairs)
         | 
| 120 | 
            +
            fetch_lfw_people_fxt = _fetch_fixture(fetch_lfw_people)
         | 
| 121 | 
            +
            fetch_olivetti_faces_fxt = _fetch_fixture(fetch_olivetti_faces)
         | 
| 122 | 
            +
            fetch_rcv1_fxt = _fetch_fixture(fetch_rcv1)
         | 
| 123 | 
            +
            fetch_species_distributions_fxt = _fetch_fixture(fetch_species_distributions)
         | 
| 124 | 
            +
            raccoon_face_fxt = pytest.fixture(raccoon_face_or_skip)
         | 
| 125 | 
            +
             | 
| 126 | 
            +
             | 
| 127 | 
            +
            def pytest_collection_modifyitems(config, items):
         | 
| 128 | 
            +
                """Called after collect is completed.
         | 
| 129 | 
            +
             | 
| 130 | 
            +
                Parameters
         | 
| 131 | 
            +
                ----------
         | 
| 132 | 
            +
                config : pytest config
         | 
| 133 | 
            +
                items : list of collected items
         | 
| 134 | 
            +
                """
         | 
| 135 | 
            +
                run_network_tests = environ.get("SKLEARN_SKIP_NETWORK_TESTS", "1") == "0"
         | 
| 136 | 
            +
                skip_network = pytest.mark.skip(
         | 
| 137 | 
            +
                    reason="test is enabled when SKLEARN_SKIP_NETWORK_TESTS=0"
         | 
| 138 | 
            +
                )
         | 
| 139 | 
            +
             | 
| 140 | 
            +
                # download datasets during collection to avoid thread unsafe behavior
         | 
| 141 | 
            +
                # when running pytest in parallel with pytest-xdist
         | 
| 142 | 
            +
                dataset_features_set = set(dataset_fetchers)
         | 
| 143 | 
            +
                datasets_to_download = set()
         | 
| 144 | 
            +
             | 
| 145 | 
            +
                for item in items:
         | 
| 146 | 
            +
                    if isinstance(item, DoctestItem) and "fetch_" in item.name:
         | 
| 147 | 
            +
                        fetcher_function_name = item.name.split(".")[-1]
         | 
| 148 | 
            +
                        dataset_fetchers_key = f"{fetcher_function_name}_fxt"
         | 
| 149 | 
            +
                        dataset_to_fetch = set([dataset_fetchers_key]) & dataset_features_set
         | 
| 150 | 
            +
                    elif not hasattr(item, "fixturenames"):
         | 
| 151 | 
            +
                        continue
         | 
| 152 | 
            +
                    else:
         | 
| 153 | 
            +
                        item_fixtures = set(item.fixturenames)
         | 
| 154 | 
            +
                        dataset_to_fetch = item_fixtures & dataset_features_set
         | 
| 155 | 
            +
             | 
| 156 | 
            +
                    if not dataset_to_fetch:
         | 
| 157 | 
            +
                        continue
         | 
| 158 | 
            +
             | 
| 159 | 
            +
                    if run_network_tests:
         | 
| 160 | 
            +
                        datasets_to_download |= dataset_to_fetch
         | 
| 161 | 
            +
                    else:
         | 
| 162 | 
            +
                        # network tests are skipped
         | 
| 163 | 
            +
                        item.add_marker(skip_network)
         | 
| 164 | 
            +
             | 
| 165 | 
            +
                # Only download datasets on the first worker spawned by pytest-xdist
         | 
| 166 | 
            +
                # to avoid thread unsafe behavior. If pytest-xdist is not used, we still
         | 
| 167 | 
            +
                # download before tests run.
         | 
| 168 | 
            +
                worker_id = environ.get("PYTEST_XDIST_WORKER", "gw0")
         | 
| 169 | 
            +
                if worker_id == "gw0" and run_network_tests:
         | 
| 170 | 
            +
                    for name in datasets_to_download:
         | 
| 171 | 
            +
                        with suppress(SkipTest):
         | 
| 172 | 
            +
                            dataset_fetchers[name]()
         | 
| 173 | 
            +
             | 
| 174 | 
            +
                for item in items:
         | 
| 175 | 
            +
                    # Known failure on with GradientBoostingClassifier on ARM64
         | 
| 176 | 
            +
                    if (
         | 
| 177 | 
            +
                        item.name.endswith("GradientBoostingClassifier")
         | 
| 178 | 
            +
                        and platform.machine() == "aarch64"
         | 
| 179 | 
            +
                    ):
         | 
| 180 | 
            +
                        marker = pytest.mark.xfail(
         | 
| 181 | 
            +
                            reason=(
         | 
| 182 | 
            +
                                "know failure. See "
         | 
| 183 | 
            +
                                "https://github.com/scikit-learn/scikit-learn/issues/17797"  # noqa
         | 
| 184 | 
            +
                            )
         | 
| 185 | 
            +
                        )
         | 
| 186 | 
            +
                        item.add_marker(marker)
         | 
| 187 | 
            +
             | 
| 188 | 
            +
                skip_doctests = False
         | 
| 189 | 
            +
                try:
         | 
| 190 | 
            +
                    import matplotlib  # noqa
         | 
| 191 | 
            +
                except ImportError:
         | 
| 192 | 
            +
                    skip_doctests = True
         | 
| 193 | 
            +
                    reason = "matplotlib is required to run the doctests"
         | 
| 194 | 
            +
             | 
| 195 | 
            +
                if _IS_32BIT:
         | 
| 196 | 
            +
                    reason = "doctest are only run when the default numpy int is 64 bits."
         | 
| 197 | 
            +
                    skip_doctests = True
         | 
| 198 | 
            +
                elif sys.platform.startswith("win32"):
         | 
| 199 | 
            +
                    reason = (
         | 
| 200 | 
            +
                        "doctests are not run for Windows because numpy arrays "
         | 
| 201 | 
            +
                        "repr is inconsistent across platforms."
         | 
| 202 | 
            +
                    )
         | 
| 203 | 
            +
                    skip_doctests = True
         | 
| 204 | 
            +
             | 
| 205 | 
            +
                if np_base_version < parse_version("2"):
         | 
| 206 | 
            +
                    # TODO: configure numpy to output scalar arrays as regular Python scalars
         | 
| 207 | 
            +
                    # once possible to improve readability of the tests docstrings.
         | 
| 208 | 
            +
                    # https://numpy.org/neps/nep-0051-scalar-representation.html#implementation
         | 
| 209 | 
            +
                    reason = "Due to NEP 51 numpy scalar repr has changed in numpy 2"
         | 
| 210 | 
            +
                    skip_doctests = True
         | 
| 211 | 
            +
             | 
| 212 | 
            +
                if sp_version < parse_version("1.14"):
         | 
| 213 | 
            +
                    reason = "Scipy sparse matrix repr has changed in scipy 1.14"
         | 
| 214 | 
            +
                    skip_doctests = True
         | 
| 215 | 
            +
             | 
| 216 | 
            +
                # Normally doctest has the entire module's scope. Here we set globs to an empty dict
         | 
| 217 | 
            +
                # to remove the module's scope:
         | 
| 218 | 
            +
                # https://docs.python.org/3/library/doctest.html#what-s-the-execution-context
         | 
| 219 | 
            +
                for item in items:
         | 
| 220 | 
            +
                    if isinstance(item, DoctestItem):
         | 
| 221 | 
            +
                        item.dtest.globs = {}
         | 
| 222 | 
            +
             | 
| 223 | 
            +
                if skip_doctests:
         | 
| 224 | 
            +
                    skip_marker = pytest.mark.skip(reason=reason)
         | 
| 225 | 
            +
             | 
| 226 | 
            +
                    for item in items:
         | 
| 227 | 
            +
                        if isinstance(item, DoctestItem):
         | 
| 228 | 
            +
                            # work-around an internal error with pytest if adding a skip
         | 
| 229 | 
            +
                            # mark to a doctest in a contextmanager, see
         | 
| 230 | 
            +
                            # https://github.com/pytest-dev/pytest/issues/8796 for more
         | 
| 231 | 
            +
                            # details.
         | 
| 232 | 
            +
                            if item.name != "sklearn._config.config_context":
         | 
| 233 | 
            +
                                item.add_marker(skip_marker)
         | 
| 234 | 
            +
                try:
         | 
| 235 | 
            +
                    import PIL  # noqa
         | 
| 236 | 
            +
             | 
| 237 | 
            +
                    pillow_installed = True
         | 
| 238 | 
            +
                except ImportError:
         | 
| 239 | 
            +
                    pillow_installed = False
         | 
| 240 | 
            +
             | 
| 241 | 
            +
                if not pillow_installed:
         | 
| 242 | 
            +
                    skip_marker = pytest.mark.skip(reason="pillow (or PIL) not installed!")
         | 
| 243 | 
            +
                    for item in items:
         | 
| 244 | 
            +
                        if item.name in [
         | 
| 245 | 
            +
                            "sklearn.feature_extraction.image.PatchExtractor",
         | 
| 246 | 
            +
                            "sklearn.feature_extraction.image.extract_patches_2d",
         | 
| 247 | 
            +
                        ]:
         | 
| 248 | 
            +
                            item.add_marker(skip_marker)
         | 
| 249 | 
            +
             | 
| 250 | 
            +
             | 
| 251 | 
            +
            @pytest.fixture(scope="function")
         | 
| 252 | 
            +
            def pyplot():
         | 
| 253 | 
            +
                """Setup and teardown fixture for matplotlib.
         | 
| 254 | 
            +
             | 
| 255 | 
            +
                This fixture checks if we can import matplotlib. If not, the tests will be
         | 
| 256 | 
            +
                skipped. Otherwise, we close the figures before and after running the
         | 
| 257 | 
            +
                functions.
         | 
| 258 | 
            +
             | 
| 259 | 
            +
                Returns
         | 
| 260 | 
            +
                -------
         | 
| 261 | 
            +
                pyplot : module
         | 
| 262 | 
            +
                    The ``matplotlib.pyplot`` module.
         | 
| 263 | 
            +
                """
         | 
| 264 | 
            +
                pyplot = pytest.importorskip("matplotlib.pyplot")
         | 
| 265 | 
            +
                pyplot.close("all")
         | 
| 266 | 
            +
                yield pyplot
         | 
| 267 | 
            +
                pyplot.close("all")
         | 
| 268 | 
            +
             | 
| 269 | 
            +
             | 
| 270 | 
            +
            def pytest_generate_tests(metafunc):
         | 
| 271 | 
            +
                """Parametrization of global_random_seed fixture
         | 
| 272 | 
            +
             | 
| 273 | 
            +
                based on the SKLEARN_TESTS_GLOBAL_RANDOM_SEED environment variable.
         | 
| 274 | 
            +
             | 
| 275 | 
            +
                The goal of this fixture is to prevent tests that use it to be sensitive
         | 
| 276 | 
            +
                to a specific seed value while still being deterministic by default.
         | 
| 277 | 
            +
             | 
| 278 | 
            +
                See the documentation for the SKLEARN_TESTS_GLOBAL_RANDOM_SEED
         | 
| 279 | 
            +
                variable for instructions on how to use this fixture.
         | 
| 280 | 
            +
             | 
| 281 | 
            +
                https://scikit-learn.org/dev/computing/parallelism.html#sklearn-tests-global-random-seed
         | 
| 282 | 
            +
             | 
| 283 | 
            +
                """
         | 
| 284 | 
            +
                # When using pytest-xdist this function is called in the xdist workers.
         | 
| 285 | 
            +
                # We rely on SKLEARN_TESTS_GLOBAL_RANDOM_SEED environment variable which is
         | 
| 286 | 
            +
                # set in before running pytest and is available in xdist workers since they
         | 
| 287 | 
            +
                # are subprocesses.
         | 
| 288 | 
            +
                RANDOM_SEED_RANGE = list(range(100))  # All seeds in [0, 99] should be valid.
         | 
| 289 | 
            +
                random_seed_var = environ.get("SKLEARN_TESTS_GLOBAL_RANDOM_SEED")
         | 
| 290 | 
            +
             | 
| 291 | 
            +
                default_random_seeds = [42]
         | 
| 292 | 
            +
             | 
| 293 | 
            +
                if random_seed_var is None:
         | 
| 294 | 
            +
                    random_seeds = default_random_seeds
         | 
| 295 | 
            +
                elif random_seed_var == "all":
         | 
| 296 | 
            +
                    random_seeds = RANDOM_SEED_RANGE
         | 
| 297 | 
            +
                else:
         | 
| 298 | 
            +
                    if "-" in random_seed_var:
         | 
| 299 | 
            +
                        start, stop = random_seed_var.split("-")
         | 
| 300 | 
            +
                        random_seeds = list(range(int(start), int(stop) + 1))
         | 
| 301 | 
            +
                    else:
         | 
| 302 | 
            +
                        random_seeds = [int(random_seed_var)]
         | 
| 303 | 
            +
             | 
| 304 | 
            +
                    if min(random_seeds) < 0 or max(random_seeds) > 99:
         | 
| 305 | 
            +
                        raise ValueError(
         | 
| 306 | 
            +
                            "The value(s) of the environment variable "
         | 
| 307 | 
            +
                            "SKLEARN_TESTS_GLOBAL_RANDOM_SEED must be in the range [0, 99] "
         | 
| 308 | 
            +
                            f"(or 'all'), got: {random_seed_var}"
         | 
| 309 | 
            +
                        )
         | 
| 310 | 
            +
             | 
| 311 | 
            +
                if "global_random_seed" in metafunc.fixturenames:
         | 
| 312 | 
            +
                    metafunc.parametrize("global_random_seed", random_seeds)
         | 
| 313 | 
            +
             | 
| 314 | 
            +
             | 
| 315 | 
            +
            def pytest_configure(config):
         | 
| 316 | 
            +
                # Use matplotlib agg backend during the tests including doctests
         | 
| 317 | 
            +
                try:
         | 
| 318 | 
            +
                    import matplotlib
         | 
| 319 | 
            +
             | 
| 320 | 
            +
                    matplotlib.use("agg")
         | 
| 321 | 
            +
                except ImportError:
         | 
| 322 | 
            +
                    pass
         | 
| 323 | 
            +
             | 
| 324 | 
            +
                allowed_parallelism = joblib.cpu_count(only_physical_cores=True)
         | 
| 325 | 
            +
                xdist_worker_count = environ.get("PYTEST_XDIST_WORKER_COUNT")
         | 
| 326 | 
            +
                if xdist_worker_count is not None:
         | 
| 327 | 
            +
                    # Set the number of OpenMP and BLAS threads based on the number of workers
         | 
| 328 | 
            +
                    # xdist is using to prevent oversubscription.
         | 
| 329 | 
            +
                    allowed_parallelism = max(allowed_parallelism // int(xdist_worker_count), 1)
         | 
| 330 | 
            +
                threadpool_limits(allowed_parallelism)
         | 
| 331 | 
            +
             | 
| 332 | 
            +
                if environ.get("SKLEARN_WARNINGS_AS_ERRORS", "0") != "0":
         | 
| 333 | 
            +
                    # This seems like the only way to programmatically change the config
         | 
| 334 | 
            +
                    # filterwarnings. This was suggested in
         | 
| 335 | 
            +
                    # https://github.com/pytest-dev/pytest/issues/3311#issuecomment-373177592
         | 
| 336 | 
            +
                    for line in get_pytest_filterwarning_lines():
         | 
| 337 | 
            +
                        config.addinivalue_line("filterwarnings", line)
         | 
| 338 | 
            +
             | 
| 339 | 
            +
             | 
| 340 | 
            +
            @pytest.fixture
         | 
| 341 | 
            +
            def hide_available_pandas(monkeypatch):
         | 
| 342 | 
            +
                """Pretend pandas was not installed."""
         | 
| 343 | 
            +
                import_orig = builtins.__import__
         | 
| 344 | 
            +
             | 
| 345 | 
            +
                def mocked_import(name, *args, **kwargs):
         | 
| 346 | 
            +
                    if name == "pandas":
         | 
| 347 | 
            +
                        raise ImportError()
         | 
| 348 | 
            +
                    return import_orig(name, *args, **kwargs)
         | 
| 349 | 
            +
             | 
| 350 | 
            +
                monkeypatch.setattr(builtins, "__import__", mocked_import)
         | 
| 351 | 
            +
             | 
| 352 | 
            +
             | 
| 353 | 
            +
            @pytest.fixture
         | 
| 354 | 
            +
            def print_changed_only_false():
         | 
| 355 | 
            +
                """Set `print_changed_only` to False for the duration of the test."""
         | 
| 356 | 
            +
                set_config(print_changed_only=False)
         | 
| 357 | 
            +
                yield
         | 
| 358 | 
            +
                set_config(print_changed_only=True)  # reset to default
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/decomposition/tests/__init__.py
    ADDED
    
    | 
            File without changes
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/discriminant_analysis.py
    ADDED
    
    | @@ -0,0 +1,1129 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Linear and quadratic discriminant analysis."""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            import warnings
         | 
| 7 | 
            +
            from numbers import Integral, Real
         | 
| 8 | 
            +
             | 
| 9 | 
            +
            import numpy as np
         | 
| 10 | 
            +
            import scipy.linalg
         | 
| 11 | 
            +
            from scipy import linalg
         | 
| 12 | 
            +
             | 
| 13 | 
            +
            from .base import (
         | 
| 14 | 
            +
                BaseEstimator,
         | 
| 15 | 
            +
                ClassifierMixin,
         | 
| 16 | 
            +
                ClassNamePrefixFeaturesOutMixin,
         | 
| 17 | 
            +
                TransformerMixin,
         | 
| 18 | 
            +
                _fit_context,
         | 
| 19 | 
            +
            )
         | 
| 20 | 
            +
            from .covariance import empirical_covariance, ledoit_wolf, shrunk_covariance
         | 
| 21 | 
            +
            from .linear_model._base import LinearClassifierMixin
         | 
| 22 | 
            +
            from .preprocessing import StandardScaler
         | 
| 23 | 
            +
            from .utils._array_api import _expit, device, get_namespace, size
         | 
| 24 | 
            +
            from .utils._param_validation import HasMethods, Interval, StrOptions
         | 
| 25 | 
            +
            from .utils.extmath import softmax
         | 
| 26 | 
            +
            from .utils.multiclass import check_classification_targets, unique_labels
         | 
| 27 | 
            +
            from .utils.validation import check_is_fitted, validate_data
         | 
| 28 | 
            +
             | 
| 29 | 
            +
            __all__ = ["LinearDiscriminantAnalysis", "QuadraticDiscriminantAnalysis"]
         | 
| 30 | 
            +
             | 
| 31 | 
            +
             | 
| 32 | 
            +
            def _cov(X, shrinkage=None, covariance_estimator=None):
         | 
| 33 | 
            +
                """Estimate covariance matrix (using optional covariance_estimator).
         | 
| 34 | 
            +
                Parameters
         | 
| 35 | 
            +
                ----------
         | 
| 36 | 
            +
                X : array-like of shape (n_samples, n_features)
         | 
| 37 | 
            +
                    Input data.
         | 
| 38 | 
            +
             | 
| 39 | 
            +
                shrinkage : {'empirical', 'auto'} or float, default=None
         | 
| 40 | 
            +
                    Shrinkage parameter, possible values:
         | 
| 41 | 
            +
                      - None or 'empirical': no shrinkage (default).
         | 
| 42 | 
            +
                      - 'auto': automatic shrinkage using the Ledoit-Wolf lemma.
         | 
| 43 | 
            +
                      - float between 0 and 1: fixed shrinkage parameter.
         | 
| 44 | 
            +
             | 
| 45 | 
            +
                    Shrinkage parameter is ignored if  `covariance_estimator`
         | 
| 46 | 
            +
                    is not None.
         | 
| 47 | 
            +
             | 
| 48 | 
            +
                covariance_estimator : estimator, default=None
         | 
| 49 | 
            +
                    If not None, `covariance_estimator` is used to estimate
         | 
| 50 | 
            +
                    the covariance matrices instead of relying on the empirical
         | 
| 51 | 
            +
                    covariance estimator (with potential shrinkage).
         | 
| 52 | 
            +
                    The object should have a fit method and a ``covariance_`` attribute
         | 
| 53 | 
            +
                    like the estimators in :mod:`sklearn.covariance``.
         | 
| 54 | 
            +
                    if None the shrinkage parameter drives the estimate.
         | 
| 55 | 
            +
             | 
| 56 | 
            +
                    .. versionadded:: 0.24
         | 
| 57 | 
            +
             | 
| 58 | 
            +
                Returns
         | 
| 59 | 
            +
                -------
         | 
| 60 | 
            +
                s : ndarray of shape (n_features, n_features)
         | 
| 61 | 
            +
                    Estimated covariance matrix.
         | 
| 62 | 
            +
                """
         | 
| 63 | 
            +
                if covariance_estimator is None:
         | 
| 64 | 
            +
                    shrinkage = "empirical" if shrinkage is None else shrinkage
         | 
| 65 | 
            +
                    if isinstance(shrinkage, str):
         | 
| 66 | 
            +
                        if shrinkage == "auto":
         | 
| 67 | 
            +
                            sc = StandardScaler()  # standardize features
         | 
| 68 | 
            +
                            X = sc.fit_transform(X)
         | 
| 69 | 
            +
                            s = ledoit_wolf(X)[0]
         | 
| 70 | 
            +
                            # rescale
         | 
| 71 | 
            +
                            s = sc.scale_[:, np.newaxis] * s * sc.scale_[np.newaxis, :]
         | 
| 72 | 
            +
                        elif shrinkage == "empirical":
         | 
| 73 | 
            +
                            s = empirical_covariance(X)
         | 
| 74 | 
            +
                    elif isinstance(shrinkage, Real):
         | 
| 75 | 
            +
                        s = shrunk_covariance(empirical_covariance(X), shrinkage)
         | 
| 76 | 
            +
                else:
         | 
| 77 | 
            +
                    if shrinkage is not None and shrinkage != 0:
         | 
| 78 | 
            +
                        raise ValueError(
         | 
| 79 | 
            +
                            "covariance_estimator and shrinkage parameters "
         | 
| 80 | 
            +
                            "are not None. Only one of the two can be set."
         | 
| 81 | 
            +
                        )
         | 
| 82 | 
            +
                    covariance_estimator.fit(X)
         | 
| 83 | 
            +
                    if not hasattr(covariance_estimator, "covariance_"):
         | 
| 84 | 
            +
                        raise ValueError(
         | 
| 85 | 
            +
                            "%s does not have a covariance_ attribute"
         | 
| 86 | 
            +
                            % covariance_estimator.__class__.__name__
         | 
| 87 | 
            +
                        )
         | 
| 88 | 
            +
                    s = covariance_estimator.covariance_
         | 
| 89 | 
            +
                return s
         | 
| 90 | 
            +
             | 
| 91 | 
            +
             | 
| 92 | 
            +
            def _class_means(X, y):
         | 
| 93 | 
            +
                """Compute class means.
         | 
| 94 | 
            +
             | 
| 95 | 
            +
                Parameters
         | 
| 96 | 
            +
                ----------
         | 
| 97 | 
            +
                X : array-like of shape (n_samples, n_features)
         | 
| 98 | 
            +
                    Input data.
         | 
| 99 | 
            +
             | 
| 100 | 
            +
                y : array-like of shape (n_samples,) or (n_samples, n_targets)
         | 
| 101 | 
            +
                    Target values.
         | 
| 102 | 
            +
             | 
| 103 | 
            +
                Returns
         | 
| 104 | 
            +
                -------
         | 
| 105 | 
            +
                means : array-like of shape (n_classes, n_features)
         | 
| 106 | 
            +
                    Class means.
         | 
| 107 | 
            +
                """
         | 
| 108 | 
            +
                xp, is_array_api_compliant = get_namespace(X)
         | 
| 109 | 
            +
                classes, y = xp.unique_inverse(y)
         | 
| 110 | 
            +
                means = xp.zeros((classes.shape[0], X.shape[1]), device=device(X), dtype=X.dtype)
         | 
| 111 | 
            +
             | 
| 112 | 
            +
                if is_array_api_compliant:
         | 
| 113 | 
            +
                    for i in range(classes.shape[0]):
         | 
| 114 | 
            +
                        means[i, :] = xp.mean(X[y == i], axis=0)
         | 
| 115 | 
            +
                else:
         | 
| 116 | 
            +
                    # TODO: Explore the choice of using bincount + add.at as it seems sub optimal
         | 
| 117 | 
            +
                    # from a performance-wise
         | 
| 118 | 
            +
                    cnt = np.bincount(y)
         | 
| 119 | 
            +
                    np.add.at(means, y, X)
         | 
| 120 | 
            +
                    means /= cnt[:, None]
         | 
| 121 | 
            +
                return means
         | 
| 122 | 
            +
             | 
| 123 | 
            +
             | 
| 124 | 
            +
            def _class_cov(X, y, priors, shrinkage=None, covariance_estimator=None):
         | 
| 125 | 
            +
                """Compute weighted within-class covariance matrix.
         | 
| 126 | 
            +
             | 
| 127 | 
            +
                The per-class covariance are weighted by the class priors.
         | 
| 128 | 
            +
             | 
| 129 | 
            +
                Parameters
         | 
| 130 | 
            +
                ----------
         | 
| 131 | 
            +
                X : array-like of shape (n_samples, n_features)
         | 
| 132 | 
            +
                    Input data.
         | 
| 133 | 
            +
             | 
| 134 | 
            +
                y : array-like of shape (n_samples,) or (n_samples, n_targets)
         | 
| 135 | 
            +
                    Target values.
         | 
| 136 | 
            +
             | 
| 137 | 
            +
                priors : array-like of shape (n_classes,)
         | 
| 138 | 
            +
                    Class priors.
         | 
| 139 | 
            +
             | 
| 140 | 
            +
                shrinkage : 'auto' or float, default=None
         | 
| 141 | 
            +
                    Shrinkage parameter, possible values:
         | 
| 142 | 
            +
                      - None: no shrinkage (default).
         | 
| 143 | 
            +
                      - 'auto': automatic shrinkage using the Ledoit-Wolf lemma.
         | 
| 144 | 
            +
                      - float between 0 and 1: fixed shrinkage parameter.
         | 
| 145 | 
            +
             | 
| 146 | 
            +
                    Shrinkage parameter is ignored if `covariance_estimator` is not None.
         | 
| 147 | 
            +
             | 
| 148 | 
            +
                covariance_estimator : estimator, default=None
         | 
| 149 | 
            +
                    If not None, `covariance_estimator` is used to estimate
         | 
| 150 | 
            +
                    the covariance matrices instead of relying the empirical
         | 
| 151 | 
            +
                    covariance estimator (with potential shrinkage).
         | 
| 152 | 
            +
                    The object should have a fit method and a ``covariance_`` attribute
         | 
| 153 | 
            +
                    like the estimators in sklearn.covariance.
         | 
| 154 | 
            +
                    If None, the shrinkage parameter drives the estimate.
         | 
| 155 | 
            +
             | 
| 156 | 
            +
                    .. versionadded:: 0.24
         | 
| 157 | 
            +
             | 
| 158 | 
            +
                Returns
         | 
| 159 | 
            +
                -------
         | 
| 160 | 
            +
                cov : array-like of shape (n_features, n_features)
         | 
| 161 | 
            +
                    Weighted within-class covariance matrix
         | 
| 162 | 
            +
                """
         | 
| 163 | 
            +
                classes = np.unique(y)
         | 
| 164 | 
            +
                cov = np.zeros(shape=(X.shape[1], X.shape[1]))
         | 
| 165 | 
            +
                for idx, group in enumerate(classes):
         | 
| 166 | 
            +
                    Xg = X[y == group, :]
         | 
| 167 | 
            +
                    cov += priors[idx] * np.atleast_2d(_cov(Xg, shrinkage, covariance_estimator))
         | 
| 168 | 
            +
                return cov
         | 
| 169 | 
            +
             | 
| 170 | 
            +
             | 
| 171 | 
            +
            class DiscriminantAnalysisPredictionMixin:
         | 
| 172 | 
            +
                """Mixin class for QuadraticDiscriminantAnalysis and NearestCentroid."""
         | 
| 173 | 
            +
             | 
| 174 | 
            +
                def decision_function(self, X):
         | 
| 175 | 
            +
                    """Apply decision function to an array of samples.
         | 
| 176 | 
            +
             | 
| 177 | 
            +
                    Parameters
         | 
| 178 | 
            +
                    ----------
         | 
| 179 | 
            +
                    X : {array-like, sparse matrix} of shape (n_samples, n_features)
         | 
| 180 | 
            +
                        Array of samples (test vectors).
         | 
| 181 | 
            +
             | 
| 182 | 
            +
                    Returns
         | 
| 183 | 
            +
                    -------
         | 
| 184 | 
            +
                    y_scores : ndarray of shape (n_samples,) or (n_samples, n_classes)
         | 
| 185 | 
            +
                        Decision function values related to each class, per sample.
         | 
| 186 | 
            +
                        In the two-class case, the shape is `(n_samples,)`, giving the
         | 
| 187 | 
            +
                        log likelihood ratio of the positive class.
         | 
| 188 | 
            +
                    """
         | 
| 189 | 
            +
                    y_scores = self._decision_function(X)
         | 
| 190 | 
            +
                    if len(self.classes_) == 2:
         | 
| 191 | 
            +
                        return y_scores[:, 1] - y_scores[:, 0]
         | 
| 192 | 
            +
                    return y_scores
         | 
| 193 | 
            +
             | 
| 194 | 
            +
                def predict(self, X):
         | 
| 195 | 
            +
                    """Perform classification on an array of vectors `X`.
         | 
| 196 | 
            +
             | 
| 197 | 
            +
                    Returns the class label for each sample.
         | 
| 198 | 
            +
             | 
| 199 | 
            +
                    Parameters
         | 
| 200 | 
            +
                    ----------
         | 
| 201 | 
            +
                    X : {array-like, sparse matrix} of shape (n_samples, n_features)
         | 
| 202 | 
            +
                        Input vectors, where `n_samples` is the number of samples and
         | 
| 203 | 
            +
                        `n_features` is the number of features.
         | 
| 204 | 
            +
             | 
| 205 | 
            +
                    Returns
         | 
| 206 | 
            +
                    -------
         | 
| 207 | 
            +
                    y_pred : ndarray of shape (n_samples,)
         | 
| 208 | 
            +
                        Class label for each sample.
         | 
| 209 | 
            +
                    """
         | 
| 210 | 
            +
                    scores = self._decision_function(X)
         | 
| 211 | 
            +
                    return self.classes_.take(scores.argmax(axis=1))
         | 
| 212 | 
            +
             | 
| 213 | 
            +
                def predict_proba(self, X):
         | 
| 214 | 
            +
                    """Estimate class probabilities.
         | 
| 215 | 
            +
             | 
| 216 | 
            +
                    Parameters
         | 
| 217 | 
            +
                    ----------
         | 
| 218 | 
            +
                    X : {array-like, sparse matrix} of shape (n_samples, n_features)
         | 
| 219 | 
            +
                        Input data.
         | 
| 220 | 
            +
             | 
| 221 | 
            +
                    Returns
         | 
| 222 | 
            +
                    -------
         | 
| 223 | 
            +
                    y_proba : ndarray of shape (n_samples, n_classes)
         | 
| 224 | 
            +
                        Probability estimate of the sample for each class in the
         | 
| 225 | 
            +
                        model, where classes are ordered as they are in `self.classes_`.
         | 
| 226 | 
            +
                    """
         | 
| 227 | 
            +
                    return np.exp(self.predict_log_proba(X))
         | 
| 228 | 
            +
             | 
| 229 | 
            +
                def predict_log_proba(self, X):
         | 
| 230 | 
            +
                    """Estimate log class probabilities.
         | 
| 231 | 
            +
             | 
| 232 | 
            +
                    Parameters
         | 
| 233 | 
            +
                    ----------
         | 
| 234 | 
            +
                    X : {array-like, sparse matrix} of shape (n_samples, n_features)
         | 
| 235 | 
            +
                        Input data.
         | 
| 236 | 
            +
             | 
| 237 | 
            +
                    Returns
         | 
| 238 | 
            +
                    -------
         | 
| 239 | 
            +
                    y_log_proba : ndarray of shape (n_samples, n_classes)
         | 
| 240 | 
            +
                        Estimated log probabilities.
         | 
| 241 | 
            +
                    """
         | 
| 242 | 
            +
                    scores = self._decision_function(X)
         | 
| 243 | 
            +
                    log_likelihood = scores - scores.max(axis=1)[:, np.newaxis]
         | 
| 244 | 
            +
                    return log_likelihood - np.log(
         | 
| 245 | 
            +
                        np.exp(log_likelihood).sum(axis=1)[:, np.newaxis]
         | 
| 246 | 
            +
                    )
         | 
| 247 | 
            +
             | 
| 248 | 
            +
             | 
| 249 | 
            +
            class LinearDiscriminantAnalysis(
         | 
| 250 | 
            +
                ClassNamePrefixFeaturesOutMixin,
         | 
| 251 | 
            +
                LinearClassifierMixin,
         | 
| 252 | 
            +
                TransformerMixin,
         | 
| 253 | 
            +
                BaseEstimator,
         | 
| 254 | 
            +
            ):
         | 
| 255 | 
            +
                """Linear Discriminant Analysis.
         | 
| 256 | 
            +
             | 
| 257 | 
            +
                A classifier with a linear decision boundary, generated by fitting class
         | 
| 258 | 
            +
                conditional densities to the data and using Bayes' rule.
         | 
| 259 | 
            +
             | 
| 260 | 
            +
                The model fits a Gaussian density to each class, assuming that all classes
         | 
| 261 | 
            +
                share the same covariance matrix.
         | 
| 262 | 
            +
             | 
| 263 | 
            +
                The fitted model can also be used to reduce the dimensionality of the input
         | 
| 264 | 
            +
                by projecting it to the most discriminative directions, using the
         | 
| 265 | 
            +
                `transform` method.
         | 
| 266 | 
            +
             | 
| 267 | 
            +
                .. versionadded:: 0.17
         | 
| 268 | 
            +
             | 
| 269 | 
            +
                For a comparison between
         | 
| 270 | 
            +
                :class:`~sklearn.discriminant_analysis.LinearDiscriminantAnalysis`
         | 
| 271 | 
            +
                and :class:`~sklearn.discriminant_analysis.QuadraticDiscriminantAnalysis`, see
         | 
| 272 | 
            +
                :ref:`sphx_glr_auto_examples_classification_plot_lda_qda.py`.
         | 
| 273 | 
            +
             | 
| 274 | 
            +
                Read more in the :ref:`User Guide <lda_qda>`.
         | 
| 275 | 
            +
             | 
| 276 | 
            +
                Parameters
         | 
| 277 | 
            +
                ----------
         | 
| 278 | 
            +
                solver : {'svd', 'lsqr', 'eigen'}, default='svd'
         | 
| 279 | 
            +
                    Solver to use, possible values:
         | 
| 280 | 
            +
                      - 'svd': Singular value decomposition (default).
         | 
| 281 | 
            +
                        Does not compute the covariance matrix, therefore this solver is
         | 
| 282 | 
            +
                        recommended for data with a large number of features.
         | 
| 283 | 
            +
                      - 'lsqr': Least squares solution.
         | 
| 284 | 
            +
                        Can be combined with shrinkage or custom covariance estimator.
         | 
| 285 | 
            +
                      - 'eigen': Eigenvalue decomposition.
         | 
| 286 | 
            +
                        Can be combined with shrinkage or custom covariance estimator.
         | 
| 287 | 
            +
             | 
| 288 | 
            +
                    .. versionchanged:: 1.2
         | 
| 289 | 
            +
                        `solver="svd"` now has experimental Array API support. See the
         | 
| 290 | 
            +
                        :ref:`Array API User Guide <array_api>` for more details.
         | 
| 291 | 
            +
             | 
| 292 | 
            +
                shrinkage : 'auto' or float, default=None
         | 
| 293 | 
            +
                    Shrinkage parameter, possible values:
         | 
| 294 | 
            +
                      - None: no shrinkage (default).
         | 
| 295 | 
            +
                      - 'auto': automatic shrinkage using the Ledoit-Wolf lemma.
         | 
| 296 | 
            +
                      - float between 0 and 1: fixed shrinkage parameter.
         | 
| 297 | 
            +
             | 
| 298 | 
            +
                    This should be left to None if `covariance_estimator` is used.
         | 
| 299 | 
            +
                    Note that shrinkage works only with 'lsqr' and 'eigen' solvers.
         | 
| 300 | 
            +
             | 
| 301 | 
            +
                    For a usage example, see
         | 
| 302 | 
            +
                    :ref:`sphx_glr_auto_examples_classification_plot_lda.py`.
         | 
| 303 | 
            +
             | 
| 304 | 
            +
                priors : array-like of shape (n_classes,), default=None
         | 
| 305 | 
            +
                    The class prior probabilities. By default, the class proportions are
         | 
| 306 | 
            +
                    inferred from the training data.
         | 
| 307 | 
            +
             | 
| 308 | 
            +
                n_components : int, default=None
         | 
| 309 | 
            +
                    Number of components (<= min(n_classes - 1, n_features)) for
         | 
| 310 | 
            +
                    dimensionality reduction. If None, will be set to
         | 
| 311 | 
            +
                    min(n_classes - 1, n_features). This parameter only affects the
         | 
| 312 | 
            +
                    `transform` method.
         | 
| 313 | 
            +
             | 
| 314 | 
            +
                    For a usage example, see
         | 
| 315 | 
            +
                    :ref:`sphx_glr_auto_examples_decomposition_plot_pca_vs_lda.py`.
         | 
| 316 | 
            +
             | 
| 317 | 
            +
                store_covariance : bool, default=False
         | 
| 318 | 
            +
                    If True, explicitly compute the weighted within-class covariance
         | 
| 319 | 
            +
                    matrix when solver is 'svd'. The matrix is always computed
         | 
| 320 | 
            +
                    and stored for the other solvers.
         | 
| 321 | 
            +
             | 
| 322 | 
            +
                    .. versionadded:: 0.17
         | 
| 323 | 
            +
             | 
| 324 | 
            +
                tol : float, default=1.0e-4
         | 
| 325 | 
            +
                    Absolute threshold for a singular value of X to be considered
         | 
| 326 | 
            +
                    significant, used to estimate the rank of X. Dimensions whose
         | 
| 327 | 
            +
                    singular values are non-significant are discarded. Only used if
         | 
| 328 | 
            +
                    solver is 'svd'.
         | 
| 329 | 
            +
             | 
| 330 | 
            +
                    .. versionadded:: 0.17
         | 
| 331 | 
            +
             | 
| 332 | 
            +
                covariance_estimator : covariance estimator, default=None
         | 
| 333 | 
            +
                    If not None, `covariance_estimator` is used to estimate
         | 
| 334 | 
            +
                    the covariance matrices instead of relying on the empirical
         | 
| 335 | 
            +
                    covariance estimator (with potential shrinkage).
         | 
| 336 | 
            +
                    The object should have a fit method and a ``covariance_`` attribute
         | 
| 337 | 
            +
                    like the estimators in :mod:`sklearn.covariance`.
         | 
| 338 | 
            +
                    if None the shrinkage parameter drives the estimate.
         | 
| 339 | 
            +
             | 
| 340 | 
            +
                    This should be left to None if `shrinkage` is used.
         | 
| 341 | 
            +
                    Note that `covariance_estimator` works only with 'lsqr' and 'eigen'
         | 
| 342 | 
            +
                    solvers.
         | 
| 343 | 
            +
             | 
| 344 | 
            +
                    .. versionadded:: 0.24
         | 
| 345 | 
            +
             | 
| 346 | 
            +
                Attributes
         | 
| 347 | 
            +
                ----------
         | 
| 348 | 
            +
                coef_ : ndarray of shape (n_features,) or (n_classes, n_features)
         | 
| 349 | 
            +
                    Weight vector(s).
         | 
| 350 | 
            +
             | 
| 351 | 
            +
                intercept_ : ndarray of shape (n_classes,)
         | 
| 352 | 
            +
                    Intercept term.
         | 
| 353 | 
            +
             | 
| 354 | 
            +
                covariance_ : array-like of shape (n_features, n_features)
         | 
| 355 | 
            +
                    Weighted within-class covariance matrix. It corresponds to
         | 
| 356 | 
            +
                    `sum_k prior_k * C_k` where `C_k` is the covariance matrix of the
         | 
| 357 | 
            +
                    samples in class `k`. The `C_k` are estimated using the (potentially
         | 
| 358 | 
            +
                    shrunk) biased estimator of covariance. If solver is 'svd', only
         | 
| 359 | 
            +
                    exists when `store_covariance` is True.
         | 
| 360 | 
            +
             | 
| 361 | 
            +
                explained_variance_ratio_ : ndarray of shape (n_components,)
         | 
| 362 | 
            +
                    Percentage of variance explained by each of the selected components.
         | 
| 363 | 
            +
                    If ``n_components`` is not set then all components are stored and the
         | 
| 364 | 
            +
                    sum of explained variances is equal to 1.0. Only available when eigen
         | 
| 365 | 
            +
                    or svd solver is used.
         | 
| 366 | 
            +
             | 
| 367 | 
            +
                means_ : array-like of shape (n_classes, n_features)
         | 
| 368 | 
            +
                    Class-wise means.
         | 
| 369 | 
            +
             | 
| 370 | 
            +
                priors_ : array-like of shape (n_classes,)
         | 
| 371 | 
            +
                    Class priors (sum to 1).
         | 
| 372 | 
            +
             | 
| 373 | 
            +
                scalings_ : array-like of shape (rank, n_classes - 1)
         | 
| 374 | 
            +
                    Scaling of the features in the space spanned by the class centroids.
         | 
| 375 | 
            +
                    Only available for 'svd' and 'eigen' solvers.
         | 
| 376 | 
            +
             | 
| 377 | 
            +
                xbar_ : array-like of shape (n_features,)
         | 
| 378 | 
            +
                    Overall mean. Only present if solver is 'svd'.
         | 
| 379 | 
            +
             | 
| 380 | 
            +
                classes_ : array-like of shape (n_classes,)
         | 
| 381 | 
            +
                    Unique class labels.
         | 
| 382 | 
            +
             | 
| 383 | 
            +
                n_features_in_ : int
         | 
| 384 | 
            +
                    Number of features seen during :term:`fit`.
         | 
| 385 | 
            +
             | 
| 386 | 
            +
                    .. versionadded:: 0.24
         | 
| 387 | 
            +
             | 
| 388 | 
            +
                feature_names_in_ : ndarray of shape (`n_features_in_`,)
         | 
| 389 | 
            +
                    Names of features seen during :term:`fit`. Defined only when `X`
         | 
| 390 | 
            +
                    has feature names that are all strings.
         | 
| 391 | 
            +
             | 
| 392 | 
            +
                    .. versionadded:: 1.0
         | 
| 393 | 
            +
             | 
| 394 | 
            +
                See Also
         | 
| 395 | 
            +
                --------
         | 
| 396 | 
            +
                QuadraticDiscriminantAnalysis : Quadratic Discriminant Analysis.
         | 
| 397 | 
            +
             | 
| 398 | 
            +
                Examples
         | 
| 399 | 
            +
                --------
         | 
| 400 | 
            +
                >>> import numpy as np
         | 
| 401 | 
            +
                >>> from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
         | 
| 402 | 
            +
                >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
         | 
| 403 | 
            +
                >>> y = np.array([1, 1, 1, 2, 2, 2])
         | 
| 404 | 
            +
                >>> clf = LinearDiscriminantAnalysis()
         | 
| 405 | 
            +
                >>> clf.fit(X, y)
         | 
| 406 | 
            +
                LinearDiscriminantAnalysis()
         | 
| 407 | 
            +
                >>> print(clf.predict([[-0.8, -1]]))
         | 
| 408 | 
            +
                [1]
         | 
| 409 | 
            +
                """
         | 
| 410 | 
            +
             | 
| 411 | 
            +
                _parameter_constraints: dict = {
         | 
| 412 | 
            +
                    "solver": [StrOptions({"svd", "lsqr", "eigen"})],
         | 
| 413 | 
            +
                    "shrinkage": [StrOptions({"auto"}), Interval(Real, 0, 1, closed="both"), None],
         | 
| 414 | 
            +
                    "n_components": [Interval(Integral, 1, None, closed="left"), None],
         | 
| 415 | 
            +
                    "priors": ["array-like", None],
         | 
| 416 | 
            +
                    "store_covariance": ["boolean"],
         | 
| 417 | 
            +
                    "tol": [Interval(Real, 0, None, closed="left")],
         | 
| 418 | 
            +
                    "covariance_estimator": [HasMethods("fit"), None],
         | 
| 419 | 
            +
                }
         | 
| 420 | 
            +
             | 
| 421 | 
            +
                def __init__(
         | 
| 422 | 
            +
                    self,
         | 
| 423 | 
            +
                    solver="svd",
         | 
| 424 | 
            +
                    shrinkage=None,
         | 
| 425 | 
            +
                    priors=None,
         | 
| 426 | 
            +
                    n_components=None,
         | 
| 427 | 
            +
                    store_covariance=False,
         | 
| 428 | 
            +
                    tol=1e-4,
         | 
| 429 | 
            +
                    covariance_estimator=None,
         | 
| 430 | 
            +
                ):
         | 
| 431 | 
            +
                    self.solver = solver
         | 
| 432 | 
            +
                    self.shrinkage = shrinkage
         | 
| 433 | 
            +
                    self.priors = priors
         | 
| 434 | 
            +
                    self.n_components = n_components
         | 
| 435 | 
            +
                    self.store_covariance = store_covariance  # used only in svd solver
         | 
| 436 | 
            +
                    self.tol = tol  # used only in svd solver
         | 
| 437 | 
            +
                    self.covariance_estimator = covariance_estimator
         | 
| 438 | 
            +
             | 
| 439 | 
            +
                def _solve_lstsq(self, X, y, shrinkage, covariance_estimator):
         | 
| 440 | 
            +
                    """Least squares solver.
         | 
| 441 | 
            +
             | 
| 442 | 
            +
                    The least squares solver computes a straightforward solution of the
         | 
| 443 | 
            +
                    optimal decision rule based directly on the discriminant functions. It
         | 
| 444 | 
            +
                    can only be used for classification (with any covariance estimator),
         | 
| 445 | 
            +
                    because
         | 
| 446 | 
            +
                    estimation of eigenvectors is not performed. Therefore, dimensionality
         | 
| 447 | 
            +
                    reduction with the transform is not supported.
         | 
| 448 | 
            +
             | 
| 449 | 
            +
                    Parameters
         | 
| 450 | 
            +
                    ----------
         | 
| 451 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 452 | 
            +
                        Training data.
         | 
| 453 | 
            +
             | 
| 454 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_classes)
         | 
| 455 | 
            +
                        Target values.
         | 
| 456 | 
            +
             | 
| 457 | 
            +
                    shrinkage : 'auto', float or None
         | 
| 458 | 
            +
                        Shrinkage parameter, possible values:
         | 
| 459 | 
            +
                          - None: no shrinkage.
         | 
| 460 | 
            +
                          - 'auto': automatic shrinkage using the Ledoit-Wolf lemma.
         | 
| 461 | 
            +
                          - float between 0 and 1: fixed shrinkage parameter.
         | 
| 462 | 
            +
             | 
| 463 | 
            +
                        Shrinkage parameter is ignored if  `covariance_estimator` i
         | 
| 464 | 
            +
                        not None
         | 
| 465 | 
            +
             | 
| 466 | 
            +
                    covariance_estimator : estimator, default=None
         | 
| 467 | 
            +
                        If not None, `covariance_estimator` is used to estimate
         | 
| 468 | 
            +
                        the covariance matrices instead of relying the empirical
         | 
| 469 | 
            +
                        covariance estimator (with potential shrinkage).
         | 
| 470 | 
            +
                        The object should have a fit method and a ``covariance_`` attribute
         | 
| 471 | 
            +
                        like the estimators in sklearn.covariance.
         | 
| 472 | 
            +
                        if None the shrinkage parameter drives the estimate.
         | 
| 473 | 
            +
             | 
| 474 | 
            +
                        .. versionadded:: 0.24
         | 
| 475 | 
            +
             | 
| 476 | 
            +
                    Notes
         | 
| 477 | 
            +
                    -----
         | 
| 478 | 
            +
                    This solver is based on [1]_, section 2.6.2, pp. 39-41.
         | 
| 479 | 
            +
             | 
| 480 | 
            +
                    References
         | 
| 481 | 
            +
                    ----------
         | 
| 482 | 
            +
                    .. [1] R. O. Duda, P. E. Hart, D. G. Stork. Pattern Classification
         | 
| 483 | 
            +
                       (Second Edition). John Wiley & Sons, Inc., New York, 2001. ISBN
         | 
| 484 | 
            +
                       0-471-05669-3.
         | 
| 485 | 
            +
                    """
         | 
| 486 | 
            +
                    self.means_ = _class_means(X, y)
         | 
| 487 | 
            +
                    self.covariance_ = _class_cov(
         | 
| 488 | 
            +
                        X, y, self.priors_, shrinkage, covariance_estimator
         | 
| 489 | 
            +
                    )
         | 
| 490 | 
            +
                    self.coef_ = linalg.lstsq(self.covariance_, self.means_.T)[0].T
         | 
| 491 | 
            +
                    self.intercept_ = -0.5 * np.diag(np.dot(self.means_, self.coef_.T)) + np.log(
         | 
| 492 | 
            +
                        self.priors_
         | 
| 493 | 
            +
                    )
         | 
| 494 | 
            +
             | 
| 495 | 
            +
                def _solve_eigen(self, X, y, shrinkage, covariance_estimator):
         | 
| 496 | 
            +
                    """Eigenvalue solver.
         | 
| 497 | 
            +
             | 
| 498 | 
            +
                    The eigenvalue solver computes the optimal solution of the Rayleigh
         | 
| 499 | 
            +
                    coefficient (basically the ratio of between class scatter to within
         | 
| 500 | 
            +
                    class scatter). This solver supports both classification and
         | 
| 501 | 
            +
                    dimensionality reduction (with any covariance estimator).
         | 
| 502 | 
            +
             | 
| 503 | 
            +
                    Parameters
         | 
| 504 | 
            +
                    ----------
         | 
| 505 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 506 | 
            +
                        Training data.
         | 
| 507 | 
            +
             | 
| 508 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_targets)
         | 
| 509 | 
            +
                        Target values.
         | 
| 510 | 
            +
             | 
| 511 | 
            +
                    shrinkage : 'auto', float or None
         | 
| 512 | 
            +
                        Shrinkage parameter, possible values:
         | 
| 513 | 
            +
                          - None: no shrinkage.
         | 
| 514 | 
            +
                          - 'auto': automatic shrinkage using the Ledoit-Wolf lemma.
         | 
| 515 | 
            +
                          - float between 0 and 1: fixed shrinkage constant.
         | 
| 516 | 
            +
             | 
| 517 | 
            +
                        Shrinkage parameter is ignored if  `covariance_estimator` i
         | 
| 518 | 
            +
                        not None
         | 
| 519 | 
            +
             | 
| 520 | 
            +
                    covariance_estimator : estimator, default=None
         | 
| 521 | 
            +
                        If not None, `covariance_estimator` is used to estimate
         | 
| 522 | 
            +
                        the covariance matrices instead of relying the empirical
         | 
| 523 | 
            +
                        covariance estimator (with potential shrinkage).
         | 
| 524 | 
            +
                        The object should have a fit method and a ``covariance_`` attribute
         | 
| 525 | 
            +
                        like the estimators in sklearn.covariance.
         | 
| 526 | 
            +
                        if None the shrinkage parameter drives the estimate.
         | 
| 527 | 
            +
             | 
| 528 | 
            +
                        .. versionadded:: 0.24
         | 
| 529 | 
            +
             | 
| 530 | 
            +
                    Notes
         | 
| 531 | 
            +
                    -----
         | 
| 532 | 
            +
                    This solver is based on [1]_, section 3.8.3, pp. 121-124.
         | 
| 533 | 
            +
             | 
| 534 | 
            +
                    References
         | 
| 535 | 
            +
                    ----------
         | 
| 536 | 
            +
                    .. [1] R. O. Duda, P. E. Hart, D. G. Stork. Pattern Classification
         | 
| 537 | 
            +
                       (Second Edition). John Wiley & Sons, Inc., New York, 2001. ISBN
         | 
| 538 | 
            +
                       0-471-05669-3.
         | 
| 539 | 
            +
                    """
         | 
| 540 | 
            +
                    self.means_ = _class_means(X, y)
         | 
| 541 | 
            +
                    self.covariance_ = _class_cov(
         | 
| 542 | 
            +
                        X, y, self.priors_, shrinkage, covariance_estimator
         | 
| 543 | 
            +
                    )
         | 
| 544 | 
            +
             | 
| 545 | 
            +
                    Sw = self.covariance_  # within scatter
         | 
| 546 | 
            +
                    St = _cov(X, shrinkage, covariance_estimator)  # total scatter
         | 
| 547 | 
            +
                    Sb = St - Sw  # between scatter
         | 
| 548 | 
            +
             | 
| 549 | 
            +
                    evals, evecs = linalg.eigh(Sb, Sw)
         | 
| 550 | 
            +
                    self.explained_variance_ratio_ = np.sort(evals / np.sum(evals))[::-1][
         | 
| 551 | 
            +
                        : self._max_components
         | 
| 552 | 
            +
                    ]
         | 
| 553 | 
            +
                    evecs = evecs[:, np.argsort(evals)[::-1]]  # sort eigenvectors
         | 
| 554 | 
            +
             | 
| 555 | 
            +
                    self.scalings_ = evecs
         | 
| 556 | 
            +
                    self.coef_ = np.dot(self.means_, evecs).dot(evecs.T)
         | 
| 557 | 
            +
                    self.intercept_ = -0.5 * np.diag(np.dot(self.means_, self.coef_.T)) + np.log(
         | 
| 558 | 
            +
                        self.priors_
         | 
| 559 | 
            +
                    )
         | 
| 560 | 
            +
             | 
| 561 | 
            +
                def _solve_svd(self, X, y):
         | 
| 562 | 
            +
                    """SVD solver.
         | 
| 563 | 
            +
             | 
| 564 | 
            +
                    Parameters
         | 
| 565 | 
            +
                    ----------
         | 
| 566 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 567 | 
            +
                        Training data.
         | 
| 568 | 
            +
             | 
| 569 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_targets)
         | 
| 570 | 
            +
                        Target values.
         | 
| 571 | 
            +
                    """
         | 
| 572 | 
            +
                    xp, is_array_api_compliant = get_namespace(X)
         | 
| 573 | 
            +
             | 
| 574 | 
            +
                    if is_array_api_compliant:
         | 
| 575 | 
            +
                        svd = xp.linalg.svd
         | 
| 576 | 
            +
                    else:
         | 
| 577 | 
            +
                        svd = scipy.linalg.svd
         | 
| 578 | 
            +
             | 
| 579 | 
            +
                    n_samples, n_features = X.shape
         | 
| 580 | 
            +
                    n_classes = self.classes_.shape[0]
         | 
| 581 | 
            +
             | 
| 582 | 
            +
                    self.means_ = _class_means(X, y)
         | 
| 583 | 
            +
                    if self.store_covariance:
         | 
| 584 | 
            +
                        self.covariance_ = _class_cov(X, y, self.priors_)
         | 
| 585 | 
            +
             | 
| 586 | 
            +
                    Xc = []
         | 
| 587 | 
            +
                    for idx, group in enumerate(self.classes_):
         | 
| 588 | 
            +
                        Xg = X[y == group]
         | 
| 589 | 
            +
                        Xc.append(Xg - self.means_[idx, :])
         | 
| 590 | 
            +
             | 
| 591 | 
            +
                    self.xbar_ = self.priors_ @ self.means_
         | 
| 592 | 
            +
             | 
| 593 | 
            +
                    Xc = xp.concat(Xc, axis=0)
         | 
| 594 | 
            +
             | 
| 595 | 
            +
                    # 1) within (univariate) scaling by with classes std-dev
         | 
| 596 | 
            +
                    std = xp.std(Xc, axis=0)
         | 
| 597 | 
            +
                    # avoid division by zero in normalization
         | 
| 598 | 
            +
                    std[std == 0] = 1.0
         | 
| 599 | 
            +
                    fac = xp.asarray(1.0 / (n_samples - n_classes), dtype=X.dtype)
         | 
| 600 | 
            +
             | 
| 601 | 
            +
                    # 2) Within variance scaling
         | 
| 602 | 
            +
                    X = xp.sqrt(fac) * (Xc / std)
         | 
| 603 | 
            +
                    # SVD of centered (within)scaled data
         | 
| 604 | 
            +
                    U, S, Vt = svd(X, full_matrices=False)
         | 
| 605 | 
            +
             | 
| 606 | 
            +
                    rank = xp.sum(xp.astype(S > self.tol, xp.int32))
         | 
| 607 | 
            +
                    # Scaling of within covariance is: V' 1/S
         | 
| 608 | 
            +
                    scalings = (Vt[:rank, :] / std).T / S[:rank]
         | 
| 609 | 
            +
                    fac = 1.0 if n_classes == 1 else 1.0 / (n_classes - 1)
         | 
| 610 | 
            +
             | 
| 611 | 
            +
                    # 3) Between variance scaling
         | 
| 612 | 
            +
                    # Scale weighted centers
         | 
| 613 | 
            +
                    X = (
         | 
| 614 | 
            +
                        (xp.sqrt((n_samples * self.priors_) * fac)) * (self.means_ - self.xbar_).T
         | 
| 615 | 
            +
                    ).T @ scalings
         | 
| 616 | 
            +
                    # Centers are living in a space with n_classes-1 dim (maximum)
         | 
| 617 | 
            +
                    # Use SVD to find projection in the space spanned by the
         | 
| 618 | 
            +
                    # (n_classes) centers
         | 
| 619 | 
            +
                    _, S, Vt = svd(X, full_matrices=False)
         | 
| 620 | 
            +
             | 
| 621 | 
            +
                    if self._max_components == 0:
         | 
| 622 | 
            +
                        self.explained_variance_ratio_ = xp.empty((0,), dtype=S.dtype)
         | 
| 623 | 
            +
                    else:
         | 
| 624 | 
            +
                        self.explained_variance_ratio_ = (S**2 / xp.sum(S**2))[
         | 
| 625 | 
            +
                            : self._max_components
         | 
| 626 | 
            +
                        ]
         | 
| 627 | 
            +
             | 
| 628 | 
            +
                    rank = xp.sum(xp.astype(S > self.tol * S[0], xp.int32))
         | 
| 629 | 
            +
                    self.scalings_ = scalings @ Vt.T[:, :rank]
         | 
| 630 | 
            +
                    coef = (self.means_ - self.xbar_) @ self.scalings_
         | 
| 631 | 
            +
                    self.intercept_ = -0.5 * xp.sum(coef**2, axis=1) + xp.log(self.priors_)
         | 
| 632 | 
            +
                    self.coef_ = coef @ self.scalings_.T
         | 
| 633 | 
            +
                    self.intercept_ -= self.xbar_ @ self.coef_.T
         | 
| 634 | 
            +
             | 
| 635 | 
            +
                @_fit_context(
         | 
| 636 | 
            +
                    # LinearDiscriminantAnalysis.covariance_estimator is not validated yet
         | 
| 637 | 
            +
                    prefer_skip_nested_validation=False
         | 
| 638 | 
            +
                )
         | 
| 639 | 
            +
                def fit(self, X, y):
         | 
| 640 | 
            +
                    """Fit the Linear Discriminant Analysis model.
         | 
| 641 | 
            +
             | 
| 642 | 
            +
                    .. versionchanged:: 0.19
         | 
| 643 | 
            +
                        `store_covariance` and `tol` has been moved to main constructor.
         | 
| 644 | 
            +
             | 
| 645 | 
            +
                    Parameters
         | 
| 646 | 
            +
                    ----------
         | 
| 647 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 648 | 
            +
                        Training data.
         | 
| 649 | 
            +
             | 
| 650 | 
            +
                    y : array-like of shape (n_samples,)
         | 
| 651 | 
            +
                        Target values.
         | 
| 652 | 
            +
             | 
| 653 | 
            +
                    Returns
         | 
| 654 | 
            +
                    -------
         | 
| 655 | 
            +
                    self : object
         | 
| 656 | 
            +
                        Fitted estimator.
         | 
| 657 | 
            +
                    """
         | 
| 658 | 
            +
                    xp, _ = get_namespace(X)
         | 
| 659 | 
            +
             | 
| 660 | 
            +
                    X, y = validate_data(
         | 
| 661 | 
            +
                        self, X, y, ensure_min_samples=2, dtype=[xp.float64, xp.float32]
         | 
| 662 | 
            +
                    )
         | 
| 663 | 
            +
                    self.classes_ = unique_labels(y)
         | 
| 664 | 
            +
                    n_samples, _ = X.shape
         | 
| 665 | 
            +
                    n_classes = self.classes_.shape[0]
         | 
| 666 | 
            +
             | 
| 667 | 
            +
                    if n_samples == n_classes:
         | 
| 668 | 
            +
                        raise ValueError(
         | 
| 669 | 
            +
                            "The number of samples must be more than the number of classes."
         | 
| 670 | 
            +
                        )
         | 
| 671 | 
            +
             | 
| 672 | 
            +
                    if self.priors is None:  # estimate priors from sample
         | 
| 673 | 
            +
                        _, cnts = xp.unique_counts(y)  # non-negative ints
         | 
| 674 | 
            +
                        self.priors_ = xp.astype(cnts, X.dtype) / float(y.shape[0])
         | 
| 675 | 
            +
                    else:
         | 
| 676 | 
            +
                        self.priors_ = xp.asarray(self.priors, dtype=X.dtype)
         | 
| 677 | 
            +
             | 
| 678 | 
            +
                    if xp.any(self.priors_ < 0):
         | 
| 679 | 
            +
                        raise ValueError("priors must be non-negative")
         | 
| 680 | 
            +
             | 
| 681 | 
            +
                    if xp.abs(xp.sum(self.priors_) - 1.0) > 1e-5:
         | 
| 682 | 
            +
                        warnings.warn("The priors do not sum to 1. Renormalizing", UserWarning)
         | 
| 683 | 
            +
                        self.priors_ = self.priors_ / self.priors_.sum()
         | 
| 684 | 
            +
             | 
| 685 | 
            +
                    # Maximum number of components no matter what n_components is
         | 
| 686 | 
            +
                    # specified:
         | 
| 687 | 
            +
                    max_components = min(n_classes - 1, X.shape[1])
         | 
| 688 | 
            +
             | 
| 689 | 
            +
                    if self.n_components is None:
         | 
| 690 | 
            +
                        self._max_components = max_components
         | 
| 691 | 
            +
                    else:
         | 
| 692 | 
            +
                        if self.n_components > max_components:
         | 
| 693 | 
            +
                            raise ValueError(
         | 
| 694 | 
            +
                                "n_components cannot be larger than min(n_features, n_classes - 1)."
         | 
| 695 | 
            +
                            )
         | 
| 696 | 
            +
                        self._max_components = self.n_components
         | 
| 697 | 
            +
             | 
| 698 | 
            +
                    if self.solver == "svd":
         | 
| 699 | 
            +
                        if self.shrinkage is not None:
         | 
| 700 | 
            +
                            raise NotImplementedError("shrinkage not supported with 'svd' solver.")
         | 
| 701 | 
            +
                        if self.covariance_estimator is not None:
         | 
| 702 | 
            +
                            raise ValueError(
         | 
| 703 | 
            +
                                "covariance estimator "
         | 
| 704 | 
            +
                                "is not supported "
         | 
| 705 | 
            +
                                "with svd solver. Try another solver"
         | 
| 706 | 
            +
                            )
         | 
| 707 | 
            +
                        self._solve_svd(X, y)
         | 
| 708 | 
            +
                    elif self.solver == "lsqr":
         | 
| 709 | 
            +
                        self._solve_lstsq(
         | 
| 710 | 
            +
                            X,
         | 
| 711 | 
            +
                            y,
         | 
| 712 | 
            +
                            shrinkage=self.shrinkage,
         | 
| 713 | 
            +
                            covariance_estimator=self.covariance_estimator,
         | 
| 714 | 
            +
                        )
         | 
| 715 | 
            +
                    elif self.solver == "eigen":
         | 
| 716 | 
            +
                        self._solve_eigen(
         | 
| 717 | 
            +
                            X,
         | 
| 718 | 
            +
                            y,
         | 
| 719 | 
            +
                            shrinkage=self.shrinkage,
         | 
| 720 | 
            +
                            covariance_estimator=self.covariance_estimator,
         | 
| 721 | 
            +
                        )
         | 
| 722 | 
            +
                    if size(self.classes_) == 2:  # treat binary case as a special case
         | 
| 723 | 
            +
                        coef_ = xp.asarray(self.coef_[1, :] - self.coef_[0, :], dtype=X.dtype)
         | 
| 724 | 
            +
                        self.coef_ = xp.reshape(coef_, (1, -1))
         | 
| 725 | 
            +
                        intercept_ = xp.asarray(
         | 
| 726 | 
            +
                            self.intercept_[1] - self.intercept_[0], dtype=X.dtype
         | 
| 727 | 
            +
                        )
         | 
| 728 | 
            +
                        self.intercept_ = xp.reshape(intercept_, (1,))
         | 
| 729 | 
            +
                    self._n_features_out = self._max_components
         | 
| 730 | 
            +
                    return self
         | 
| 731 | 
            +
             | 
| 732 | 
            +
                def transform(self, X):
         | 
| 733 | 
            +
                    """Project data to maximize class separation.
         | 
| 734 | 
            +
             | 
| 735 | 
            +
                    Parameters
         | 
| 736 | 
            +
                    ----------
         | 
| 737 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 738 | 
            +
                        Input data.
         | 
| 739 | 
            +
             | 
| 740 | 
            +
                    Returns
         | 
| 741 | 
            +
                    -------
         | 
| 742 | 
            +
                    X_new : ndarray of shape (n_samples, n_components) or \
         | 
| 743 | 
            +
                        (n_samples, min(rank, n_components))
         | 
| 744 | 
            +
                        Transformed data. In the case of the 'svd' solver, the shape
         | 
| 745 | 
            +
                        is (n_samples, min(rank, n_components)).
         | 
| 746 | 
            +
                    """
         | 
| 747 | 
            +
                    if self.solver == "lsqr":
         | 
| 748 | 
            +
                        raise NotImplementedError(
         | 
| 749 | 
            +
                            "transform not implemented for 'lsqr' solver (use 'svd' or 'eigen')."
         | 
| 750 | 
            +
                        )
         | 
| 751 | 
            +
                    check_is_fitted(self)
         | 
| 752 | 
            +
                    xp, _ = get_namespace(X)
         | 
| 753 | 
            +
                    X = validate_data(self, X, reset=False)
         | 
| 754 | 
            +
             | 
| 755 | 
            +
                    if self.solver == "svd":
         | 
| 756 | 
            +
                        X_new = (X - self.xbar_) @ self.scalings_
         | 
| 757 | 
            +
                    elif self.solver == "eigen":
         | 
| 758 | 
            +
                        X_new = X @ self.scalings_
         | 
| 759 | 
            +
             | 
| 760 | 
            +
                    return X_new[:, : self._max_components]
         | 
| 761 | 
            +
             | 
| 762 | 
            +
                def predict_proba(self, X):
         | 
| 763 | 
            +
                    """Estimate probability.
         | 
| 764 | 
            +
             | 
| 765 | 
            +
                    Parameters
         | 
| 766 | 
            +
                    ----------
         | 
| 767 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 768 | 
            +
                        Input data.
         | 
| 769 | 
            +
             | 
| 770 | 
            +
                    Returns
         | 
| 771 | 
            +
                    -------
         | 
| 772 | 
            +
                    C : ndarray of shape (n_samples, n_classes)
         | 
| 773 | 
            +
                        Estimated probabilities.
         | 
| 774 | 
            +
                    """
         | 
| 775 | 
            +
                    check_is_fitted(self)
         | 
| 776 | 
            +
                    xp, is_array_api_compliant = get_namespace(X)
         | 
| 777 | 
            +
                    decision = self.decision_function(X)
         | 
| 778 | 
            +
                    if size(self.classes_) == 2:
         | 
| 779 | 
            +
                        proba = _expit(decision, xp)
         | 
| 780 | 
            +
                        return xp.stack([1 - proba, proba], axis=1)
         | 
| 781 | 
            +
                    else:
         | 
| 782 | 
            +
                        return softmax(decision)
         | 
| 783 | 
            +
             | 
| 784 | 
            +
                def predict_log_proba(self, X):
         | 
| 785 | 
            +
                    """Estimate log probability.
         | 
| 786 | 
            +
             | 
| 787 | 
            +
                    Parameters
         | 
| 788 | 
            +
                    ----------
         | 
| 789 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 790 | 
            +
                        Input data.
         | 
| 791 | 
            +
             | 
| 792 | 
            +
                    Returns
         | 
| 793 | 
            +
                    -------
         | 
| 794 | 
            +
                    C : ndarray of shape (n_samples, n_classes)
         | 
| 795 | 
            +
                        Estimated log probabilities.
         | 
| 796 | 
            +
                    """
         | 
| 797 | 
            +
                    xp, _ = get_namespace(X)
         | 
| 798 | 
            +
                    prediction = self.predict_proba(X)
         | 
| 799 | 
            +
             | 
| 800 | 
            +
                    info = xp.finfo(prediction.dtype)
         | 
| 801 | 
            +
                    if hasattr(info, "smallest_normal"):
         | 
| 802 | 
            +
                        smallest_normal = info.smallest_normal
         | 
| 803 | 
            +
                    else:
         | 
| 804 | 
            +
                        # smallest_normal was introduced in NumPy 1.22
         | 
| 805 | 
            +
                        smallest_normal = info.tiny
         | 
| 806 | 
            +
             | 
| 807 | 
            +
                    prediction[prediction == 0.0] += smallest_normal
         | 
| 808 | 
            +
                    return xp.log(prediction)
         | 
| 809 | 
            +
             | 
| 810 | 
            +
                def decision_function(self, X):
         | 
| 811 | 
            +
                    """Apply decision function to an array of samples.
         | 
| 812 | 
            +
             | 
| 813 | 
            +
                    The decision function is equal (up to a constant factor) to the
         | 
| 814 | 
            +
                    log-posterior of the model, i.e. `log p(y = k | x)`. In a binary
         | 
| 815 | 
            +
                    classification setting this instead corresponds to the difference
         | 
| 816 | 
            +
                    `log p(y = 1 | x) - log p(y = 0 | x)`. See :ref:`lda_qda_math`.
         | 
| 817 | 
            +
             | 
| 818 | 
            +
                    Parameters
         | 
| 819 | 
            +
                    ----------
         | 
| 820 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 821 | 
            +
                        Array of samples (test vectors).
         | 
| 822 | 
            +
             | 
| 823 | 
            +
                    Returns
         | 
| 824 | 
            +
                    -------
         | 
| 825 | 
            +
                    y_scores : ndarray of shape (n_samples,) or (n_samples, n_classes)
         | 
| 826 | 
            +
                        Decision function values related to each class, per sample.
         | 
| 827 | 
            +
                        In the two-class case, the shape is `(n_samples,)`, giving the
         | 
| 828 | 
            +
                        log likelihood ratio of the positive class.
         | 
| 829 | 
            +
                    """
         | 
| 830 | 
            +
                    # Only override for the doc
         | 
| 831 | 
            +
                    return super().decision_function(X)
         | 
| 832 | 
            +
             | 
| 833 | 
            +
                def __sklearn_tags__(self):
         | 
| 834 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 835 | 
            +
                    tags.array_api_support = True
         | 
| 836 | 
            +
                    return tags
         | 
| 837 | 
            +
             | 
| 838 | 
            +
             | 
| 839 | 
            +
            class QuadraticDiscriminantAnalysis(
         | 
| 840 | 
            +
                DiscriminantAnalysisPredictionMixin, ClassifierMixin, BaseEstimator
         | 
| 841 | 
            +
            ):
         | 
| 842 | 
            +
                """Quadratic Discriminant Analysis.
         | 
| 843 | 
            +
             | 
| 844 | 
            +
                A classifier with a quadratic decision boundary, generated
         | 
| 845 | 
            +
                by fitting class conditional densities to the data
         | 
| 846 | 
            +
                and using Bayes' rule.
         | 
| 847 | 
            +
             | 
| 848 | 
            +
                The model fits a Gaussian density to each class.
         | 
| 849 | 
            +
             | 
| 850 | 
            +
                .. versionadded:: 0.17
         | 
| 851 | 
            +
             | 
| 852 | 
            +
                For a comparison between
         | 
| 853 | 
            +
                :class:`~sklearn.discriminant_analysis.QuadraticDiscriminantAnalysis`
         | 
| 854 | 
            +
                and :class:`~sklearn.discriminant_analysis.LinearDiscriminantAnalysis`, see
         | 
| 855 | 
            +
                :ref:`sphx_glr_auto_examples_classification_plot_lda_qda.py`.
         | 
| 856 | 
            +
             | 
| 857 | 
            +
                Read more in the :ref:`User Guide <lda_qda>`.
         | 
| 858 | 
            +
             | 
| 859 | 
            +
                Parameters
         | 
| 860 | 
            +
                ----------
         | 
| 861 | 
            +
                priors : array-like of shape (n_classes,), default=None
         | 
| 862 | 
            +
                    Class priors. By default, the class proportions are inferred from the
         | 
| 863 | 
            +
                    training data.
         | 
| 864 | 
            +
             | 
| 865 | 
            +
                reg_param : float, default=0.0
         | 
| 866 | 
            +
                    Regularizes the per-class covariance estimates by transforming S2 as
         | 
| 867 | 
            +
                    ``S2 = (1 - reg_param) * S2 + reg_param * np.eye(n_features)``,
         | 
| 868 | 
            +
                    where S2 corresponds to the `scaling_` attribute of a given class.
         | 
| 869 | 
            +
             | 
| 870 | 
            +
                store_covariance : bool, default=False
         | 
| 871 | 
            +
                    If True, the class covariance matrices are explicitly computed and
         | 
| 872 | 
            +
                    stored in the `self.covariance_` attribute.
         | 
| 873 | 
            +
             | 
| 874 | 
            +
                    .. versionadded:: 0.17
         | 
| 875 | 
            +
             | 
| 876 | 
            +
                tol : float, default=1.0e-4
         | 
| 877 | 
            +
                    Absolute threshold for the covariance matrix to be considered rank
         | 
| 878 | 
            +
                    deficient after applying some regularization (see `reg_param`) to each
         | 
| 879 | 
            +
                    `Sk` where `Sk` represents covariance matrix for k-th class. This
         | 
| 880 | 
            +
                    parameter does not affect the predictions. It controls when a warning
         | 
| 881 | 
            +
                    is raised if the covariance matrix is not full rank.
         | 
| 882 | 
            +
             | 
| 883 | 
            +
                    .. versionadded:: 0.17
         | 
| 884 | 
            +
             | 
| 885 | 
            +
                Attributes
         | 
| 886 | 
            +
                ----------
         | 
| 887 | 
            +
                covariance_ : list of len n_classes of ndarray \
         | 
| 888 | 
            +
                        of shape (n_features, n_features)
         | 
| 889 | 
            +
                    For each class, gives the covariance matrix estimated using the
         | 
| 890 | 
            +
                    samples of that class. The estimations are unbiased. Only present if
         | 
| 891 | 
            +
                    `store_covariance` is True.
         | 
| 892 | 
            +
             | 
| 893 | 
            +
                means_ : array-like of shape (n_classes, n_features)
         | 
| 894 | 
            +
                    Class-wise means.
         | 
| 895 | 
            +
             | 
| 896 | 
            +
                priors_ : array-like of shape (n_classes,)
         | 
| 897 | 
            +
                    Class priors (sum to 1).
         | 
| 898 | 
            +
             | 
| 899 | 
            +
                rotations_ : list of len n_classes of ndarray of shape (n_features, n_k)
         | 
| 900 | 
            +
                    For each class k an array of shape (n_features, n_k), where
         | 
| 901 | 
            +
                    ``n_k = min(n_features, number of elements in class k)``
         | 
| 902 | 
            +
                    It is the rotation of the Gaussian distribution, i.e. its
         | 
| 903 | 
            +
                    principal axis. It corresponds to `V`, the matrix of eigenvectors
         | 
| 904 | 
            +
                    coming from the SVD of `Xk = U S Vt` where `Xk` is the centered
         | 
| 905 | 
            +
                    matrix of samples from class k.
         | 
| 906 | 
            +
             | 
| 907 | 
            +
                scalings_ : list of len n_classes of ndarray of shape (n_k,)
         | 
| 908 | 
            +
                    For each class, contains the scaling of
         | 
| 909 | 
            +
                    the Gaussian distributions along its principal axes, i.e. the
         | 
| 910 | 
            +
                    variance in the rotated coordinate system. It corresponds to `S^2 /
         | 
| 911 | 
            +
                    (n_samples - 1)`, where `S` is the diagonal matrix of singular values
         | 
| 912 | 
            +
                    from the SVD of `Xk`, where `Xk` is the centered matrix of samples
         | 
| 913 | 
            +
                    from class k.
         | 
| 914 | 
            +
             | 
| 915 | 
            +
                classes_ : ndarray of shape (n_classes,)
         | 
| 916 | 
            +
                    Unique class labels.
         | 
| 917 | 
            +
             | 
| 918 | 
            +
                n_features_in_ : int
         | 
| 919 | 
            +
                    Number of features seen during :term:`fit`.
         | 
| 920 | 
            +
             | 
| 921 | 
            +
                    .. versionadded:: 0.24
         | 
| 922 | 
            +
             | 
| 923 | 
            +
                feature_names_in_ : ndarray of shape (`n_features_in_`,)
         | 
| 924 | 
            +
                    Names of features seen during :term:`fit`. Defined only when `X`
         | 
| 925 | 
            +
                    has feature names that are all strings.
         | 
| 926 | 
            +
             | 
| 927 | 
            +
                    .. versionadded:: 1.0
         | 
| 928 | 
            +
             | 
| 929 | 
            +
                See Also
         | 
| 930 | 
            +
                --------
         | 
| 931 | 
            +
                LinearDiscriminantAnalysis : Linear Discriminant Analysis.
         | 
| 932 | 
            +
             | 
| 933 | 
            +
                Examples
         | 
| 934 | 
            +
                --------
         | 
| 935 | 
            +
                >>> from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
         | 
| 936 | 
            +
                >>> import numpy as np
         | 
| 937 | 
            +
                >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
         | 
| 938 | 
            +
                >>> y = np.array([1, 1, 1, 2, 2, 2])
         | 
| 939 | 
            +
                >>> clf = QuadraticDiscriminantAnalysis()
         | 
| 940 | 
            +
                >>> clf.fit(X, y)
         | 
| 941 | 
            +
                QuadraticDiscriminantAnalysis()
         | 
| 942 | 
            +
                >>> print(clf.predict([[-0.8, -1]]))
         | 
| 943 | 
            +
                [1]
         | 
| 944 | 
            +
                """
         | 
| 945 | 
            +
             | 
| 946 | 
            +
                _parameter_constraints: dict = {
         | 
| 947 | 
            +
                    "priors": ["array-like", None],
         | 
| 948 | 
            +
                    "reg_param": [Interval(Real, 0, 1, closed="both")],
         | 
| 949 | 
            +
                    "store_covariance": ["boolean"],
         | 
| 950 | 
            +
                    "tol": [Interval(Real, 0, None, closed="left")],
         | 
| 951 | 
            +
                }
         | 
| 952 | 
            +
             | 
| 953 | 
            +
                def __init__(
         | 
| 954 | 
            +
                    self, *, priors=None, reg_param=0.0, store_covariance=False, tol=1.0e-4
         | 
| 955 | 
            +
                ):
         | 
| 956 | 
            +
                    self.priors = priors
         | 
| 957 | 
            +
                    self.reg_param = reg_param
         | 
| 958 | 
            +
                    self.store_covariance = store_covariance
         | 
| 959 | 
            +
                    self.tol = tol
         | 
| 960 | 
            +
             | 
| 961 | 
            +
                @_fit_context(prefer_skip_nested_validation=True)
         | 
| 962 | 
            +
                def fit(self, X, y):
         | 
| 963 | 
            +
                    """Fit the model according to the given training data and parameters.
         | 
| 964 | 
            +
             | 
| 965 | 
            +
                    .. versionchanged:: 0.19
         | 
| 966 | 
            +
                        ``store_covariances`` has been moved to main constructor as
         | 
| 967 | 
            +
                        ``store_covariance``.
         | 
| 968 | 
            +
             | 
| 969 | 
            +
                    .. versionchanged:: 0.19
         | 
| 970 | 
            +
                        ``tol`` has been moved to main constructor.
         | 
| 971 | 
            +
             | 
| 972 | 
            +
                    Parameters
         | 
| 973 | 
            +
                    ----------
         | 
| 974 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 975 | 
            +
                        Training vector, where `n_samples` is the number of samples and
         | 
| 976 | 
            +
                        `n_features` is the number of features.
         | 
| 977 | 
            +
             | 
| 978 | 
            +
                    y : array-like of shape (n_samples,)
         | 
| 979 | 
            +
                        Target values (integers).
         | 
| 980 | 
            +
             | 
| 981 | 
            +
                    Returns
         | 
| 982 | 
            +
                    -------
         | 
| 983 | 
            +
                    self : object
         | 
| 984 | 
            +
                        Fitted estimator.
         | 
| 985 | 
            +
                    """
         | 
| 986 | 
            +
                    X, y = validate_data(self, X, y)
         | 
| 987 | 
            +
                    check_classification_targets(y)
         | 
| 988 | 
            +
                    self.classes_, y = np.unique(y, return_inverse=True)
         | 
| 989 | 
            +
                    n_samples, n_features = X.shape
         | 
| 990 | 
            +
                    n_classes = len(self.classes_)
         | 
| 991 | 
            +
                    if n_classes < 2:
         | 
| 992 | 
            +
                        raise ValueError(
         | 
| 993 | 
            +
                            "The number of classes has to be greater than one; got %d class"
         | 
| 994 | 
            +
                            % (n_classes)
         | 
| 995 | 
            +
                        )
         | 
| 996 | 
            +
                    if self.priors is None:
         | 
| 997 | 
            +
                        self.priors_ = np.bincount(y) / float(n_samples)
         | 
| 998 | 
            +
                    else:
         | 
| 999 | 
            +
                        self.priors_ = np.array(self.priors)
         | 
| 1000 | 
            +
             | 
| 1001 | 
            +
                    cov = None
         | 
| 1002 | 
            +
                    store_covariance = self.store_covariance
         | 
| 1003 | 
            +
                    if store_covariance:
         | 
| 1004 | 
            +
                        cov = []
         | 
| 1005 | 
            +
                    means = []
         | 
| 1006 | 
            +
                    scalings = []
         | 
| 1007 | 
            +
                    rotations = []
         | 
| 1008 | 
            +
                    for ind in range(n_classes):
         | 
| 1009 | 
            +
                        Xg = X[y == ind, :]
         | 
| 1010 | 
            +
                        meang = Xg.mean(0)
         | 
| 1011 | 
            +
                        means.append(meang)
         | 
| 1012 | 
            +
                        if len(Xg) == 1:
         | 
| 1013 | 
            +
                            raise ValueError(
         | 
| 1014 | 
            +
                                "y has only 1 sample in class %s, covariance is ill defined."
         | 
| 1015 | 
            +
                                % str(self.classes_[ind])
         | 
| 1016 | 
            +
                            )
         | 
| 1017 | 
            +
                        Xgc = Xg - meang
         | 
| 1018 | 
            +
                        # Xgc = U * S * V.T
         | 
| 1019 | 
            +
                        _, S, Vt = np.linalg.svd(Xgc, full_matrices=False)
         | 
| 1020 | 
            +
                        S2 = (S**2) / (len(Xg) - 1)
         | 
| 1021 | 
            +
                        S2 = ((1 - self.reg_param) * S2) + self.reg_param
         | 
| 1022 | 
            +
                        rank = np.sum(S2 > self.tol)
         | 
| 1023 | 
            +
                        if rank < n_features:
         | 
| 1024 | 
            +
                            warnings.warn(
         | 
| 1025 | 
            +
                                f"The covariance matrix of class {ind} is not full rank. "
         | 
| 1026 | 
            +
                                "Increasing the value of parameter `reg_param` might help"
         | 
| 1027 | 
            +
                                " reducing the collinearity.",
         | 
| 1028 | 
            +
                                linalg.LinAlgWarning,
         | 
| 1029 | 
            +
                            )
         | 
| 1030 | 
            +
                        if self.store_covariance or store_covariance:
         | 
| 1031 | 
            +
                            # cov = V * (S^2 / (n-1)) * V.T
         | 
| 1032 | 
            +
                            cov.append(np.dot(S2 * Vt.T, Vt))
         | 
| 1033 | 
            +
                        scalings.append(S2)
         | 
| 1034 | 
            +
                        rotations.append(Vt.T)
         | 
| 1035 | 
            +
                    if self.store_covariance or store_covariance:
         | 
| 1036 | 
            +
                        self.covariance_ = cov
         | 
| 1037 | 
            +
                    self.means_ = np.asarray(means)
         | 
| 1038 | 
            +
                    self.scalings_ = scalings
         | 
| 1039 | 
            +
                    self.rotations_ = rotations
         | 
| 1040 | 
            +
                    return self
         | 
| 1041 | 
            +
             | 
| 1042 | 
            +
                def _decision_function(self, X):
         | 
| 1043 | 
            +
                    # return log posterior, see eq (4.12) p. 110 of the ESL.
         | 
| 1044 | 
            +
                    check_is_fitted(self)
         | 
| 1045 | 
            +
             | 
| 1046 | 
            +
                    X = validate_data(self, X, reset=False)
         | 
| 1047 | 
            +
                    norm2 = []
         | 
| 1048 | 
            +
                    for i in range(len(self.classes_)):
         | 
| 1049 | 
            +
                        R = self.rotations_[i]
         | 
| 1050 | 
            +
                        S = self.scalings_[i]
         | 
| 1051 | 
            +
                        Xm = X - self.means_[i]
         | 
| 1052 | 
            +
                        X2 = np.dot(Xm, R * (S ** (-0.5)))
         | 
| 1053 | 
            +
                        norm2.append(np.sum(X2**2, axis=1))
         | 
| 1054 | 
            +
                    norm2 = np.array(norm2).T  # shape = [len(X), n_classes]
         | 
| 1055 | 
            +
                    u = np.asarray([np.sum(np.log(s)) for s in self.scalings_])
         | 
| 1056 | 
            +
                    return -0.5 * (norm2 + u) + np.log(self.priors_)
         | 
| 1057 | 
            +
             | 
| 1058 | 
            +
                def decision_function(self, X):
         | 
| 1059 | 
            +
                    """Apply decision function to an array of samples.
         | 
| 1060 | 
            +
             | 
| 1061 | 
            +
                    The decision function is equal (up to a constant factor) to the
         | 
| 1062 | 
            +
                    log-posterior of the model, i.e. `log p(y = k | x)`. In a binary
         | 
| 1063 | 
            +
                    classification setting this instead corresponds to the difference
         | 
| 1064 | 
            +
                    `log p(y = 1 | x) - log p(y = 0 | x)`. See :ref:`lda_qda_math`.
         | 
| 1065 | 
            +
             | 
| 1066 | 
            +
                    Parameters
         | 
| 1067 | 
            +
                    ----------
         | 
| 1068 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 1069 | 
            +
                        Array of samples (test vectors).
         | 
| 1070 | 
            +
             | 
| 1071 | 
            +
                    Returns
         | 
| 1072 | 
            +
                    -------
         | 
| 1073 | 
            +
                    C : ndarray of shape (n_samples,) or (n_samples, n_classes)
         | 
| 1074 | 
            +
                        Decision function values related to each class, per sample.
         | 
| 1075 | 
            +
                        In the two-class case, the shape is `(n_samples,)`, giving the
         | 
| 1076 | 
            +
                        log likelihood ratio of the positive class.
         | 
| 1077 | 
            +
                    """
         | 
| 1078 | 
            +
                    return super().decision_function(X)
         | 
| 1079 | 
            +
             | 
| 1080 | 
            +
                def predict(self, X):
         | 
| 1081 | 
            +
                    """Perform classification on an array of test vectors X.
         | 
| 1082 | 
            +
             | 
| 1083 | 
            +
                    The predicted class C for each sample in X is returned.
         | 
| 1084 | 
            +
             | 
| 1085 | 
            +
                    Parameters
         | 
| 1086 | 
            +
                    ----------
         | 
| 1087 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 1088 | 
            +
                        Vector to be scored, where `n_samples` is the number of samples and
         | 
| 1089 | 
            +
                        `n_features` is the number of features.
         | 
| 1090 | 
            +
             | 
| 1091 | 
            +
                    Returns
         | 
| 1092 | 
            +
                    -------
         | 
| 1093 | 
            +
                    C : ndarray of shape (n_samples,)
         | 
| 1094 | 
            +
                        Estimated probabilities.
         | 
| 1095 | 
            +
                    """
         | 
| 1096 | 
            +
                    return super().predict(X)
         | 
| 1097 | 
            +
             | 
| 1098 | 
            +
                def predict_proba(self, X):
         | 
| 1099 | 
            +
                    """Return posterior probabilities of classification.
         | 
| 1100 | 
            +
             | 
| 1101 | 
            +
                    Parameters
         | 
| 1102 | 
            +
                    ----------
         | 
| 1103 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 1104 | 
            +
                        Array of samples/test vectors.
         | 
| 1105 | 
            +
             | 
| 1106 | 
            +
                    Returns
         | 
| 1107 | 
            +
                    -------
         | 
| 1108 | 
            +
                    C : ndarray of shape (n_samples, n_classes)
         | 
| 1109 | 
            +
                        Posterior probabilities of classification per class.
         | 
| 1110 | 
            +
                    """
         | 
| 1111 | 
            +
                    # compute the likelihood of the underlying gaussian models
         | 
| 1112 | 
            +
                    # up to a multiplicative constant.
         | 
| 1113 | 
            +
                    return super().predict_proba(X)
         | 
| 1114 | 
            +
             | 
| 1115 | 
            +
                def predict_log_proba(self, X):
         | 
| 1116 | 
            +
                    """Return log of posterior probabilities of classification.
         | 
| 1117 | 
            +
             | 
| 1118 | 
            +
                    Parameters
         | 
| 1119 | 
            +
                    ----------
         | 
| 1120 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 1121 | 
            +
                        Array of samples/test vectors.
         | 
| 1122 | 
            +
             | 
| 1123 | 
            +
                    Returns
         | 
| 1124 | 
            +
                    -------
         | 
| 1125 | 
            +
                    C : ndarray of shape (n_samples, n_classes)
         | 
| 1126 | 
            +
                        Posterior log-probabilities of classification per class.
         | 
| 1127 | 
            +
                    """
         | 
| 1128 | 
            +
                    # XXX : can do better to avoid precision overflows
         | 
| 1129 | 
            +
                    return super().predict_log_proba(X)
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/dummy.py
    ADDED
    
    | @@ -0,0 +1,702 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Dummy estimators that implement simple rules of thumb."""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            import warnings
         | 
| 7 | 
            +
            from numbers import Integral, Real
         | 
| 8 | 
            +
             | 
| 9 | 
            +
            import numpy as np
         | 
| 10 | 
            +
            import scipy.sparse as sp
         | 
| 11 | 
            +
             | 
| 12 | 
            +
            from .base import (
         | 
| 13 | 
            +
                BaseEstimator,
         | 
| 14 | 
            +
                ClassifierMixin,
         | 
| 15 | 
            +
                MultiOutputMixin,
         | 
| 16 | 
            +
                RegressorMixin,
         | 
| 17 | 
            +
                _fit_context,
         | 
| 18 | 
            +
            )
         | 
| 19 | 
            +
            from .utils import check_random_state
         | 
| 20 | 
            +
            from .utils._param_validation import Interval, StrOptions
         | 
| 21 | 
            +
            from .utils.multiclass import class_distribution
         | 
| 22 | 
            +
            from .utils.random import _random_choice_csc
         | 
| 23 | 
            +
            from .utils.stats import _weighted_percentile
         | 
| 24 | 
            +
            from .utils.validation import (
         | 
| 25 | 
            +
                _check_sample_weight,
         | 
| 26 | 
            +
                _num_samples,
         | 
| 27 | 
            +
                check_array,
         | 
| 28 | 
            +
                check_consistent_length,
         | 
| 29 | 
            +
                check_is_fitted,
         | 
| 30 | 
            +
                validate_data,
         | 
| 31 | 
            +
            )
         | 
| 32 | 
            +
             | 
| 33 | 
            +
             | 
| 34 | 
            +
            class DummyClassifier(MultiOutputMixin, ClassifierMixin, BaseEstimator):
         | 
| 35 | 
            +
                """DummyClassifier makes predictions that ignore the input features.
         | 
| 36 | 
            +
             | 
| 37 | 
            +
                This classifier serves as a simple baseline to compare against other more
         | 
| 38 | 
            +
                complex classifiers.
         | 
| 39 | 
            +
             | 
| 40 | 
            +
                The specific behavior of the baseline is selected with the `strategy`
         | 
| 41 | 
            +
                parameter.
         | 
| 42 | 
            +
             | 
| 43 | 
            +
                All strategies make predictions that ignore the input feature values passed
         | 
| 44 | 
            +
                as the `X` argument to `fit` and `predict`. The predictions, however,
         | 
| 45 | 
            +
                typically depend on values observed in the `y` parameter passed to `fit`.
         | 
| 46 | 
            +
             | 
| 47 | 
            +
                Note that the "stratified" and "uniform" strategies lead to
         | 
| 48 | 
            +
                non-deterministic predictions that can be rendered deterministic by setting
         | 
| 49 | 
            +
                the `random_state` parameter if needed. The other strategies are naturally
         | 
| 50 | 
            +
                deterministic and, once fit, always return the same constant prediction
         | 
| 51 | 
            +
                for any value of `X`.
         | 
| 52 | 
            +
             | 
| 53 | 
            +
                Read more in the :ref:`User Guide <dummy_estimators>`.
         | 
| 54 | 
            +
             | 
| 55 | 
            +
                .. versionadded:: 0.13
         | 
| 56 | 
            +
             | 
| 57 | 
            +
                Parameters
         | 
| 58 | 
            +
                ----------
         | 
| 59 | 
            +
                strategy : {"most_frequent", "prior", "stratified", "uniform", \
         | 
| 60 | 
            +
                        "constant"}, default="prior"
         | 
| 61 | 
            +
                    Strategy to use to generate predictions.
         | 
| 62 | 
            +
             | 
| 63 | 
            +
                    * "most_frequent": the `predict` method always returns the most
         | 
| 64 | 
            +
                      frequent class label in the observed `y` argument passed to `fit`.
         | 
| 65 | 
            +
                      The `predict_proba` method returns the matching one-hot encoded
         | 
| 66 | 
            +
                      vector.
         | 
| 67 | 
            +
                    * "prior": the `predict` method always returns the most frequent
         | 
| 68 | 
            +
                      class label in the observed `y` argument passed to `fit` (like
         | 
| 69 | 
            +
                      "most_frequent"). ``predict_proba`` always returns the empirical
         | 
| 70 | 
            +
                      class distribution of `y` also known as the empirical class prior
         | 
| 71 | 
            +
                      distribution.
         | 
| 72 | 
            +
                    * "stratified": the `predict_proba` method randomly samples one-hot
         | 
| 73 | 
            +
                      vectors from a multinomial distribution parametrized by the empirical
         | 
| 74 | 
            +
                      class prior probabilities.
         | 
| 75 | 
            +
                      The `predict` method returns the class label which got probability
         | 
| 76 | 
            +
                      one in the one-hot vector of `predict_proba`.
         | 
| 77 | 
            +
                      Each sampled row of both methods is therefore independent and
         | 
| 78 | 
            +
                      identically distributed.
         | 
| 79 | 
            +
                    * "uniform": generates predictions uniformly at random from the list
         | 
| 80 | 
            +
                      of unique classes observed in `y`, i.e. each class has equal
         | 
| 81 | 
            +
                      probability.
         | 
| 82 | 
            +
                    * "constant": always predicts a constant label that is provided by
         | 
| 83 | 
            +
                      the user. This is useful for metrics that evaluate a non-majority
         | 
| 84 | 
            +
                      class.
         | 
| 85 | 
            +
             | 
| 86 | 
            +
                      .. versionchanged:: 0.24
         | 
| 87 | 
            +
                         The default value of `strategy` has changed to "prior" in version
         | 
| 88 | 
            +
                         0.24.
         | 
| 89 | 
            +
             | 
| 90 | 
            +
                random_state : int, RandomState instance or None, default=None
         | 
| 91 | 
            +
                    Controls the randomness to generate the predictions when
         | 
| 92 | 
            +
                    ``strategy='stratified'`` or ``strategy='uniform'``.
         | 
| 93 | 
            +
                    Pass an int for reproducible output across multiple function calls.
         | 
| 94 | 
            +
                    See :term:`Glossary <random_state>`.
         | 
| 95 | 
            +
             | 
| 96 | 
            +
                constant : int or str or array-like of shape (n_outputs,), default=None
         | 
| 97 | 
            +
                    The explicit constant as predicted by the "constant" strategy. This
         | 
| 98 | 
            +
                    parameter is useful only for the "constant" strategy.
         | 
| 99 | 
            +
             | 
| 100 | 
            +
                Attributes
         | 
| 101 | 
            +
                ----------
         | 
| 102 | 
            +
                classes_ : ndarray of shape (n_classes,) or list of such arrays
         | 
| 103 | 
            +
                    Unique class labels observed in `y`. For multi-output classification
         | 
| 104 | 
            +
                    problems, this attribute is a list of arrays as each output has an
         | 
| 105 | 
            +
                    independent set of possible classes.
         | 
| 106 | 
            +
             | 
| 107 | 
            +
                n_classes_ : int or list of int
         | 
| 108 | 
            +
                    Number of label for each output.
         | 
| 109 | 
            +
             | 
| 110 | 
            +
                class_prior_ : ndarray of shape (n_classes,) or list of such arrays
         | 
| 111 | 
            +
                    Frequency of each class observed in `y`. For multioutput classification
         | 
| 112 | 
            +
                    problems, this is computed independently for each output.
         | 
| 113 | 
            +
             | 
| 114 | 
            +
                n_features_in_ : int
         | 
| 115 | 
            +
                    Number of features seen during :term:`fit`.
         | 
| 116 | 
            +
             | 
| 117 | 
            +
                feature_names_in_ : ndarray of shape (`n_features_in_`,)
         | 
| 118 | 
            +
                    Names of features seen during :term:`fit`. Defined only when `X` has
         | 
| 119 | 
            +
                    feature names that are all strings.
         | 
| 120 | 
            +
             | 
| 121 | 
            +
                n_outputs_ : int
         | 
| 122 | 
            +
                    Number of outputs.
         | 
| 123 | 
            +
             | 
| 124 | 
            +
                sparse_output_ : bool
         | 
| 125 | 
            +
                    True if the array returned from predict is to be in sparse CSC format.
         | 
| 126 | 
            +
                    Is automatically set to True if the input `y` is passed in sparse
         | 
| 127 | 
            +
                    format.
         | 
| 128 | 
            +
             | 
| 129 | 
            +
                See Also
         | 
| 130 | 
            +
                --------
         | 
| 131 | 
            +
                DummyRegressor : Regressor that makes predictions using simple rules.
         | 
| 132 | 
            +
             | 
| 133 | 
            +
                Examples
         | 
| 134 | 
            +
                --------
         | 
| 135 | 
            +
                >>> import numpy as np
         | 
| 136 | 
            +
                >>> from sklearn.dummy import DummyClassifier
         | 
| 137 | 
            +
                >>> X = np.array([-1, 1, 1, 1])
         | 
| 138 | 
            +
                >>> y = np.array([0, 1, 1, 1])
         | 
| 139 | 
            +
                >>> dummy_clf = DummyClassifier(strategy="most_frequent")
         | 
| 140 | 
            +
                >>> dummy_clf.fit(X, y)
         | 
| 141 | 
            +
                DummyClassifier(strategy='most_frequent')
         | 
| 142 | 
            +
                >>> dummy_clf.predict(X)
         | 
| 143 | 
            +
                array([1, 1, 1, 1])
         | 
| 144 | 
            +
                >>> dummy_clf.score(X, y)
         | 
| 145 | 
            +
                0.75
         | 
| 146 | 
            +
                """
         | 
| 147 | 
            +
             | 
| 148 | 
            +
                _parameter_constraints: dict = {
         | 
| 149 | 
            +
                    "strategy": [
         | 
| 150 | 
            +
                        StrOptions({"most_frequent", "prior", "stratified", "uniform", "constant"})
         | 
| 151 | 
            +
                    ],
         | 
| 152 | 
            +
                    "random_state": ["random_state"],
         | 
| 153 | 
            +
                    "constant": [Integral, str, "array-like", None],
         | 
| 154 | 
            +
                }
         | 
| 155 | 
            +
             | 
| 156 | 
            +
                def __init__(self, *, strategy="prior", random_state=None, constant=None):
         | 
| 157 | 
            +
                    self.strategy = strategy
         | 
| 158 | 
            +
                    self.random_state = random_state
         | 
| 159 | 
            +
                    self.constant = constant
         | 
| 160 | 
            +
             | 
| 161 | 
            +
                @_fit_context(prefer_skip_nested_validation=True)
         | 
| 162 | 
            +
                def fit(self, X, y, sample_weight=None):
         | 
| 163 | 
            +
                    """Fit the baseline classifier.
         | 
| 164 | 
            +
             | 
| 165 | 
            +
                    Parameters
         | 
| 166 | 
            +
                    ----------
         | 
| 167 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 168 | 
            +
                        Training data.
         | 
| 169 | 
            +
             | 
| 170 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 171 | 
            +
                        Target values.
         | 
| 172 | 
            +
             | 
| 173 | 
            +
                    sample_weight : array-like of shape (n_samples,), default=None
         | 
| 174 | 
            +
                        Sample weights.
         | 
| 175 | 
            +
             | 
| 176 | 
            +
                    Returns
         | 
| 177 | 
            +
                    -------
         | 
| 178 | 
            +
                    self : object
         | 
| 179 | 
            +
                        Returns the instance itself.
         | 
| 180 | 
            +
                    """
         | 
| 181 | 
            +
                    validate_data(self, X, skip_check_array=True)
         | 
| 182 | 
            +
             | 
| 183 | 
            +
                    self._strategy = self.strategy
         | 
| 184 | 
            +
             | 
| 185 | 
            +
                    if self._strategy == "uniform" and sp.issparse(y):
         | 
| 186 | 
            +
                        y = y.toarray()
         | 
| 187 | 
            +
                        warnings.warn(
         | 
| 188 | 
            +
                            (
         | 
| 189 | 
            +
                                "A local copy of the target data has been converted "
         | 
| 190 | 
            +
                                "to a numpy array. Predicting on sparse target data "
         | 
| 191 | 
            +
                                "with the uniform strategy would not save memory "
         | 
| 192 | 
            +
                                "and would be slower."
         | 
| 193 | 
            +
                            ),
         | 
| 194 | 
            +
                            UserWarning,
         | 
| 195 | 
            +
                        )
         | 
| 196 | 
            +
             | 
| 197 | 
            +
                    self.sparse_output_ = sp.issparse(y)
         | 
| 198 | 
            +
             | 
| 199 | 
            +
                    if not self.sparse_output_:
         | 
| 200 | 
            +
                        y = np.asarray(y)
         | 
| 201 | 
            +
                        y = np.atleast_1d(y)
         | 
| 202 | 
            +
             | 
| 203 | 
            +
                    if y.ndim == 1:
         | 
| 204 | 
            +
                        y = np.reshape(y, (-1, 1))
         | 
| 205 | 
            +
             | 
| 206 | 
            +
                    self.n_outputs_ = y.shape[1]
         | 
| 207 | 
            +
             | 
| 208 | 
            +
                    check_consistent_length(X, y)
         | 
| 209 | 
            +
             | 
| 210 | 
            +
                    if sample_weight is not None:
         | 
| 211 | 
            +
                        sample_weight = _check_sample_weight(sample_weight, X)
         | 
| 212 | 
            +
             | 
| 213 | 
            +
                    if self._strategy == "constant":
         | 
| 214 | 
            +
                        if self.constant is None:
         | 
| 215 | 
            +
                            raise ValueError(
         | 
| 216 | 
            +
                                "Constant target value has to be specified "
         | 
| 217 | 
            +
                                "when the constant strategy is used."
         | 
| 218 | 
            +
                            )
         | 
| 219 | 
            +
                        else:
         | 
| 220 | 
            +
                            constant = np.reshape(np.atleast_1d(self.constant), (-1, 1))
         | 
| 221 | 
            +
                            if constant.shape[0] != self.n_outputs_:
         | 
| 222 | 
            +
                                raise ValueError(
         | 
| 223 | 
            +
                                    "Constant target value should have shape (%d, 1)."
         | 
| 224 | 
            +
                                    % self.n_outputs_
         | 
| 225 | 
            +
                                )
         | 
| 226 | 
            +
             | 
| 227 | 
            +
                    (self.classes_, self.n_classes_, self.class_prior_) = class_distribution(
         | 
| 228 | 
            +
                        y, sample_weight
         | 
| 229 | 
            +
                    )
         | 
| 230 | 
            +
             | 
| 231 | 
            +
                    if self._strategy == "constant":
         | 
| 232 | 
            +
                        for k in range(self.n_outputs_):
         | 
| 233 | 
            +
                            if not any(constant[k][0] == c for c in self.classes_[k]):
         | 
| 234 | 
            +
                                # Checking in case of constant strategy if the constant
         | 
| 235 | 
            +
                                # provided by the user is in y.
         | 
| 236 | 
            +
                                err_msg = (
         | 
| 237 | 
            +
                                    "The constant target value must be present in "
         | 
| 238 | 
            +
                                    "the training data. You provided constant={}. "
         | 
| 239 | 
            +
                                    "Possible values are: {}.".format(
         | 
| 240 | 
            +
                                        self.constant, self.classes_[k].tolist()
         | 
| 241 | 
            +
                                    )
         | 
| 242 | 
            +
                                )
         | 
| 243 | 
            +
                                raise ValueError(err_msg)
         | 
| 244 | 
            +
             | 
| 245 | 
            +
                    if self.n_outputs_ == 1:
         | 
| 246 | 
            +
                        self.n_classes_ = self.n_classes_[0]
         | 
| 247 | 
            +
                        self.classes_ = self.classes_[0]
         | 
| 248 | 
            +
                        self.class_prior_ = self.class_prior_[0]
         | 
| 249 | 
            +
             | 
| 250 | 
            +
                    return self
         | 
| 251 | 
            +
             | 
| 252 | 
            +
                def predict(self, X):
         | 
| 253 | 
            +
                    """Perform classification on test vectors X.
         | 
| 254 | 
            +
             | 
| 255 | 
            +
                    Parameters
         | 
| 256 | 
            +
                    ----------
         | 
| 257 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 258 | 
            +
                        Test data.
         | 
| 259 | 
            +
             | 
| 260 | 
            +
                    Returns
         | 
| 261 | 
            +
                    -------
         | 
| 262 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 263 | 
            +
                        Predicted target values for X.
         | 
| 264 | 
            +
                    """
         | 
| 265 | 
            +
                    check_is_fitted(self)
         | 
| 266 | 
            +
             | 
| 267 | 
            +
                    # numpy random_state expects Python int and not long as size argument
         | 
| 268 | 
            +
                    # under Windows
         | 
| 269 | 
            +
                    n_samples = _num_samples(X)
         | 
| 270 | 
            +
                    rs = check_random_state(self.random_state)
         | 
| 271 | 
            +
             | 
| 272 | 
            +
                    n_classes_ = self.n_classes_
         | 
| 273 | 
            +
                    classes_ = self.classes_
         | 
| 274 | 
            +
                    class_prior_ = self.class_prior_
         | 
| 275 | 
            +
                    constant = self.constant
         | 
| 276 | 
            +
                    if self.n_outputs_ == 1:
         | 
| 277 | 
            +
                        # Get same type even for self.n_outputs_ == 1
         | 
| 278 | 
            +
                        n_classes_ = [n_classes_]
         | 
| 279 | 
            +
                        classes_ = [classes_]
         | 
| 280 | 
            +
                        class_prior_ = [class_prior_]
         | 
| 281 | 
            +
                        constant = [constant]
         | 
| 282 | 
            +
                    # Compute probability only once
         | 
| 283 | 
            +
                    if self._strategy == "stratified":
         | 
| 284 | 
            +
                        proba = self.predict_proba(X)
         | 
| 285 | 
            +
                        if self.n_outputs_ == 1:
         | 
| 286 | 
            +
                            proba = [proba]
         | 
| 287 | 
            +
             | 
| 288 | 
            +
                    if self.sparse_output_:
         | 
| 289 | 
            +
                        class_prob = None
         | 
| 290 | 
            +
                        if self._strategy in ("most_frequent", "prior"):
         | 
| 291 | 
            +
                            classes_ = [np.array([cp.argmax()]) for cp in class_prior_]
         | 
| 292 | 
            +
             | 
| 293 | 
            +
                        elif self._strategy == "stratified":
         | 
| 294 | 
            +
                            class_prob = class_prior_
         | 
| 295 | 
            +
             | 
| 296 | 
            +
                        elif self._strategy == "uniform":
         | 
| 297 | 
            +
                            raise ValueError(
         | 
| 298 | 
            +
                                "Sparse target prediction is not "
         | 
| 299 | 
            +
                                "supported with the uniform strategy"
         | 
| 300 | 
            +
                            )
         | 
| 301 | 
            +
             | 
| 302 | 
            +
                        elif self._strategy == "constant":
         | 
| 303 | 
            +
                            classes_ = [np.array([c]) for c in constant]
         | 
| 304 | 
            +
             | 
| 305 | 
            +
                        y = _random_choice_csc(n_samples, classes_, class_prob, self.random_state)
         | 
| 306 | 
            +
                    else:
         | 
| 307 | 
            +
                        if self._strategy in ("most_frequent", "prior"):
         | 
| 308 | 
            +
                            y = np.tile(
         | 
| 309 | 
            +
                                [
         | 
| 310 | 
            +
                                    classes_[k][class_prior_[k].argmax()]
         | 
| 311 | 
            +
                                    for k in range(self.n_outputs_)
         | 
| 312 | 
            +
                                ],
         | 
| 313 | 
            +
                                [n_samples, 1],
         | 
| 314 | 
            +
                            )
         | 
| 315 | 
            +
             | 
| 316 | 
            +
                        elif self._strategy == "stratified":
         | 
| 317 | 
            +
                            y = np.vstack(
         | 
| 318 | 
            +
                                [
         | 
| 319 | 
            +
                                    classes_[k][proba[k].argmax(axis=1)]
         | 
| 320 | 
            +
                                    for k in range(self.n_outputs_)
         | 
| 321 | 
            +
                                ]
         | 
| 322 | 
            +
                            ).T
         | 
| 323 | 
            +
             | 
| 324 | 
            +
                        elif self._strategy == "uniform":
         | 
| 325 | 
            +
                            ret = [
         | 
| 326 | 
            +
                                classes_[k][rs.randint(n_classes_[k], size=n_samples)]
         | 
| 327 | 
            +
                                for k in range(self.n_outputs_)
         | 
| 328 | 
            +
                            ]
         | 
| 329 | 
            +
                            y = np.vstack(ret).T
         | 
| 330 | 
            +
             | 
| 331 | 
            +
                        elif self._strategy == "constant":
         | 
| 332 | 
            +
                            y = np.tile(self.constant, (n_samples, 1))
         | 
| 333 | 
            +
             | 
| 334 | 
            +
                        if self.n_outputs_ == 1:
         | 
| 335 | 
            +
                            y = np.ravel(y)
         | 
| 336 | 
            +
             | 
| 337 | 
            +
                    return y
         | 
| 338 | 
            +
             | 
| 339 | 
            +
                def predict_proba(self, X):
         | 
| 340 | 
            +
                    """
         | 
| 341 | 
            +
                    Return probability estimates for the test vectors X.
         | 
| 342 | 
            +
             | 
| 343 | 
            +
                    Parameters
         | 
| 344 | 
            +
                    ----------
         | 
| 345 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 346 | 
            +
                        Test data.
         | 
| 347 | 
            +
             | 
| 348 | 
            +
                    Returns
         | 
| 349 | 
            +
                    -------
         | 
| 350 | 
            +
                    P : ndarray of shape (n_samples, n_classes) or list of such arrays
         | 
| 351 | 
            +
                        Returns the probability of the sample for each class in
         | 
| 352 | 
            +
                        the model, where classes are ordered arithmetically, for each
         | 
| 353 | 
            +
                        output.
         | 
| 354 | 
            +
                    """
         | 
| 355 | 
            +
                    check_is_fitted(self)
         | 
| 356 | 
            +
             | 
| 357 | 
            +
                    # numpy random_state expects Python int and not long as size argument
         | 
| 358 | 
            +
                    # under Windows
         | 
| 359 | 
            +
                    n_samples = _num_samples(X)
         | 
| 360 | 
            +
                    rs = check_random_state(self.random_state)
         | 
| 361 | 
            +
             | 
| 362 | 
            +
                    n_classes_ = self.n_classes_
         | 
| 363 | 
            +
                    classes_ = self.classes_
         | 
| 364 | 
            +
                    class_prior_ = self.class_prior_
         | 
| 365 | 
            +
                    constant = self.constant
         | 
| 366 | 
            +
                    if self.n_outputs_ == 1:
         | 
| 367 | 
            +
                        # Get same type even for self.n_outputs_ == 1
         | 
| 368 | 
            +
                        n_classes_ = [n_classes_]
         | 
| 369 | 
            +
                        classes_ = [classes_]
         | 
| 370 | 
            +
                        class_prior_ = [class_prior_]
         | 
| 371 | 
            +
                        constant = [constant]
         | 
| 372 | 
            +
             | 
| 373 | 
            +
                    P = []
         | 
| 374 | 
            +
                    for k in range(self.n_outputs_):
         | 
| 375 | 
            +
                        if self._strategy == "most_frequent":
         | 
| 376 | 
            +
                            ind = class_prior_[k].argmax()
         | 
| 377 | 
            +
                            out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64)
         | 
| 378 | 
            +
                            out[:, ind] = 1.0
         | 
| 379 | 
            +
                        elif self._strategy == "prior":
         | 
| 380 | 
            +
                            out = np.ones((n_samples, 1)) * class_prior_[k]
         | 
| 381 | 
            +
             | 
| 382 | 
            +
                        elif self._strategy == "stratified":
         | 
| 383 | 
            +
                            out = rs.multinomial(1, class_prior_[k], size=n_samples)
         | 
| 384 | 
            +
                            out = out.astype(np.float64)
         | 
| 385 | 
            +
             | 
| 386 | 
            +
                        elif self._strategy == "uniform":
         | 
| 387 | 
            +
                            out = np.ones((n_samples, n_classes_[k]), dtype=np.float64)
         | 
| 388 | 
            +
                            out /= n_classes_[k]
         | 
| 389 | 
            +
             | 
| 390 | 
            +
                        elif self._strategy == "constant":
         | 
| 391 | 
            +
                            ind = np.where(classes_[k] == constant[k])
         | 
| 392 | 
            +
                            out = np.zeros((n_samples, n_classes_[k]), dtype=np.float64)
         | 
| 393 | 
            +
                            out[:, ind] = 1.0
         | 
| 394 | 
            +
             | 
| 395 | 
            +
                        P.append(out)
         | 
| 396 | 
            +
             | 
| 397 | 
            +
                    if self.n_outputs_ == 1:
         | 
| 398 | 
            +
                        P = P[0]
         | 
| 399 | 
            +
             | 
| 400 | 
            +
                    return P
         | 
| 401 | 
            +
             | 
| 402 | 
            +
                def predict_log_proba(self, X):
         | 
| 403 | 
            +
                    """
         | 
| 404 | 
            +
                    Return log probability estimates for the test vectors X.
         | 
| 405 | 
            +
             | 
| 406 | 
            +
                    Parameters
         | 
| 407 | 
            +
                    ----------
         | 
| 408 | 
            +
                    X : {array-like, object with finite length or shape}
         | 
| 409 | 
            +
                        Training data.
         | 
| 410 | 
            +
             | 
| 411 | 
            +
                    Returns
         | 
| 412 | 
            +
                    -------
         | 
| 413 | 
            +
                    P : ndarray of shape (n_samples, n_classes) or list of such arrays
         | 
| 414 | 
            +
                        Returns the log probability of the sample for each class in
         | 
| 415 | 
            +
                        the model, where classes are ordered arithmetically for each
         | 
| 416 | 
            +
                        output.
         | 
| 417 | 
            +
                    """
         | 
| 418 | 
            +
                    proba = self.predict_proba(X)
         | 
| 419 | 
            +
                    if self.n_outputs_ == 1:
         | 
| 420 | 
            +
                        return np.log(proba)
         | 
| 421 | 
            +
                    else:
         | 
| 422 | 
            +
                        return [np.log(p) for p in proba]
         | 
| 423 | 
            +
             | 
| 424 | 
            +
                def __sklearn_tags__(self):
         | 
| 425 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 426 | 
            +
                    tags.input_tags.sparse = True
         | 
| 427 | 
            +
                    tags.classifier_tags.poor_score = True
         | 
| 428 | 
            +
                    tags.no_validation = True
         | 
| 429 | 
            +
                    return tags
         | 
| 430 | 
            +
             | 
| 431 | 
            +
                def score(self, X, y, sample_weight=None):
         | 
| 432 | 
            +
                    """Return the mean accuracy on the given test data and labels.
         | 
| 433 | 
            +
             | 
| 434 | 
            +
                    In multi-label classification, this is the subset accuracy
         | 
| 435 | 
            +
                    which is a harsh metric since you require for each sample that
         | 
| 436 | 
            +
                    each label set be correctly predicted.
         | 
| 437 | 
            +
             | 
| 438 | 
            +
                    Parameters
         | 
| 439 | 
            +
                    ----------
         | 
| 440 | 
            +
                    X : None or array-like of shape (n_samples, n_features)
         | 
| 441 | 
            +
                        Test samples. Passing None as test samples gives the same result
         | 
| 442 | 
            +
                        as passing real test samples, since DummyClassifier
         | 
| 443 | 
            +
                        operates independently of the sampled observations.
         | 
| 444 | 
            +
             | 
| 445 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 446 | 
            +
                        True labels for X.
         | 
| 447 | 
            +
             | 
| 448 | 
            +
                    sample_weight : array-like of shape (n_samples,), default=None
         | 
| 449 | 
            +
                        Sample weights.
         | 
| 450 | 
            +
             | 
| 451 | 
            +
                    Returns
         | 
| 452 | 
            +
                    -------
         | 
| 453 | 
            +
                    score : float
         | 
| 454 | 
            +
                        Mean accuracy of self.predict(X) w.r.t. y.
         | 
| 455 | 
            +
                    """
         | 
| 456 | 
            +
                    if X is None:
         | 
| 457 | 
            +
                        X = np.zeros(shape=(len(y), 1))
         | 
| 458 | 
            +
                    return super().score(X, y, sample_weight)
         | 
| 459 | 
            +
             | 
| 460 | 
            +
             | 
| 461 | 
            +
            class DummyRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
         | 
| 462 | 
            +
                """Regressor that makes predictions using simple rules.
         | 
| 463 | 
            +
             | 
| 464 | 
            +
                This regressor is useful as a simple baseline to compare with other
         | 
| 465 | 
            +
                (real) regressors. Do not use it for real problems.
         | 
| 466 | 
            +
             | 
| 467 | 
            +
                Read more in the :ref:`User Guide <dummy_estimators>`.
         | 
| 468 | 
            +
             | 
| 469 | 
            +
                .. versionadded:: 0.13
         | 
| 470 | 
            +
             | 
| 471 | 
            +
                Parameters
         | 
| 472 | 
            +
                ----------
         | 
| 473 | 
            +
                strategy : {"mean", "median", "quantile", "constant"}, default="mean"
         | 
| 474 | 
            +
                    Strategy to use to generate predictions.
         | 
| 475 | 
            +
             | 
| 476 | 
            +
                    * "mean": always predicts the mean of the training set
         | 
| 477 | 
            +
                    * "median": always predicts the median of the training set
         | 
| 478 | 
            +
                    * "quantile": always predicts a specified quantile of the training set,
         | 
| 479 | 
            +
                      provided with the quantile parameter.
         | 
| 480 | 
            +
                    * "constant": always predicts a constant value that is provided by
         | 
| 481 | 
            +
                      the user.
         | 
| 482 | 
            +
             | 
| 483 | 
            +
                constant : int or float or array-like of shape (n_outputs,), default=None
         | 
| 484 | 
            +
                    The explicit constant as predicted by the "constant" strategy. This
         | 
| 485 | 
            +
                    parameter is useful only for the "constant" strategy.
         | 
| 486 | 
            +
             | 
| 487 | 
            +
                quantile : float in [0.0, 1.0], default=None
         | 
| 488 | 
            +
                    The quantile to predict using the "quantile" strategy. A quantile of
         | 
| 489 | 
            +
                    0.5 corresponds to the median, while 0.0 to the minimum and 1.0 to the
         | 
| 490 | 
            +
                    maximum.
         | 
| 491 | 
            +
             | 
| 492 | 
            +
                Attributes
         | 
| 493 | 
            +
                ----------
         | 
| 494 | 
            +
                constant_ : ndarray of shape (1, n_outputs)
         | 
| 495 | 
            +
                    Mean or median or quantile of the training targets or constant value
         | 
| 496 | 
            +
                    given by the user.
         | 
| 497 | 
            +
             | 
| 498 | 
            +
                n_features_in_ : int
         | 
| 499 | 
            +
                    Number of features seen during :term:`fit`.
         | 
| 500 | 
            +
             | 
| 501 | 
            +
                feature_names_in_ : ndarray of shape (`n_features_in_`,)
         | 
| 502 | 
            +
                    Names of features seen during :term:`fit`. Defined only when `X` has
         | 
| 503 | 
            +
                    feature names that are all strings.
         | 
| 504 | 
            +
             | 
| 505 | 
            +
                n_outputs_ : int
         | 
| 506 | 
            +
                    Number of outputs.
         | 
| 507 | 
            +
             | 
| 508 | 
            +
                See Also
         | 
| 509 | 
            +
                --------
         | 
| 510 | 
            +
                DummyClassifier: Classifier that makes predictions using simple rules.
         | 
| 511 | 
            +
             | 
| 512 | 
            +
                Examples
         | 
| 513 | 
            +
                --------
         | 
| 514 | 
            +
                >>> import numpy as np
         | 
| 515 | 
            +
                >>> from sklearn.dummy import DummyRegressor
         | 
| 516 | 
            +
                >>> X = np.array([1.0, 2.0, 3.0, 4.0])
         | 
| 517 | 
            +
                >>> y = np.array([2.0, 3.0, 5.0, 10.0])
         | 
| 518 | 
            +
                >>> dummy_regr = DummyRegressor(strategy="mean")
         | 
| 519 | 
            +
                >>> dummy_regr.fit(X, y)
         | 
| 520 | 
            +
                DummyRegressor()
         | 
| 521 | 
            +
                >>> dummy_regr.predict(X)
         | 
| 522 | 
            +
                array([5., 5., 5., 5.])
         | 
| 523 | 
            +
                >>> dummy_regr.score(X, y)
         | 
| 524 | 
            +
                0.0
         | 
| 525 | 
            +
                """
         | 
| 526 | 
            +
             | 
| 527 | 
            +
                _parameter_constraints: dict = {
         | 
| 528 | 
            +
                    "strategy": [StrOptions({"mean", "median", "quantile", "constant"})],
         | 
| 529 | 
            +
                    "quantile": [Interval(Real, 0.0, 1.0, closed="both"), None],
         | 
| 530 | 
            +
                    "constant": [
         | 
| 531 | 
            +
                        Interval(Real, None, None, closed="neither"),
         | 
| 532 | 
            +
                        "array-like",
         | 
| 533 | 
            +
                        None,
         | 
| 534 | 
            +
                    ],
         | 
| 535 | 
            +
                }
         | 
| 536 | 
            +
             | 
| 537 | 
            +
                def __init__(self, *, strategy="mean", constant=None, quantile=None):
         | 
| 538 | 
            +
                    self.strategy = strategy
         | 
| 539 | 
            +
                    self.constant = constant
         | 
| 540 | 
            +
                    self.quantile = quantile
         | 
| 541 | 
            +
             | 
| 542 | 
            +
                @_fit_context(prefer_skip_nested_validation=True)
         | 
| 543 | 
            +
                def fit(self, X, y, sample_weight=None):
         | 
| 544 | 
            +
                    """Fit the baseline regressor.
         | 
| 545 | 
            +
             | 
| 546 | 
            +
                    Parameters
         | 
| 547 | 
            +
                    ----------
         | 
| 548 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 549 | 
            +
                        Training data.
         | 
| 550 | 
            +
             | 
| 551 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 552 | 
            +
                        Target values.
         | 
| 553 | 
            +
             | 
| 554 | 
            +
                    sample_weight : array-like of shape (n_samples,), default=None
         | 
| 555 | 
            +
                        Sample weights.
         | 
| 556 | 
            +
             | 
| 557 | 
            +
                    Returns
         | 
| 558 | 
            +
                    -------
         | 
| 559 | 
            +
                    self : object
         | 
| 560 | 
            +
                        Fitted estimator.
         | 
| 561 | 
            +
                    """
         | 
| 562 | 
            +
                    validate_data(self, X, skip_check_array=True)
         | 
| 563 | 
            +
             | 
| 564 | 
            +
                    y = check_array(y, ensure_2d=False, input_name="y")
         | 
| 565 | 
            +
                    if len(y) == 0:
         | 
| 566 | 
            +
                        raise ValueError("y must not be empty.")
         | 
| 567 | 
            +
             | 
| 568 | 
            +
                    if y.ndim == 1:
         | 
| 569 | 
            +
                        y = np.reshape(y, (-1, 1))
         | 
| 570 | 
            +
                    self.n_outputs_ = y.shape[1]
         | 
| 571 | 
            +
             | 
| 572 | 
            +
                    check_consistent_length(X, y, sample_weight)
         | 
| 573 | 
            +
             | 
| 574 | 
            +
                    if sample_weight is not None:
         | 
| 575 | 
            +
                        sample_weight = _check_sample_weight(sample_weight, X)
         | 
| 576 | 
            +
             | 
| 577 | 
            +
                    if self.strategy == "mean":
         | 
| 578 | 
            +
                        self.constant_ = np.average(y, axis=0, weights=sample_weight)
         | 
| 579 | 
            +
             | 
| 580 | 
            +
                    elif self.strategy == "median":
         | 
| 581 | 
            +
                        if sample_weight is None:
         | 
| 582 | 
            +
                            self.constant_ = np.median(y, axis=0)
         | 
| 583 | 
            +
                        else:
         | 
| 584 | 
            +
                            self.constant_ = [
         | 
| 585 | 
            +
                                _weighted_percentile(y[:, k], sample_weight, percentile=50.0)
         | 
| 586 | 
            +
                                for k in range(self.n_outputs_)
         | 
| 587 | 
            +
                            ]
         | 
| 588 | 
            +
             | 
| 589 | 
            +
                    elif self.strategy == "quantile":
         | 
| 590 | 
            +
                        if self.quantile is None:
         | 
| 591 | 
            +
                            raise ValueError(
         | 
| 592 | 
            +
                                "When using `strategy='quantile', you have to specify the desired "
         | 
| 593 | 
            +
                                "quantile in the range [0, 1]."
         | 
| 594 | 
            +
                            )
         | 
| 595 | 
            +
                        percentile = self.quantile * 100.0
         | 
| 596 | 
            +
                        if sample_weight is None:
         | 
| 597 | 
            +
                            self.constant_ = np.percentile(y, axis=0, q=percentile)
         | 
| 598 | 
            +
                        else:
         | 
| 599 | 
            +
                            self.constant_ = [
         | 
| 600 | 
            +
                                _weighted_percentile(y[:, k], sample_weight, percentile=percentile)
         | 
| 601 | 
            +
                                for k in range(self.n_outputs_)
         | 
| 602 | 
            +
                            ]
         | 
| 603 | 
            +
             | 
| 604 | 
            +
                    elif self.strategy == "constant":
         | 
| 605 | 
            +
                        if self.constant is None:
         | 
| 606 | 
            +
                            raise TypeError(
         | 
| 607 | 
            +
                                "Constant target value has to be specified "
         | 
| 608 | 
            +
                                "when the constant strategy is used."
         | 
| 609 | 
            +
                            )
         | 
| 610 | 
            +
             | 
| 611 | 
            +
                        self.constant_ = check_array(
         | 
| 612 | 
            +
                            self.constant,
         | 
| 613 | 
            +
                            accept_sparse=["csr", "csc", "coo"],
         | 
| 614 | 
            +
                            ensure_2d=False,
         | 
| 615 | 
            +
                            ensure_min_samples=0,
         | 
| 616 | 
            +
                        )
         | 
| 617 | 
            +
             | 
| 618 | 
            +
                        if self.n_outputs_ != 1 and self.constant_.shape[0] != y.shape[1]:
         | 
| 619 | 
            +
                            raise ValueError(
         | 
| 620 | 
            +
                                "Constant target value should have shape (%d, 1)." % y.shape[1]
         | 
| 621 | 
            +
                            )
         | 
| 622 | 
            +
             | 
| 623 | 
            +
                    self.constant_ = np.reshape(self.constant_, (1, -1))
         | 
| 624 | 
            +
                    return self
         | 
| 625 | 
            +
             | 
| 626 | 
            +
                def predict(self, X, return_std=False):
         | 
| 627 | 
            +
                    """Perform classification on test vectors X.
         | 
| 628 | 
            +
             | 
| 629 | 
            +
                    Parameters
         | 
| 630 | 
            +
                    ----------
         | 
| 631 | 
            +
                    X : array-like of shape (n_samples, n_features)
         | 
| 632 | 
            +
                        Test data.
         | 
| 633 | 
            +
             | 
| 634 | 
            +
                    return_std : bool, default=False
         | 
| 635 | 
            +
                        Whether to return the standard deviation of posterior prediction.
         | 
| 636 | 
            +
                        All zeros in this case.
         | 
| 637 | 
            +
             | 
| 638 | 
            +
                        .. versionadded:: 0.20
         | 
| 639 | 
            +
             | 
| 640 | 
            +
                    Returns
         | 
| 641 | 
            +
                    -------
         | 
| 642 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 643 | 
            +
                        Predicted target values for X.
         | 
| 644 | 
            +
             | 
| 645 | 
            +
                    y_std : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 646 | 
            +
                        Standard deviation of predictive distribution of query points.
         | 
| 647 | 
            +
                    """
         | 
| 648 | 
            +
                    check_is_fitted(self)
         | 
| 649 | 
            +
                    n_samples = _num_samples(X)
         | 
| 650 | 
            +
             | 
| 651 | 
            +
                    y = np.full(
         | 
| 652 | 
            +
                        (n_samples, self.n_outputs_),
         | 
| 653 | 
            +
                        self.constant_,
         | 
| 654 | 
            +
                        dtype=np.array(self.constant_).dtype,
         | 
| 655 | 
            +
                    )
         | 
| 656 | 
            +
                    y_std = np.zeros((n_samples, self.n_outputs_))
         | 
| 657 | 
            +
             | 
| 658 | 
            +
                    if self.n_outputs_ == 1:
         | 
| 659 | 
            +
                        y = np.ravel(y)
         | 
| 660 | 
            +
                        y_std = np.ravel(y_std)
         | 
| 661 | 
            +
             | 
| 662 | 
            +
                    return (y, y_std) if return_std else y
         | 
| 663 | 
            +
             | 
| 664 | 
            +
                def __sklearn_tags__(self):
         | 
| 665 | 
            +
                    tags = super().__sklearn_tags__()
         | 
| 666 | 
            +
                    tags.input_tags.sparse = True
         | 
| 667 | 
            +
                    tags.regressor_tags.poor_score = True
         | 
| 668 | 
            +
                    tags.no_validation = True
         | 
| 669 | 
            +
                    return tags
         | 
| 670 | 
            +
             | 
| 671 | 
            +
                def score(self, X, y, sample_weight=None):
         | 
| 672 | 
            +
                    """Return the coefficient of determination R^2 of the prediction.
         | 
| 673 | 
            +
             | 
| 674 | 
            +
                    The coefficient R^2 is defined as `(1 - u/v)`, where `u` is the
         | 
| 675 | 
            +
                    residual sum of squares `((y_true - y_pred) ** 2).sum()` and `v` is the
         | 
| 676 | 
            +
                    total sum of squares `((y_true - y_true.mean()) ** 2).sum()`. The best
         | 
| 677 | 
            +
                    possible score is 1.0 and it can be negative (because the model can be
         | 
| 678 | 
            +
                    arbitrarily worse). A constant model that always predicts the expected
         | 
| 679 | 
            +
                    value of y, disregarding the input features, would get a R^2 score of
         | 
| 680 | 
            +
                    0.0.
         | 
| 681 | 
            +
             | 
| 682 | 
            +
                    Parameters
         | 
| 683 | 
            +
                    ----------
         | 
| 684 | 
            +
                    X : None or array-like of shape (n_samples, n_features)
         | 
| 685 | 
            +
                        Test samples. Passing None as test samples gives the same result
         | 
| 686 | 
            +
                        as passing real test samples, since `DummyRegressor`
         | 
| 687 | 
            +
                        operates independently of the sampled observations.
         | 
| 688 | 
            +
             | 
| 689 | 
            +
                    y : array-like of shape (n_samples,) or (n_samples, n_outputs)
         | 
| 690 | 
            +
                        True values for X.
         | 
| 691 | 
            +
             | 
| 692 | 
            +
                    sample_weight : array-like of shape (n_samples,), default=None
         | 
| 693 | 
            +
                        Sample weights.
         | 
| 694 | 
            +
             | 
| 695 | 
            +
                    Returns
         | 
| 696 | 
            +
                    -------
         | 
| 697 | 
            +
                    score : float
         | 
| 698 | 
            +
                        R^2 of `self.predict(X)` w.r.t. y.
         | 
| 699 | 
            +
                    """
         | 
| 700 | 
            +
                    if X is None:
         | 
| 701 | 
            +
                        X = np.zeros(shape=(len(y), 1))
         | 
| 702 | 
            +
                    return super().score(X, y, sample_weight)
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/exceptions.py
    ADDED
    
    | @@ -0,0 +1,249 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Custom warnings and errors used across scikit-learn."""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            __all__ = [
         | 
| 7 | 
            +
                "NotFittedError",
         | 
| 8 | 
            +
                "ConvergenceWarning",
         | 
| 9 | 
            +
                "DataConversionWarning",
         | 
| 10 | 
            +
                "DataDimensionalityWarning",
         | 
| 11 | 
            +
                "EfficiencyWarning",
         | 
| 12 | 
            +
                "FitFailedWarning",
         | 
| 13 | 
            +
                "SkipTestWarning",
         | 
| 14 | 
            +
                "UndefinedMetricWarning",
         | 
| 15 | 
            +
                "PositiveSpectrumWarning",
         | 
| 16 | 
            +
                "UnsetMetadataPassedError",
         | 
| 17 | 
            +
                "EstimatorCheckFailedWarning",
         | 
| 18 | 
            +
            ]
         | 
| 19 | 
            +
             | 
| 20 | 
            +
             | 
| 21 | 
            +
            class UnsetMetadataPassedError(ValueError):
         | 
| 22 | 
            +
                """Exception class to raise if a metadata is passed which is not explicitly \
         | 
| 23 | 
            +
                    requested (metadata=True) or not requested (metadata=False).
         | 
| 24 | 
            +
             | 
| 25 | 
            +
                .. versionadded:: 1.3
         | 
| 26 | 
            +
             | 
| 27 | 
            +
                Parameters
         | 
| 28 | 
            +
                ----------
         | 
| 29 | 
            +
                message : str
         | 
| 30 | 
            +
                    The message
         | 
| 31 | 
            +
             | 
| 32 | 
            +
                unrequested_params : dict
         | 
| 33 | 
            +
                    A dictionary of parameters and their values which are provided but not
         | 
| 34 | 
            +
                    requested.
         | 
| 35 | 
            +
             | 
| 36 | 
            +
                routed_params : dict
         | 
| 37 | 
            +
                    A dictionary of routed parameters.
         | 
| 38 | 
            +
                """
         | 
| 39 | 
            +
             | 
| 40 | 
            +
                def __init__(self, *, message, unrequested_params, routed_params):
         | 
| 41 | 
            +
                    super().__init__(message)
         | 
| 42 | 
            +
                    self.unrequested_params = unrequested_params
         | 
| 43 | 
            +
                    self.routed_params = routed_params
         | 
| 44 | 
            +
             | 
| 45 | 
            +
             | 
| 46 | 
            +
            class NotFittedError(ValueError, AttributeError):
         | 
| 47 | 
            +
                """Exception class to raise if estimator is used before fitting.
         | 
| 48 | 
            +
             | 
| 49 | 
            +
                This class inherits from both ValueError and AttributeError to help with
         | 
| 50 | 
            +
                exception handling and backward compatibility.
         | 
| 51 | 
            +
             | 
| 52 | 
            +
                Examples
         | 
| 53 | 
            +
                --------
         | 
| 54 | 
            +
                >>> from sklearn.svm import LinearSVC
         | 
| 55 | 
            +
                >>> from sklearn.exceptions import NotFittedError
         | 
| 56 | 
            +
                >>> try:
         | 
| 57 | 
            +
                ...     LinearSVC().predict([[1, 2], [2, 3], [3, 4]])
         | 
| 58 | 
            +
                ... except NotFittedError as e:
         | 
| 59 | 
            +
                ...     print(repr(e))
         | 
| 60 | 
            +
                NotFittedError("This LinearSVC instance is not fitted yet. Call 'fit' with
         | 
| 61 | 
            +
                appropriate arguments before using this estimator."...)
         | 
| 62 | 
            +
             | 
| 63 | 
            +
                .. versionchanged:: 0.18
         | 
| 64 | 
            +
                   Moved from sklearn.utils.validation.
         | 
| 65 | 
            +
                """
         | 
| 66 | 
            +
             | 
| 67 | 
            +
             | 
| 68 | 
            +
            class ConvergenceWarning(UserWarning):
         | 
| 69 | 
            +
                """Custom warning to capture convergence problems
         | 
| 70 | 
            +
             | 
| 71 | 
            +
                .. versionchanged:: 0.18
         | 
| 72 | 
            +
                   Moved from sklearn.utils.
         | 
| 73 | 
            +
                """
         | 
| 74 | 
            +
             | 
| 75 | 
            +
             | 
| 76 | 
            +
            class DataConversionWarning(UserWarning):
         | 
| 77 | 
            +
                """Warning used to notify implicit data conversions happening in the code.
         | 
| 78 | 
            +
             | 
| 79 | 
            +
                This warning occurs when some input data needs to be converted or
         | 
| 80 | 
            +
                interpreted in a way that may not match the user's expectations.
         | 
| 81 | 
            +
             | 
| 82 | 
            +
                For example, this warning may occur when the user
         | 
| 83 | 
            +
                    - passes an integer array to a function which expects float input and
         | 
| 84 | 
            +
                      will convert the input
         | 
| 85 | 
            +
                    - requests a non-copying operation, but a copy is required to meet the
         | 
| 86 | 
            +
                      implementation's data-type expectations;
         | 
| 87 | 
            +
                    - passes an input whose shape can be interpreted ambiguously.
         | 
| 88 | 
            +
             | 
| 89 | 
            +
                .. versionchanged:: 0.18
         | 
| 90 | 
            +
                   Moved from sklearn.utils.validation.
         | 
| 91 | 
            +
                """
         | 
| 92 | 
            +
             | 
| 93 | 
            +
             | 
| 94 | 
            +
            class DataDimensionalityWarning(UserWarning):
         | 
| 95 | 
            +
                """Custom warning to notify potential issues with data dimensionality.
         | 
| 96 | 
            +
             | 
| 97 | 
            +
                For example, in random projection, this warning is raised when the
         | 
| 98 | 
            +
                number of components, which quantifies the dimensionality of the target
         | 
| 99 | 
            +
                projection space, is higher than the number of features, which quantifies
         | 
| 100 | 
            +
                the dimensionality of the original source space, to imply that the
         | 
| 101 | 
            +
                dimensionality of the problem will not be reduced.
         | 
| 102 | 
            +
             | 
| 103 | 
            +
                .. versionchanged:: 0.18
         | 
| 104 | 
            +
                   Moved from sklearn.utils.
         | 
| 105 | 
            +
                """
         | 
| 106 | 
            +
             | 
| 107 | 
            +
             | 
| 108 | 
            +
            class EfficiencyWarning(UserWarning):
         | 
| 109 | 
            +
                """Warning used to notify the user of inefficient computation.
         | 
| 110 | 
            +
             | 
| 111 | 
            +
                This warning notifies the user that the efficiency may not be optimal due
         | 
| 112 | 
            +
                to some reason which may be included as a part of the warning message.
         | 
| 113 | 
            +
                This may be subclassed into a more specific Warning class.
         | 
| 114 | 
            +
             | 
| 115 | 
            +
                .. versionadded:: 0.18
         | 
| 116 | 
            +
                """
         | 
| 117 | 
            +
             | 
| 118 | 
            +
             | 
| 119 | 
            +
            class FitFailedWarning(RuntimeWarning):
         | 
| 120 | 
            +
                """Warning class used if there is an error while fitting the estimator.
         | 
| 121 | 
            +
             | 
| 122 | 
            +
                This Warning is used in meta estimators GridSearchCV and RandomizedSearchCV
         | 
| 123 | 
            +
                and the cross-validation helper function cross_val_score to warn when there
         | 
| 124 | 
            +
                is an error while fitting the estimator.
         | 
| 125 | 
            +
             | 
| 126 | 
            +
                .. versionchanged:: 0.18
         | 
| 127 | 
            +
                   Moved from sklearn.cross_validation.
         | 
| 128 | 
            +
                """
         | 
| 129 | 
            +
             | 
| 130 | 
            +
             | 
| 131 | 
            +
            class SkipTestWarning(UserWarning):
         | 
| 132 | 
            +
                """Warning class used to notify the user of a test that was skipped.
         | 
| 133 | 
            +
             | 
| 134 | 
            +
                For example, one of the estimator checks requires a pandas import.
         | 
| 135 | 
            +
                If the pandas package cannot be imported, the test will be skipped rather
         | 
| 136 | 
            +
                than register as a failure.
         | 
| 137 | 
            +
                """
         | 
| 138 | 
            +
             | 
| 139 | 
            +
             | 
| 140 | 
            +
            class UndefinedMetricWarning(UserWarning):
         | 
| 141 | 
            +
                """Warning used when the metric is invalid
         | 
| 142 | 
            +
             | 
| 143 | 
            +
                .. versionchanged:: 0.18
         | 
| 144 | 
            +
                   Moved from sklearn.base.
         | 
| 145 | 
            +
                """
         | 
| 146 | 
            +
             | 
| 147 | 
            +
             | 
| 148 | 
            +
            class PositiveSpectrumWarning(UserWarning):
         | 
| 149 | 
            +
                """Warning raised when the eigenvalues of a PSD matrix have issues
         | 
| 150 | 
            +
             | 
| 151 | 
            +
                This warning is typically raised by ``_check_psd_eigenvalues`` when the
         | 
| 152 | 
            +
                eigenvalues of a positive semidefinite (PSD) matrix such as a gram matrix
         | 
| 153 | 
            +
                (kernel) present significant negative eigenvalues, or bad conditioning i.e.
         | 
| 154 | 
            +
                very small non-zero eigenvalues compared to the largest eigenvalue.
         | 
| 155 | 
            +
             | 
| 156 | 
            +
                .. versionadded:: 0.22
         | 
| 157 | 
            +
                """
         | 
| 158 | 
            +
             | 
| 159 | 
            +
             | 
| 160 | 
            +
            class InconsistentVersionWarning(UserWarning):
         | 
| 161 | 
            +
                """Warning raised when an estimator is unpickled with a inconsistent version.
         | 
| 162 | 
            +
             | 
| 163 | 
            +
                Parameters
         | 
| 164 | 
            +
                ----------
         | 
| 165 | 
            +
                estimator_name : str
         | 
| 166 | 
            +
                    Estimator name.
         | 
| 167 | 
            +
             | 
| 168 | 
            +
                current_sklearn_version : str
         | 
| 169 | 
            +
                    Current scikit-learn version.
         | 
| 170 | 
            +
             | 
| 171 | 
            +
                original_sklearn_version : str
         | 
| 172 | 
            +
                    Original scikit-learn version.
         | 
| 173 | 
            +
                """
         | 
| 174 | 
            +
             | 
| 175 | 
            +
                def __init__(
         | 
| 176 | 
            +
                    self, *, estimator_name, current_sklearn_version, original_sklearn_version
         | 
| 177 | 
            +
                ):
         | 
| 178 | 
            +
                    self.estimator_name = estimator_name
         | 
| 179 | 
            +
                    self.current_sklearn_version = current_sklearn_version
         | 
| 180 | 
            +
                    self.original_sklearn_version = original_sklearn_version
         | 
| 181 | 
            +
             | 
| 182 | 
            +
                def __str__(self):
         | 
| 183 | 
            +
                    return (
         | 
| 184 | 
            +
                        f"Trying to unpickle estimator {self.estimator_name} from version"
         | 
| 185 | 
            +
                        f" {self.original_sklearn_version} when "
         | 
| 186 | 
            +
                        f"using version {self.current_sklearn_version}. This might lead to breaking"
         | 
| 187 | 
            +
                        " code or "
         | 
| 188 | 
            +
                        "invalid results. Use at your own risk. "
         | 
| 189 | 
            +
                        "For more info please refer to:\n"
         | 
| 190 | 
            +
                        "https://scikit-learn.org/stable/model_persistence.html"
         | 
| 191 | 
            +
                        "#security-maintainability-limitations"
         | 
| 192 | 
            +
                    )
         | 
| 193 | 
            +
             | 
| 194 | 
            +
             | 
| 195 | 
            +
            class EstimatorCheckFailedWarning(UserWarning):
         | 
| 196 | 
            +
                """Warning raised when an estimator check from the common tests fails.
         | 
| 197 | 
            +
             | 
| 198 | 
            +
                Parameters
         | 
| 199 | 
            +
                ----------
         | 
| 200 | 
            +
                estimator : estimator object
         | 
| 201 | 
            +
                    Estimator instance for which the test failed.
         | 
| 202 | 
            +
             | 
| 203 | 
            +
                check_name : str
         | 
| 204 | 
            +
                    Name of the check that failed.
         | 
| 205 | 
            +
             | 
| 206 | 
            +
                exception : Exception
         | 
| 207 | 
            +
                    Exception raised by the failed check.
         | 
| 208 | 
            +
             | 
| 209 | 
            +
                status : str
         | 
| 210 | 
            +
                    Status of the check.
         | 
| 211 | 
            +
             | 
| 212 | 
            +
                expected_to_fail : bool
         | 
| 213 | 
            +
                    Whether the check was expected to fail.
         | 
| 214 | 
            +
             | 
| 215 | 
            +
                expected_to_fail_reason : str
         | 
| 216 | 
            +
                    Reason for the expected failure.
         | 
| 217 | 
            +
                """
         | 
| 218 | 
            +
             | 
| 219 | 
            +
                def __init__(
         | 
| 220 | 
            +
                    self,
         | 
| 221 | 
            +
                    *,
         | 
| 222 | 
            +
                    estimator,
         | 
| 223 | 
            +
                    check_name: str,
         | 
| 224 | 
            +
                    exception: Exception,
         | 
| 225 | 
            +
                    status: str,
         | 
| 226 | 
            +
                    expected_to_fail: bool,
         | 
| 227 | 
            +
                    expected_to_fail_reason: str,
         | 
| 228 | 
            +
                ):
         | 
| 229 | 
            +
                    self.estimator = estimator
         | 
| 230 | 
            +
                    self.check_name = check_name
         | 
| 231 | 
            +
                    self.exception = exception
         | 
| 232 | 
            +
                    self.status = status
         | 
| 233 | 
            +
                    self.expected_to_fail = expected_to_fail
         | 
| 234 | 
            +
                    self.expected_to_fail_reason = expected_to_fail_reason
         | 
| 235 | 
            +
             | 
| 236 | 
            +
                def __repr__(self):
         | 
| 237 | 
            +
                    expected_to_fail_str = (
         | 
| 238 | 
            +
                        f"Expected to fail: {self.expected_to_fail_reason}"
         | 
| 239 | 
            +
                        if self.expected_to_fail
         | 
| 240 | 
            +
                        else "Not expected to fail"
         | 
| 241 | 
            +
                    )
         | 
| 242 | 
            +
                    return (
         | 
| 243 | 
            +
                        f"Test {self.check_name} failed for estimator {self.estimator!r}.\n"
         | 
| 244 | 
            +
                        f"Expected to fail reason: {expected_to_fail_str}\n"
         | 
| 245 | 
            +
                        f"Exception: {self.exception}"
         | 
| 246 | 
            +
                    )
         | 
| 247 | 
            +
             | 
| 248 | 
            +
                def __str__(self):
         | 
| 249 | 
            +
                    return self.__repr__()
         | 
    	
        evalkit_tf437/lib/python3.10/site-packages/sklearn/feature_extraction/__init__.py
    ADDED
    
    | @@ -0,0 +1,18 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            """Feature extraction from raw data."""
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            # Authors: The scikit-learn developers
         | 
| 4 | 
            +
            # SPDX-License-Identifier: BSD-3-Clause
         | 
| 5 | 
            +
             | 
| 6 | 
            +
            from . import image, text
         | 
| 7 | 
            +
            from ._dict_vectorizer import DictVectorizer
         | 
| 8 | 
            +
            from ._hash import FeatureHasher
         | 
| 9 | 
            +
            from .image import grid_to_graph, img_to_graph
         | 
| 10 | 
            +
             | 
| 11 | 
            +
            __all__ = [
         | 
| 12 | 
            +
                "DictVectorizer",
         | 
| 13 | 
            +
                "image",
         | 
| 14 | 
            +
                "img_to_graph",
         | 
| 15 | 
            +
                "grid_to_graph",
         | 
| 16 | 
            +
                "text",
         | 
| 17 | 
            +
                "FeatureHasher",
         | 
| 18 | 
            +
            ]
         |