ZTWHHH commited on
Commit
6049deb
·
verified ·
1 Parent(s): 09b651f

Add files using upload-large-folder tool

Browse files
Files changed (27) hide show
  1. .gitattributes +1 -0
  2. chatunivi/lib/libcrypto.a +3 -0
  3. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/__pycache__/__init__.cpython-310.pyc +0 -0
  4. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/__init__.py +87 -0
  5. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/__pycache__/__init__.cpython-310.pyc +0 -0
  6. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/base.cpython-310-x86_64-linux-gnu.so +0 -0
  7. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/ccalendar.pyi +12 -0
  8. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/conversion.pyi +14 -0
  9. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/dtypes.pyi +83 -0
  10. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/fields.pyi +62 -0
  11. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/nattype.pyi +141 -0
  12. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/np_datetime.pyi +27 -0
  13. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.pyi +287 -0
  14. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/parsing.pyi +33 -0
  15. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/period.pyi +135 -0
  16. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/strptime.pyi +14 -0
  17. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/timedeltas.pyi +174 -0
  18. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/timestamps.pyi +241 -0
  19. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/timezones.pyi +21 -0
  20. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/vectorized.pyi +43 -0
  21. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/window/__init__.py +0 -0
  22. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/window/__pycache__/__init__.cpython-310.pyc +0 -0
  23. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/window/aggregations.pyi +127 -0
  24. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/window/indexers.pyi +12 -0
  25. infer_4_30_0/lib/python3.10/site-packages/pandas/arrays/__init__.py +53 -0
  26. infer_4_30_0/lib/python3.10/site-packages/pandas/arrays/__pycache__/__init__.cpython-310.pyc +0 -0
  27. infer_4_30_0/lib/python3.10/site-packages/pandas/errors/__init__.py +850 -0
.gitattributes CHANGED
@@ -2133,3 +2133,4 @@ infer_4_30_0/lib/python3.10/site-packages/nvidia/cufft/lib/libcufftw.so.11 filte
2133
  infer_4_30_0/lib/python3.10/site-packages/nvidia/cufft/lib/libcufftw.so.10 filter=lfs diff=lfs merge=lfs -text
2134
  infer_4_30_0/lib/python3.10/site-packages/nvidia/cusolver/lib/libcusolverMg.so.11 filter=lfs diff=lfs merge=lfs -text
2135
  infer_4_30_0/lib/python3.10/site-packages/freetype/libfreetype.so filter=lfs diff=lfs merge=lfs -text
 
 
2133
  infer_4_30_0/lib/python3.10/site-packages/nvidia/cufft/lib/libcufftw.so.10 filter=lfs diff=lfs merge=lfs -text
2134
  infer_4_30_0/lib/python3.10/site-packages/nvidia/cusolver/lib/libcusolverMg.so.11 filter=lfs diff=lfs merge=lfs -text
2135
  infer_4_30_0/lib/python3.10/site-packages/freetype/libfreetype.so filter=lfs diff=lfs merge=lfs -text
2136
+ chatunivi/lib/libcrypto.a filter=lfs diff=lfs merge=lfs -text
chatunivi/lib/libcrypto.a ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01243b4048aad14af6904f0721b341e86ecbd5cddb724305263ede73a5bc1f53
3
+ size 11086938
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (538 Bytes). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/__init__.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __all__ = [
2
+ "dtypes",
3
+ "localize_pydatetime",
4
+ "NaT",
5
+ "NaTType",
6
+ "iNaT",
7
+ "nat_strings",
8
+ "OutOfBoundsDatetime",
9
+ "OutOfBoundsTimedelta",
10
+ "IncompatibleFrequency",
11
+ "Period",
12
+ "Resolution",
13
+ "Timedelta",
14
+ "normalize_i8_timestamps",
15
+ "is_date_array_normalized",
16
+ "dt64arr_to_periodarr",
17
+ "delta_to_nanoseconds",
18
+ "ints_to_pydatetime",
19
+ "ints_to_pytimedelta",
20
+ "get_resolution",
21
+ "Timestamp",
22
+ "tz_convert_from_utc_single",
23
+ "tz_convert_from_utc",
24
+ "to_offset",
25
+ "Tick",
26
+ "BaseOffset",
27
+ "tz_compare",
28
+ "is_unitless",
29
+ "astype_overflowsafe",
30
+ "get_unit_from_dtype",
31
+ "periods_per_day",
32
+ "periods_per_second",
33
+ "guess_datetime_format",
34
+ "add_overflowsafe",
35
+ "get_supported_dtype",
36
+ "is_supported_dtype",
37
+ ]
38
+
39
+ from pandas._libs.tslibs import dtypes # pylint: disable=import-self
40
+ from pandas._libs.tslibs.conversion import localize_pydatetime
41
+ from pandas._libs.tslibs.dtypes import (
42
+ Resolution,
43
+ periods_per_day,
44
+ periods_per_second,
45
+ )
46
+ from pandas._libs.tslibs.nattype import (
47
+ NaT,
48
+ NaTType,
49
+ iNaT,
50
+ nat_strings,
51
+ )
52
+ from pandas._libs.tslibs.np_datetime import (
53
+ OutOfBoundsDatetime,
54
+ OutOfBoundsTimedelta,
55
+ add_overflowsafe,
56
+ astype_overflowsafe,
57
+ get_supported_dtype,
58
+ is_supported_dtype,
59
+ is_unitless,
60
+ py_get_unit_from_dtype as get_unit_from_dtype,
61
+ )
62
+ from pandas._libs.tslibs.offsets import (
63
+ BaseOffset,
64
+ Tick,
65
+ to_offset,
66
+ )
67
+ from pandas._libs.tslibs.parsing import guess_datetime_format
68
+ from pandas._libs.tslibs.period import (
69
+ IncompatibleFrequency,
70
+ Period,
71
+ )
72
+ from pandas._libs.tslibs.timedeltas import (
73
+ Timedelta,
74
+ delta_to_nanoseconds,
75
+ ints_to_pytimedelta,
76
+ )
77
+ from pandas._libs.tslibs.timestamps import Timestamp
78
+ from pandas._libs.tslibs.timezones import tz_compare
79
+ from pandas._libs.tslibs.tzconversion import tz_convert_from_utc_single
80
+ from pandas._libs.tslibs.vectorized import (
81
+ dt64arr_to_periodarr,
82
+ get_resolution,
83
+ ints_to_pydatetime,
84
+ is_date_array_normalized,
85
+ normalize_i8_timestamps,
86
+ tz_convert_from_utc,
87
+ )
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.84 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/base.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (62.3 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/ccalendar.pyi ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ DAYS: list[str]
2
+ MONTH_ALIASES: dict[int, str]
3
+ MONTH_NUMBERS: dict[str, int]
4
+ MONTHS: list[str]
5
+ int_to_weekday: dict[int, str]
6
+
7
+ def get_firstbday(year: int, month: int) -> int: ...
8
+ def get_lastbday(year: int, month: int) -> int: ...
9
+ def get_day_of_year(year: int, month: int, day: int) -> int: ...
10
+ def get_iso_calendar(year: int, month: int, day: int) -> tuple[int, int, int]: ...
11
+ def get_week_of_year(year: int, month: int, day: int) -> int: ...
12
+ def get_days_in_month(year: int, month: int) -> int: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/conversion.pyi ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ tzinfo,
4
+ )
5
+
6
+ import numpy as np
7
+
8
+ DT64NS_DTYPE: np.dtype
9
+ TD64NS_DTYPE: np.dtype
10
+
11
+ def localize_pydatetime(dt: datetime, tz: tzinfo | None) -> datetime: ...
12
+ def cast_from_unit_vectorized(
13
+ values: np.ndarray, unit: str, out_unit: str = ...
14
+ ) -> np.ndarray: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/dtypes.pyi ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from enum import Enum
2
+
3
+ OFFSET_TO_PERIOD_FREQSTR: dict[str, str]
4
+
5
+ def periods_per_day(reso: int = ...) -> int: ...
6
+ def periods_per_second(reso: int) -> int: ...
7
+ def abbrev_to_npy_unit(abbrev: str | None) -> int: ...
8
+ def freq_to_period_freqstr(freq_n: int, freq_name: str) -> str: ...
9
+
10
+ class PeriodDtypeBase:
11
+ _dtype_code: int # PeriodDtypeCode
12
+ _n: int
13
+
14
+ # actually __cinit__
15
+ def __new__(cls, code: int, n: int): ...
16
+ @property
17
+ def _freq_group_code(self) -> int: ...
18
+ @property
19
+ def _resolution_obj(self) -> Resolution: ...
20
+ def _get_to_timestamp_base(self) -> int: ...
21
+ @property
22
+ def _freqstr(self) -> str: ...
23
+ def __hash__(self) -> int: ...
24
+ def _is_tick_like(self) -> bool: ...
25
+ @property
26
+ def _creso(self) -> int: ...
27
+ @property
28
+ def _td64_unit(self) -> str: ...
29
+
30
+ class FreqGroup(Enum):
31
+ FR_ANN: int
32
+ FR_QTR: int
33
+ FR_MTH: int
34
+ FR_WK: int
35
+ FR_BUS: int
36
+ FR_DAY: int
37
+ FR_HR: int
38
+ FR_MIN: int
39
+ FR_SEC: int
40
+ FR_MS: int
41
+ FR_US: int
42
+ FR_NS: int
43
+ FR_UND: int
44
+ @staticmethod
45
+ def from_period_dtype_code(code: int) -> FreqGroup: ...
46
+
47
+ class Resolution(Enum):
48
+ RESO_NS: int
49
+ RESO_US: int
50
+ RESO_MS: int
51
+ RESO_SEC: int
52
+ RESO_MIN: int
53
+ RESO_HR: int
54
+ RESO_DAY: int
55
+ RESO_MTH: int
56
+ RESO_QTR: int
57
+ RESO_YR: int
58
+ def __lt__(self, other: Resolution) -> bool: ...
59
+ def __ge__(self, other: Resolution) -> bool: ...
60
+ @property
61
+ def attrname(self) -> str: ...
62
+ @classmethod
63
+ def from_attrname(cls, attrname: str) -> Resolution: ...
64
+ @classmethod
65
+ def get_reso_from_freqstr(cls, freq: str) -> Resolution: ...
66
+ @property
67
+ def attr_abbrev(self) -> str: ...
68
+
69
+ class NpyDatetimeUnit(Enum):
70
+ NPY_FR_Y: int
71
+ NPY_FR_M: int
72
+ NPY_FR_W: int
73
+ NPY_FR_D: int
74
+ NPY_FR_h: int
75
+ NPY_FR_m: int
76
+ NPY_FR_s: int
77
+ NPY_FR_ms: int
78
+ NPY_FR_us: int
79
+ NPY_FR_ns: int
80
+ NPY_FR_ps: int
81
+ NPY_FR_fs: int
82
+ NPY_FR_as: int
83
+ NPY_FR_GENERIC: int
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/fields.pyi ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import npt
4
+
5
+ def build_field_sarray(
6
+ dtindex: npt.NDArray[np.int64], # const int64_t[:]
7
+ reso: int, # NPY_DATETIMEUNIT
8
+ ) -> np.ndarray: ...
9
+ def month_position_check(fields, weekdays) -> str | None: ...
10
+ def get_date_name_field(
11
+ dtindex: npt.NDArray[np.int64], # const int64_t[:]
12
+ field: str,
13
+ locale: str | None = ...,
14
+ reso: int = ..., # NPY_DATETIMEUNIT
15
+ ) -> npt.NDArray[np.object_]: ...
16
+ def get_start_end_field(
17
+ dtindex: npt.NDArray[np.int64],
18
+ field: str,
19
+ freqstr: str | None = ...,
20
+ month_kw: int = ...,
21
+ reso: int = ..., # NPY_DATETIMEUNIT
22
+ ) -> npt.NDArray[np.bool_]: ...
23
+ def get_date_field(
24
+ dtindex: npt.NDArray[np.int64], # const int64_t[:]
25
+ field: str,
26
+ reso: int = ..., # NPY_DATETIMEUNIT
27
+ ) -> npt.NDArray[np.int32]: ...
28
+ def get_timedelta_field(
29
+ tdindex: npt.NDArray[np.int64], # const int64_t[:]
30
+ field: str,
31
+ reso: int = ..., # NPY_DATETIMEUNIT
32
+ ) -> npt.NDArray[np.int32]: ...
33
+ def get_timedelta_days(
34
+ tdindex: npt.NDArray[np.int64], # const int64_t[:]
35
+ reso: int = ..., # NPY_DATETIMEUNIT
36
+ ) -> npt.NDArray[np.int64]: ...
37
+ def isleapyear_arr(
38
+ years: np.ndarray,
39
+ ) -> npt.NDArray[np.bool_]: ...
40
+ def build_isocalendar_sarray(
41
+ dtindex: npt.NDArray[np.int64], # const int64_t[:]
42
+ reso: int, # NPY_DATETIMEUNIT
43
+ ) -> np.ndarray: ...
44
+ def _get_locale_names(name_type: str, locale: str | None = ...): ...
45
+
46
+ class RoundTo:
47
+ @property
48
+ def MINUS_INFTY(self) -> int: ...
49
+ @property
50
+ def PLUS_INFTY(self) -> int: ...
51
+ @property
52
+ def NEAREST_HALF_EVEN(self) -> int: ...
53
+ @property
54
+ def NEAREST_HALF_PLUS_INFTY(self) -> int: ...
55
+ @property
56
+ def NEAREST_HALF_MINUS_INFTY(self) -> int: ...
57
+
58
+ def round_nsint64(
59
+ values: npt.NDArray[np.int64],
60
+ mode: RoundTo,
61
+ nanos: int,
62
+ ) -> npt.NDArray[np.int64]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/nattype.pyi ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ tzinfo as _tzinfo,
5
+ )
6
+ import typing
7
+
8
+ import numpy as np
9
+
10
+ from pandas._libs.tslibs.period import Period
11
+ from pandas._typing import Self
12
+
13
+ NaT: NaTType
14
+ iNaT: int
15
+ nat_strings: set[str]
16
+
17
+ _NaTComparisonTypes: typing.TypeAlias = (
18
+ datetime | timedelta | Period | np.datetime64 | np.timedelta64
19
+ )
20
+
21
+ class _NatComparison:
22
+ def __call__(self, other: _NaTComparisonTypes) -> bool: ...
23
+
24
+ class NaTType:
25
+ _value: np.int64
26
+ @property
27
+ def value(self) -> int: ...
28
+ @property
29
+ def asm8(self) -> np.datetime64: ...
30
+ def to_datetime64(self) -> np.datetime64: ...
31
+ def to_numpy(
32
+ self, dtype: np.dtype | str | None = ..., copy: bool = ...
33
+ ) -> np.datetime64 | np.timedelta64: ...
34
+ @property
35
+ def is_leap_year(self) -> bool: ...
36
+ @property
37
+ def is_month_start(self) -> bool: ...
38
+ @property
39
+ def is_quarter_start(self) -> bool: ...
40
+ @property
41
+ def is_year_start(self) -> bool: ...
42
+ @property
43
+ def is_month_end(self) -> bool: ...
44
+ @property
45
+ def is_quarter_end(self) -> bool: ...
46
+ @property
47
+ def is_year_end(self) -> bool: ...
48
+ @property
49
+ def day_of_year(self) -> float: ...
50
+ @property
51
+ def dayofyear(self) -> float: ...
52
+ @property
53
+ def days_in_month(self) -> float: ...
54
+ @property
55
+ def daysinmonth(self) -> float: ...
56
+ @property
57
+ def day_of_week(self) -> float: ...
58
+ @property
59
+ def dayofweek(self) -> float: ...
60
+ @property
61
+ def week(self) -> float: ...
62
+ @property
63
+ def weekofyear(self) -> float: ...
64
+ def day_name(self) -> float: ...
65
+ def month_name(self) -> float: ...
66
+ def weekday(self) -> float: ...
67
+ def isoweekday(self) -> float: ...
68
+ def total_seconds(self) -> float: ...
69
+ def today(self, *args, **kwargs) -> NaTType: ...
70
+ def now(self, *args, **kwargs) -> NaTType: ...
71
+ def to_pydatetime(self) -> NaTType: ...
72
+ def date(self) -> NaTType: ...
73
+ def round(self) -> NaTType: ...
74
+ def floor(self) -> NaTType: ...
75
+ def ceil(self) -> NaTType: ...
76
+ @property
77
+ def tzinfo(self) -> None: ...
78
+ @property
79
+ def tz(self) -> None: ...
80
+ def tz_convert(self, tz: _tzinfo | str | None) -> NaTType: ...
81
+ def tz_localize(
82
+ self,
83
+ tz: _tzinfo | str | None,
84
+ ambiguous: str = ...,
85
+ nonexistent: str = ...,
86
+ ) -> NaTType: ...
87
+ def replace(
88
+ self,
89
+ year: int | None = ...,
90
+ month: int | None = ...,
91
+ day: int | None = ...,
92
+ hour: int | None = ...,
93
+ minute: int | None = ...,
94
+ second: int | None = ...,
95
+ microsecond: int | None = ...,
96
+ nanosecond: int | None = ...,
97
+ tzinfo: _tzinfo | None = ...,
98
+ fold: int | None = ...,
99
+ ) -> NaTType: ...
100
+ @property
101
+ def year(self) -> float: ...
102
+ @property
103
+ def quarter(self) -> float: ...
104
+ @property
105
+ def month(self) -> float: ...
106
+ @property
107
+ def day(self) -> float: ...
108
+ @property
109
+ def hour(self) -> float: ...
110
+ @property
111
+ def minute(self) -> float: ...
112
+ @property
113
+ def second(self) -> float: ...
114
+ @property
115
+ def millisecond(self) -> float: ...
116
+ @property
117
+ def microsecond(self) -> float: ...
118
+ @property
119
+ def nanosecond(self) -> float: ...
120
+ # inject Timedelta properties
121
+ @property
122
+ def days(self) -> float: ...
123
+ @property
124
+ def microseconds(self) -> float: ...
125
+ @property
126
+ def nanoseconds(self) -> float: ...
127
+ # inject Period properties
128
+ @property
129
+ def qyear(self) -> float: ...
130
+ def __eq__(self, other: object) -> bool: ...
131
+ def __ne__(self, other: object) -> bool: ...
132
+ __lt__: _NatComparison
133
+ __le__: _NatComparison
134
+ __gt__: _NatComparison
135
+ __ge__: _NatComparison
136
+ def __sub__(self, other: Self | timedelta | datetime) -> Self: ...
137
+ def __rsub__(self, other: Self | timedelta | datetime) -> Self: ...
138
+ def __add__(self, other: Self | timedelta | datetime) -> Self: ...
139
+ def __radd__(self, other: Self | timedelta | datetime) -> Self: ...
140
+ def __hash__(self) -> int: ...
141
+ def as_unit(self, unit: str, round_ok: bool = ...) -> NaTType: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/np_datetime.pyi ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import npt
4
+
5
+ class OutOfBoundsDatetime(ValueError): ...
6
+ class OutOfBoundsTimedelta(ValueError): ...
7
+
8
+ # only exposed for testing
9
+ def py_get_unit_from_dtype(dtype: np.dtype): ...
10
+ def py_td64_to_tdstruct(td64: int, unit: int) -> dict: ...
11
+ def astype_overflowsafe(
12
+ values: np.ndarray,
13
+ dtype: np.dtype,
14
+ copy: bool = ...,
15
+ round_ok: bool = ...,
16
+ is_coerce: bool = ...,
17
+ ) -> np.ndarray: ...
18
+ def is_unitless(dtype: np.dtype) -> bool: ...
19
+ def compare_mismatched_resolutions(
20
+ left: np.ndarray, right: np.ndarray, op
21
+ ) -> npt.NDArray[np.bool_]: ...
22
+ def add_overflowsafe(
23
+ left: npt.NDArray[np.int64],
24
+ right: npt.NDArray[np.int64],
25
+ ) -> npt.NDArray[np.int64]: ...
26
+ def get_supported_dtype(dtype: np.dtype) -> np.dtype: ...
27
+ def is_supported_dtype(dtype: np.dtype) -> bool: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.pyi ADDED
@@ -0,0 +1,287 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ time,
4
+ timedelta,
5
+ )
6
+ from typing import (
7
+ Any,
8
+ Collection,
9
+ Literal,
10
+ TypeVar,
11
+ overload,
12
+ )
13
+
14
+ import numpy as np
15
+
16
+ from pandas._libs.tslibs.nattype import NaTType
17
+ from pandas._typing import (
18
+ OffsetCalendar,
19
+ Self,
20
+ npt,
21
+ )
22
+
23
+ from .timedeltas import Timedelta
24
+
25
+ _BaseOffsetT = TypeVar("_BaseOffsetT", bound=BaseOffset)
26
+ _DatetimeT = TypeVar("_DatetimeT", bound=datetime)
27
+ _TimedeltaT = TypeVar("_TimedeltaT", bound=timedelta)
28
+
29
+ _relativedelta_kwds: set[str]
30
+ prefix_mapping: dict[str, type]
31
+
32
+ class ApplyTypeError(TypeError): ...
33
+
34
+ class BaseOffset:
35
+ n: int
36
+ normalize: bool
37
+ def __init__(self, n: int = ..., normalize: bool = ...) -> None: ...
38
+ def __eq__(self, other) -> bool: ...
39
+ def __ne__(self, other) -> bool: ...
40
+ def __hash__(self) -> int: ...
41
+ @property
42
+ def kwds(self) -> dict: ...
43
+ @property
44
+ def base(self) -> BaseOffset: ...
45
+ @overload
46
+ def __add__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
47
+ @overload
48
+ def __add__(self, other: BaseOffset) -> Self: ...
49
+ @overload
50
+ def __add__(self, other: _DatetimeT) -> _DatetimeT: ...
51
+ @overload
52
+ def __add__(self, other: _TimedeltaT) -> _TimedeltaT: ...
53
+ @overload
54
+ def __radd__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
55
+ @overload
56
+ def __radd__(self, other: BaseOffset) -> Self: ...
57
+ @overload
58
+ def __radd__(self, other: _DatetimeT) -> _DatetimeT: ...
59
+ @overload
60
+ def __radd__(self, other: _TimedeltaT) -> _TimedeltaT: ...
61
+ @overload
62
+ def __radd__(self, other: NaTType) -> NaTType: ...
63
+ def __sub__(self, other: BaseOffset) -> Self: ...
64
+ @overload
65
+ def __rsub__(self, other: npt.NDArray[np.object_]) -> npt.NDArray[np.object_]: ...
66
+ @overload
67
+ def __rsub__(self, other: BaseOffset): ...
68
+ @overload
69
+ def __rsub__(self, other: _DatetimeT) -> _DatetimeT: ...
70
+ @overload
71
+ def __rsub__(self, other: _TimedeltaT) -> _TimedeltaT: ...
72
+ @overload
73
+ def __mul__(self, other: np.ndarray) -> np.ndarray: ...
74
+ @overload
75
+ def __mul__(self, other: int): ...
76
+ @overload
77
+ def __rmul__(self, other: np.ndarray) -> np.ndarray: ...
78
+ @overload
79
+ def __rmul__(self, other: int) -> Self: ...
80
+ def __neg__(self) -> Self: ...
81
+ def copy(self) -> Self: ...
82
+ @property
83
+ def name(self) -> str: ...
84
+ @property
85
+ def rule_code(self) -> str: ...
86
+ @property
87
+ def freqstr(self) -> str: ...
88
+ def _apply(self, other): ...
89
+ def _apply_array(self, dtarr: np.ndarray) -> np.ndarray: ...
90
+ def rollback(self, dt: datetime) -> datetime: ...
91
+ def rollforward(self, dt: datetime) -> datetime: ...
92
+ def is_on_offset(self, dt: datetime) -> bool: ...
93
+ def __setstate__(self, state) -> None: ...
94
+ def __getstate__(self): ...
95
+ @property
96
+ def nanos(self) -> int: ...
97
+ def is_anchored(self) -> bool: ...
98
+
99
+ def _get_offset(name: str) -> BaseOffset: ...
100
+
101
+ class SingleConstructorOffset(BaseOffset):
102
+ @classmethod
103
+ def _from_name(cls, suffix: None = ...): ...
104
+ def __reduce__(self): ...
105
+
106
+ @overload
107
+ def to_offset(freq: None, is_period: bool = ...) -> None: ...
108
+ @overload
109
+ def to_offset(freq: _BaseOffsetT, is_period: bool = ...) -> _BaseOffsetT: ...
110
+ @overload
111
+ def to_offset(freq: timedelta | str, is_period: bool = ...) -> BaseOffset: ...
112
+
113
+ class Tick(SingleConstructorOffset):
114
+ _creso: int
115
+ _prefix: str
116
+ def __init__(self, n: int = ..., normalize: bool = ...) -> None: ...
117
+ @property
118
+ def delta(self) -> Timedelta: ...
119
+ @property
120
+ def nanos(self) -> int: ...
121
+
122
+ def delta_to_tick(delta: timedelta) -> Tick: ...
123
+
124
+ class Day(Tick): ...
125
+ class Hour(Tick): ...
126
+ class Minute(Tick): ...
127
+ class Second(Tick): ...
128
+ class Milli(Tick): ...
129
+ class Micro(Tick): ...
130
+ class Nano(Tick): ...
131
+
132
+ class RelativeDeltaOffset(BaseOffset):
133
+ def __init__(self, n: int = ..., normalize: bool = ..., **kwds: Any) -> None: ...
134
+
135
+ class BusinessMixin(SingleConstructorOffset):
136
+ def __init__(
137
+ self, n: int = ..., normalize: bool = ..., offset: timedelta = ...
138
+ ) -> None: ...
139
+
140
+ class BusinessDay(BusinessMixin): ...
141
+
142
+ class BusinessHour(BusinessMixin):
143
+ def __init__(
144
+ self,
145
+ n: int = ...,
146
+ normalize: bool = ...,
147
+ start: str | time | Collection[str | time] = ...,
148
+ end: str | time | Collection[str | time] = ...,
149
+ offset: timedelta = ...,
150
+ ) -> None: ...
151
+
152
+ class WeekOfMonthMixin(SingleConstructorOffset):
153
+ def __init__(
154
+ self, n: int = ..., normalize: bool = ..., weekday: int = ...
155
+ ) -> None: ...
156
+
157
+ class YearOffset(SingleConstructorOffset):
158
+ def __init__(
159
+ self, n: int = ..., normalize: bool = ..., month: int | None = ...
160
+ ) -> None: ...
161
+
162
+ class BYearEnd(YearOffset): ...
163
+ class BYearBegin(YearOffset): ...
164
+ class YearEnd(YearOffset): ...
165
+ class YearBegin(YearOffset): ...
166
+
167
+ class QuarterOffset(SingleConstructorOffset):
168
+ def __init__(
169
+ self, n: int = ..., normalize: bool = ..., startingMonth: int | None = ...
170
+ ) -> None: ...
171
+
172
+ class BQuarterEnd(QuarterOffset): ...
173
+ class BQuarterBegin(QuarterOffset): ...
174
+ class QuarterEnd(QuarterOffset): ...
175
+ class QuarterBegin(QuarterOffset): ...
176
+ class MonthOffset(SingleConstructorOffset): ...
177
+ class MonthEnd(MonthOffset): ...
178
+ class MonthBegin(MonthOffset): ...
179
+ class BusinessMonthEnd(MonthOffset): ...
180
+ class BusinessMonthBegin(MonthOffset): ...
181
+
182
+ class SemiMonthOffset(SingleConstructorOffset):
183
+ def __init__(
184
+ self, n: int = ..., normalize: bool = ..., day_of_month: int | None = ...
185
+ ) -> None: ...
186
+
187
+ class SemiMonthEnd(SemiMonthOffset): ...
188
+ class SemiMonthBegin(SemiMonthOffset): ...
189
+
190
+ class Week(SingleConstructorOffset):
191
+ def __init__(
192
+ self, n: int = ..., normalize: bool = ..., weekday: int | None = ...
193
+ ) -> None: ...
194
+
195
+ class WeekOfMonth(WeekOfMonthMixin):
196
+ def __init__(
197
+ self, n: int = ..., normalize: bool = ..., week: int = ..., weekday: int = ...
198
+ ) -> None: ...
199
+
200
+ class LastWeekOfMonth(WeekOfMonthMixin): ...
201
+
202
+ class FY5253Mixin(SingleConstructorOffset):
203
+ def __init__(
204
+ self,
205
+ n: int = ...,
206
+ normalize: bool = ...,
207
+ weekday: int = ...,
208
+ startingMonth: int = ...,
209
+ variation: Literal["nearest", "last"] = ...,
210
+ ) -> None: ...
211
+
212
+ class FY5253(FY5253Mixin): ...
213
+
214
+ class FY5253Quarter(FY5253Mixin):
215
+ def __init__(
216
+ self,
217
+ n: int = ...,
218
+ normalize: bool = ...,
219
+ weekday: int = ...,
220
+ startingMonth: int = ...,
221
+ qtr_with_extra_week: int = ...,
222
+ variation: Literal["nearest", "last"] = ...,
223
+ ) -> None: ...
224
+
225
+ class Easter(SingleConstructorOffset): ...
226
+
227
+ class _CustomBusinessMonth(BusinessMixin):
228
+ def __init__(
229
+ self,
230
+ n: int = ...,
231
+ normalize: bool = ...,
232
+ weekmask: str = ...,
233
+ holidays: list | None = ...,
234
+ calendar: OffsetCalendar | None = ...,
235
+ offset: timedelta = ...,
236
+ ) -> None: ...
237
+
238
+ class CustomBusinessDay(BusinessDay):
239
+ def __init__(
240
+ self,
241
+ n: int = ...,
242
+ normalize: bool = ...,
243
+ weekmask: str = ...,
244
+ holidays: list | None = ...,
245
+ calendar: OffsetCalendar | None = ...,
246
+ offset: timedelta = ...,
247
+ ) -> None: ...
248
+
249
+ class CustomBusinessHour(BusinessHour):
250
+ def __init__(
251
+ self,
252
+ n: int = ...,
253
+ normalize: bool = ...,
254
+ weekmask: str = ...,
255
+ holidays: list | None = ...,
256
+ calendar: OffsetCalendar | None = ...,
257
+ start: str | time | Collection[str | time] = ...,
258
+ end: str | time | Collection[str | time] = ...,
259
+ offset: timedelta = ...,
260
+ ) -> None: ...
261
+
262
+ class CustomBusinessMonthEnd(_CustomBusinessMonth): ...
263
+ class CustomBusinessMonthBegin(_CustomBusinessMonth): ...
264
+ class OffsetMeta(type): ...
265
+ class DateOffset(RelativeDeltaOffset, metaclass=OffsetMeta): ...
266
+
267
+ BDay = BusinessDay
268
+ BMonthEnd = BusinessMonthEnd
269
+ BMonthBegin = BusinessMonthBegin
270
+ CBMonthEnd = CustomBusinessMonthEnd
271
+ CBMonthBegin = CustomBusinessMonthBegin
272
+ CDay = CustomBusinessDay
273
+
274
+ def roll_qtrday(
275
+ other: datetime, n: int, month: int, day_opt: str, modby: int
276
+ ) -> int: ...
277
+
278
+ INVALID_FREQ_ERR_MSG: Literal["Invalid frequency: {0}"]
279
+
280
+ def shift_months(
281
+ dtindex: npt.NDArray[np.int64],
282
+ months: int,
283
+ day_opt: str | None = ...,
284
+ reso: int = ...,
285
+ ) -> npt.NDArray[np.int64]: ...
286
+
287
+ _offset_map: dict[str, BaseOffset]
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/parsing.pyi ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+
5
+ from pandas._typing import npt
6
+
7
+ class DateParseError(ValueError): ...
8
+
9
+ def py_parse_datetime_string(
10
+ date_string: str,
11
+ dayfirst: bool = ...,
12
+ yearfirst: bool = ...,
13
+ ) -> datetime: ...
14
+ def parse_datetime_string_with_reso(
15
+ date_string: str,
16
+ freq: str | None = ...,
17
+ dayfirst: bool | None = ...,
18
+ yearfirst: bool | None = ...,
19
+ ) -> tuple[datetime, str]: ...
20
+ def _does_string_look_like_datetime(py_string: str) -> bool: ...
21
+ def quarter_to_myear(year: int, quarter: int, freq: str) -> tuple[int, int]: ...
22
+ def try_parse_dates(
23
+ values: npt.NDArray[np.object_], # object[:]
24
+ parser,
25
+ ) -> npt.NDArray[np.object_]: ...
26
+ def guess_datetime_format(
27
+ dt_str: str,
28
+ dayfirst: bool | None = ...,
29
+ ) -> str | None: ...
30
+ def concat_date_cols(
31
+ date_cols: tuple,
32
+ ) -> npt.NDArray[np.object_]: ...
33
+ def get_rule_month(source: str) -> str: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/period.pyi ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import timedelta
2
+ from typing import Literal
3
+
4
+ import numpy as np
5
+
6
+ from pandas._libs.tslibs.dtypes import PeriodDtypeBase
7
+ from pandas._libs.tslibs.nattype import NaTType
8
+ from pandas._libs.tslibs.offsets import BaseOffset
9
+ from pandas._libs.tslibs.timestamps import Timestamp
10
+ from pandas._typing import (
11
+ Frequency,
12
+ npt,
13
+ )
14
+
15
+ INVALID_FREQ_ERR_MSG: str
16
+ DIFFERENT_FREQ: str
17
+
18
+ class IncompatibleFrequency(ValueError): ...
19
+
20
+ def periodarr_to_dt64arr(
21
+ periodarr: npt.NDArray[np.int64], # const int64_t[:]
22
+ freq: int,
23
+ ) -> npt.NDArray[np.int64]: ...
24
+ def period_asfreq_arr(
25
+ arr: npt.NDArray[np.int64],
26
+ freq1: int,
27
+ freq2: int,
28
+ end: bool,
29
+ ) -> npt.NDArray[np.int64]: ...
30
+ def get_period_field_arr(
31
+ field: str,
32
+ arr: npt.NDArray[np.int64], # const int64_t[:]
33
+ freq: int,
34
+ ) -> npt.NDArray[np.int64]: ...
35
+ def from_ordinals(
36
+ values: npt.NDArray[np.int64], # const int64_t[:]
37
+ freq: timedelta | BaseOffset | str,
38
+ ) -> npt.NDArray[np.int64]: ...
39
+ def extract_ordinals(
40
+ values: npt.NDArray[np.object_],
41
+ freq: Frequency | int,
42
+ ) -> npt.NDArray[np.int64]: ...
43
+ def extract_freq(
44
+ values: npt.NDArray[np.object_],
45
+ ) -> BaseOffset: ...
46
+ def period_array_strftime(
47
+ values: npt.NDArray[np.int64],
48
+ dtype_code: int,
49
+ na_rep,
50
+ date_format: str | None,
51
+ ) -> npt.NDArray[np.object_]: ...
52
+
53
+ # exposed for tests
54
+ def period_asfreq(ordinal: int, freq1: int, freq2: int, end: bool) -> int: ...
55
+ def period_ordinal(
56
+ y: int, m: int, d: int, h: int, min: int, s: int, us: int, ps: int, freq: int
57
+ ) -> int: ...
58
+ def freq_to_dtype_code(freq: BaseOffset) -> int: ...
59
+ def validate_end_alias(how: str) -> Literal["E", "S"]: ...
60
+
61
+ class PeriodMixin:
62
+ @property
63
+ def end_time(self) -> Timestamp: ...
64
+ @property
65
+ def start_time(self) -> Timestamp: ...
66
+ def _require_matching_freq(self, other: BaseOffset, base: bool = ...) -> None: ...
67
+
68
+ class Period(PeriodMixin):
69
+ ordinal: int # int64_t
70
+ freq: BaseOffset
71
+ _dtype: PeriodDtypeBase
72
+
73
+ # error: "__new__" must return a class instance (got "Union[Period, NaTType]")
74
+ def __new__( # type: ignore[misc]
75
+ cls,
76
+ value=...,
77
+ freq: int | str | BaseOffset | None = ...,
78
+ ordinal: int | None = ...,
79
+ year: int | None = ...,
80
+ month: int | None = ...,
81
+ quarter: int | None = ...,
82
+ day: int | None = ...,
83
+ hour: int | None = ...,
84
+ minute: int | None = ...,
85
+ second: int | None = ...,
86
+ ) -> Period | NaTType: ...
87
+ @classmethod
88
+ def _maybe_convert_freq(cls, freq) -> BaseOffset: ...
89
+ @classmethod
90
+ def _from_ordinal(cls, ordinal: int, freq: BaseOffset) -> Period: ...
91
+ @classmethod
92
+ def now(cls, freq: Frequency) -> Period: ...
93
+ def strftime(self, fmt: str | None) -> str: ...
94
+ def to_timestamp(
95
+ self,
96
+ freq: str | BaseOffset | None = ...,
97
+ how: str = ...,
98
+ ) -> Timestamp: ...
99
+ def asfreq(self, freq: str | BaseOffset, how: str = ...) -> Period: ...
100
+ @property
101
+ def freqstr(self) -> str: ...
102
+ @property
103
+ def is_leap_year(self) -> bool: ...
104
+ @property
105
+ def daysinmonth(self) -> int: ...
106
+ @property
107
+ def days_in_month(self) -> int: ...
108
+ @property
109
+ def qyear(self) -> int: ...
110
+ @property
111
+ def quarter(self) -> int: ...
112
+ @property
113
+ def day_of_year(self) -> int: ...
114
+ @property
115
+ def weekday(self) -> int: ...
116
+ @property
117
+ def day_of_week(self) -> int: ...
118
+ @property
119
+ def week(self) -> int: ...
120
+ @property
121
+ def weekofyear(self) -> int: ...
122
+ @property
123
+ def second(self) -> int: ...
124
+ @property
125
+ def minute(self) -> int: ...
126
+ @property
127
+ def hour(self) -> int: ...
128
+ @property
129
+ def day(self) -> int: ...
130
+ @property
131
+ def month(self) -> int: ...
132
+ @property
133
+ def year(self) -> int: ...
134
+ def __sub__(self, other) -> Period | BaseOffset: ...
135
+ def __add__(self, other) -> Period: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/strptime.pyi ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import npt
4
+
5
+ def array_strptime(
6
+ values: npt.NDArray[np.object_],
7
+ fmt: str | None,
8
+ exact: bool = ...,
9
+ errors: str = ...,
10
+ utc: bool = ...,
11
+ creso: int = ..., # NPY_DATETIMEUNIT
12
+ ) -> tuple[np.ndarray, np.ndarray]: ...
13
+
14
+ # first ndarray is M8[ns], second is object ndarray of tzinfo | None
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/timedeltas.pyi ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import timedelta
2
+ from typing import (
3
+ ClassVar,
4
+ Literal,
5
+ TypeAlias,
6
+ TypeVar,
7
+ overload,
8
+ )
9
+
10
+ import numpy as np
11
+
12
+ from pandas._libs.tslibs import (
13
+ NaTType,
14
+ Tick,
15
+ )
16
+ from pandas._typing import (
17
+ Frequency,
18
+ Self,
19
+ npt,
20
+ )
21
+
22
+ # This should be kept consistent with the keys in the dict timedelta_abbrevs
23
+ # in pandas/_libs/tslibs/timedeltas.pyx
24
+ UnitChoices: TypeAlias = Literal[
25
+ "Y",
26
+ "y",
27
+ "M",
28
+ "W",
29
+ "w",
30
+ "D",
31
+ "d",
32
+ "days",
33
+ "day",
34
+ "hours",
35
+ "hour",
36
+ "hr",
37
+ "h",
38
+ "m",
39
+ "minute",
40
+ "min",
41
+ "minutes",
42
+ "T",
43
+ "t",
44
+ "s",
45
+ "seconds",
46
+ "sec",
47
+ "second",
48
+ "ms",
49
+ "milliseconds",
50
+ "millisecond",
51
+ "milli",
52
+ "millis",
53
+ "L",
54
+ "l",
55
+ "us",
56
+ "microseconds",
57
+ "microsecond",
58
+ "µs",
59
+ "micro",
60
+ "micros",
61
+ "u",
62
+ "ns",
63
+ "nanoseconds",
64
+ "nano",
65
+ "nanos",
66
+ "nanosecond",
67
+ "n",
68
+ ]
69
+ _S = TypeVar("_S", bound=timedelta)
70
+
71
+ def get_unit_for_round(freq, creso: int) -> int: ...
72
+ def disallow_ambiguous_unit(unit: str | None) -> None: ...
73
+ def ints_to_pytimedelta(
74
+ m8values: npt.NDArray[np.timedelta64],
75
+ box: bool = ...,
76
+ ) -> npt.NDArray[np.object_]: ...
77
+ def array_to_timedelta64(
78
+ values: npt.NDArray[np.object_],
79
+ unit: str | None = ...,
80
+ errors: str = ...,
81
+ ) -> np.ndarray: ... # np.ndarray[m8ns]
82
+ def parse_timedelta_unit(unit: str | None) -> UnitChoices: ...
83
+ def delta_to_nanoseconds(
84
+ delta: np.timedelta64 | timedelta | Tick,
85
+ reso: int = ..., # NPY_DATETIMEUNIT
86
+ round_ok: bool = ...,
87
+ ) -> int: ...
88
+ def floordiv_object_array(
89
+ left: np.ndarray, right: npt.NDArray[np.object_]
90
+ ) -> np.ndarray: ...
91
+ def truediv_object_array(
92
+ left: np.ndarray, right: npt.NDArray[np.object_]
93
+ ) -> np.ndarray: ...
94
+
95
+ class Timedelta(timedelta):
96
+ _creso: int
97
+ min: ClassVar[Timedelta]
98
+ max: ClassVar[Timedelta]
99
+ resolution: ClassVar[Timedelta]
100
+ value: int # np.int64
101
+ _value: int # np.int64
102
+ # error: "__new__" must return a class instance (got "Union[Timestamp, NaTType]")
103
+ def __new__( # type: ignore[misc]
104
+ cls: type[_S],
105
+ value=...,
106
+ unit: str | None = ...,
107
+ **kwargs: float | np.integer | np.floating,
108
+ ) -> _S | NaTType: ...
109
+ @classmethod
110
+ def _from_value_and_reso(cls, value: np.int64, reso: int) -> Timedelta: ...
111
+ @property
112
+ def days(self) -> int: ...
113
+ @property
114
+ def seconds(self) -> int: ...
115
+ @property
116
+ def microseconds(self) -> int: ...
117
+ def total_seconds(self) -> float: ...
118
+ def to_pytimedelta(self) -> timedelta: ...
119
+ def to_timedelta64(self) -> np.timedelta64: ...
120
+ @property
121
+ def asm8(self) -> np.timedelta64: ...
122
+ # TODO: round/floor/ceil could return NaT?
123
+ def round(self, freq: Frequency) -> Self: ...
124
+ def floor(self, freq: Frequency) -> Self: ...
125
+ def ceil(self, freq: Frequency) -> Self: ...
126
+ @property
127
+ def resolution_string(self) -> str: ...
128
+ def __add__(self, other: timedelta) -> Timedelta: ...
129
+ def __radd__(self, other: timedelta) -> Timedelta: ...
130
+ def __sub__(self, other: timedelta) -> Timedelta: ...
131
+ def __rsub__(self, other: timedelta) -> Timedelta: ...
132
+ def __neg__(self) -> Timedelta: ...
133
+ def __pos__(self) -> Timedelta: ...
134
+ def __abs__(self) -> Timedelta: ...
135
+ def __mul__(self, other: float) -> Timedelta: ...
136
+ def __rmul__(self, other: float) -> Timedelta: ...
137
+ # error: Signature of "__floordiv__" incompatible with supertype "timedelta"
138
+ @overload # type: ignore[override]
139
+ def __floordiv__(self, other: timedelta) -> int: ...
140
+ @overload
141
+ def __floordiv__(self, other: float) -> Timedelta: ...
142
+ @overload
143
+ def __floordiv__(
144
+ self, other: npt.NDArray[np.timedelta64]
145
+ ) -> npt.NDArray[np.intp]: ...
146
+ @overload
147
+ def __floordiv__(
148
+ self, other: npt.NDArray[np.number]
149
+ ) -> npt.NDArray[np.timedelta64] | Timedelta: ...
150
+ @overload
151
+ def __rfloordiv__(self, other: timedelta | str) -> int: ...
152
+ @overload
153
+ def __rfloordiv__(self, other: None | NaTType) -> NaTType: ...
154
+ @overload
155
+ def __rfloordiv__(self, other: np.ndarray) -> npt.NDArray[np.timedelta64]: ...
156
+ @overload
157
+ def __truediv__(self, other: timedelta) -> float: ...
158
+ @overload
159
+ def __truediv__(self, other: float) -> Timedelta: ...
160
+ def __mod__(self, other: timedelta) -> Timedelta: ...
161
+ def __divmod__(self, other: timedelta) -> tuple[int, Timedelta]: ...
162
+ def __le__(self, other: timedelta) -> bool: ...
163
+ def __lt__(self, other: timedelta) -> bool: ...
164
+ def __ge__(self, other: timedelta) -> bool: ...
165
+ def __gt__(self, other: timedelta) -> bool: ...
166
+ def __hash__(self) -> int: ...
167
+ def isoformat(self) -> str: ...
168
+ def to_numpy(
169
+ self, dtype: npt.DTypeLike = ..., copy: bool = False
170
+ ) -> np.timedelta64: ...
171
+ def view(self, dtype: npt.DTypeLike) -> object: ...
172
+ @property
173
+ def unit(self) -> str: ...
174
+ def as_unit(self, unit: str, round_ok: bool = ...) -> Timedelta: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/timestamps.pyi ADDED
@@ -0,0 +1,241 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ date as _date,
3
+ datetime,
4
+ time as _time,
5
+ timedelta,
6
+ tzinfo as _tzinfo,
7
+ )
8
+ from time import struct_time
9
+ from typing import (
10
+ ClassVar,
11
+ Literal,
12
+ TypeAlias,
13
+ overload,
14
+ )
15
+
16
+ import numpy as np
17
+
18
+ from pandas._libs.tslibs import (
19
+ BaseOffset,
20
+ NaTType,
21
+ Period,
22
+ Tick,
23
+ Timedelta,
24
+ )
25
+ from pandas._typing import (
26
+ Self,
27
+ TimestampNonexistent,
28
+ )
29
+
30
+ _TimeZones: TypeAlias = str | _tzinfo | None | int
31
+
32
+ def integer_op_not_supported(obj: object) -> TypeError: ...
33
+
34
+ class Timestamp(datetime):
35
+ _creso: int
36
+ min: ClassVar[Timestamp]
37
+ max: ClassVar[Timestamp]
38
+
39
+ resolution: ClassVar[Timedelta]
40
+ _value: int # np.int64
41
+ # error: "__new__" must return a class instance (got "Union[Timestamp, NaTType]")
42
+ def __new__( # type: ignore[misc]
43
+ cls: type[Self],
44
+ ts_input: np.integer | float | str | _date | datetime | np.datetime64 = ...,
45
+ year: int | None = ...,
46
+ month: int | None = ...,
47
+ day: int | None = ...,
48
+ hour: int | None = ...,
49
+ minute: int | None = ...,
50
+ second: int | None = ...,
51
+ microsecond: int | None = ...,
52
+ tzinfo: _tzinfo | None = ...,
53
+ *,
54
+ nanosecond: int | None = ...,
55
+ tz: _TimeZones = ...,
56
+ unit: str | int | None = ...,
57
+ fold: int | None = ...,
58
+ ) -> Self | NaTType: ...
59
+ @classmethod
60
+ def _from_value_and_reso(
61
+ cls, value: int, reso: int, tz: _TimeZones
62
+ ) -> Timestamp: ...
63
+ @property
64
+ def value(self) -> int: ... # np.int64
65
+ @property
66
+ def year(self) -> int: ...
67
+ @property
68
+ def month(self) -> int: ...
69
+ @property
70
+ def day(self) -> int: ...
71
+ @property
72
+ def hour(self) -> int: ...
73
+ @property
74
+ def minute(self) -> int: ...
75
+ @property
76
+ def second(self) -> int: ...
77
+ @property
78
+ def microsecond(self) -> int: ...
79
+ @property
80
+ def nanosecond(self) -> int: ...
81
+ @property
82
+ def tzinfo(self) -> _tzinfo | None: ...
83
+ @property
84
+ def tz(self) -> _tzinfo | None: ...
85
+ @property
86
+ def fold(self) -> int: ...
87
+ @classmethod
88
+ def fromtimestamp(cls, ts: float, tz: _TimeZones = ...) -> Self: ...
89
+ @classmethod
90
+ def utcfromtimestamp(cls, ts: float) -> Self: ...
91
+ @classmethod
92
+ def today(cls, tz: _TimeZones = ...) -> Self: ...
93
+ @classmethod
94
+ def fromordinal(
95
+ cls,
96
+ ordinal: int,
97
+ tz: _TimeZones = ...,
98
+ ) -> Self: ...
99
+ @classmethod
100
+ def now(cls, tz: _TimeZones = ...) -> Self: ...
101
+ @classmethod
102
+ def utcnow(cls) -> Self: ...
103
+ # error: Signature of "combine" incompatible with supertype "datetime"
104
+ @classmethod
105
+ def combine( # type: ignore[override]
106
+ cls, date: _date, time: _time
107
+ ) -> datetime: ...
108
+ @classmethod
109
+ def fromisoformat(cls, date_string: str) -> Self: ...
110
+ def strftime(self, format: str) -> str: ...
111
+ def __format__(self, fmt: str) -> str: ...
112
+ def toordinal(self) -> int: ...
113
+ def timetuple(self) -> struct_time: ...
114
+ def timestamp(self) -> float: ...
115
+ def utctimetuple(self) -> struct_time: ...
116
+ def date(self) -> _date: ...
117
+ def time(self) -> _time: ...
118
+ def timetz(self) -> _time: ...
119
+ # LSP violation: nanosecond is not present in datetime.datetime.replace
120
+ # and has positional args following it
121
+ def replace( # type: ignore[override]
122
+ self,
123
+ year: int | None = ...,
124
+ month: int | None = ...,
125
+ day: int | None = ...,
126
+ hour: int | None = ...,
127
+ minute: int | None = ...,
128
+ second: int | None = ...,
129
+ microsecond: int | None = ...,
130
+ nanosecond: int | None = ...,
131
+ tzinfo: _tzinfo | type[object] | None = ...,
132
+ fold: int | None = ...,
133
+ ) -> Self: ...
134
+ # LSP violation: datetime.datetime.astimezone has a default value for tz
135
+ def astimezone(self, tz: _TimeZones) -> Self: ... # type: ignore[override]
136
+ def ctime(self) -> str: ...
137
+ def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ...
138
+ @classmethod
139
+ def strptime(
140
+ # Note: strptime is actually disabled and raises NotImplementedError
141
+ cls,
142
+ date_string: str,
143
+ format: str,
144
+ ) -> Self: ...
145
+ def utcoffset(self) -> timedelta | None: ...
146
+ def tzname(self) -> str | None: ...
147
+ def dst(self) -> timedelta | None: ...
148
+ def __le__(self, other: datetime) -> bool: ... # type: ignore[override]
149
+ def __lt__(self, other: datetime) -> bool: ... # type: ignore[override]
150
+ def __ge__(self, other: datetime) -> bool: ... # type: ignore[override]
151
+ def __gt__(self, other: datetime) -> bool: ... # type: ignore[override]
152
+ # error: Signature of "__add__" incompatible with supertype "date"/"datetime"
153
+ @overload # type: ignore[override]
154
+ def __add__(self, other: np.ndarray) -> np.ndarray: ...
155
+ @overload
156
+ def __add__(self, other: timedelta | np.timedelta64 | Tick) -> Self: ...
157
+ def __radd__(self, other: timedelta) -> Self: ...
158
+ @overload # type: ignore[override]
159
+ def __sub__(self, other: datetime) -> Timedelta: ...
160
+ @overload
161
+ def __sub__(self, other: timedelta | np.timedelta64 | Tick) -> Self: ...
162
+ def __hash__(self) -> int: ...
163
+ def weekday(self) -> int: ...
164
+ def isoweekday(self) -> int: ...
165
+ # Return type "Tuple[int, int, int]" of "isocalendar" incompatible with return
166
+ # type "_IsoCalendarDate" in supertype "date"
167
+ def isocalendar(self) -> tuple[int, int, int]: ... # type: ignore[override]
168
+ @property
169
+ def is_leap_year(self) -> bool: ...
170
+ @property
171
+ def is_month_start(self) -> bool: ...
172
+ @property
173
+ def is_quarter_start(self) -> bool: ...
174
+ @property
175
+ def is_year_start(self) -> bool: ...
176
+ @property
177
+ def is_month_end(self) -> bool: ...
178
+ @property
179
+ def is_quarter_end(self) -> bool: ...
180
+ @property
181
+ def is_year_end(self) -> bool: ...
182
+ def to_pydatetime(self, warn: bool = ...) -> datetime: ...
183
+ def to_datetime64(self) -> np.datetime64: ...
184
+ def to_period(self, freq: BaseOffset | str | None = None) -> Period: ...
185
+ def to_julian_date(self) -> np.float64: ...
186
+ @property
187
+ def asm8(self) -> np.datetime64: ...
188
+ def tz_convert(self, tz: _TimeZones) -> Self: ...
189
+ # TODO: could return NaT?
190
+ def tz_localize(
191
+ self,
192
+ tz: _TimeZones,
193
+ ambiguous: bool | Literal["raise", "NaT"] = ...,
194
+ nonexistent: TimestampNonexistent = ...,
195
+ ) -> Self: ...
196
+ def normalize(self) -> Self: ...
197
+ # TODO: round/floor/ceil could return NaT?
198
+ def round(
199
+ self,
200
+ freq: str,
201
+ ambiguous: bool | Literal["raise", "NaT"] = ...,
202
+ nonexistent: TimestampNonexistent = ...,
203
+ ) -> Self: ...
204
+ def floor(
205
+ self,
206
+ freq: str,
207
+ ambiguous: bool | Literal["raise", "NaT"] = ...,
208
+ nonexistent: TimestampNonexistent = ...,
209
+ ) -> Self: ...
210
+ def ceil(
211
+ self,
212
+ freq: str,
213
+ ambiguous: bool | Literal["raise", "NaT"] = ...,
214
+ nonexistent: TimestampNonexistent = ...,
215
+ ) -> Self: ...
216
+ def day_name(self, locale: str | None = ...) -> str: ...
217
+ def month_name(self, locale: str | None = ...) -> str: ...
218
+ @property
219
+ def day_of_week(self) -> int: ...
220
+ @property
221
+ def dayofweek(self) -> int: ...
222
+ @property
223
+ def day_of_year(self) -> int: ...
224
+ @property
225
+ def dayofyear(self) -> int: ...
226
+ @property
227
+ def quarter(self) -> int: ...
228
+ @property
229
+ def week(self) -> int: ...
230
+ def to_numpy(
231
+ self, dtype: np.dtype | None = ..., copy: bool = ...
232
+ ) -> np.datetime64: ...
233
+ @property
234
+ def _date_repr(self) -> str: ...
235
+ @property
236
+ def days_in_month(self) -> int: ...
237
+ @property
238
+ def daysinmonth(self) -> int: ...
239
+ @property
240
+ def unit(self) -> str: ...
241
+ def as_unit(self, unit: str, round_ok: bool = ...) -> Timestamp: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/timezones.pyi ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ tzinfo,
4
+ )
5
+ from typing import Callable
6
+
7
+ import numpy as np
8
+
9
+ # imported from dateutil.tz
10
+ dateutil_gettz: Callable[[str], tzinfo]
11
+
12
+ def tz_standardize(tz: tzinfo) -> tzinfo: ...
13
+ def tz_compare(start: tzinfo | None, end: tzinfo | None) -> bool: ...
14
+ def infer_tzinfo(
15
+ start: datetime | None,
16
+ end: datetime | None,
17
+ ) -> tzinfo | None: ...
18
+ def maybe_get_tz(tz: str | int | np.int64 | tzinfo | None) -> tzinfo | None: ...
19
+ def get_timezone(tz: tzinfo) -> tzinfo | str: ...
20
+ def is_utc(tz: tzinfo | None) -> bool: ...
21
+ def is_fixed_offset(tz: tzinfo) -> bool: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/vectorized.pyi ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ For cython types that cannot be represented precisely, closest-available
3
+ python equivalents are used, and the precise types kept as adjacent comments.
4
+ """
5
+ from datetime import tzinfo
6
+
7
+ import numpy as np
8
+
9
+ from pandas._libs.tslibs.dtypes import Resolution
10
+ from pandas._typing import npt
11
+
12
+ def dt64arr_to_periodarr(
13
+ stamps: npt.NDArray[np.int64],
14
+ freq: int,
15
+ tz: tzinfo | None,
16
+ reso: int = ..., # NPY_DATETIMEUNIT
17
+ ) -> npt.NDArray[np.int64]: ...
18
+ def is_date_array_normalized(
19
+ stamps: npt.NDArray[np.int64],
20
+ tz: tzinfo | None,
21
+ reso: int, # NPY_DATETIMEUNIT
22
+ ) -> bool: ...
23
+ def normalize_i8_timestamps(
24
+ stamps: npt.NDArray[np.int64],
25
+ tz: tzinfo | None,
26
+ reso: int, # NPY_DATETIMEUNIT
27
+ ) -> npt.NDArray[np.int64]: ...
28
+ def get_resolution(
29
+ stamps: npt.NDArray[np.int64],
30
+ tz: tzinfo | None = ...,
31
+ reso: int = ..., # NPY_DATETIMEUNIT
32
+ ) -> Resolution: ...
33
+ def ints_to_pydatetime(
34
+ stamps: npt.NDArray[np.int64],
35
+ tz: tzinfo | None = ...,
36
+ box: str = ...,
37
+ reso: int = ..., # NPY_DATETIMEUNIT
38
+ ) -> npt.NDArray[np.object_]: ...
39
+ def tz_convert_from_utc(
40
+ stamps: npt.NDArray[np.int64],
41
+ tz: tzinfo | None,
42
+ reso: int = ..., # NPY_DATETIMEUNIT
43
+ ) -> npt.NDArray[np.int64]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/window/__init__.py ADDED
File without changes
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/window/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (177 Bytes). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/window/aggregations.pyi ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Any,
3
+ Callable,
4
+ Literal,
5
+ )
6
+
7
+ import numpy as np
8
+
9
+ from pandas._typing import (
10
+ WindowingRankType,
11
+ npt,
12
+ )
13
+
14
+ def roll_sum(
15
+ values: np.ndarray, # const float64_t[:]
16
+ start: np.ndarray, # np.ndarray[np.int64]
17
+ end: np.ndarray, # np.ndarray[np.int64]
18
+ minp: int, # int64_t
19
+ ) -> np.ndarray: ... # np.ndarray[float]
20
+ def roll_mean(
21
+ values: np.ndarray, # const float64_t[:]
22
+ start: np.ndarray, # np.ndarray[np.int64]
23
+ end: np.ndarray, # np.ndarray[np.int64]
24
+ minp: int, # int64_t
25
+ ) -> np.ndarray: ... # np.ndarray[float]
26
+ def roll_var(
27
+ values: np.ndarray, # const float64_t[:]
28
+ start: np.ndarray, # np.ndarray[np.int64]
29
+ end: np.ndarray, # np.ndarray[np.int64]
30
+ minp: int, # int64_t
31
+ ddof: int = ...,
32
+ ) -> np.ndarray: ... # np.ndarray[float]
33
+ def roll_skew(
34
+ values: np.ndarray, # np.ndarray[np.float64]
35
+ start: np.ndarray, # np.ndarray[np.int64]
36
+ end: np.ndarray, # np.ndarray[np.int64]
37
+ minp: int, # int64_t
38
+ ) -> np.ndarray: ... # np.ndarray[float]
39
+ def roll_kurt(
40
+ values: np.ndarray, # np.ndarray[np.float64]
41
+ start: np.ndarray, # np.ndarray[np.int64]
42
+ end: np.ndarray, # np.ndarray[np.int64]
43
+ minp: int, # int64_t
44
+ ) -> np.ndarray: ... # np.ndarray[float]
45
+ def roll_median_c(
46
+ values: np.ndarray, # np.ndarray[np.float64]
47
+ start: np.ndarray, # np.ndarray[np.int64]
48
+ end: np.ndarray, # np.ndarray[np.int64]
49
+ minp: int, # int64_t
50
+ ) -> np.ndarray: ... # np.ndarray[float]
51
+ def roll_max(
52
+ values: np.ndarray, # np.ndarray[np.float64]
53
+ start: np.ndarray, # np.ndarray[np.int64]
54
+ end: np.ndarray, # np.ndarray[np.int64]
55
+ minp: int, # int64_t
56
+ ) -> np.ndarray: ... # np.ndarray[float]
57
+ def roll_min(
58
+ values: np.ndarray, # np.ndarray[np.float64]
59
+ start: np.ndarray, # np.ndarray[np.int64]
60
+ end: np.ndarray, # np.ndarray[np.int64]
61
+ minp: int, # int64_t
62
+ ) -> np.ndarray: ... # np.ndarray[float]
63
+ def roll_quantile(
64
+ values: np.ndarray, # const float64_t[:]
65
+ start: np.ndarray, # np.ndarray[np.int64]
66
+ end: np.ndarray, # np.ndarray[np.int64]
67
+ minp: int, # int64_t
68
+ quantile: float, # float64_t
69
+ interpolation: Literal["linear", "lower", "higher", "nearest", "midpoint"],
70
+ ) -> np.ndarray: ... # np.ndarray[float]
71
+ def roll_rank(
72
+ values: np.ndarray,
73
+ start: np.ndarray,
74
+ end: np.ndarray,
75
+ minp: int,
76
+ percentile: bool,
77
+ method: WindowingRankType,
78
+ ascending: bool,
79
+ ) -> np.ndarray: ... # np.ndarray[float]
80
+ def roll_apply(
81
+ obj: object,
82
+ start: np.ndarray, # np.ndarray[np.int64]
83
+ end: np.ndarray, # np.ndarray[np.int64]
84
+ minp: int, # int64_t
85
+ function: Callable[..., Any],
86
+ raw: bool,
87
+ args: tuple[Any, ...],
88
+ kwargs: dict[str, Any],
89
+ ) -> npt.NDArray[np.float64]: ...
90
+ def roll_weighted_sum(
91
+ values: np.ndarray, # const float64_t[:]
92
+ weights: np.ndarray, # const float64_t[:]
93
+ minp: int,
94
+ ) -> np.ndarray: ... # np.ndarray[np.float64]
95
+ def roll_weighted_mean(
96
+ values: np.ndarray, # const float64_t[:]
97
+ weights: np.ndarray, # const float64_t[:]
98
+ minp: int,
99
+ ) -> np.ndarray: ... # np.ndarray[np.float64]
100
+ def roll_weighted_var(
101
+ values: np.ndarray, # const float64_t[:]
102
+ weights: np.ndarray, # const float64_t[:]
103
+ minp: int, # int64_t
104
+ ddof: int, # unsigned int
105
+ ) -> np.ndarray: ... # np.ndarray[np.float64]
106
+ def ewm(
107
+ vals: np.ndarray, # const float64_t[:]
108
+ start: np.ndarray, # const int64_t[:]
109
+ end: np.ndarray, # const int64_t[:]
110
+ minp: int,
111
+ com: float, # float64_t
112
+ adjust: bool,
113
+ ignore_na: bool,
114
+ deltas: np.ndarray | None = None, # const float64_t[:]
115
+ normalize: bool = True,
116
+ ) -> np.ndarray: ... # np.ndarray[np.float64]
117
+ def ewmcov(
118
+ input_x: np.ndarray, # const float64_t[:]
119
+ start: np.ndarray, # const int64_t[:]
120
+ end: np.ndarray, # const int64_t[:]
121
+ minp: int,
122
+ input_y: np.ndarray, # const float64_t[:]
123
+ com: float, # float64_t
124
+ adjust: bool,
125
+ ignore_na: bool,
126
+ bias: bool,
127
+ ) -> np.ndarray: ... # np.ndarray[np.float64]
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/window/indexers.pyi ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import npt
4
+
5
+ def calculate_variable_window_bounds(
6
+ num_values: int, # int64_t
7
+ window_size: int, # int64_t
8
+ min_periods,
9
+ center: bool,
10
+ closed: str | None,
11
+ index: np.ndarray, # const int64_t[:]
12
+ ) -> tuple[npt.NDArray[np.int64], npt.NDArray[np.int64]]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/arrays/__init__.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ All of pandas' ExtensionArrays.
3
+
4
+ See :ref:`extending.extension-types` for more.
5
+ """
6
+ from pandas.core.arrays import (
7
+ ArrowExtensionArray,
8
+ ArrowStringArray,
9
+ BooleanArray,
10
+ Categorical,
11
+ DatetimeArray,
12
+ FloatingArray,
13
+ IntegerArray,
14
+ IntervalArray,
15
+ NumpyExtensionArray,
16
+ PeriodArray,
17
+ SparseArray,
18
+ StringArray,
19
+ TimedeltaArray,
20
+ )
21
+
22
+ __all__ = [
23
+ "ArrowExtensionArray",
24
+ "ArrowStringArray",
25
+ "BooleanArray",
26
+ "Categorical",
27
+ "DatetimeArray",
28
+ "FloatingArray",
29
+ "IntegerArray",
30
+ "IntervalArray",
31
+ "NumpyExtensionArray",
32
+ "PeriodArray",
33
+ "SparseArray",
34
+ "StringArray",
35
+ "TimedeltaArray",
36
+ ]
37
+
38
+
39
+ def __getattr__(name: str) -> type[NumpyExtensionArray]:
40
+ if name == "PandasArray":
41
+ # GH#53694
42
+ import warnings
43
+
44
+ from pandas.util._exceptions import find_stack_level
45
+
46
+ warnings.warn(
47
+ "PandasArray has been renamed NumpyExtensionArray. Use that "
48
+ "instead. This alias will be removed in a future version.",
49
+ FutureWarning,
50
+ stacklevel=find_stack_level(),
51
+ )
52
+ return NumpyExtensionArray
53
+ raise AttributeError(f"module 'pandas.arrays' has no attribute '{name}'")
infer_4_30_0/lib/python3.10/site-packages/pandas/arrays/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.17 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/errors/__init__.py ADDED
@@ -0,0 +1,850 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Expose public exceptions & warnings
3
+ """
4
+ from __future__ import annotations
5
+
6
+ import ctypes
7
+
8
+ from pandas._config.config import OptionError
9
+
10
+ from pandas._libs.tslibs import (
11
+ OutOfBoundsDatetime,
12
+ OutOfBoundsTimedelta,
13
+ )
14
+
15
+ from pandas.util.version import InvalidVersion
16
+
17
+
18
+ class IntCastingNaNError(ValueError):
19
+ """
20
+ Exception raised when converting (``astype``) an array with NaN to an integer type.
21
+
22
+ Examples
23
+ --------
24
+ >>> pd.DataFrame(np.array([[1, np.nan], [2, 3]]), dtype="i8")
25
+ Traceback (most recent call last):
26
+ IntCastingNaNError: Cannot convert non-finite values (NA or inf) to integer
27
+ """
28
+
29
+
30
+ class NullFrequencyError(ValueError):
31
+ """
32
+ Exception raised when a ``freq`` cannot be null.
33
+
34
+ Particularly ``DatetimeIndex.shift``, ``TimedeltaIndex.shift``,
35
+ ``PeriodIndex.shift``.
36
+
37
+ Examples
38
+ --------
39
+ >>> df = pd.DatetimeIndex(["2011-01-01 10:00", "2011-01-01"], freq=None)
40
+ >>> df.shift(2)
41
+ Traceback (most recent call last):
42
+ NullFrequencyError: Cannot shift with no freq
43
+ """
44
+
45
+
46
+ class PerformanceWarning(Warning):
47
+ """
48
+ Warning raised when there is a possible performance impact.
49
+
50
+ Examples
51
+ --------
52
+ >>> df = pd.DataFrame({"jim": [0, 0, 1, 1],
53
+ ... "joe": ["x", "x", "z", "y"],
54
+ ... "jolie": [1, 2, 3, 4]})
55
+ >>> df = df.set_index(["jim", "joe"])
56
+ >>> df
57
+ jolie
58
+ jim joe
59
+ 0 x 1
60
+ x 2
61
+ 1 z 3
62
+ y 4
63
+ >>> df.loc[(1, 'z')] # doctest: +SKIP
64
+ # PerformanceWarning: indexing past lexsort depth may impact performance.
65
+ df.loc[(1, 'z')]
66
+ jolie
67
+ jim joe
68
+ 1 z 3
69
+ """
70
+
71
+
72
+ class UnsupportedFunctionCall(ValueError):
73
+ """
74
+ Exception raised when attempting to call a unsupported numpy function.
75
+
76
+ For example, ``np.cumsum(groupby_object)``.
77
+
78
+ Examples
79
+ --------
80
+ >>> df = pd.DataFrame({"A": [0, 0, 1, 1],
81
+ ... "B": ["x", "x", "z", "y"],
82
+ ... "C": [1, 2, 3, 4]}
83
+ ... )
84
+ >>> np.cumsum(df.groupby(["A"]))
85
+ Traceback (most recent call last):
86
+ UnsupportedFunctionCall: numpy operations are not valid with groupby.
87
+ Use .groupby(...).cumsum() instead
88
+ """
89
+
90
+
91
+ class UnsortedIndexError(KeyError):
92
+ """
93
+ Error raised when slicing a MultiIndex which has not been lexsorted.
94
+
95
+ Subclass of `KeyError`.
96
+
97
+ Examples
98
+ --------
99
+ >>> df = pd.DataFrame({"cat": [0, 0, 1, 1],
100
+ ... "color": ["white", "white", "brown", "black"],
101
+ ... "lives": [4, 4, 3, 7]},
102
+ ... )
103
+ >>> df = df.set_index(["cat", "color"])
104
+ >>> df
105
+ lives
106
+ cat color
107
+ 0 white 4
108
+ white 4
109
+ 1 brown 3
110
+ black 7
111
+ >>> df.loc[(0, "black"):(1, "white")]
112
+ Traceback (most recent call last):
113
+ UnsortedIndexError: 'Key length (2) was greater
114
+ than MultiIndex lexsort depth (1)'
115
+ """
116
+
117
+
118
+ class ParserError(ValueError):
119
+ """
120
+ Exception that is raised by an error encountered in parsing file contents.
121
+
122
+ This is a generic error raised for errors encountered when functions like
123
+ `read_csv` or `read_html` are parsing contents of a file.
124
+
125
+ See Also
126
+ --------
127
+ read_csv : Read CSV (comma-separated) file into a DataFrame.
128
+ read_html : Read HTML table into a DataFrame.
129
+
130
+ Examples
131
+ --------
132
+ >>> data = '''a,b,c
133
+ ... cat,foo,bar
134
+ ... dog,foo,"baz'''
135
+ >>> from io import StringIO
136
+ >>> pd.read_csv(StringIO(data), skipfooter=1, engine='python')
137
+ Traceback (most recent call last):
138
+ ParserError: ',' expected after '"'. Error could possibly be due
139
+ to parsing errors in the skipped footer rows
140
+ """
141
+
142
+
143
+ class DtypeWarning(Warning):
144
+ """
145
+ Warning raised when reading different dtypes in a column from a file.
146
+
147
+ Raised for a dtype incompatibility. This can happen whenever `read_csv`
148
+ or `read_table` encounter non-uniform dtypes in a column(s) of a given
149
+ CSV file.
150
+
151
+ See Also
152
+ --------
153
+ read_csv : Read CSV (comma-separated) file into a DataFrame.
154
+ read_table : Read general delimited file into a DataFrame.
155
+
156
+ Notes
157
+ -----
158
+ This warning is issued when dealing with larger files because the dtype
159
+ checking happens per chunk read.
160
+
161
+ Despite the warning, the CSV file is read with mixed types in a single
162
+ column which will be an object type. See the examples below to better
163
+ understand this issue.
164
+
165
+ Examples
166
+ --------
167
+ This example creates and reads a large CSV file with a column that contains
168
+ `int` and `str`.
169
+
170
+ >>> df = pd.DataFrame({'a': (['1'] * 100000 + ['X'] * 100000 +
171
+ ... ['1'] * 100000),
172
+ ... 'b': ['b'] * 300000}) # doctest: +SKIP
173
+ >>> df.to_csv('test.csv', index=False) # doctest: +SKIP
174
+ >>> df2 = pd.read_csv('test.csv') # doctest: +SKIP
175
+ ... # DtypeWarning: Columns (0) have mixed types
176
+
177
+ Important to notice that ``df2`` will contain both `str` and `int` for the
178
+ same input, '1'.
179
+
180
+ >>> df2.iloc[262140, 0] # doctest: +SKIP
181
+ '1'
182
+ >>> type(df2.iloc[262140, 0]) # doctest: +SKIP
183
+ <class 'str'>
184
+ >>> df2.iloc[262150, 0] # doctest: +SKIP
185
+ 1
186
+ >>> type(df2.iloc[262150, 0]) # doctest: +SKIP
187
+ <class 'int'>
188
+
189
+ One way to solve this issue is using the `dtype` parameter in the
190
+ `read_csv` and `read_table` functions to explicit the conversion:
191
+
192
+ >>> df2 = pd.read_csv('test.csv', sep=',', dtype={'a': str}) # doctest: +SKIP
193
+
194
+ No warning was issued.
195
+ """
196
+
197
+
198
+ class EmptyDataError(ValueError):
199
+ """
200
+ Exception raised in ``pd.read_csv`` when empty data or header is encountered.
201
+
202
+ Examples
203
+ --------
204
+ >>> from io import StringIO
205
+ >>> empty = StringIO()
206
+ >>> pd.read_csv(empty)
207
+ Traceback (most recent call last):
208
+ EmptyDataError: No columns to parse from file
209
+ """
210
+
211
+
212
+ class ParserWarning(Warning):
213
+ """
214
+ Warning raised when reading a file that doesn't use the default 'c' parser.
215
+
216
+ Raised by `pd.read_csv` and `pd.read_table` when it is necessary to change
217
+ parsers, generally from the default 'c' parser to 'python'.
218
+
219
+ It happens due to a lack of support or functionality for parsing a
220
+ particular attribute of a CSV file with the requested engine.
221
+
222
+ Currently, 'c' unsupported options include the following parameters:
223
+
224
+ 1. `sep` other than a single character (e.g. regex separators)
225
+ 2. `skipfooter` higher than 0
226
+ 3. `sep=None` with `delim_whitespace=False`
227
+
228
+ The warning can be avoided by adding `engine='python'` as a parameter in
229
+ `pd.read_csv` and `pd.read_table` methods.
230
+
231
+ See Also
232
+ --------
233
+ pd.read_csv : Read CSV (comma-separated) file into DataFrame.
234
+ pd.read_table : Read general delimited file into DataFrame.
235
+
236
+ Examples
237
+ --------
238
+ Using a `sep` in `pd.read_csv` other than a single character:
239
+
240
+ >>> import io
241
+ >>> csv = '''a;b;c
242
+ ... 1;1,8
243
+ ... 1;2,1'''
244
+ >>> df = pd.read_csv(io.StringIO(csv), sep='[;,]') # doctest: +SKIP
245
+ ... # ParserWarning: Falling back to the 'python' engine...
246
+
247
+ Adding `engine='python'` to `pd.read_csv` removes the Warning:
248
+
249
+ >>> df = pd.read_csv(io.StringIO(csv), sep='[;,]', engine='python')
250
+ """
251
+
252
+
253
+ class MergeError(ValueError):
254
+ """
255
+ Exception raised when merging data.
256
+
257
+ Subclass of ``ValueError``.
258
+
259
+ Examples
260
+ --------
261
+ >>> left = pd.DataFrame({"a": ["a", "b", "b", "d"],
262
+ ... "b": ["cat", "dog", "weasel", "horse"]},
263
+ ... index=range(4))
264
+ >>> right = pd.DataFrame({"a": ["a", "b", "c", "d"],
265
+ ... "c": ["meow", "bark", "chirp", "nay"]},
266
+ ... index=range(4)).set_index("a")
267
+ >>> left.join(right, on="a", validate="one_to_one",)
268
+ Traceback (most recent call last):
269
+ MergeError: Merge keys are not unique in left dataset; not a one-to-one merge
270
+ """
271
+
272
+
273
+ class AbstractMethodError(NotImplementedError):
274
+ """
275
+ Raise this error instead of NotImplementedError for abstract methods.
276
+
277
+ Examples
278
+ --------
279
+ >>> class Foo:
280
+ ... @classmethod
281
+ ... def classmethod(cls):
282
+ ... raise pd.errors.AbstractMethodError(cls, methodtype="classmethod")
283
+ ... def method(self):
284
+ ... raise pd.errors.AbstractMethodError(self)
285
+ >>> test = Foo.classmethod()
286
+ Traceback (most recent call last):
287
+ AbstractMethodError: This classmethod must be defined in the concrete class Foo
288
+
289
+ >>> test2 = Foo().method()
290
+ Traceback (most recent call last):
291
+ AbstractMethodError: This classmethod must be defined in the concrete class Foo
292
+ """
293
+
294
+ def __init__(self, class_instance, methodtype: str = "method") -> None:
295
+ types = {"method", "classmethod", "staticmethod", "property"}
296
+ if methodtype not in types:
297
+ raise ValueError(
298
+ f"methodtype must be one of {methodtype}, got {types} instead."
299
+ )
300
+ self.methodtype = methodtype
301
+ self.class_instance = class_instance
302
+
303
+ def __str__(self) -> str:
304
+ if self.methodtype == "classmethod":
305
+ name = self.class_instance.__name__
306
+ else:
307
+ name = type(self.class_instance).__name__
308
+ return f"This {self.methodtype} must be defined in the concrete class {name}"
309
+
310
+
311
+ class NumbaUtilError(Exception):
312
+ """
313
+ Error raised for unsupported Numba engine routines.
314
+
315
+ Examples
316
+ --------
317
+ >>> df = pd.DataFrame({"key": ["a", "a", "b", "b"], "data": [1, 2, 3, 4]},
318
+ ... columns=["key", "data"])
319
+ >>> def incorrect_function(x):
320
+ ... return sum(x) * 2.7
321
+ >>> df.groupby("key").agg(incorrect_function, engine="numba")
322
+ Traceback (most recent call last):
323
+ NumbaUtilError: The first 2 arguments to incorrect_function
324
+ must be ['values', 'index']
325
+ """
326
+
327
+
328
+ class DuplicateLabelError(ValueError):
329
+ """
330
+ Error raised when an operation would introduce duplicate labels.
331
+
332
+ Examples
333
+ --------
334
+ >>> s = pd.Series([0, 1, 2], index=['a', 'b', 'c']).set_flags(
335
+ ... allows_duplicate_labels=False
336
+ ... )
337
+ >>> s.reindex(['a', 'a', 'b'])
338
+ Traceback (most recent call last):
339
+ ...
340
+ DuplicateLabelError: Index has duplicates.
341
+ positions
342
+ label
343
+ a [0, 1]
344
+ """
345
+
346
+
347
+ class InvalidIndexError(Exception):
348
+ """
349
+ Exception raised when attempting to use an invalid index key.
350
+
351
+ Examples
352
+ --------
353
+ >>> idx = pd.MultiIndex.from_product([["x", "y"], [0, 1]])
354
+ >>> df = pd.DataFrame([[1, 1, 2, 2],
355
+ ... [3, 3, 4, 4]], columns=idx)
356
+ >>> df
357
+ x y
358
+ 0 1 0 1
359
+ 0 1 1 2 2
360
+ 1 3 3 4 4
361
+ >>> df[:, 0]
362
+ Traceback (most recent call last):
363
+ InvalidIndexError: (slice(None, None, None), 0)
364
+ """
365
+
366
+
367
+ class DataError(Exception):
368
+ """
369
+ Exceptionn raised when performing an operation on non-numerical data.
370
+
371
+ For example, calling ``ohlc`` on a non-numerical column or a function
372
+ on a rolling window.
373
+
374
+ Examples
375
+ --------
376
+ >>> ser = pd.Series(['a', 'b', 'c'])
377
+ >>> ser.rolling(2).sum()
378
+ Traceback (most recent call last):
379
+ DataError: No numeric types to aggregate
380
+ """
381
+
382
+
383
+ class SpecificationError(Exception):
384
+ """
385
+ Exception raised by ``agg`` when the functions are ill-specified.
386
+
387
+ The exception raised in two scenarios.
388
+
389
+ The first way is calling ``agg`` on a
390
+ Dataframe or Series using a nested renamer (dict-of-dict).
391
+
392
+ The second way is calling ``agg`` on a Dataframe with duplicated functions
393
+ names without assigning column name.
394
+
395
+ Examples
396
+ --------
397
+ >>> df = pd.DataFrame({'A': [1, 1, 1, 2, 2],
398
+ ... 'B': range(5),
399
+ ... 'C': range(5)})
400
+ >>> df.groupby('A').B.agg({'foo': 'count'}) # doctest: +SKIP
401
+ ... # SpecificationError: nested renamer is not supported
402
+
403
+ >>> df.groupby('A').agg({'B': {'foo': ['sum', 'max']}}) # doctest: +SKIP
404
+ ... # SpecificationError: nested renamer is not supported
405
+
406
+ >>> df.groupby('A').agg(['min', 'min']) # doctest: +SKIP
407
+ ... # SpecificationError: nested renamer is not supported
408
+ """
409
+
410
+
411
+ class SettingWithCopyError(ValueError):
412
+ """
413
+ Exception raised when trying to set on a copied slice from a ``DataFrame``.
414
+
415
+ The ``mode.chained_assignment`` needs to be set to set to 'raise.' This can
416
+ happen unintentionally when chained indexing.
417
+
418
+ For more information on evaluation order,
419
+ see :ref:`the user guide<indexing.evaluation_order>`.
420
+
421
+ For more information on view vs. copy,
422
+ see :ref:`the user guide<indexing.view_versus_copy>`.
423
+
424
+ Examples
425
+ --------
426
+ >>> pd.options.mode.chained_assignment = 'raise'
427
+ >>> df = pd.DataFrame({'A': [1, 1, 1, 2, 2]}, columns=['A'])
428
+ >>> df.loc[0:3]['A'] = 'a' # doctest: +SKIP
429
+ ... # SettingWithCopyError: A value is trying to be set on a copy of a...
430
+ """
431
+
432
+
433
+ class SettingWithCopyWarning(Warning):
434
+ """
435
+ Warning raised when trying to set on a copied slice from a ``DataFrame``.
436
+
437
+ The ``mode.chained_assignment`` needs to be set to set to 'warn.'
438
+ 'Warn' is the default option. This can happen unintentionally when
439
+ chained indexing.
440
+
441
+ For more information on evaluation order,
442
+ see :ref:`the user guide<indexing.evaluation_order>`.
443
+
444
+ For more information on view vs. copy,
445
+ see :ref:`the user guide<indexing.view_versus_copy>`.
446
+
447
+ Examples
448
+ --------
449
+ >>> df = pd.DataFrame({'A': [1, 1, 1, 2, 2]}, columns=['A'])
450
+ >>> df.loc[0:3]['A'] = 'a' # doctest: +SKIP
451
+ ... # SettingWithCopyWarning: A value is trying to be set on a copy of a...
452
+ """
453
+
454
+
455
+ class ChainedAssignmentError(Warning):
456
+ """
457
+ Warning raised when trying to set using chained assignment.
458
+
459
+ When the ``mode.copy_on_write`` option is enabled, chained assignment can
460
+ never work. In such a situation, we are always setting into a temporary
461
+ object that is the result of an indexing operation (getitem), which under
462
+ Copy-on-Write always behaves as a copy. Thus, assigning through a chain
463
+ can never update the original Series or DataFrame.
464
+
465
+ For more information on view vs. copy,
466
+ see :ref:`the user guide<indexing.view_versus_copy>`.
467
+
468
+ Examples
469
+ --------
470
+ >>> pd.options.mode.copy_on_write = True
471
+ >>> df = pd.DataFrame({'A': [1, 1, 1, 2, 2]}, columns=['A'])
472
+ >>> df["A"][0:3] = 10 # doctest: +SKIP
473
+ ... # ChainedAssignmentError: ...
474
+ >>> pd.options.mode.copy_on_write = False
475
+ """
476
+
477
+
478
+ _chained_assignment_msg = (
479
+ "A value is trying to be set on a copy of a DataFrame or Series "
480
+ "through chained assignment.\n"
481
+ "When using the Copy-on-Write mode, such chained assignment never works "
482
+ "to update the original DataFrame or Series, because the intermediate "
483
+ "object on which we are setting values always behaves as a copy.\n\n"
484
+ "Try using '.loc[row_indexer, col_indexer] = value' instead, to perform "
485
+ "the assignment in a single step.\n\n"
486
+ "See the caveats in the documentation: "
487
+ "https://pandas.pydata.org/pandas-docs/stable/user_guide/"
488
+ "indexing.html#returning-a-view-versus-a-copy"
489
+ )
490
+
491
+
492
+ _chained_assignment_method_msg = (
493
+ "A value is trying to be set on a copy of a DataFrame or Series "
494
+ "through chained assignment using an inplace method.\n"
495
+ "When using the Copy-on-Write mode, such inplace method never works "
496
+ "to update the original DataFrame or Series, because the intermediate "
497
+ "object on which we are setting values always behaves as a copy.\n\n"
498
+ "For example, when doing 'df[col].method(value, inplace=True)', try "
499
+ "using 'df.method({col: value}, inplace=True)' instead, to perform "
500
+ "the operation inplace on the original object.\n\n"
501
+ )
502
+
503
+
504
+ _chained_assignment_warning_msg = (
505
+ "ChainedAssignmentError: behaviour will change in pandas 3.0!\n"
506
+ "You are setting values through chained assignment. Currently this works "
507
+ "in certain cases, but when using Copy-on-Write (which will become the "
508
+ "default behaviour in pandas 3.0) this will never work to update the "
509
+ "original DataFrame or Series, because the intermediate object on which "
510
+ "we are setting values will behave as a copy.\n"
511
+ "A typical example is when you are setting values in a column of a "
512
+ "DataFrame, like:\n\n"
513
+ 'df["col"][row_indexer] = value\n\n'
514
+ 'Use `df.loc[row_indexer, "col"] = values` instead, to perform the '
515
+ "assignment in a single step and ensure this keeps updating the original `df`.\n\n"
516
+ "See the caveats in the documentation: "
517
+ "https://pandas.pydata.org/pandas-docs/stable/user_guide/"
518
+ "indexing.html#returning-a-view-versus-a-copy\n"
519
+ )
520
+
521
+
522
+ _chained_assignment_warning_method_msg = (
523
+ "A value is trying to be set on a copy of a DataFrame or Series "
524
+ "through chained assignment using an inplace method.\n"
525
+ "The behavior will change in pandas 3.0. This inplace method will "
526
+ "never work because the intermediate object on which we are setting "
527
+ "values always behaves as a copy.\n\n"
528
+ "For example, when doing 'df[col].method(value, inplace=True)', try "
529
+ "using 'df.method({col: value}, inplace=True)' or "
530
+ "df[col] = df[col].method(value) instead, to perform "
531
+ "the operation inplace on the original object.\n\n"
532
+ )
533
+
534
+
535
+ def _check_cacher(obj):
536
+ # This is a mess, selection paths that return a view set the _cacher attribute
537
+ # on the Series; most of them also set _item_cache which adds 1 to our relevant
538
+ # reference count, but iloc does not, so we have to check if we are actually
539
+ # in the item cache
540
+ if hasattr(obj, "_cacher"):
541
+ parent = obj._cacher[1]()
542
+ # parent could be dead
543
+ if parent is None:
544
+ return False
545
+ if hasattr(parent, "_item_cache"):
546
+ if obj._cacher[0] in parent._item_cache:
547
+ # Check if we are actually the item from item_cache, iloc creates a
548
+ # new object
549
+ return obj is parent._item_cache[obj._cacher[0]]
550
+ return False
551
+
552
+
553
+ class NumExprClobberingError(NameError):
554
+ """
555
+ Exception raised when trying to use a built-in numexpr name as a variable name.
556
+
557
+ ``eval`` or ``query`` will throw the error if the engine is set
558
+ to 'numexpr'. 'numexpr' is the default engine value for these methods if the
559
+ numexpr package is installed.
560
+
561
+ Examples
562
+ --------
563
+ >>> df = pd.DataFrame({'abs': [1, 1, 1]})
564
+ >>> df.query("abs > 2") # doctest: +SKIP
565
+ ... # NumExprClobberingError: Variables in expression "(abs) > (2)" overlap...
566
+ >>> sin, a = 1, 2
567
+ >>> pd.eval("sin + a", engine='numexpr') # doctest: +SKIP
568
+ ... # NumExprClobberingError: Variables in expression "(sin) + (a)" overlap...
569
+ """
570
+
571
+
572
+ class UndefinedVariableError(NameError):
573
+ """
574
+ Exception raised by ``query`` or ``eval`` when using an undefined variable name.
575
+
576
+ It will also specify whether the undefined variable is local or not.
577
+
578
+ Examples
579
+ --------
580
+ >>> df = pd.DataFrame({'A': [1, 1, 1]})
581
+ >>> df.query("A > x") # doctest: +SKIP
582
+ ... # UndefinedVariableError: name 'x' is not defined
583
+ >>> df.query("A > @y") # doctest: +SKIP
584
+ ... # UndefinedVariableError: local variable 'y' is not defined
585
+ >>> pd.eval('x + 1') # doctest: +SKIP
586
+ ... # UndefinedVariableError: name 'x' is not defined
587
+ """
588
+
589
+ def __init__(self, name: str, is_local: bool | None = None) -> None:
590
+ base_msg = f"{repr(name)} is not defined"
591
+ if is_local:
592
+ msg = f"local variable {base_msg}"
593
+ else:
594
+ msg = f"name {base_msg}"
595
+ super().__init__(msg)
596
+
597
+
598
+ class IndexingError(Exception):
599
+ """
600
+ Exception is raised when trying to index and there is a mismatch in dimensions.
601
+
602
+ Examples
603
+ --------
604
+ >>> df = pd.DataFrame({'A': [1, 1, 1]})
605
+ >>> df.loc[..., ..., 'A'] # doctest: +SKIP
606
+ ... # IndexingError: indexer may only contain one '...' entry
607
+ >>> df = pd.DataFrame({'A': [1, 1, 1]})
608
+ >>> df.loc[1, ..., ...] # doctest: +SKIP
609
+ ... # IndexingError: Too many indexers
610
+ >>> df[pd.Series([True], dtype=bool)] # doctest: +SKIP
611
+ ... # IndexingError: Unalignable boolean Series provided as indexer...
612
+ >>> s = pd.Series(range(2),
613
+ ... index = pd.MultiIndex.from_product([["a", "b"], ["c"]]))
614
+ >>> s.loc["a", "c", "d"] # doctest: +SKIP
615
+ ... # IndexingError: Too many indexers
616
+ """
617
+
618
+
619
+ class PyperclipException(RuntimeError):
620
+ """
621
+ Exception raised when clipboard functionality is unsupported.
622
+
623
+ Raised by ``to_clipboard()`` and ``read_clipboard()``.
624
+ """
625
+
626
+
627
+ class PyperclipWindowsException(PyperclipException):
628
+ """
629
+ Exception raised when clipboard functionality is unsupported by Windows.
630
+
631
+ Access to the clipboard handle would be denied due to some other
632
+ window process is accessing it.
633
+ """
634
+
635
+ def __init__(self, message: str) -> None:
636
+ # attr only exists on Windows, so typing fails on other platforms
637
+ message += f" ({ctypes.WinError()})" # type: ignore[attr-defined]
638
+ super().__init__(message)
639
+
640
+
641
+ class CSSWarning(UserWarning):
642
+ """
643
+ Warning is raised when converting css styling fails.
644
+
645
+ This can be due to the styling not having an equivalent value or because the
646
+ styling isn't properly formatted.
647
+
648
+ Examples
649
+ --------
650
+ >>> df = pd.DataFrame({'A': [1, 1, 1]})
651
+ >>> df.style.applymap(
652
+ ... lambda x: 'background-color: blueGreenRed;'
653
+ ... ).to_excel('styled.xlsx') # doctest: +SKIP
654
+ CSSWarning: Unhandled color format: 'blueGreenRed'
655
+ >>> df.style.applymap(
656
+ ... lambda x: 'border: 1px solid red red;'
657
+ ... ).to_excel('styled.xlsx') # doctest: +SKIP
658
+ CSSWarning: Unhandled color format: 'blueGreenRed'
659
+ """
660
+
661
+
662
+ class PossibleDataLossError(Exception):
663
+ """
664
+ Exception raised when trying to open a HDFStore file when already opened.
665
+
666
+ Examples
667
+ --------
668
+ >>> store = pd.HDFStore('my-store', 'a') # doctest: +SKIP
669
+ >>> store.open("w") # doctest: +SKIP
670
+ ... # PossibleDataLossError: Re-opening the file [my-store] with mode [a]...
671
+ """
672
+
673
+
674
+ class ClosedFileError(Exception):
675
+ """
676
+ Exception is raised when trying to perform an operation on a closed HDFStore file.
677
+
678
+ Examples
679
+ --------
680
+ >>> store = pd.HDFStore('my-store', 'a') # doctest: +SKIP
681
+ >>> store.close() # doctest: +SKIP
682
+ >>> store.keys() # doctest: +SKIP
683
+ ... # ClosedFileError: my-store file is not open!
684
+ """
685
+
686
+
687
+ class IncompatibilityWarning(Warning):
688
+ """
689
+ Warning raised when trying to use where criteria on an incompatible HDF5 file.
690
+ """
691
+
692
+
693
+ class AttributeConflictWarning(Warning):
694
+ """
695
+ Warning raised when index attributes conflict when using HDFStore.
696
+
697
+ Occurs when attempting to append an index with a different
698
+ name than the existing index on an HDFStore or attempting to append an index with a
699
+ different frequency than the existing index on an HDFStore.
700
+
701
+ Examples
702
+ --------
703
+ >>> idx1 = pd.Index(['a', 'b'], name='name1')
704
+ >>> df1 = pd.DataFrame([[1, 2], [3, 4]], index=idx1)
705
+ >>> df1.to_hdf('file', 'data', 'w', append=True) # doctest: +SKIP
706
+ >>> idx2 = pd.Index(['c', 'd'], name='name2')
707
+ >>> df2 = pd.DataFrame([[5, 6], [7, 8]], index=idx2)
708
+ >>> df2.to_hdf('file', 'data', 'a', append=True) # doctest: +SKIP
709
+ AttributeConflictWarning: the [index_name] attribute of the existing index is
710
+ [name1] which conflicts with the new [name2]...
711
+ """
712
+
713
+
714
+ class DatabaseError(OSError):
715
+ """
716
+ Error is raised when executing sql with bad syntax or sql that throws an error.
717
+
718
+ Examples
719
+ --------
720
+ >>> from sqlite3 import connect
721
+ >>> conn = connect(':memory:')
722
+ >>> pd.read_sql('select * test', conn) # doctest: +SKIP
723
+ ... # DatabaseError: Execution failed on sql 'test': near "test": syntax error
724
+ """
725
+
726
+
727
+ class PossiblePrecisionLoss(Warning):
728
+ """
729
+ Warning raised by to_stata on a column with a value outside or equal to int64.
730
+
731
+ When the column value is outside or equal to the int64 value the column is
732
+ converted to a float64 dtype.
733
+
734
+ Examples
735
+ --------
736
+ >>> df = pd.DataFrame({"s": pd.Series([1, 2**53], dtype=np.int64)})
737
+ >>> df.to_stata('test') # doctest: +SKIP
738
+ ... # PossiblePrecisionLoss: Column converted from int64 to float64...
739
+ """
740
+
741
+
742
+ class ValueLabelTypeMismatch(Warning):
743
+ """
744
+ Warning raised by to_stata on a category column that contains non-string values.
745
+
746
+ Examples
747
+ --------
748
+ >>> df = pd.DataFrame({"categories": pd.Series(["a", 2], dtype="category")})
749
+ >>> df.to_stata('test') # doctest: +SKIP
750
+ ... # ValueLabelTypeMismatch: Stata value labels (pandas categories) must be str...
751
+ """
752
+
753
+
754
+ class InvalidColumnName(Warning):
755
+ """
756
+ Warning raised by to_stata the column contains a non-valid stata name.
757
+
758
+ Because the column name is an invalid Stata variable, the name needs to be
759
+ converted.
760
+
761
+ Examples
762
+ --------
763
+ >>> df = pd.DataFrame({"0categories": pd.Series([2, 2])})
764
+ >>> df.to_stata('test') # doctest: +SKIP
765
+ ... # InvalidColumnName: Not all pandas column names were valid Stata variable...
766
+ """
767
+
768
+
769
+ class CategoricalConversionWarning(Warning):
770
+ """
771
+ Warning is raised when reading a partial labeled Stata file using a iterator.
772
+
773
+ Examples
774
+ --------
775
+ >>> from pandas.io.stata import StataReader
776
+ >>> with StataReader('dta_file', chunksize=2) as reader: # doctest: +SKIP
777
+ ... for i, block in enumerate(reader):
778
+ ... print(i, block)
779
+ ... # CategoricalConversionWarning: One or more series with value labels...
780
+ """
781
+
782
+
783
+ class LossySetitemError(Exception):
784
+ """
785
+ Raised when trying to do a __setitem__ on an np.ndarray that is not lossless.
786
+
787
+ Notes
788
+ -----
789
+ This is an internal error.
790
+ """
791
+
792
+
793
+ class NoBufferPresent(Exception):
794
+ """
795
+ Exception is raised in _get_data_buffer to signal that there is no requested buffer.
796
+ """
797
+
798
+
799
+ class InvalidComparison(Exception):
800
+ """
801
+ Exception is raised by _validate_comparison_value to indicate an invalid comparison.
802
+
803
+ Notes
804
+ -----
805
+ This is an internal error.
806
+ """
807
+
808
+
809
+ __all__ = [
810
+ "AbstractMethodError",
811
+ "AttributeConflictWarning",
812
+ "CategoricalConversionWarning",
813
+ "ClosedFileError",
814
+ "CSSWarning",
815
+ "DatabaseError",
816
+ "DataError",
817
+ "DtypeWarning",
818
+ "DuplicateLabelError",
819
+ "EmptyDataError",
820
+ "IncompatibilityWarning",
821
+ "IntCastingNaNError",
822
+ "InvalidColumnName",
823
+ "InvalidComparison",
824
+ "InvalidIndexError",
825
+ "InvalidVersion",
826
+ "IndexingError",
827
+ "LossySetitemError",
828
+ "MergeError",
829
+ "NoBufferPresent",
830
+ "NullFrequencyError",
831
+ "NumbaUtilError",
832
+ "NumExprClobberingError",
833
+ "OptionError",
834
+ "OutOfBoundsDatetime",
835
+ "OutOfBoundsTimedelta",
836
+ "ParserError",
837
+ "ParserWarning",
838
+ "PerformanceWarning",
839
+ "PossibleDataLossError",
840
+ "PossiblePrecisionLoss",
841
+ "PyperclipException",
842
+ "PyperclipWindowsException",
843
+ "SettingWithCopyError",
844
+ "SettingWithCopyWarning",
845
+ "SpecificationError",
846
+ "UndefinedVariableError",
847
+ "UnsortedIndexError",
848
+ "UnsupportedFunctionCall",
849
+ "ValueLabelTypeMismatch",
850
+ ]