ZTWHHH commited on
Commit
09b651f
·
verified ·
1 Parent(s): fdfe567

Add files using upload-large-folder tool

Browse files
Files changed (50) hide show
  1. infer_4_30_0/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc +0 -0
  2. infer_4_30_0/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc +0 -0
  3. infer_4_30_0/lib/python3.10/site-packages/_distutils_hack/override.py +1 -0
  4. infer_4_30_0/lib/python3.10/site-packages/pandas/__init__.py +367 -0
  5. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/__init__.py +27 -0
  6. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/algos.pyi +416 -0
  7. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/arrays.pyi +40 -0
  8. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/byteswap.cpython-310-x86_64-linux-gnu.so +0 -0
  9. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/byteswap.pyi +5 -0
  10. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/groupby.pyi +216 -0
  11. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/hashing.pyi +9 -0
  12. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/hashtable.pyi +252 -0
  13. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/index.pyi +100 -0
  14. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/indexing.cpython-310-x86_64-linux-gnu.so +0 -0
  15. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/indexing.pyi +17 -0
  16. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/internals.pyi +94 -0
  17. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/interval.pyi +174 -0
  18. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/join.pyi +79 -0
  19. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/json.cpython-310-x86_64-linux-gnu.so +0 -0
  20. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/json.pyi +23 -0
  21. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/lib.pyi +213 -0
  22. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/missing.pyi +16 -0
  23. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/ops.pyi +51 -0
  24. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/ops_dispatch.cpython-310-x86_64-linux-gnu.so +0 -0
  25. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/ops_dispatch.pyi +5 -0
  26. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/pandas_datetime.cpython-310-x86_64-linux-gnu.so +0 -0
  27. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/pandas_parser.cpython-310-x86_64-linux-gnu.so +0 -0
  28. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/parsers.pyi +77 -0
  29. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/properties.cpython-310-x86_64-linux-gnu.so +0 -0
  30. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/properties.pyi +27 -0
  31. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/reshape.pyi +16 -0
  32. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/sas.pyi +7 -0
  33. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/sparse.pyi +51 -0
  34. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/testing.pyi +12 -0
  35. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslib.pyi +37 -0
  36. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/tzconversion.pyi +21 -0
  37. infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/writers.pyi +20 -0
  38. infer_4_30_0/lib/python3.10/site-packages/pandas/_typing.py +525 -0
  39. infer_4_30_0/lib/python3.10/site-packages/pandas/_version.py +692 -0
  40. infer_4_30_0/lib/python3.10/site-packages/pandas/_version_meson.py +2 -0
  41. infer_4_30_0/lib/python3.10/site-packages/pandas/conftest.py +1980 -0
  42. infer_4_30_0/lib/python3.10/site-packages/pandas/pyproject.toml +811 -0
  43. infer_4_30_0/lib/python3.10/site-packages/pandas/testing.py +18 -0
  44. infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/INSTALLER +1 -0
  45. infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/LICENSE +29 -0
  46. infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/METADATA +146 -0
  47. infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/RECORD +385 -0
  48. infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/REQUESTED +0 -0
  49. infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/WHEEL +5 -0
  50. infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/top_level.txt +1 -0
infer_4_30_0/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (8.22 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc ADDED
Binary file (224 Bytes). View file
 
infer_4_30_0/lib/python3.10/site-packages/_distutils_hack/override.py ADDED
@@ -0,0 +1 @@
 
 
1
+ __import__('_distutils_hack').do_override()
infer_4_30_0/lib/python3.10/site-packages/pandas/__init__.py ADDED
@@ -0,0 +1,367 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import warnings
5
+
6
+ __docformat__ = "restructuredtext"
7
+
8
+ # Let users know if they're missing any of our hard dependencies
9
+ _hard_dependencies = ("numpy", "pytz", "dateutil")
10
+ _missing_dependencies = []
11
+
12
+ for _dependency in _hard_dependencies:
13
+ try:
14
+ __import__(_dependency)
15
+ except ImportError as _e: # pragma: no cover
16
+ _missing_dependencies.append(f"{_dependency}: {_e}")
17
+
18
+ if _missing_dependencies: # pragma: no cover
19
+ raise ImportError(
20
+ "Unable to import required dependencies:\n" + "\n".join(_missing_dependencies)
21
+ )
22
+ del _hard_dependencies, _dependency, _missing_dependencies
23
+
24
+ try:
25
+ # numpy compat
26
+ from pandas.compat import (
27
+ is_numpy_dev as _is_numpy_dev, # pyright: ignore[reportUnusedImport] # noqa: F401
28
+ )
29
+ except ImportError as _err: # pragma: no cover
30
+ _module = _err.name
31
+ raise ImportError(
32
+ f"C extension: {_module} not built. If you want to import "
33
+ "pandas from the source directory, you may need to run "
34
+ "'python setup.py build_ext' to build the C extensions first."
35
+ ) from _err
36
+
37
+ from pandas._config import (
38
+ get_option,
39
+ set_option,
40
+ reset_option,
41
+ describe_option,
42
+ option_context,
43
+ options,
44
+ )
45
+
46
+ # let init-time option registration happen
47
+ import pandas.core.config_init # pyright: ignore[reportUnusedImport] # noqa: F401
48
+
49
+ from pandas.core.api import (
50
+ # dtype
51
+ ArrowDtype,
52
+ Int8Dtype,
53
+ Int16Dtype,
54
+ Int32Dtype,
55
+ Int64Dtype,
56
+ UInt8Dtype,
57
+ UInt16Dtype,
58
+ UInt32Dtype,
59
+ UInt64Dtype,
60
+ Float32Dtype,
61
+ Float64Dtype,
62
+ CategoricalDtype,
63
+ PeriodDtype,
64
+ IntervalDtype,
65
+ DatetimeTZDtype,
66
+ StringDtype,
67
+ BooleanDtype,
68
+ # missing
69
+ NA,
70
+ isna,
71
+ isnull,
72
+ notna,
73
+ notnull,
74
+ # indexes
75
+ Index,
76
+ CategoricalIndex,
77
+ RangeIndex,
78
+ MultiIndex,
79
+ IntervalIndex,
80
+ TimedeltaIndex,
81
+ DatetimeIndex,
82
+ PeriodIndex,
83
+ IndexSlice,
84
+ # tseries
85
+ NaT,
86
+ Period,
87
+ period_range,
88
+ Timedelta,
89
+ timedelta_range,
90
+ Timestamp,
91
+ date_range,
92
+ bdate_range,
93
+ Interval,
94
+ interval_range,
95
+ DateOffset,
96
+ # conversion
97
+ to_numeric,
98
+ to_datetime,
99
+ to_timedelta,
100
+ # misc
101
+ Flags,
102
+ Grouper,
103
+ factorize,
104
+ unique,
105
+ value_counts,
106
+ NamedAgg,
107
+ array,
108
+ Categorical,
109
+ set_eng_float_format,
110
+ Series,
111
+ DataFrame,
112
+ )
113
+
114
+ from pandas.core.dtypes.dtypes import SparseDtype
115
+
116
+ from pandas.tseries.api import infer_freq
117
+ from pandas.tseries import offsets
118
+
119
+ from pandas.core.computation.api import eval
120
+
121
+ from pandas.core.reshape.api import (
122
+ concat,
123
+ lreshape,
124
+ melt,
125
+ wide_to_long,
126
+ merge,
127
+ merge_asof,
128
+ merge_ordered,
129
+ crosstab,
130
+ pivot,
131
+ pivot_table,
132
+ get_dummies,
133
+ from_dummies,
134
+ cut,
135
+ qcut,
136
+ )
137
+
138
+ from pandas import api, arrays, errors, io, plotting, tseries
139
+ from pandas import testing
140
+ from pandas.util._print_versions import show_versions
141
+
142
+ from pandas.io.api import (
143
+ # excel
144
+ ExcelFile,
145
+ ExcelWriter,
146
+ read_excel,
147
+ # parsers
148
+ read_csv,
149
+ read_fwf,
150
+ read_table,
151
+ # pickle
152
+ read_pickle,
153
+ to_pickle,
154
+ # pytables
155
+ HDFStore,
156
+ read_hdf,
157
+ # sql
158
+ read_sql,
159
+ read_sql_query,
160
+ read_sql_table,
161
+ # misc
162
+ read_clipboard,
163
+ read_parquet,
164
+ read_orc,
165
+ read_feather,
166
+ read_gbq,
167
+ read_html,
168
+ read_xml,
169
+ read_json,
170
+ read_stata,
171
+ read_sas,
172
+ read_spss,
173
+ )
174
+
175
+ from pandas.io.json._normalize import json_normalize
176
+
177
+ from pandas.util._tester import test
178
+
179
+ # use the closest tagged version if possible
180
+ _built_with_meson = False
181
+ try:
182
+ from pandas._version_meson import ( # pyright: ignore [reportMissingImports]
183
+ __version__,
184
+ __git_version__,
185
+ )
186
+
187
+ _built_with_meson = True
188
+ except ImportError:
189
+ from pandas._version import get_versions
190
+
191
+ v = get_versions()
192
+ __version__ = v.get("closest-tag", v["version"])
193
+ __git_version__ = v.get("full-revisionid")
194
+ del get_versions, v
195
+
196
+ # GH#55043 - deprecation of the data_manager option
197
+ if "PANDAS_DATA_MANAGER" in os.environ:
198
+ warnings.warn(
199
+ "The env variable PANDAS_DATA_MANAGER is set. The data_manager option is "
200
+ "deprecated and will be removed in a future version. Only the BlockManager "
201
+ "will be available. Unset this environment variable to silence this warning.",
202
+ FutureWarning,
203
+ stacklevel=2,
204
+ )
205
+
206
+ del warnings, os
207
+
208
+ # module level doc-string
209
+ __doc__ = """
210
+ pandas - a powerful data analysis and manipulation library for Python
211
+ =====================================================================
212
+
213
+ **pandas** is a Python package providing fast, flexible, and expressive data
214
+ structures designed to make working with "relational" or "labeled" data both
215
+ easy and intuitive. It aims to be the fundamental high-level building block for
216
+ doing practical, **real world** data analysis in Python. Additionally, it has
217
+ the broader goal of becoming **the most powerful and flexible open source data
218
+ analysis / manipulation tool available in any language**. It is already well on
219
+ its way toward this goal.
220
+
221
+ Main Features
222
+ -------------
223
+ Here are just a few of the things that pandas does well:
224
+
225
+ - Easy handling of missing data in floating point as well as non-floating
226
+ point data.
227
+ - Size mutability: columns can be inserted and deleted from DataFrame and
228
+ higher dimensional objects
229
+ - Automatic and explicit data alignment: objects can be explicitly aligned
230
+ to a set of labels, or the user can simply ignore the labels and let
231
+ `Series`, `DataFrame`, etc. automatically align the data for you in
232
+ computations.
233
+ - Powerful, flexible group by functionality to perform split-apply-combine
234
+ operations on data sets, for both aggregating and transforming data.
235
+ - Make it easy to convert ragged, differently-indexed data in other Python
236
+ and NumPy data structures into DataFrame objects.
237
+ - Intelligent label-based slicing, fancy indexing, and subsetting of large
238
+ data sets.
239
+ - Intuitive merging and joining data sets.
240
+ - Flexible reshaping and pivoting of data sets.
241
+ - Hierarchical labeling of axes (possible to have multiple labels per tick).
242
+ - Robust IO tools for loading data from flat files (CSV and delimited),
243
+ Excel files, databases, and saving/loading data from the ultrafast HDF5
244
+ format.
245
+ - Time series-specific functionality: date range generation and frequency
246
+ conversion, moving window statistics, date shifting and lagging.
247
+ """
248
+
249
+ # Use __all__ to let type checkers know what is part of the public API.
250
+ # Pandas is not (yet) a py.typed library: the public API is determined
251
+ # based on the documentation.
252
+ __all__ = [
253
+ "ArrowDtype",
254
+ "BooleanDtype",
255
+ "Categorical",
256
+ "CategoricalDtype",
257
+ "CategoricalIndex",
258
+ "DataFrame",
259
+ "DateOffset",
260
+ "DatetimeIndex",
261
+ "DatetimeTZDtype",
262
+ "ExcelFile",
263
+ "ExcelWriter",
264
+ "Flags",
265
+ "Float32Dtype",
266
+ "Float64Dtype",
267
+ "Grouper",
268
+ "HDFStore",
269
+ "Index",
270
+ "IndexSlice",
271
+ "Int16Dtype",
272
+ "Int32Dtype",
273
+ "Int64Dtype",
274
+ "Int8Dtype",
275
+ "Interval",
276
+ "IntervalDtype",
277
+ "IntervalIndex",
278
+ "MultiIndex",
279
+ "NA",
280
+ "NaT",
281
+ "NamedAgg",
282
+ "Period",
283
+ "PeriodDtype",
284
+ "PeriodIndex",
285
+ "RangeIndex",
286
+ "Series",
287
+ "SparseDtype",
288
+ "StringDtype",
289
+ "Timedelta",
290
+ "TimedeltaIndex",
291
+ "Timestamp",
292
+ "UInt16Dtype",
293
+ "UInt32Dtype",
294
+ "UInt64Dtype",
295
+ "UInt8Dtype",
296
+ "api",
297
+ "array",
298
+ "arrays",
299
+ "bdate_range",
300
+ "concat",
301
+ "crosstab",
302
+ "cut",
303
+ "date_range",
304
+ "describe_option",
305
+ "errors",
306
+ "eval",
307
+ "factorize",
308
+ "get_dummies",
309
+ "from_dummies",
310
+ "get_option",
311
+ "infer_freq",
312
+ "interval_range",
313
+ "io",
314
+ "isna",
315
+ "isnull",
316
+ "json_normalize",
317
+ "lreshape",
318
+ "melt",
319
+ "merge",
320
+ "merge_asof",
321
+ "merge_ordered",
322
+ "notna",
323
+ "notnull",
324
+ "offsets",
325
+ "option_context",
326
+ "options",
327
+ "period_range",
328
+ "pivot",
329
+ "pivot_table",
330
+ "plotting",
331
+ "qcut",
332
+ "read_clipboard",
333
+ "read_csv",
334
+ "read_excel",
335
+ "read_feather",
336
+ "read_fwf",
337
+ "read_gbq",
338
+ "read_hdf",
339
+ "read_html",
340
+ "read_json",
341
+ "read_orc",
342
+ "read_parquet",
343
+ "read_pickle",
344
+ "read_sas",
345
+ "read_spss",
346
+ "read_sql",
347
+ "read_sql_query",
348
+ "read_sql_table",
349
+ "read_stata",
350
+ "read_table",
351
+ "read_xml",
352
+ "reset_option",
353
+ "set_eng_float_format",
354
+ "set_option",
355
+ "show_versions",
356
+ "test",
357
+ "testing",
358
+ "timedelta_range",
359
+ "to_datetime",
360
+ "to_numeric",
361
+ "to_pickle",
362
+ "to_timedelta",
363
+ "tseries",
364
+ "unique",
365
+ "value_counts",
366
+ "wide_to_long",
367
+ ]
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/__init__.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __all__ = [
2
+ "NaT",
3
+ "NaTType",
4
+ "OutOfBoundsDatetime",
5
+ "Period",
6
+ "Timedelta",
7
+ "Timestamp",
8
+ "iNaT",
9
+ "Interval",
10
+ ]
11
+
12
+
13
+ # Below imports needs to happen first to ensure pandas top level
14
+ # module gets monkeypatched with the pandas_datetime_CAPI
15
+ # see pandas_datetime_exec in pd_datetime.c
16
+ import pandas._libs.pandas_parser # isort: skip # type: ignore[reportUnusedImport]
17
+ import pandas._libs.pandas_datetime # noqa: F401 # isort: skip # type: ignore[reportUnusedImport]
18
+ from pandas._libs.interval import Interval
19
+ from pandas._libs.tslibs import (
20
+ NaT,
21
+ NaTType,
22
+ OutOfBoundsDatetime,
23
+ Period,
24
+ Timedelta,
25
+ Timestamp,
26
+ iNaT,
27
+ )
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/algos.pyi ADDED
@@ -0,0 +1,416 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any
2
+
3
+ import numpy as np
4
+
5
+ from pandas._typing import npt
6
+
7
+ class Infinity:
8
+ def __eq__(self, other) -> bool: ...
9
+ def __ne__(self, other) -> bool: ...
10
+ def __lt__(self, other) -> bool: ...
11
+ def __le__(self, other) -> bool: ...
12
+ def __gt__(self, other) -> bool: ...
13
+ def __ge__(self, other) -> bool: ...
14
+
15
+ class NegInfinity:
16
+ def __eq__(self, other) -> bool: ...
17
+ def __ne__(self, other) -> bool: ...
18
+ def __lt__(self, other) -> bool: ...
19
+ def __le__(self, other) -> bool: ...
20
+ def __gt__(self, other) -> bool: ...
21
+ def __ge__(self, other) -> bool: ...
22
+
23
+ def unique_deltas(
24
+ arr: np.ndarray, # const int64_t[:]
25
+ ) -> np.ndarray: ... # np.ndarray[np.int64, ndim=1]
26
+ def is_lexsorted(list_of_arrays: list[npt.NDArray[np.int64]]) -> bool: ...
27
+ def groupsort_indexer(
28
+ index: np.ndarray, # const int64_t[:]
29
+ ngroups: int,
30
+ ) -> tuple[
31
+ np.ndarray, # ndarray[int64_t, ndim=1]
32
+ np.ndarray, # ndarray[int64_t, ndim=1]
33
+ ]: ...
34
+ def kth_smallest(
35
+ arr: np.ndarray, # numeric[:]
36
+ k: int,
37
+ ) -> Any: ... # numeric
38
+
39
+ # ----------------------------------------------------------------------
40
+ # Pairwise correlation/covariance
41
+
42
+ def nancorr(
43
+ mat: npt.NDArray[np.float64], # const float64_t[:, :]
44
+ cov: bool = ...,
45
+ minp: int | None = ...,
46
+ ) -> npt.NDArray[np.float64]: ... # ndarray[float64_t, ndim=2]
47
+ def nancorr_spearman(
48
+ mat: npt.NDArray[np.float64], # ndarray[float64_t, ndim=2]
49
+ minp: int = ...,
50
+ ) -> npt.NDArray[np.float64]: ... # ndarray[float64_t, ndim=2]
51
+
52
+ # ----------------------------------------------------------------------
53
+
54
+ def validate_limit(nobs: int | None, limit=...) -> int: ...
55
+ def get_fill_indexer(
56
+ mask: npt.NDArray[np.bool_],
57
+ limit: int | None = None,
58
+ ) -> npt.NDArray[np.intp]: ...
59
+ def pad(
60
+ old: np.ndarray, # ndarray[numeric_object_t]
61
+ new: np.ndarray, # ndarray[numeric_object_t]
62
+ limit=...,
63
+ ) -> npt.NDArray[np.intp]: ... # np.ndarray[np.intp, ndim=1]
64
+ def pad_inplace(
65
+ values: np.ndarray, # numeric_object_t[:]
66
+ mask: np.ndarray, # uint8_t[:]
67
+ limit=...,
68
+ ) -> None: ...
69
+ def pad_2d_inplace(
70
+ values: np.ndarray, # numeric_object_t[:, :]
71
+ mask: np.ndarray, # const uint8_t[:, :]
72
+ limit=...,
73
+ ) -> None: ...
74
+ def backfill(
75
+ old: np.ndarray, # ndarray[numeric_object_t]
76
+ new: np.ndarray, # ndarray[numeric_object_t]
77
+ limit=...,
78
+ ) -> npt.NDArray[np.intp]: ... # np.ndarray[np.intp, ndim=1]
79
+ def backfill_inplace(
80
+ values: np.ndarray, # numeric_object_t[:]
81
+ mask: np.ndarray, # uint8_t[:]
82
+ limit=...,
83
+ ) -> None: ...
84
+ def backfill_2d_inplace(
85
+ values: np.ndarray, # numeric_object_t[:, :]
86
+ mask: np.ndarray, # const uint8_t[:, :]
87
+ limit=...,
88
+ ) -> None: ...
89
+ def is_monotonic(
90
+ arr: np.ndarray, # ndarray[numeric_object_t, ndim=1]
91
+ timelike: bool,
92
+ ) -> tuple[bool, bool, bool]: ...
93
+
94
+ # ----------------------------------------------------------------------
95
+ # rank_1d, rank_2d
96
+ # ----------------------------------------------------------------------
97
+
98
+ def rank_1d(
99
+ values: np.ndarray, # ndarray[numeric_object_t, ndim=1]
100
+ labels: np.ndarray | None = ..., # const int64_t[:]=None
101
+ is_datetimelike: bool = ...,
102
+ ties_method=...,
103
+ ascending: bool = ...,
104
+ pct: bool = ...,
105
+ na_option=...,
106
+ mask: npt.NDArray[np.bool_] | None = ...,
107
+ ) -> np.ndarray: ... # np.ndarray[float64_t, ndim=1]
108
+ def rank_2d(
109
+ in_arr: np.ndarray, # ndarray[numeric_object_t, ndim=2]
110
+ axis: int = ...,
111
+ is_datetimelike: bool = ...,
112
+ ties_method=...,
113
+ ascending: bool = ...,
114
+ na_option=...,
115
+ pct: bool = ...,
116
+ ) -> np.ndarray: ... # np.ndarray[float64_t, ndim=1]
117
+ def diff_2d(
118
+ arr: np.ndarray, # ndarray[diff_t, ndim=2]
119
+ out: np.ndarray, # ndarray[out_t, ndim=2]
120
+ periods: int,
121
+ axis: int,
122
+ datetimelike: bool = ...,
123
+ ) -> None: ...
124
+ def ensure_platform_int(arr: object) -> npt.NDArray[np.intp]: ...
125
+ def ensure_object(arr: object) -> npt.NDArray[np.object_]: ...
126
+ def ensure_float64(arr: object) -> npt.NDArray[np.float64]: ...
127
+ def ensure_int8(arr: object) -> npt.NDArray[np.int8]: ...
128
+ def ensure_int16(arr: object) -> npt.NDArray[np.int16]: ...
129
+ def ensure_int32(arr: object) -> npt.NDArray[np.int32]: ...
130
+ def ensure_int64(arr: object) -> npt.NDArray[np.int64]: ...
131
+ def ensure_uint64(arr: object) -> npt.NDArray[np.uint64]: ...
132
+ def take_1d_int8_int8(
133
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
134
+ ) -> None: ...
135
+ def take_1d_int8_int32(
136
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
137
+ ) -> None: ...
138
+ def take_1d_int8_int64(
139
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
140
+ ) -> None: ...
141
+ def take_1d_int8_float64(
142
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
143
+ ) -> None: ...
144
+ def take_1d_int16_int16(
145
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
146
+ ) -> None: ...
147
+ def take_1d_int16_int32(
148
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
149
+ ) -> None: ...
150
+ def take_1d_int16_int64(
151
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
152
+ ) -> None: ...
153
+ def take_1d_int16_float64(
154
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
155
+ ) -> None: ...
156
+ def take_1d_int32_int32(
157
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
158
+ ) -> None: ...
159
+ def take_1d_int32_int64(
160
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
161
+ ) -> None: ...
162
+ def take_1d_int32_float64(
163
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
164
+ ) -> None: ...
165
+ def take_1d_int64_int64(
166
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
167
+ ) -> None: ...
168
+ def take_1d_int64_float64(
169
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
170
+ ) -> None: ...
171
+ def take_1d_float32_float32(
172
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
173
+ ) -> None: ...
174
+ def take_1d_float32_float64(
175
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
176
+ ) -> None: ...
177
+ def take_1d_float64_float64(
178
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
179
+ ) -> None: ...
180
+ def take_1d_object_object(
181
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
182
+ ) -> None: ...
183
+ def take_1d_bool_bool(
184
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
185
+ ) -> None: ...
186
+ def take_1d_bool_object(
187
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
188
+ ) -> None: ...
189
+ def take_2d_axis0_int8_int8(
190
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
191
+ ) -> None: ...
192
+ def take_2d_axis0_int8_int32(
193
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
194
+ ) -> None: ...
195
+ def take_2d_axis0_int8_int64(
196
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
197
+ ) -> None: ...
198
+ def take_2d_axis0_int8_float64(
199
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
200
+ ) -> None: ...
201
+ def take_2d_axis0_int16_int16(
202
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
203
+ ) -> None: ...
204
+ def take_2d_axis0_int16_int32(
205
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
206
+ ) -> None: ...
207
+ def take_2d_axis0_int16_int64(
208
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
209
+ ) -> None: ...
210
+ def take_2d_axis0_int16_float64(
211
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
212
+ ) -> None: ...
213
+ def take_2d_axis0_int32_int32(
214
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
215
+ ) -> None: ...
216
+ def take_2d_axis0_int32_int64(
217
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
218
+ ) -> None: ...
219
+ def take_2d_axis0_int32_float64(
220
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
221
+ ) -> None: ...
222
+ def take_2d_axis0_int64_int64(
223
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
224
+ ) -> None: ...
225
+ def take_2d_axis0_int64_float64(
226
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
227
+ ) -> None: ...
228
+ def take_2d_axis0_float32_float32(
229
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
230
+ ) -> None: ...
231
+ def take_2d_axis0_float32_float64(
232
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
233
+ ) -> None: ...
234
+ def take_2d_axis0_float64_float64(
235
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
236
+ ) -> None: ...
237
+ def take_2d_axis0_object_object(
238
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
239
+ ) -> None: ...
240
+ def take_2d_axis0_bool_bool(
241
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
242
+ ) -> None: ...
243
+ def take_2d_axis0_bool_object(
244
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
245
+ ) -> None: ...
246
+ def take_2d_axis1_int8_int8(
247
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
248
+ ) -> None: ...
249
+ def take_2d_axis1_int8_int32(
250
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
251
+ ) -> None: ...
252
+ def take_2d_axis1_int8_int64(
253
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
254
+ ) -> None: ...
255
+ def take_2d_axis1_int8_float64(
256
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
257
+ ) -> None: ...
258
+ def take_2d_axis1_int16_int16(
259
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
260
+ ) -> None: ...
261
+ def take_2d_axis1_int16_int32(
262
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
263
+ ) -> None: ...
264
+ def take_2d_axis1_int16_int64(
265
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
266
+ ) -> None: ...
267
+ def take_2d_axis1_int16_float64(
268
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
269
+ ) -> None: ...
270
+ def take_2d_axis1_int32_int32(
271
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
272
+ ) -> None: ...
273
+ def take_2d_axis1_int32_int64(
274
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
275
+ ) -> None: ...
276
+ def take_2d_axis1_int32_float64(
277
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
278
+ ) -> None: ...
279
+ def take_2d_axis1_int64_int64(
280
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
281
+ ) -> None: ...
282
+ def take_2d_axis1_int64_float64(
283
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
284
+ ) -> None: ...
285
+ def take_2d_axis1_float32_float32(
286
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
287
+ ) -> None: ...
288
+ def take_2d_axis1_float32_float64(
289
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
290
+ ) -> None: ...
291
+ def take_2d_axis1_float64_float64(
292
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
293
+ ) -> None: ...
294
+ def take_2d_axis1_object_object(
295
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
296
+ ) -> None: ...
297
+ def take_2d_axis1_bool_bool(
298
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
299
+ ) -> None: ...
300
+ def take_2d_axis1_bool_object(
301
+ values: np.ndarray, indexer: npt.NDArray[np.intp], out: np.ndarray, fill_value=...
302
+ ) -> None: ...
303
+ def take_2d_multi_int8_int8(
304
+ values: np.ndarray,
305
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
306
+ out: np.ndarray,
307
+ fill_value=...,
308
+ ) -> None: ...
309
+ def take_2d_multi_int8_int32(
310
+ values: np.ndarray,
311
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
312
+ out: np.ndarray,
313
+ fill_value=...,
314
+ ) -> None: ...
315
+ def take_2d_multi_int8_int64(
316
+ values: np.ndarray,
317
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
318
+ out: np.ndarray,
319
+ fill_value=...,
320
+ ) -> None: ...
321
+ def take_2d_multi_int8_float64(
322
+ values: np.ndarray,
323
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
324
+ out: np.ndarray,
325
+ fill_value=...,
326
+ ) -> None: ...
327
+ def take_2d_multi_int16_int16(
328
+ values: np.ndarray,
329
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
330
+ out: np.ndarray,
331
+ fill_value=...,
332
+ ) -> None: ...
333
+ def take_2d_multi_int16_int32(
334
+ values: np.ndarray,
335
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
336
+ out: np.ndarray,
337
+ fill_value=...,
338
+ ) -> None: ...
339
+ def take_2d_multi_int16_int64(
340
+ values: np.ndarray,
341
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
342
+ out: np.ndarray,
343
+ fill_value=...,
344
+ ) -> None: ...
345
+ def take_2d_multi_int16_float64(
346
+ values: np.ndarray,
347
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
348
+ out: np.ndarray,
349
+ fill_value=...,
350
+ ) -> None: ...
351
+ def take_2d_multi_int32_int32(
352
+ values: np.ndarray,
353
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
354
+ out: np.ndarray,
355
+ fill_value=...,
356
+ ) -> None: ...
357
+ def take_2d_multi_int32_int64(
358
+ values: np.ndarray,
359
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
360
+ out: np.ndarray,
361
+ fill_value=...,
362
+ ) -> None: ...
363
+ def take_2d_multi_int32_float64(
364
+ values: np.ndarray,
365
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
366
+ out: np.ndarray,
367
+ fill_value=...,
368
+ ) -> None: ...
369
+ def take_2d_multi_int64_float64(
370
+ values: np.ndarray,
371
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
372
+ out: np.ndarray,
373
+ fill_value=...,
374
+ ) -> None: ...
375
+ def take_2d_multi_float32_float32(
376
+ values: np.ndarray,
377
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
378
+ out: np.ndarray,
379
+ fill_value=...,
380
+ ) -> None: ...
381
+ def take_2d_multi_float32_float64(
382
+ values: np.ndarray,
383
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
384
+ out: np.ndarray,
385
+ fill_value=...,
386
+ ) -> None: ...
387
+ def take_2d_multi_float64_float64(
388
+ values: np.ndarray,
389
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
390
+ out: np.ndarray,
391
+ fill_value=...,
392
+ ) -> None: ...
393
+ def take_2d_multi_object_object(
394
+ values: np.ndarray,
395
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
396
+ out: np.ndarray,
397
+ fill_value=...,
398
+ ) -> None: ...
399
+ def take_2d_multi_bool_bool(
400
+ values: np.ndarray,
401
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
402
+ out: np.ndarray,
403
+ fill_value=...,
404
+ ) -> None: ...
405
+ def take_2d_multi_bool_object(
406
+ values: np.ndarray,
407
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
408
+ out: np.ndarray,
409
+ fill_value=...,
410
+ ) -> None: ...
411
+ def take_2d_multi_int64_int64(
412
+ values: np.ndarray,
413
+ indexer: tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]],
414
+ out: np.ndarray,
415
+ fill_value=...,
416
+ ) -> None: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/arrays.pyi ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Sequence
2
+
3
+ import numpy as np
4
+
5
+ from pandas._typing import (
6
+ AxisInt,
7
+ DtypeObj,
8
+ Self,
9
+ Shape,
10
+ )
11
+
12
+ class NDArrayBacked:
13
+ _dtype: DtypeObj
14
+ _ndarray: np.ndarray
15
+ def __init__(self, values: np.ndarray, dtype: DtypeObj) -> None: ...
16
+ @classmethod
17
+ def _simple_new(cls, values: np.ndarray, dtype: DtypeObj): ...
18
+ def _from_backing_data(self, values: np.ndarray): ...
19
+ def __setstate__(self, state): ...
20
+ def __len__(self) -> int: ...
21
+ @property
22
+ def shape(self) -> Shape: ...
23
+ @property
24
+ def ndim(self) -> int: ...
25
+ @property
26
+ def size(self) -> int: ...
27
+ @property
28
+ def nbytes(self) -> int: ...
29
+ def copy(self, order=...): ...
30
+ def delete(self, loc, axis=...): ...
31
+ def swapaxes(self, axis1, axis2): ...
32
+ def repeat(self, repeats: int | Sequence[int], axis: int | None = ...): ...
33
+ def reshape(self, *args, **kwargs): ...
34
+ def ravel(self, order=...): ...
35
+ @property
36
+ def T(self): ...
37
+ @classmethod
38
+ def _concat_same_type(
39
+ cls, to_concat: Sequence[Self], axis: AxisInt = ...
40
+ ) -> Self: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/byteswap.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (61.7 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/byteswap.pyi ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ def read_float_with_byteswap(data: bytes, offset: int, byteswap: bool) -> float: ...
2
+ def read_double_with_byteswap(data: bytes, offset: int, byteswap: bool) -> float: ...
3
+ def read_uint16_with_byteswap(data: bytes, offset: int, byteswap: bool) -> int: ...
4
+ def read_uint32_with_byteswap(data: bytes, offset: int, byteswap: bool) -> int: ...
5
+ def read_uint64_with_byteswap(data: bytes, offset: int, byteswap: bool) -> int: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/groupby.pyi ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Literal
2
+
3
+ import numpy as np
4
+
5
+ from pandas._typing import npt
6
+
7
+ def group_median_float64(
8
+ out: np.ndarray, # ndarray[float64_t, ndim=2]
9
+ counts: npt.NDArray[np.int64],
10
+ values: np.ndarray, # ndarray[float64_t, ndim=2]
11
+ labels: npt.NDArray[np.int64],
12
+ min_count: int = ..., # Py_ssize_t
13
+ mask: np.ndarray | None = ...,
14
+ result_mask: np.ndarray | None = ...,
15
+ ) -> None: ...
16
+ def group_cumprod(
17
+ out: np.ndarray, # float64_t[:, ::1]
18
+ values: np.ndarray, # const float64_t[:, :]
19
+ labels: np.ndarray, # const int64_t[:]
20
+ ngroups: int,
21
+ is_datetimelike: bool,
22
+ skipna: bool = ...,
23
+ mask: np.ndarray | None = ...,
24
+ result_mask: np.ndarray | None = ...,
25
+ ) -> None: ...
26
+ def group_cumsum(
27
+ out: np.ndarray, # int64float_t[:, ::1]
28
+ values: np.ndarray, # ndarray[int64float_t, ndim=2]
29
+ labels: np.ndarray, # const int64_t[:]
30
+ ngroups: int,
31
+ is_datetimelike: bool,
32
+ skipna: bool = ...,
33
+ mask: np.ndarray | None = ...,
34
+ result_mask: np.ndarray | None = ...,
35
+ ) -> None: ...
36
+ def group_shift_indexer(
37
+ out: np.ndarray, # int64_t[::1]
38
+ labels: np.ndarray, # const int64_t[:]
39
+ ngroups: int,
40
+ periods: int,
41
+ ) -> None: ...
42
+ def group_fillna_indexer(
43
+ out: np.ndarray, # ndarray[intp_t]
44
+ labels: np.ndarray, # ndarray[int64_t]
45
+ sorted_labels: npt.NDArray[np.intp],
46
+ mask: npt.NDArray[np.uint8],
47
+ limit: int, # int64_t
48
+ dropna: bool,
49
+ ) -> None: ...
50
+ def group_any_all(
51
+ out: np.ndarray, # uint8_t[::1]
52
+ values: np.ndarray, # const uint8_t[::1]
53
+ labels: np.ndarray, # const int64_t[:]
54
+ mask: np.ndarray, # const uint8_t[::1]
55
+ val_test: Literal["any", "all"],
56
+ skipna: bool,
57
+ result_mask: np.ndarray | None,
58
+ ) -> None: ...
59
+ def group_sum(
60
+ out: np.ndarray, # complexfloatingintuint_t[:, ::1]
61
+ counts: np.ndarray, # int64_t[::1]
62
+ values: np.ndarray, # ndarray[complexfloatingintuint_t, ndim=2]
63
+ labels: np.ndarray, # const intp_t[:]
64
+ mask: np.ndarray | None,
65
+ result_mask: np.ndarray | None = ...,
66
+ min_count: int = ...,
67
+ is_datetimelike: bool = ...,
68
+ ) -> None: ...
69
+ def group_prod(
70
+ out: np.ndarray, # int64float_t[:, ::1]
71
+ counts: np.ndarray, # int64_t[::1]
72
+ values: np.ndarray, # ndarray[int64float_t, ndim=2]
73
+ labels: np.ndarray, # const intp_t[:]
74
+ mask: np.ndarray | None,
75
+ result_mask: np.ndarray | None = ...,
76
+ min_count: int = ...,
77
+ ) -> None: ...
78
+ def group_var(
79
+ out: np.ndarray, # floating[:, ::1]
80
+ counts: np.ndarray, # int64_t[::1]
81
+ values: np.ndarray, # ndarray[floating, ndim=2]
82
+ labels: np.ndarray, # const intp_t[:]
83
+ min_count: int = ..., # Py_ssize_t
84
+ ddof: int = ..., # int64_t
85
+ mask: np.ndarray | None = ...,
86
+ result_mask: np.ndarray | None = ...,
87
+ is_datetimelike: bool = ...,
88
+ name: str = ...,
89
+ ) -> None: ...
90
+ def group_skew(
91
+ out: np.ndarray, # float64_t[:, ::1]
92
+ counts: np.ndarray, # int64_t[::1]
93
+ values: np.ndarray, # ndarray[float64_T, ndim=2]
94
+ labels: np.ndarray, # const intp_t[::1]
95
+ mask: np.ndarray | None = ...,
96
+ result_mask: np.ndarray | None = ...,
97
+ skipna: bool = ...,
98
+ ) -> None: ...
99
+ def group_mean(
100
+ out: np.ndarray, # floating[:, ::1]
101
+ counts: np.ndarray, # int64_t[::1]
102
+ values: np.ndarray, # ndarray[floating, ndim=2]
103
+ labels: np.ndarray, # const intp_t[:]
104
+ min_count: int = ..., # Py_ssize_t
105
+ is_datetimelike: bool = ..., # bint
106
+ mask: np.ndarray | None = ...,
107
+ result_mask: np.ndarray | None = ...,
108
+ ) -> None: ...
109
+ def group_ohlc(
110
+ out: np.ndarray, # floatingintuint_t[:, ::1]
111
+ counts: np.ndarray, # int64_t[::1]
112
+ values: np.ndarray, # ndarray[floatingintuint_t, ndim=2]
113
+ labels: np.ndarray, # const intp_t[:]
114
+ min_count: int = ...,
115
+ mask: np.ndarray | None = ...,
116
+ result_mask: np.ndarray | None = ...,
117
+ ) -> None: ...
118
+ def group_quantile(
119
+ out: npt.NDArray[np.float64],
120
+ values: np.ndarray, # ndarray[numeric, ndim=1]
121
+ labels: npt.NDArray[np.intp],
122
+ mask: npt.NDArray[np.uint8],
123
+ qs: npt.NDArray[np.float64], # const
124
+ starts: npt.NDArray[np.int64],
125
+ ends: npt.NDArray[np.int64],
126
+ interpolation: Literal["linear", "lower", "higher", "nearest", "midpoint"],
127
+ result_mask: np.ndarray | None,
128
+ is_datetimelike: bool,
129
+ ) -> None: ...
130
+ def group_last(
131
+ out: np.ndarray, # rank_t[:, ::1]
132
+ counts: np.ndarray, # int64_t[::1]
133
+ values: np.ndarray, # ndarray[rank_t, ndim=2]
134
+ labels: np.ndarray, # const int64_t[:]
135
+ mask: npt.NDArray[np.bool_] | None,
136
+ result_mask: npt.NDArray[np.bool_] | None = ...,
137
+ min_count: int = ..., # Py_ssize_t
138
+ is_datetimelike: bool = ...,
139
+ skipna: bool = ...,
140
+ ) -> None: ...
141
+ def group_nth(
142
+ out: np.ndarray, # rank_t[:, ::1]
143
+ counts: np.ndarray, # int64_t[::1]
144
+ values: np.ndarray, # ndarray[rank_t, ndim=2]
145
+ labels: np.ndarray, # const int64_t[:]
146
+ mask: npt.NDArray[np.bool_] | None,
147
+ result_mask: npt.NDArray[np.bool_] | None = ...,
148
+ min_count: int = ..., # int64_t
149
+ rank: int = ..., # int64_t
150
+ is_datetimelike: bool = ...,
151
+ skipna: bool = ...,
152
+ ) -> None: ...
153
+ def group_rank(
154
+ out: np.ndarray, # float64_t[:, ::1]
155
+ values: np.ndarray, # ndarray[rank_t, ndim=2]
156
+ labels: np.ndarray, # const int64_t[:]
157
+ ngroups: int,
158
+ is_datetimelike: bool,
159
+ ties_method: Literal["average", "min", "max", "first", "dense"] = ...,
160
+ ascending: bool = ...,
161
+ pct: bool = ...,
162
+ na_option: Literal["keep", "top", "bottom"] = ...,
163
+ mask: npt.NDArray[np.bool_] | None = ...,
164
+ ) -> None: ...
165
+ def group_max(
166
+ out: np.ndarray, # groupby_t[:, ::1]
167
+ counts: np.ndarray, # int64_t[::1]
168
+ values: np.ndarray, # ndarray[groupby_t, ndim=2]
169
+ labels: np.ndarray, # const int64_t[:]
170
+ min_count: int = ...,
171
+ is_datetimelike: bool = ...,
172
+ mask: np.ndarray | None = ...,
173
+ result_mask: np.ndarray | None = ...,
174
+ ) -> None: ...
175
+ def group_min(
176
+ out: np.ndarray, # groupby_t[:, ::1]
177
+ counts: np.ndarray, # int64_t[::1]
178
+ values: np.ndarray, # ndarray[groupby_t, ndim=2]
179
+ labels: np.ndarray, # const int64_t[:]
180
+ min_count: int = ...,
181
+ is_datetimelike: bool = ...,
182
+ mask: np.ndarray | None = ...,
183
+ result_mask: np.ndarray | None = ...,
184
+ ) -> None: ...
185
+ def group_idxmin_idxmax(
186
+ out: npt.NDArray[np.intp],
187
+ counts: npt.NDArray[np.int64],
188
+ values: np.ndarray, # ndarray[groupby_t, ndim=2]
189
+ labels: npt.NDArray[np.intp],
190
+ min_count: int = ...,
191
+ is_datetimelike: bool = ...,
192
+ mask: np.ndarray | None = ...,
193
+ name: str = ...,
194
+ skipna: bool = ...,
195
+ result_mask: np.ndarray | None = ...,
196
+ ) -> None: ...
197
+ def group_cummin(
198
+ out: np.ndarray, # groupby_t[:, ::1]
199
+ values: np.ndarray, # ndarray[groupby_t, ndim=2]
200
+ labels: np.ndarray, # const int64_t[:]
201
+ ngroups: int,
202
+ is_datetimelike: bool,
203
+ mask: np.ndarray | None = ...,
204
+ result_mask: np.ndarray | None = ...,
205
+ skipna: bool = ...,
206
+ ) -> None: ...
207
+ def group_cummax(
208
+ out: np.ndarray, # groupby_t[:, ::1]
209
+ values: np.ndarray, # ndarray[groupby_t, ndim=2]
210
+ labels: np.ndarray, # const int64_t[:]
211
+ ngroups: int,
212
+ is_datetimelike: bool,
213
+ mask: np.ndarray | None = ...,
214
+ result_mask: np.ndarray | None = ...,
215
+ skipna: bool = ...,
216
+ ) -> None: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/hashing.pyi ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import npt
4
+
5
+ def hash_object_array(
6
+ arr: npt.NDArray[np.object_],
7
+ key: str,
8
+ encoding: str = ...,
9
+ ) -> npt.NDArray[np.uint64]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/hashtable.pyi ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Any,
3
+ Hashable,
4
+ Literal,
5
+ )
6
+
7
+ import numpy as np
8
+
9
+ from pandas._typing import npt
10
+
11
+ def unique_label_indices(
12
+ labels: np.ndarray, # const int64_t[:]
13
+ ) -> np.ndarray: ...
14
+
15
+ class Factorizer:
16
+ count: int
17
+ uniques: Any
18
+ def __init__(self, size_hint: int) -> None: ...
19
+ def get_count(self) -> int: ...
20
+ def factorize(
21
+ self,
22
+ values: np.ndarray,
23
+ na_sentinel=...,
24
+ na_value=...,
25
+ mask=...,
26
+ ) -> npt.NDArray[np.intp]: ...
27
+
28
+ class ObjectFactorizer(Factorizer):
29
+ table: PyObjectHashTable
30
+ uniques: ObjectVector
31
+
32
+ class Int64Factorizer(Factorizer):
33
+ table: Int64HashTable
34
+ uniques: Int64Vector
35
+
36
+ class UInt64Factorizer(Factorizer):
37
+ table: UInt64HashTable
38
+ uniques: UInt64Vector
39
+
40
+ class Int32Factorizer(Factorizer):
41
+ table: Int32HashTable
42
+ uniques: Int32Vector
43
+
44
+ class UInt32Factorizer(Factorizer):
45
+ table: UInt32HashTable
46
+ uniques: UInt32Vector
47
+
48
+ class Int16Factorizer(Factorizer):
49
+ table: Int16HashTable
50
+ uniques: Int16Vector
51
+
52
+ class UInt16Factorizer(Factorizer):
53
+ table: UInt16HashTable
54
+ uniques: UInt16Vector
55
+
56
+ class Int8Factorizer(Factorizer):
57
+ table: Int8HashTable
58
+ uniques: Int8Vector
59
+
60
+ class UInt8Factorizer(Factorizer):
61
+ table: UInt8HashTable
62
+ uniques: UInt8Vector
63
+
64
+ class Float64Factorizer(Factorizer):
65
+ table: Float64HashTable
66
+ uniques: Float64Vector
67
+
68
+ class Float32Factorizer(Factorizer):
69
+ table: Float32HashTable
70
+ uniques: Float32Vector
71
+
72
+ class Complex64Factorizer(Factorizer):
73
+ table: Complex64HashTable
74
+ uniques: Complex64Vector
75
+
76
+ class Complex128Factorizer(Factorizer):
77
+ table: Complex128HashTable
78
+ uniques: Complex128Vector
79
+
80
+ class Int64Vector:
81
+ def __init__(self, *args) -> None: ...
82
+ def __len__(self) -> int: ...
83
+ def to_array(self) -> npt.NDArray[np.int64]: ...
84
+
85
+ class Int32Vector:
86
+ def __init__(self, *args) -> None: ...
87
+ def __len__(self) -> int: ...
88
+ def to_array(self) -> npt.NDArray[np.int32]: ...
89
+
90
+ class Int16Vector:
91
+ def __init__(self, *args) -> None: ...
92
+ def __len__(self) -> int: ...
93
+ def to_array(self) -> npt.NDArray[np.int16]: ...
94
+
95
+ class Int8Vector:
96
+ def __init__(self, *args) -> None: ...
97
+ def __len__(self) -> int: ...
98
+ def to_array(self) -> npt.NDArray[np.int8]: ...
99
+
100
+ class UInt64Vector:
101
+ def __init__(self, *args) -> None: ...
102
+ def __len__(self) -> int: ...
103
+ def to_array(self) -> npt.NDArray[np.uint64]: ...
104
+
105
+ class UInt32Vector:
106
+ def __init__(self, *args) -> None: ...
107
+ def __len__(self) -> int: ...
108
+ def to_array(self) -> npt.NDArray[np.uint32]: ...
109
+
110
+ class UInt16Vector:
111
+ def __init__(self, *args) -> None: ...
112
+ def __len__(self) -> int: ...
113
+ def to_array(self) -> npt.NDArray[np.uint16]: ...
114
+
115
+ class UInt8Vector:
116
+ def __init__(self, *args) -> None: ...
117
+ def __len__(self) -> int: ...
118
+ def to_array(self) -> npt.NDArray[np.uint8]: ...
119
+
120
+ class Float64Vector:
121
+ def __init__(self, *args) -> None: ...
122
+ def __len__(self) -> int: ...
123
+ def to_array(self) -> npt.NDArray[np.float64]: ...
124
+
125
+ class Float32Vector:
126
+ def __init__(self, *args) -> None: ...
127
+ def __len__(self) -> int: ...
128
+ def to_array(self) -> npt.NDArray[np.float32]: ...
129
+
130
+ class Complex128Vector:
131
+ def __init__(self, *args) -> None: ...
132
+ def __len__(self) -> int: ...
133
+ def to_array(self) -> npt.NDArray[np.complex128]: ...
134
+
135
+ class Complex64Vector:
136
+ def __init__(self, *args) -> None: ...
137
+ def __len__(self) -> int: ...
138
+ def to_array(self) -> npt.NDArray[np.complex64]: ...
139
+
140
+ class StringVector:
141
+ def __init__(self, *args) -> None: ...
142
+ def __len__(self) -> int: ...
143
+ def to_array(self) -> npt.NDArray[np.object_]: ...
144
+
145
+ class ObjectVector:
146
+ def __init__(self, *args) -> None: ...
147
+ def __len__(self) -> int: ...
148
+ def to_array(self) -> npt.NDArray[np.object_]: ...
149
+
150
+ class HashTable:
151
+ # NB: The base HashTable class does _not_ actually have these methods;
152
+ # we are putting them here for the sake of mypy to avoid
153
+ # reproducing them in each subclass below.
154
+ def __init__(self, size_hint: int = ..., uses_mask: bool = ...) -> None: ...
155
+ def __len__(self) -> int: ...
156
+ def __contains__(self, key: Hashable) -> bool: ...
157
+ def sizeof(self, deep: bool = ...) -> int: ...
158
+ def get_state(self) -> dict[str, int]: ...
159
+ # TODO: `val/key` type is subclass-specific
160
+ def get_item(self, val): ... # TODO: return type?
161
+ def set_item(self, key, val) -> None: ...
162
+ def get_na(self): ... # TODO: return type?
163
+ def set_na(self, val) -> None: ...
164
+ def map_locations(
165
+ self,
166
+ values: np.ndarray, # np.ndarray[subclass-specific]
167
+ mask: npt.NDArray[np.bool_] | None = ...,
168
+ ) -> None: ...
169
+ def lookup(
170
+ self,
171
+ values: np.ndarray, # np.ndarray[subclass-specific]
172
+ mask: npt.NDArray[np.bool_] | None = ...,
173
+ ) -> npt.NDArray[np.intp]: ...
174
+ def get_labels(
175
+ self,
176
+ values: np.ndarray, # np.ndarray[subclass-specific]
177
+ uniques, # SubclassTypeVector
178
+ count_prior: int = ...,
179
+ na_sentinel: int = ...,
180
+ na_value: object = ...,
181
+ mask=...,
182
+ ) -> npt.NDArray[np.intp]: ...
183
+ def unique(
184
+ self,
185
+ values: np.ndarray, # np.ndarray[subclass-specific]
186
+ return_inverse: bool = ...,
187
+ mask=...,
188
+ ) -> (
189
+ tuple[
190
+ np.ndarray, # np.ndarray[subclass-specific]
191
+ npt.NDArray[np.intp],
192
+ ]
193
+ | np.ndarray
194
+ ): ... # np.ndarray[subclass-specific]
195
+ def factorize(
196
+ self,
197
+ values: np.ndarray, # np.ndarray[subclass-specific]
198
+ na_sentinel: int = ...,
199
+ na_value: object = ...,
200
+ mask=...,
201
+ ignore_na: bool = True,
202
+ ) -> tuple[np.ndarray, npt.NDArray[np.intp]]: ... # np.ndarray[subclass-specific]
203
+
204
+ class Complex128HashTable(HashTable): ...
205
+ class Complex64HashTable(HashTable): ...
206
+ class Float64HashTable(HashTable): ...
207
+ class Float32HashTable(HashTable): ...
208
+
209
+ class Int64HashTable(HashTable):
210
+ # Only Int64HashTable has get_labels_groupby, map_keys_to_values
211
+ def get_labels_groupby(
212
+ self,
213
+ values: npt.NDArray[np.int64], # const int64_t[:]
214
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.int64]]: ...
215
+ def map_keys_to_values(
216
+ self,
217
+ keys: npt.NDArray[np.int64],
218
+ values: npt.NDArray[np.int64], # const int64_t[:]
219
+ ) -> None: ...
220
+
221
+ class Int32HashTable(HashTable): ...
222
+ class Int16HashTable(HashTable): ...
223
+ class Int8HashTable(HashTable): ...
224
+ class UInt64HashTable(HashTable): ...
225
+ class UInt32HashTable(HashTable): ...
226
+ class UInt16HashTable(HashTable): ...
227
+ class UInt8HashTable(HashTable): ...
228
+ class StringHashTable(HashTable): ...
229
+ class PyObjectHashTable(HashTable): ...
230
+ class IntpHashTable(HashTable): ...
231
+
232
+ def duplicated(
233
+ values: np.ndarray,
234
+ keep: Literal["last", "first", False] = ...,
235
+ mask: npt.NDArray[np.bool_] | None = ...,
236
+ ) -> npt.NDArray[np.bool_]: ...
237
+ def mode(
238
+ values: np.ndarray, dropna: bool, mask: npt.NDArray[np.bool_] | None = ...
239
+ ) -> np.ndarray: ...
240
+ def value_count(
241
+ values: np.ndarray,
242
+ dropna: bool,
243
+ mask: npt.NDArray[np.bool_] | None = ...,
244
+ ) -> tuple[np.ndarray, npt.NDArray[np.int64], int]: ... # np.ndarray[same-as-values]
245
+
246
+ # arr and values should have same dtype
247
+ def ismember(
248
+ arr: np.ndarray,
249
+ values: np.ndarray,
250
+ ) -> npt.NDArray[np.bool_]: ...
251
+ def object_hash(obj) -> int: ...
252
+ def objects_are_equal(a, b) -> bool: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/index.pyi ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import npt
4
+
5
+ from pandas import MultiIndex
6
+ from pandas.core.arrays import ExtensionArray
7
+
8
+ multiindex_nulls_shift: int
9
+
10
+ class IndexEngine:
11
+ over_size_threshold: bool
12
+ def __init__(self, values: np.ndarray) -> None: ...
13
+ def __contains__(self, val: object) -> bool: ...
14
+
15
+ # -> int | slice | np.ndarray[bool]
16
+ def get_loc(self, val: object) -> int | slice | np.ndarray: ...
17
+ def sizeof(self, deep: bool = ...) -> int: ...
18
+ def __sizeof__(self) -> int: ...
19
+ @property
20
+ def is_unique(self) -> bool: ...
21
+ @property
22
+ def is_monotonic_increasing(self) -> bool: ...
23
+ @property
24
+ def is_monotonic_decreasing(self) -> bool: ...
25
+ @property
26
+ def is_mapping_populated(self) -> bool: ...
27
+ def clear_mapping(self): ...
28
+ def get_indexer(self, values: np.ndarray) -> npt.NDArray[np.intp]: ...
29
+ def get_indexer_non_unique(
30
+ self,
31
+ targets: np.ndarray,
32
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
33
+
34
+ class MaskedIndexEngine(IndexEngine):
35
+ def __init__(self, values: object) -> None: ...
36
+ def get_indexer_non_unique(
37
+ self, targets: object
38
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
39
+
40
+ class Float64Engine(IndexEngine): ...
41
+ class Float32Engine(IndexEngine): ...
42
+ class Complex128Engine(IndexEngine): ...
43
+ class Complex64Engine(IndexEngine): ...
44
+ class Int64Engine(IndexEngine): ...
45
+ class Int32Engine(IndexEngine): ...
46
+ class Int16Engine(IndexEngine): ...
47
+ class Int8Engine(IndexEngine): ...
48
+ class UInt64Engine(IndexEngine): ...
49
+ class UInt32Engine(IndexEngine): ...
50
+ class UInt16Engine(IndexEngine): ...
51
+ class UInt8Engine(IndexEngine): ...
52
+ class ObjectEngine(IndexEngine): ...
53
+ class DatetimeEngine(Int64Engine): ...
54
+ class TimedeltaEngine(DatetimeEngine): ...
55
+ class PeriodEngine(Int64Engine): ...
56
+ class BoolEngine(UInt8Engine): ...
57
+ class MaskedFloat64Engine(MaskedIndexEngine): ...
58
+ class MaskedFloat32Engine(MaskedIndexEngine): ...
59
+ class MaskedComplex128Engine(MaskedIndexEngine): ...
60
+ class MaskedComplex64Engine(MaskedIndexEngine): ...
61
+ class MaskedInt64Engine(MaskedIndexEngine): ...
62
+ class MaskedInt32Engine(MaskedIndexEngine): ...
63
+ class MaskedInt16Engine(MaskedIndexEngine): ...
64
+ class MaskedInt8Engine(MaskedIndexEngine): ...
65
+ class MaskedUInt64Engine(MaskedIndexEngine): ...
66
+ class MaskedUInt32Engine(MaskedIndexEngine): ...
67
+ class MaskedUInt16Engine(MaskedIndexEngine): ...
68
+ class MaskedUInt8Engine(MaskedIndexEngine): ...
69
+ class MaskedBoolEngine(MaskedUInt8Engine): ...
70
+
71
+ class BaseMultiIndexCodesEngine:
72
+ levels: list[np.ndarray]
73
+ offsets: np.ndarray # ndarray[uint64_t, ndim=1]
74
+
75
+ def __init__(
76
+ self,
77
+ levels: list[np.ndarray], # all entries hashable
78
+ labels: list[np.ndarray], # all entries integer-dtyped
79
+ offsets: np.ndarray, # np.ndarray[np.uint64, ndim=1]
80
+ ) -> None: ...
81
+ def get_indexer(self, target: npt.NDArray[np.object_]) -> npt.NDArray[np.intp]: ...
82
+ def _extract_level_codes(self, target: MultiIndex) -> np.ndarray: ...
83
+
84
+ class ExtensionEngine:
85
+ def __init__(self, values: ExtensionArray) -> None: ...
86
+ def __contains__(self, val: object) -> bool: ...
87
+ def get_loc(self, val: object) -> int | slice | np.ndarray: ...
88
+ def get_indexer(self, values: np.ndarray) -> npt.NDArray[np.intp]: ...
89
+ def get_indexer_non_unique(
90
+ self,
91
+ targets: np.ndarray,
92
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
93
+ @property
94
+ def is_unique(self) -> bool: ...
95
+ @property
96
+ def is_monotonic_increasing(self) -> bool: ...
97
+ @property
98
+ def is_monotonic_decreasing(self) -> bool: ...
99
+ def sizeof(self, deep: bool = ...) -> int: ...
100
+ def clear_mapping(self): ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/indexing.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (66.6 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/indexing.pyi ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Generic,
3
+ TypeVar,
4
+ )
5
+
6
+ from pandas.core.indexing import IndexingMixin
7
+
8
+ _IndexingMixinT = TypeVar("_IndexingMixinT", bound=IndexingMixin)
9
+
10
+ class NDFrameIndexerBase(Generic[_IndexingMixinT]):
11
+ name: str
12
+ # in practice obj is either a DataFrame or a Series
13
+ obj: _IndexingMixinT
14
+
15
+ def __init__(self, name: str, obj: _IndexingMixinT) -> None: ...
16
+ @property
17
+ def ndim(self) -> int: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/internals.pyi ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Iterator,
3
+ Sequence,
4
+ final,
5
+ overload,
6
+ )
7
+ import weakref
8
+
9
+ import numpy as np
10
+
11
+ from pandas._typing import (
12
+ ArrayLike,
13
+ Self,
14
+ npt,
15
+ )
16
+
17
+ from pandas import Index
18
+ from pandas.core.internals.blocks import Block as B
19
+
20
+ def slice_len(slc: slice, objlen: int = ...) -> int: ...
21
+ def get_concat_blkno_indexers(
22
+ blknos_list: list[npt.NDArray[np.intp]],
23
+ ) -> list[tuple[npt.NDArray[np.intp], BlockPlacement]]: ...
24
+ def get_blkno_indexers(
25
+ blknos: np.ndarray, # int64_t[:]
26
+ group: bool = ...,
27
+ ) -> list[tuple[int, slice | np.ndarray]]: ...
28
+ def get_blkno_placements(
29
+ blknos: np.ndarray,
30
+ group: bool = ...,
31
+ ) -> Iterator[tuple[int, BlockPlacement]]: ...
32
+ def update_blklocs_and_blknos(
33
+ blklocs: npt.NDArray[np.intp],
34
+ blknos: npt.NDArray[np.intp],
35
+ loc: int,
36
+ nblocks: int,
37
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
38
+ @final
39
+ class BlockPlacement:
40
+ def __init__(self, val: int | slice | np.ndarray) -> None: ...
41
+ @property
42
+ def indexer(self) -> np.ndarray | slice: ...
43
+ @property
44
+ def as_array(self) -> np.ndarray: ...
45
+ @property
46
+ def as_slice(self) -> slice: ...
47
+ @property
48
+ def is_slice_like(self) -> bool: ...
49
+ @overload
50
+ def __getitem__(
51
+ self, loc: slice | Sequence[int] | npt.NDArray[np.intp]
52
+ ) -> BlockPlacement: ...
53
+ @overload
54
+ def __getitem__(self, loc: int) -> int: ...
55
+ def __iter__(self) -> Iterator[int]: ...
56
+ def __len__(self) -> int: ...
57
+ def delete(self, loc) -> BlockPlacement: ...
58
+ def add(self, other) -> BlockPlacement: ...
59
+ def append(self, others: list[BlockPlacement]) -> BlockPlacement: ...
60
+ def tile_for_unstack(self, factor: int) -> npt.NDArray[np.intp]: ...
61
+
62
+ class Block:
63
+ _mgr_locs: BlockPlacement
64
+ ndim: int
65
+ values: ArrayLike
66
+ refs: BlockValuesRefs
67
+ def __init__(
68
+ self,
69
+ values: ArrayLike,
70
+ placement: BlockPlacement,
71
+ ndim: int,
72
+ refs: BlockValuesRefs | None = ...,
73
+ ) -> None: ...
74
+ def slice_block_rows(self, slicer: slice) -> Self: ...
75
+
76
+ class BlockManager:
77
+ blocks: tuple[B, ...]
78
+ axes: list[Index]
79
+ _known_consolidated: bool
80
+ _is_consolidated: bool
81
+ _blknos: np.ndarray
82
+ _blklocs: np.ndarray
83
+ def __init__(
84
+ self, blocks: tuple[B, ...], axes: list[Index], verify_integrity=...
85
+ ) -> None: ...
86
+ def get_slice(self, slobj: slice, axis: int = ...) -> Self: ...
87
+ def _rebuild_blknos_and_blklocs(self) -> None: ...
88
+
89
+ class BlockValuesRefs:
90
+ referenced_blocks: list[weakref.ref]
91
+ def __init__(self, blk: Block | None = ...) -> None: ...
92
+ def add_reference(self, blk: Block) -> None: ...
93
+ def add_index_reference(self, index: Index) -> None: ...
94
+ def has_reference(self) -> bool: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/interval.pyi ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Any,
3
+ Generic,
4
+ TypeVar,
5
+ overload,
6
+ )
7
+
8
+ import numpy as np
9
+ import numpy.typing as npt
10
+
11
+ from pandas._typing import (
12
+ IntervalClosedType,
13
+ Timedelta,
14
+ Timestamp,
15
+ )
16
+
17
+ VALID_CLOSED: frozenset[str]
18
+
19
+ _OrderableScalarT = TypeVar("_OrderableScalarT", int, float)
20
+ _OrderableTimesT = TypeVar("_OrderableTimesT", Timestamp, Timedelta)
21
+ _OrderableT = TypeVar("_OrderableT", int, float, Timestamp, Timedelta)
22
+
23
+ class _LengthDescriptor:
24
+ @overload
25
+ def __get__(
26
+ self, instance: Interval[_OrderableScalarT], owner: Any
27
+ ) -> _OrderableScalarT: ...
28
+ @overload
29
+ def __get__(
30
+ self, instance: Interval[_OrderableTimesT], owner: Any
31
+ ) -> Timedelta: ...
32
+
33
+ class _MidDescriptor:
34
+ @overload
35
+ def __get__(self, instance: Interval[_OrderableScalarT], owner: Any) -> float: ...
36
+ @overload
37
+ def __get__(
38
+ self, instance: Interval[_OrderableTimesT], owner: Any
39
+ ) -> _OrderableTimesT: ...
40
+
41
+ class IntervalMixin:
42
+ @property
43
+ def closed_left(self) -> bool: ...
44
+ @property
45
+ def closed_right(self) -> bool: ...
46
+ @property
47
+ def open_left(self) -> bool: ...
48
+ @property
49
+ def open_right(self) -> bool: ...
50
+ @property
51
+ def is_empty(self) -> bool: ...
52
+ def _check_closed_matches(self, other: IntervalMixin, name: str = ...) -> None: ...
53
+
54
+ class Interval(IntervalMixin, Generic[_OrderableT]):
55
+ @property
56
+ def left(self: Interval[_OrderableT]) -> _OrderableT: ...
57
+ @property
58
+ def right(self: Interval[_OrderableT]) -> _OrderableT: ...
59
+ @property
60
+ def closed(self) -> IntervalClosedType: ...
61
+ mid: _MidDescriptor
62
+ length: _LengthDescriptor
63
+ def __init__(
64
+ self,
65
+ left: _OrderableT,
66
+ right: _OrderableT,
67
+ closed: IntervalClosedType = ...,
68
+ ) -> None: ...
69
+ def __hash__(self) -> int: ...
70
+ @overload
71
+ def __contains__(
72
+ self: Interval[Timedelta], key: Timedelta | Interval[Timedelta]
73
+ ) -> bool: ...
74
+ @overload
75
+ def __contains__(
76
+ self: Interval[Timestamp], key: Timestamp | Interval[Timestamp]
77
+ ) -> bool: ...
78
+ @overload
79
+ def __contains__(
80
+ self: Interval[_OrderableScalarT],
81
+ key: _OrderableScalarT | Interval[_OrderableScalarT],
82
+ ) -> bool: ...
83
+ @overload
84
+ def __add__(
85
+ self: Interval[_OrderableTimesT], y: Timedelta
86
+ ) -> Interval[_OrderableTimesT]: ...
87
+ @overload
88
+ def __add__(
89
+ self: Interval[int], y: _OrderableScalarT
90
+ ) -> Interval[_OrderableScalarT]: ...
91
+ @overload
92
+ def __add__(self: Interval[float], y: float) -> Interval[float]: ...
93
+ @overload
94
+ def __radd__(
95
+ self: Interval[_OrderableTimesT], y: Timedelta
96
+ ) -> Interval[_OrderableTimesT]: ...
97
+ @overload
98
+ def __radd__(
99
+ self: Interval[int], y: _OrderableScalarT
100
+ ) -> Interval[_OrderableScalarT]: ...
101
+ @overload
102
+ def __radd__(self: Interval[float], y: float) -> Interval[float]: ...
103
+ @overload
104
+ def __sub__(
105
+ self: Interval[_OrderableTimesT], y: Timedelta
106
+ ) -> Interval[_OrderableTimesT]: ...
107
+ @overload
108
+ def __sub__(
109
+ self: Interval[int], y: _OrderableScalarT
110
+ ) -> Interval[_OrderableScalarT]: ...
111
+ @overload
112
+ def __sub__(self: Interval[float], y: float) -> Interval[float]: ...
113
+ @overload
114
+ def __rsub__(
115
+ self: Interval[_OrderableTimesT], y: Timedelta
116
+ ) -> Interval[_OrderableTimesT]: ...
117
+ @overload
118
+ def __rsub__(
119
+ self: Interval[int], y: _OrderableScalarT
120
+ ) -> Interval[_OrderableScalarT]: ...
121
+ @overload
122
+ def __rsub__(self: Interval[float], y: float) -> Interval[float]: ...
123
+ @overload
124
+ def __mul__(
125
+ self: Interval[int], y: _OrderableScalarT
126
+ ) -> Interval[_OrderableScalarT]: ...
127
+ @overload
128
+ def __mul__(self: Interval[float], y: float) -> Interval[float]: ...
129
+ @overload
130
+ def __rmul__(
131
+ self: Interval[int], y: _OrderableScalarT
132
+ ) -> Interval[_OrderableScalarT]: ...
133
+ @overload
134
+ def __rmul__(self: Interval[float], y: float) -> Interval[float]: ...
135
+ @overload
136
+ def __truediv__(
137
+ self: Interval[int], y: _OrderableScalarT
138
+ ) -> Interval[_OrderableScalarT]: ...
139
+ @overload
140
+ def __truediv__(self: Interval[float], y: float) -> Interval[float]: ...
141
+ @overload
142
+ def __floordiv__(
143
+ self: Interval[int], y: _OrderableScalarT
144
+ ) -> Interval[_OrderableScalarT]: ...
145
+ @overload
146
+ def __floordiv__(self: Interval[float], y: float) -> Interval[float]: ...
147
+ def overlaps(self: Interval[_OrderableT], other: Interval[_OrderableT]) -> bool: ...
148
+
149
+ def intervals_to_interval_bounds(
150
+ intervals: np.ndarray, validate_closed: bool = ...
151
+ ) -> tuple[np.ndarray, np.ndarray, IntervalClosedType]: ...
152
+
153
+ class IntervalTree(IntervalMixin):
154
+ def __init__(
155
+ self,
156
+ left: np.ndarray,
157
+ right: np.ndarray,
158
+ closed: IntervalClosedType = ...,
159
+ leaf_size: int = ...,
160
+ ) -> None: ...
161
+ @property
162
+ def mid(self) -> np.ndarray: ...
163
+ @property
164
+ def length(self) -> np.ndarray: ...
165
+ def get_indexer(self, target) -> npt.NDArray[np.intp]: ...
166
+ def get_indexer_non_unique(
167
+ self, target
168
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
169
+ _na_count: int
170
+ @property
171
+ def is_overlapping(self) -> bool: ...
172
+ @property
173
+ def is_monotonic_increasing(self) -> bool: ...
174
+ def clear_mapping(self) -> None: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/join.pyi ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import npt
4
+
5
+ def inner_join(
6
+ left: np.ndarray, # const intp_t[:]
7
+ right: np.ndarray, # const intp_t[:]
8
+ max_groups: int,
9
+ sort: bool = ...,
10
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
11
+ def left_outer_join(
12
+ left: np.ndarray, # const intp_t[:]
13
+ right: np.ndarray, # const intp_t[:]
14
+ max_groups: int,
15
+ sort: bool = ...,
16
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
17
+ def full_outer_join(
18
+ left: np.ndarray, # const intp_t[:]
19
+ right: np.ndarray, # const intp_t[:]
20
+ max_groups: int,
21
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
22
+ def ffill_indexer(
23
+ indexer: np.ndarray, # const intp_t[:]
24
+ ) -> npt.NDArray[np.intp]: ...
25
+ def left_join_indexer_unique(
26
+ left: np.ndarray, # ndarray[join_t]
27
+ right: np.ndarray, # ndarray[join_t]
28
+ ) -> npt.NDArray[np.intp]: ...
29
+ def left_join_indexer(
30
+ left: np.ndarray, # ndarray[join_t]
31
+ right: np.ndarray, # ndarray[join_t]
32
+ ) -> tuple[
33
+ np.ndarray, # np.ndarray[join_t]
34
+ npt.NDArray[np.intp],
35
+ npt.NDArray[np.intp],
36
+ ]: ...
37
+ def inner_join_indexer(
38
+ left: np.ndarray, # ndarray[join_t]
39
+ right: np.ndarray, # ndarray[join_t]
40
+ ) -> tuple[
41
+ np.ndarray, # np.ndarray[join_t]
42
+ npt.NDArray[np.intp],
43
+ npt.NDArray[np.intp],
44
+ ]: ...
45
+ def outer_join_indexer(
46
+ left: np.ndarray, # ndarray[join_t]
47
+ right: np.ndarray, # ndarray[join_t]
48
+ ) -> tuple[
49
+ np.ndarray, # np.ndarray[join_t]
50
+ npt.NDArray[np.intp],
51
+ npt.NDArray[np.intp],
52
+ ]: ...
53
+ def asof_join_backward_on_X_by_Y(
54
+ left_values: np.ndarray, # ndarray[numeric_t]
55
+ right_values: np.ndarray, # ndarray[numeric_t]
56
+ left_by_values: np.ndarray, # const int64_t[:]
57
+ right_by_values: np.ndarray, # const int64_t[:]
58
+ allow_exact_matches: bool = ...,
59
+ tolerance: np.number | float | None = ...,
60
+ use_hashtable: bool = ...,
61
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
62
+ def asof_join_forward_on_X_by_Y(
63
+ left_values: np.ndarray, # ndarray[numeric_t]
64
+ right_values: np.ndarray, # ndarray[numeric_t]
65
+ left_by_values: np.ndarray, # const int64_t[:]
66
+ right_by_values: np.ndarray, # const int64_t[:]
67
+ allow_exact_matches: bool = ...,
68
+ tolerance: np.number | float | None = ...,
69
+ use_hashtable: bool = ...,
70
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
71
+ def asof_join_nearest_on_X_by_Y(
72
+ left_values: np.ndarray, # ndarray[numeric_t]
73
+ right_values: np.ndarray, # ndarray[numeric_t]
74
+ left_by_values: np.ndarray, # const int64_t[:]
75
+ right_by_values: np.ndarray, # const int64_t[:]
76
+ allow_exact_matches: bool = ...,
77
+ tolerance: np.number | float | None = ...,
78
+ use_hashtable: bool = ...,
79
+ ) -> tuple[npt.NDArray[np.intp], npt.NDArray[np.intp]]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/json.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (64.3 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/json.pyi ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Any,
3
+ Callable,
4
+ )
5
+
6
+ def ujson_dumps(
7
+ obj: Any,
8
+ ensure_ascii: bool = ...,
9
+ double_precision: int = ...,
10
+ indent: int = ...,
11
+ orient: str = ...,
12
+ date_unit: str = ...,
13
+ iso_dates: bool = ...,
14
+ default_handler: None
15
+ | Callable[[Any], str | float | bool | list | dict | None] = ...,
16
+ ) -> str: ...
17
+ def ujson_loads(
18
+ s: str,
19
+ precise_float: bool = ...,
20
+ numpy: bool = ...,
21
+ dtype: None = ...,
22
+ labelled: bool = ...,
23
+ ) -> Any: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/lib.pyi ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # TODO(npdtypes): Many types specified here can be made more specific/accurate;
2
+ # the more specific versions are specified in comments
3
+ from decimal import Decimal
4
+ from typing import (
5
+ Any,
6
+ Callable,
7
+ Final,
8
+ Generator,
9
+ Hashable,
10
+ Literal,
11
+ TypeAlias,
12
+ overload,
13
+ )
14
+
15
+ import numpy as np
16
+
17
+ from pandas._libs.interval import Interval
18
+ from pandas._libs.tslibs import Period
19
+ from pandas._typing import (
20
+ ArrayLike,
21
+ DtypeObj,
22
+ TypeGuard,
23
+ npt,
24
+ )
25
+
26
+ # placeholder until we can specify np.ndarray[object, ndim=2]
27
+ ndarray_obj_2d = np.ndarray
28
+
29
+ from enum import Enum
30
+
31
+ class _NoDefault(Enum):
32
+ no_default = ...
33
+
34
+ no_default: Final = _NoDefault.no_default
35
+ NoDefault: TypeAlias = Literal[_NoDefault.no_default]
36
+
37
+ i8max: int
38
+ u8max: int
39
+
40
+ def is_np_dtype(dtype: object, kinds: str | None = ...) -> TypeGuard[np.dtype]: ...
41
+ def item_from_zerodim(val: object) -> object: ...
42
+ def infer_dtype(value: object, skipna: bool = ...) -> str: ...
43
+ def is_iterator(obj: object) -> bool: ...
44
+ def is_scalar(val: object) -> bool: ...
45
+ def is_list_like(obj: object, allow_sets: bool = ...) -> bool: ...
46
+ def is_pyarrow_array(obj: object) -> bool: ...
47
+ def is_period(val: object) -> TypeGuard[Period]: ...
48
+ def is_interval(obj: object) -> TypeGuard[Interval]: ...
49
+ def is_decimal(obj: object) -> TypeGuard[Decimal]: ...
50
+ def is_complex(obj: object) -> TypeGuard[complex]: ...
51
+ def is_bool(obj: object) -> TypeGuard[bool | np.bool_]: ...
52
+ def is_integer(obj: object) -> TypeGuard[int | np.integer]: ...
53
+ def is_int_or_none(obj) -> bool: ...
54
+ def is_float(obj: object) -> TypeGuard[float]: ...
55
+ def is_interval_array(values: np.ndarray) -> bool: ...
56
+ def is_datetime64_array(values: np.ndarray, skipna: bool = True) -> bool: ...
57
+ def is_timedelta_or_timedelta64_array(
58
+ values: np.ndarray, skipna: bool = True
59
+ ) -> bool: ...
60
+ def is_datetime_with_singletz_array(values: np.ndarray) -> bool: ...
61
+ def is_time_array(values: np.ndarray, skipna: bool = ...): ...
62
+ def is_date_array(values: np.ndarray, skipna: bool = ...): ...
63
+ def is_datetime_array(values: np.ndarray, skipna: bool = ...): ...
64
+ def is_string_array(values: np.ndarray, skipna: bool = ...): ...
65
+ def is_float_array(values: np.ndarray): ...
66
+ def is_integer_array(values: np.ndarray, skipna: bool = ...): ...
67
+ def is_bool_array(values: np.ndarray, skipna: bool = ...): ...
68
+ def fast_multiget(
69
+ mapping: dict,
70
+ keys: np.ndarray, # object[:]
71
+ default=...,
72
+ ) -> np.ndarray: ...
73
+ def fast_unique_multiple_list_gen(gen: Generator, sort: bool = ...) -> list: ...
74
+ def fast_unique_multiple_list(lists: list, sort: bool | None = ...) -> list: ...
75
+ def map_infer(
76
+ arr: np.ndarray,
77
+ f: Callable[[Any], Any],
78
+ convert: bool = ...,
79
+ ignore_na: bool = ...,
80
+ ) -> np.ndarray: ...
81
+ @overload
82
+ def maybe_convert_objects(
83
+ objects: npt.NDArray[np.object_],
84
+ *,
85
+ try_float: bool = ...,
86
+ safe: bool = ...,
87
+ convert_numeric: bool = ...,
88
+ convert_non_numeric: Literal[False] = ...,
89
+ convert_to_nullable_dtype: Literal[False] = ...,
90
+ dtype_if_all_nat: DtypeObj | None = ...,
91
+ ) -> npt.NDArray[np.object_ | np.number]: ...
92
+ @overload
93
+ def maybe_convert_objects(
94
+ objects: npt.NDArray[np.object_],
95
+ *,
96
+ try_float: bool = ...,
97
+ safe: bool = ...,
98
+ convert_numeric: bool = ...,
99
+ convert_non_numeric: bool = ...,
100
+ convert_to_nullable_dtype: Literal[True] = ...,
101
+ dtype_if_all_nat: DtypeObj | None = ...,
102
+ ) -> ArrayLike: ...
103
+ @overload
104
+ def maybe_convert_objects(
105
+ objects: npt.NDArray[np.object_],
106
+ *,
107
+ try_float: bool = ...,
108
+ safe: bool = ...,
109
+ convert_numeric: bool = ...,
110
+ convert_non_numeric: bool = ...,
111
+ convert_to_nullable_dtype: bool = ...,
112
+ dtype_if_all_nat: DtypeObj | None = ...,
113
+ ) -> ArrayLike: ...
114
+ @overload
115
+ def maybe_convert_numeric(
116
+ values: npt.NDArray[np.object_],
117
+ na_values: set,
118
+ convert_empty: bool = ...,
119
+ coerce_numeric: bool = ...,
120
+ convert_to_masked_nullable: Literal[False] = ...,
121
+ ) -> tuple[np.ndarray, None]: ...
122
+ @overload
123
+ def maybe_convert_numeric(
124
+ values: npt.NDArray[np.object_],
125
+ na_values: set,
126
+ convert_empty: bool = ...,
127
+ coerce_numeric: bool = ...,
128
+ *,
129
+ convert_to_masked_nullable: Literal[True],
130
+ ) -> tuple[np.ndarray, np.ndarray]: ...
131
+
132
+ # TODO: restrict `arr`?
133
+ def ensure_string_array(
134
+ arr,
135
+ na_value: object = ...,
136
+ convert_na_value: bool = ...,
137
+ copy: bool = ...,
138
+ skipna: bool = ...,
139
+ ) -> npt.NDArray[np.object_]: ...
140
+ def convert_nans_to_NA(
141
+ arr: npt.NDArray[np.object_],
142
+ ) -> npt.NDArray[np.object_]: ...
143
+ def fast_zip(ndarrays: list) -> npt.NDArray[np.object_]: ...
144
+
145
+ # TODO: can we be more specific about rows?
146
+ def to_object_array_tuples(rows: object) -> ndarray_obj_2d: ...
147
+ def tuples_to_object_array(
148
+ tuples: npt.NDArray[np.object_],
149
+ ) -> ndarray_obj_2d: ...
150
+
151
+ # TODO: can we be more specific about rows?
152
+ def to_object_array(rows: object, min_width: int = ...) -> ndarray_obj_2d: ...
153
+ def dicts_to_array(dicts: list, columns: list) -> ndarray_obj_2d: ...
154
+ def maybe_booleans_to_slice(
155
+ mask: npt.NDArray[np.uint8],
156
+ ) -> slice | npt.NDArray[np.uint8]: ...
157
+ def maybe_indices_to_slice(
158
+ indices: npt.NDArray[np.intp],
159
+ max_len: int,
160
+ ) -> slice | npt.NDArray[np.intp]: ...
161
+ def is_all_arraylike(obj: list) -> bool: ...
162
+
163
+ # -----------------------------------------------------------------
164
+ # Functions which in reality take memoryviews
165
+
166
+ def memory_usage_of_objects(arr: np.ndarray) -> int: ... # object[:] # np.int64
167
+ def map_infer_mask(
168
+ arr: np.ndarray,
169
+ f: Callable[[Any], Any],
170
+ mask: np.ndarray, # const uint8_t[:]
171
+ convert: bool = ...,
172
+ na_value: Any = ...,
173
+ dtype: np.dtype = ...,
174
+ ) -> np.ndarray: ...
175
+ def indices_fast(
176
+ index: npt.NDArray[np.intp],
177
+ labels: np.ndarray, # const int64_t[:]
178
+ keys: list,
179
+ sorted_labels: list[npt.NDArray[np.int64]],
180
+ ) -> dict[Hashable, npt.NDArray[np.intp]]: ...
181
+ def generate_slices(
182
+ labels: np.ndarray, ngroups: int # const intp_t[:]
183
+ ) -> tuple[npt.NDArray[np.int64], npt.NDArray[np.int64]]: ...
184
+ def count_level_2d(
185
+ mask: np.ndarray, # ndarray[uint8_t, ndim=2, cast=True],
186
+ labels: np.ndarray, # const intp_t[:]
187
+ max_bin: int,
188
+ ) -> np.ndarray: ... # np.ndarray[np.int64, ndim=2]
189
+ def get_level_sorter(
190
+ codes: np.ndarray, # const int64_t[:]
191
+ starts: np.ndarray, # const intp_t[:]
192
+ ) -> np.ndarray: ... # np.ndarray[np.intp, ndim=1]
193
+ def generate_bins_dt64(
194
+ values: npt.NDArray[np.int64],
195
+ binner: np.ndarray, # const int64_t[:]
196
+ closed: object = ...,
197
+ hasnans: bool = ...,
198
+ ) -> np.ndarray: ... # np.ndarray[np.int64, ndim=1]
199
+ def array_equivalent_object(
200
+ left: npt.NDArray[np.object_],
201
+ right: npt.NDArray[np.object_],
202
+ ) -> bool: ...
203
+ def has_infs(arr: np.ndarray) -> bool: ... # const floating[:]
204
+ def has_only_ints_or_nan(arr: np.ndarray) -> bool: ... # const floating[:]
205
+ def get_reverse_indexer(
206
+ indexer: np.ndarray, # const intp_t[:]
207
+ length: int,
208
+ ) -> npt.NDArray[np.intp]: ...
209
+ def is_bool_list(obj: list) -> bool: ...
210
+ def dtypes_all_equal(types: list[DtypeObj]) -> bool: ...
211
+ def is_range_indexer(
212
+ left: np.ndarray, n: int # np.ndarray[np.int64, ndim=1]
213
+ ) -> bool: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/missing.pyi ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from numpy import typing as npt
3
+
4
+ class NAType:
5
+ def __new__(cls, *args, **kwargs): ...
6
+
7
+ NA: NAType
8
+
9
+ def is_matching_na(
10
+ left: object, right: object, nan_matches_none: bool = ...
11
+ ) -> bool: ...
12
+ def isposinf_scalar(val: object) -> bool: ...
13
+ def isneginf_scalar(val: object) -> bool: ...
14
+ def checknull(val: object, inf_as_na: bool = ...) -> bool: ...
15
+ def isnaobj(arr: np.ndarray, inf_as_na: bool = ...) -> npt.NDArray[np.bool_]: ...
16
+ def is_numeric_na(values: np.ndarray) -> npt.NDArray[np.bool_]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/ops.pyi ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Any,
3
+ Callable,
4
+ Iterable,
5
+ Literal,
6
+ TypeAlias,
7
+ overload,
8
+ )
9
+
10
+ import numpy as np
11
+
12
+ from pandas._typing import npt
13
+
14
+ _BinOp: TypeAlias = Callable[[Any, Any], Any]
15
+ _BoolOp: TypeAlias = Callable[[Any, Any], bool]
16
+
17
+ def scalar_compare(
18
+ values: np.ndarray, # object[:]
19
+ val: object,
20
+ op: _BoolOp, # {operator.eq, operator.ne, ...}
21
+ ) -> npt.NDArray[np.bool_]: ...
22
+ def vec_compare(
23
+ left: npt.NDArray[np.object_],
24
+ right: npt.NDArray[np.object_],
25
+ op: _BoolOp, # {operator.eq, operator.ne, ...}
26
+ ) -> npt.NDArray[np.bool_]: ...
27
+ def scalar_binop(
28
+ values: np.ndarray, # object[:]
29
+ val: object,
30
+ op: _BinOp, # binary operator
31
+ ) -> np.ndarray: ...
32
+ def vec_binop(
33
+ left: np.ndarray, # object[:]
34
+ right: np.ndarray, # object[:]
35
+ op: _BinOp, # binary operator
36
+ ) -> np.ndarray: ...
37
+ @overload
38
+ def maybe_convert_bool(
39
+ arr: npt.NDArray[np.object_],
40
+ true_values: Iterable | None = None,
41
+ false_values: Iterable | None = None,
42
+ convert_to_masked_nullable: Literal[False] = ...,
43
+ ) -> tuple[np.ndarray, None]: ...
44
+ @overload
45
+ def maybe_convert_bool(
46
+ arr: npt.NDArray[np.object_],
47
+ true_values: Iterable = ...,
48
+ false_values: Iterable = ...,
49
+ *,
50
+ convert_to_masked_nullable: Literal[True],
51
+ ) -> tuple[np.ndarray, np.ndarray]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/ops_dispatch.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (61.7 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/ops_dispatch.pyi ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ def maybe_dispatch_ufunc_to_dunder_op(
4
+ self, ufunc: np.ufunc, method: str, *inputs, **kwargs
5
+ ): ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/pandas_datetime.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (39.3 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/pandas_parser.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (43.4 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/parsers.pyi ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Hashable,
3
+ Literal,
4
+ )
5
+
6
+ import numpy as np
7
+
8
+ from pandas._typing import (
9
+ ArrayLike,
10
+ Dtype,
11
+ npt,
12
+ )
13
+
14
+ STR_NA_VALUES: set[str]
15
+ DEFAULT_BUFFER_HEURISTIC: int
16
+
17
+ def sanitize_objects(
18
+ values: npt.NDArray[np.object_],
19
+ na_values: set,
20
+ ) -> int: ...
21
+
22
+ class TextReader:
23
+ unnamed_cols: set[str]
24
+ table_width: int # int64_t
25
+ leading_cols: int # int64_t
26
+ header: list[list[int]] # non-negative integers
27
+ def __init__(
28
+ self,
29
+ source,
30
+ delimiter: bytes | str = ..., # single-character only
31
+ header=...,
32
+ header_start: int = ..., # int64_t
33
+ header_end: int = ..., # uint64_t
34
+ index_col=...,
35
+ names=...,
36
+ tokenize_chunksize: int = ..., # int64_t
37
+ delim_whitespace: bool = ...,
38
+ converters=...,
39
+ skipinitialspace: bool = ...,
40
+ escapechar: bytes | str | None = ..., # single-character only
41
+ doublequote: bool = ...,
42
+ quotechar: str | bytes | None = ..., # at most 1 character
43
+ quoting: int = ...,
44
+ lineterminator: bytes | str | None = ..., # at most 1 character
45
+ comment=...,
46
+ decimal: bytes | str = ..., # single-character only
47
+ thousands: bytes | str | None = ..., # single-character only
48
+ dtype: Dtype | dict[Hashable, Dtype] = ...,
49
+ usecols=...,
50
+ error_bad_lines: bool = ...,
51
+ warn_bad_lines: bool = ...,
52
+ na_filter: bool = ...,
53
+ na_values=...,
54
+ na_fvalues=...,
55
+ keep_default_na: bool = ...,
56
+ true_values=...,
57
+ false_values=...,
58
+ allow_leading_cols: bool = ...,
59
+ skiprows=...,
60
+ skipfooter: int = ..., # int64_t
61
+ verbose: bool = ...,
62
+ float_precision: Literal["round_trip", "legacy", "high"] | None = ...,
63
+ skip_blank_lines: bool = ...,
64
+ encoding_errors: bytes | str = ...,
65
+ ) -> None: ...
66
+ def set_noconvert(self, i: int) -> None: ...
67
+ def remove_noconvert(self, i: int) -> None: ...
68
+ def close(self) -> None: ...
69
+ def read(self, rows: int | None = ...) -> dict[int, ArrayLike]: ...
70
+ def read_low_memory(self, rows: int | None) -> list[dict[int, ArrayLike]]: ...
71
+
72
+ # _maybe_upcast, na_values are only exposed for testing
73
+ na_values: dict
74
+
75
+ def _maybe_upcast(
76
+ arr, use_dtype_backend: bool = ..., dtype_backend: str = ...
77
+ ) -> np.ndarray: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/properties.cpython-310-x86_64-linux-gnu.so ADDED
Binary file (91.9 kB). View file
 
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/properties.pyi ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import (
2
+ Sequence,
3
+ overload,
4
+ )
5
+
6
+ from pandas._typing import (
7
+ AnyArrayLike,
8
+ DataFrame,
9
+ Index,
10
+ Series,
11
+ )
12
+
13
+ # note: this is a lie to make type checkers happy (they special
14
+ # case property). cache_readonly uses attribute names similar to
15
+ # property (fget) but it does not provide fset and fdel.
16
+ cache_readonly = property
17
+
18
+ class AxisProperty:
19
+ axis: int
20
+ def __init__(self, axis: int = ..., doc: str = ...) -> None: ...
21
+ @overload
22
+ def __get__(self, obj: DataFrame | Series, type) -> Index: ...
23
+ @overload
24
+ def __get__(self, obj: None, type) -> AxisProperty: ...
25
+ def __set__(
26
+ self, obj: DataFrame | Series, value: AnyArrayLike | Sequence
27
+ ) -> None: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/reshape.pyi ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import npt
4
+
5
+ def unstack(
6
+ values: np.ndarray, # reshape_t[:, :]
7
+ mask: np.ndarray, # const uint8_t[:]
8
+ stride: int,
9
+ length: int,
10
+ width: int,
11
+ new_values: np.ndarray, # reshape_t[:, :]
12
+ new_mask: np.ndarray, # uint8_t[:, :]
13
+ ) -> None: ...
14
+ def explode(
15
+ values: npt.NDArray[np.object_],
16
+ ) -> tuple[npt.NDArray[np.object_], npt.NDArray[np.int64]]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/sas.pyi ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from pandas.io.sas.sas7bdat import SAS7BDATReader
2
+
3
+ class Parser:
4
+ def __init__(self, parser: SAS7BDATReader) -> None: ...
5
+ def read(self, nrows: int) -> None: ...
6
+
7
+ def get_subheader_index(signature: bytes) -> int: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/sparse.pyi ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Sequence
2
+
3
+ import numpy as np
4
+
5
+ from pandas._typing import (
6
+ Self,
7
+ npt,
8
+ )
9
+
10
+ class SparseIndex:
11
+ length: int
12
+ npoints: int
13
+ def __init__(self) -> None: ...
14
+ @property
15
+ def ngaps(self) -> int: ...
16
+ @property
17
+ def nbytes(self) -> int: ...
18
+ @property
19
+ def indices(self) -> npt.NDArray[np.int32]: ...
20
+ def equals(self, other) -> bool: ...
21
+ def lookup(self, index: int) -> np.int32: ...
22
+ def lookup_array(self, indexer: npt.NDArray[np.int32]) -> npt.NDArray[np.int32]: ...
23
+ def to_int_index(self) -> IntIndex: ...
24
+ def to_block_index(self) -> BlockIndex: ...
25
+ def intersect(self, y_: SparseIndex) -> Self: ...
26
+ def make_union(self, y_: SparseIndex) -> Self: ...
27
+
28
+ class IntIndex(SparseIndex):
29
+ indices: npt.NDArray[np.int32]
30
+ def __init__(
31
+ self, length: int, indices: Sequence[int], check_integrity: bool = ...
32
+ ) -> None: ...
33
+
34
+ class BlockIndex(SparseIndex):
35
+ nblocks: int
36
+ blocs: np.ndarray
37
+ blengths: np.ndarray
38
+ def __init__(
39
+ self, length: int, blocs: np.ndarray, blengths: np.ndarray
40
+ ) -> None: ...
41
+
42
+ # Override to have correct parameters
43
+ def intersect(self, other: SparseIndex) -> Self: ...
44
+ def make_union(self, y: SparseIndex) -> Self: ...
45
+
46
+ def make_mask_object_ndarray(
47
+ arr: npt.NDArray[np.object_], fill_value
48
+ ) -> npt.NDArray[np.bool_]: ...
49
+ def get_blocks(
50
+ indices: npt.NDArray[np.int32],
51
+ ) -> tuple[npt.NDArray[np.int32], npt.NDArray[np.int32]]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/testing.pyi ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ def assert_dict_equal(a, b, compare_keys: bool = ...): ...
2
+ def assert_almost_equal(
3
+ a,
4
+ b,
5
+ rtol: float = ...,
6
+ atol: float = ...,
7
+ check_dtype: bool = ...,
8
+ obj=...,
9
+ lobj=...,
10
+ robj=...,
11
+ index_values=...,
12
+ ): ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslib.pyi ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import tzinfo
2
+
3
+ import numpy as np
4
+
5
+ from pandas._typing import npt
6
+
7
+ def format_array_from_datetime(
8
+ values: npt.NDArray[np.int64],
9
+ tz: tzinfo | None = ...,
10
+ format: str | None = ...,
11
+ na_rep: str | float = ...,
12
+ reso: int = ..., # NPY_DATETIMEUNIT
13
+ ) -> npt.NDArray[np.object_]: ...
14
+ def array_with_unit_to_datetime(
15
+ values: npt.NDArray[np.object_],
16
+ unit: str,
17
+ errors: str = ...,
18
+ ) -> tuple[np.ndarray, tzinfo | None]: ...
19
+ def first_non_null(values: np.ndarray) -> int: ...
20
+ def array_to_datetime(
21
+ values: npt.NDArray[np.object_],
22
+ errors: str = ...,
23
+ dayfirst: bool = ...,
24
+ yearfirst: bool = ...,
25
+ utc: bool = ...,
26
+ creso: int = ...,
27
+ ) -> tuple[np.ndarray, tzinfo | None]: ...
28
+
29
+ # returned ndarray may be object dtype or datetime64[ns]
30
+
31
+ def array_to_datetime_with_tz(
32
+ values: npt.NDArray[np.object_],
33
+ tz: tzinfo,
34
+ dayfirst: bool,
35
+ yearfirst: bool,
36
+ creso: int,
37
+ ) -> npt.NDArray[np.int64]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/tslibs/tzconversion.pyi ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ timedelta,
3
+ tzinfo,
4
+ )
5
+ from typing import Iterable
6
+
7
+ import numpy as np
8
+
9
+ from pandas._typing import npt
10
+
11
+ # tz_convert_from_utc_single exposed for testing
12
+ def tz_convert_from_utc_single(
13
+ utc_val: np.int64, tz: tzinfo, creso: int = ...
14
+ ) -> np.int64: ...
15
+ def tz_localize_to_utc(
16
+ vals: npt.NDArray[np.int64],
17
+ tz: tzinfo | None,
18
+ ambiguous: str | bool | Iterable[bool] | None = ...,
19
+ nonexistent: str | timedelta | np.timedelta64 | None = ...,
20
+ creso: int = ..., # NPY_DATETIMEUNIT
21
+ ) -> npt.NDArray[np.int64]: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_libs/writers.pyi ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas._typing import ArrayLike
4
+
5
+ def write_csv_rows(
6
+ data: list[ArrayLike],
7
+ data_index: np.ndarray,
8
+ nlevels: int,
9
+ cols: np.ndarray,
10
+ writer: object, # _csv.writer
11
+ ) -> None: ...
12
+ def convert_json_to_lines(arr: str) -> str: ...
13
+ def max_len_string_array(
14
+ arr: np.ndarray, # pandas_string[:]
15
+ ) -> int: ...
16
+ def word_len(val: object) -> int: ...
17
+ def string_array_replace_from_nan_rep(
18
+ arr: np.ndarray, # np.ndarray[object, ndim=1]
19
+ nan_rep: object,
20
+ ) -> None: ...
infer_4_30_0/lib/python3.10/site-packages/pandas/_typing.py ADDED
@@ -0,0 +1,525 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import (
4
+ Hashable,
5
+ Iterator,
6
+ Mapping,
7
+ MutableMapping,
8
+ Sequence,
9
+ )
10
+ from datetime import (
11
+ date,
12
+ datetime,
13
+ timedelta,
14
+ tzinfo,
15
+ )
16
+ from os import PathLike
17
+ import sys
18
+ from typing import (
19
+ TYPE_CHECKING,
20
+ Any,
21
+ Callable,
22
+ Literal,
23
+ Optional,
24
+ Protocol,
25
+ Type as type_t,
26
+ TypeVar,
27
+ Union,
28
+ overload,
29
+ )
30
+
31
+ import numpy as np
32
+
33
+ # To prevent import cycles place any internal imports in the branch below
34
+ # and use a string literal forward reference to it in subsequent types
35
+ # https://mypy.readthedocs.io/en/latest/common_issues.html#import-cycles
36
+ if TYPE_CHECKING:
37
+ import numpy.typing as npt
38
+
39
+ from pandas._libs import (
40
+ NaTType,
41
+ Period,
42
+ Timedelta,
43
+ Timestamp,
44
+ )
45
+ from pandas._libs.tslibs import BaseOffset
46
+
47
+ from pandas.core.dtypes.dtypes import ExtensionDtype
48
+
49
+ from pandas import Interval
50
+ from pandas.arrays import (
51
+ DatetimeArray,
52
+ TimedeltaArray,
53
+ )
54
+ from pandas.core.arrays.base import ExtensionArray
55
+ from pandas.core.frame import DataFrame
56
+ from pandas.core.generic import NDFrame
57
+ from pandas.core.groupby.generic import (
58
+ DataFrameGroupBy,
59
+ GroupBy,
60
+ SeriesGroupBy,
61
+ )
62
+ from pandas.core.indexes.base import Index
63
+ from pandas.core.internals import (
64
+ ArrayManager,
65
+ BlockManager,
66
+ SingleArrayManager,
67
+ SingleBlockManager,
68
+ )
69
+ from pandas.core.resample import Resampler
70
+ from pandas.core.series import Series
71
+ from pandas.core.window.rolling import BaseWindow
72
+
73
+ from pandas.io.formats.format import EngFormatter
74
+ from pandas.tseries.holiday import AbstractHolidayCalendar
75
+
76
+ ScalarLike_co = Union[
77
+ int,
78
+ float,
79
+ complex,
80
+ str,
81
+ bytes,
82
+ np.generic,
83
+ ]
84
+
85
+ # numpy compatible types
86
+ NumpyValueArrayLike = Union[ScalarLike_co, npt.ArrayLike]
87
+ # Name "npt._ArrayLikeInt_co" is not defined [name-defined]
88
+ NumpySorter = Optional[npt._ArrayLikeInt_co] # type: ignore[name-defined]
89
+
90
+ from typing import SupportsIndex
91
+
92
+ if sys.version_info >= (3, 10):
93
+ from typing import TypeGuard # pyright: ignore[reportUnusedImport]
94
+ else:
95
+ from typing_extensions import TypeGuard # pyright: ignore[reportUnusedImport]
96
+
97
+ if sys.version_info >= (3, 11):
98
+ from typing import Self # pyright: ignore[reportUnusedImport]
99
+ else:
100
+ from typing_extensions import Self # pyright: ignore[reportUnusedImport]
101
+ else:
102
+ npt: Any = None
103
+ Self: Any = None
104
+ TypeGuard: Any = None
105
+
106
+ HashableT = TypeVar("HashableT", bound=Hashable)
107
+ MutableMappingT = TypeVar("MutableMappingT", bound=MutableMapping)
108
+
109
+ # array-like
110
+
111
+ ArrayLike = Union["ExtensionArray", np.ndarray]
112
+ AnyArrayLike = Union[ArrayLike, "Index", "Series"]
113
+ TimeArrayLike = Union["DatetimeArray", "TimedeltaArray"]
114
+
115
+ # list-like
116
+
117
+ # from https://github.com/hauntsaninja/useful_types
118
+ # includes Sequence-like objects but excludes str and bytes
119
+ _T_co = TypeVar("_T_co", covariant=True)
120
+
121
+
122
+ class SequenceNotStr(Protocol[_T_co]):
123
+ @overload
124
+ def __getitem__(self, index: SupportsIndex, /) -> _T_co:
125
+ ...
126
+
127
+ @overload
128
+ def __getitem__(self, index: slice, /) -> Sequence[_T_co]:
129
+ ...
130
+
131
+ def __contains__(self, value: object, /) -> bool:
132
+ ...
133
+
134
+ def __len__(self) -> int:
135
+ ...
136
+
137
+ def __iter__(self) -> Iterator[_T_co]:
138
+ ...
139
+
140
+ def index(self, value: Any, /, start: int = 0, stop: int = ...) -> int:
141
+ ...
142
+
143
+ def count(self, value: Any, /) -> int:
144
+ ...
145
+
146
+ def __reversed__(self) -> Iterator[_T_co]:
147
+ ...
148
+
149
+
150
+ ListLike = Union[AnyArrayLike, SequenceNotStr, range]
151
+
152
+ # scalars
153
+
154
+ PythonScalar = Union[str, float, bool]
155
+ DatetimeLikeScalar = Union["Period", "Timestamp", "Timedelta"]
156
+ PandasScalar = Union["Period", "Timestamp", "Timedelta", "Interval"]
157
+ Scalar = Union[PythonScalar, PandasScalar, np.datetime64, np.timedelta64, date]
158
+ IntStrT = TypeVar("IntStrT", bound=Union[int, str])
159
+
160
+
161
+ # timestamp and timedelta convertible types
162
+
163
+ TimestampConvertibleTypes = Union[
164
+ "Timestamp", date, np.datetime64, np.int64, float, str
165
+ ]
166
+ TimestampNonexistent = Union[
167
+ Literal["shift_forward", "shift_backward", "NaT", "raise"], timedelta
168
+ ]
169
+ TimedeltaConvertibleTypes = Union[
170
+ "Timedelta", timedelta, np.timedelta64, np.int64, float, str
171
+ ]
172
+ Timezone = Union[str, tzinfo]
173
+
174
+ ToTimestampHow = Literal["s", "e", "start", "end"]
175
+
176
+ # NDFrameT is stricter and ensures that the same subclass of NDFrame always is
177
+ # used. E.g. `def func(a: NDFrameT) -> NDFrameT: ...` means that if a
178
+ # Series is passed into a function, a Series is always returned and if a DataFrame is
179
+ # passed in, a DataFrame is always returned.
180
+ NDFrameT = TypeVar("NDFrameT", bound="NDFrame")
181
+
182
+ NumpyIndexT = TypeVar("NumpyIndexT", np.ndarray, "Index")
183
+
184
+ AxisInt = int
185
+ Axis = Union[AxisInt, Literal["index", "columns", "rows"]]
186
+ IndexLabel = Union[Hashable, Sequence[Hashable]]
187
+ Level = Hashable
188
+ Shape = tuple[int, ...]
189
+ Suffixes = tuple[Optional[str], Optional[str]]
190
+ Ordered = Optional[bool]
191
+ JSONSerializable = Optional[Union[PythonScalar, list, dict]]
192
+ Frequency = Union[str, "BaseOffset"]
193
+ Axes = ListLike
194
+
195
+ RandomState = Union[
196
+ int,
197
+ np.ndarray,
198
+ np.random.Generator,
199
+ np.random.BitGenerator,
200
+ np.random.RandomState,
201
+ ]
202
+
203
+ # dtypes
204
+ NpDtype = Union[str, np.dtype, type_t[Union[str, complex, bool, object]]]
205
+ Dtype = Union["ExtensionDtype", NpDtype]
206
+ AstypeArg = Union["ExtensionDtype", "npt.DTypeLike"]
207
+ # DtypeArg specifies all allowable dtypes in a functions its dtype argument
208
+ DtypeArg = Union[Dtype, dict[Hashable, Dtype]]
209
+ DtypeObj = Union[np.dtype, "ExtensionDtype"]
210
+
211
+ # converters
212
+ ConvertersArg = dict[Hashable, Callable[[Dtype], Dtype]]
213
+
214
+ # parse_dates
215
+ ParseDatesArg = Union[
216
+ bool, list[Hashable], list[list[Hashable]], dict[Hashable, list[Hashable]]
217
+ ]
218
+
219
+ # For functions like rename that convert one label to another
220
+ Renamer = Union[Mapping[Any, Hashable], Callable[[Any], Hashable]]
221
+
222
+ # to maintain type information across generic functions and parametrization
223
+ T = TypeVar("T")
224
+
225
+ # used in decorators to preserve the signature of the function it decorates
226
+ # see https://mypy.readthedocs.io/en/stable/generics.html#declaring-decorators
227
+ FuncType = Callable[..., Any]
228
+ F = TypeVar("F", bound=FuncType)
229
+
230
+ # types of vectorized key functions for DataFrame::sort_values and
231
+ # DataFrame::sort_index, among others
232
+ ValueKeyFunc = Optional[Callable[["Series"], Union["Series", AnyArrayLike]]]
233
+ IndexKeyFunc = Optional[Callable[["Index"], Union["Index", AnyArrayLike]]]
234
+
235
+ # types of `func` kwarg for DataFrame.aggregate and Series.aggregate
236
+ AggFuncTypeBase = Union[Callable, str]
237
+ AggFuncTypeDict = MutableMapping[
238
+ Hashable, Union[AggFuncTypeBase, list[AggFuncTypeBase]]
239
+ ]
240
+ AggFuncType = Union[
241
+ AggFuncTypeBase,
242
+ list[AggFuncTypeBase],
243
+ AggFuncTypeDict,
244
+ ]
245
+ AggObjType = Union[
246
+ "Series",
247
+ "DataFrame",
248
+ "GroupBy",
249
+ "SeriesGroupBy",
250
+ "DataFrameGroupBy",
251
+ "BaseWindow",
252
+ "Resampler",
253
+ ]
254
+
255
+ PythonFuncType = Callable[[Any], Any]
256
+
257
+ # filenames and file-like-objects
258
+ AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True)
259
+ AnyStr_contra = TypeVar("AnyStr_contra", str, bytes, contravariant=True)
260
+
261
+
262
+ class BaseBuffer(Protocol):
263
+ @property
264
+ def mode(self) -> str:
265
+ # for _get_filepath_or_buffer
266
+ ...
267
+
268
+ def seek(self, __offset: int, __whence: int = ...) -> int:
269
+ # with one argument: gzip.GzipFile, bz2.BZ2File
270
+ # with two arguments: zip.ZipFile, read_sas
271
+ ...
272
+
273
+ def seekable(self) -> bool:
274
+ # for bz2.BZ2File
275
+ ...
276
+
277
+ def tell(self) -> int:
278
+ # for zip.ZipFile, read_stata, to_stata
279
+ ...
280
+
281
+
282
+ class ReadBuffer(BaseBuffer, Protocol[AnyStr_co]):
283
+ def read(self, __n: int = ...) -> AnyStr_co:
284
+ # for BytesIOWrapper, gzip.GzipFile, bz2.BZ2File
285
+ ...
286
+
287
+
288
+ class WriteBuffer(BaseBuffer, Protocol[AnyStr_contra]):
289
+ def write(self, __b: AnyStr_contra) -> Any:
290
+ # for gzip.GzipFile, bz2.BZ2File
291
+ ...
292
+
293
+ def flush(self) -> Any:
294
+ # for gzip.GzipFile, bz2.BZ2File
295
+ ...
296
+
297
+
298
+ class ReadPickleBuffer(ReadBuffer[bytes], Protocol):
299
+ def readline(self) -> bytes:
300
+ ...
301
+
302
+
303
+ class WriteExcelBuffer(WriteBuffer[bytes], Protocol):
304
+ def truncate(self, size: int | None = ...) -> int:
305
+ ...
306
+
307
+
308
+ class ReadCsvBuffer(ReadBuffer[AnyStr_co], Protocol):
309
+ def __iter__(self) -> Iterator[AnyStr_co]:
310
+ # for engine=python
311
+ ...
312
+
313
+ def fileno(self) -> int:
314
+ # for _MMapWrapper
315
+ ...
316
+
317
+ def readline(self) -> AnyStr_co:
318
+ # for engine=python
319
+ ...
320
+
321
+ @property
322
+ def closed(self) -> bool:
323
+ # for enine=pyarrow
324
+ ...
325
+
326
+
327
+ FilePath = Union[str, "PathLike[str]"]
328
+
329
+ # for arbitrary kwargs passed during reading/writing files
330
+ StorageOptions = Optional[dict[str, Any]]
331
+
332
+
333
+ # compression keywords and compression
334
+ CompressionDict = dict[str, Any]
335
+ CompressionOptions = Optional[
336
+ Union[Literal["infer", "gzip", "bz2", "zip", "xz", "zstd", "tar"], CompressionDict]
337
+ ]
338
+
339
+ # types in DataFrameFormatter
340
+ FormattersType = Union[
341
+ list[Callable], tuple[Callable, ...], Mapping[Union[str, int], Callable]
342
+ ]
343
+ ColspaceType = Mapping[Hashable, Union[str, int]]
344
+ FloatFormatType = Union[str, Callable, "EngFormatter"]
345
+ ColspaceArgType = Union[
346
+ str, int, Sequence[Union[str, int]], Mapping[Hashable, Union[str, int]]
347
+ ]
348
+
349
+ # Arguments for fillna()
350
+ FillnaOptions = Literal["backfill", "bfill", "ffill", "pad"]
351
+ InterpolateOptions = Literal[
352
+ "linear",
353
+ "time",
354
+ "index",
355
+ "values",
356
+ "nearest",
357
+ "zero",
358
+ "slinear",
359
+ "quadratic",
360
+ "cubic",
361
+ "barycentric",
362
+ "polynomial",
363
+ "krogh",
364
+ "piecewise_polynomial",
365
+ "spline",
366
+ "pchip",
367
+ "akima",
368
+ "cubicspline",
369
+ "from_derivatives",
370
+ ]
371
+
372
+ # internals
373
+ Manager = Union[
374
+ "ArrayManager", "SingleArrayManager", "BlockManager", "SingleBlockManager"
375
+ ]
376
+ SingleManager = Union["SingleArrayManager", "SingleBlockManager"]
377
+ Manager2D = Union["ArrayManager", "BlockManager"]
378
+
379
+ # indexing
380
+ # PositionalIndexer -> valid 1D positional indexer, e.g. can pass
381
+ # to ndarray.__getitem__
382
+ # ScalarIndexer is for a single value as the index
383
+ # SequenceIndexer is for list like or slices (but not tuples)
384
+ # PositionalIndexerTuple is extends the PositionalIndexer for 2D arrays
385
+ # These are used in various __getitem__ overloads
386
+ # TODO(typing#684): add Ellipsis, see
387
+ # https://github.com/python/typing/issues/684#issuecomment-548203158
388
+ # https://bugs.python.org/issue41810
389
+ # Using List[int] here rather than Sequence[int] to disallow tuples.
390
+ ScalarIndexer = Union[int, np.integer]
391
+ SequenceIndexer = Union[slice, list[int], np.ndarray]
392
+ PositionalIndexer = Union[ScalarIndexer, SequenceIndexer]
393
+ PositionalIndexerTuple = tuple[PositionalIndexer, PositionalIndexer]
394
+ PositionalIndexer2D = Union[PositionalIndexer, PositionalIndexerTuple]
395
+ if TYPE_CHECKING:
396
+ TakeIndexer = Union[Sequence[int], Sequence[np.integer], npt.NDArray[np.integer]]
397
+ else:
398
+ TakeIndexer = Any
399
+
400
+ # Shared by functions such as drop and astype
401
+ IgnoreRaise = Literal["ignore", "raise"]
402
+
403
+ # Windowing rank methods
404
+ WindowingRankType = Literal["average", "min", "max"]
405
+
406
+ # read_csv engines
407
+ CSVEngine = Literal["c", "python", "pyarrow", "python-fwf"]
408
+
409
+ # read_json engines
410
+ JSONEngine = Literal["ujson", "pyarrow"]
411
+
412
+ # read_xml parsers
413
+ XMLParsers = Literal["lxml", "etree"]
414
+
415
+ # read_html flavors
416
+ HTMLFlavors = Literal["lxml", "html5lib", "bs4"]
417
+
418
+ # Interval closed type
419
+ IntervalLeftRight = Literal["left", "right"]
420
+ IntervalClosedType = Union[IntervalLeftRight, Literal["both", "neither"]]
421
+
422
+ # datetime and NaTType
423
+ DatetimeNaTType = Union[datetime, "NaTType"]
424
+ DateTimeErrorChoices = Union[IgnoreRaise, Literal["coerce"]]
425
+
426
+ # sort_index
427
+ SortKind = Literal["quicksort", "mergesort", "heapsort", "stable"]
428
+ NaPosition = Literal["first", "last"]
429
+
430
+ # Arguments for nsmalles and n_largest
431
+ NsmallestNlargestKeep = Literal["first", "last", "all"]
432
+
433
+ # quantile interpolation
434
+ QuantileInterpolation = Literal["linear", "lower", "higher", "midpoint", "nearest"]
435
+
436
+ # plotting
437
+ PlottingOrientation = Literal["horizontal", "vertical"]
438
+
439
+ # dropna
440
+ AnyAll = Literal["any", "all"]
441
+
442
+ # merge
443
+ MergeHow = Literal["left", "right", "inner", "outer", "cross"]
444
+ MergeValidate = Literal[
445
+ "one_to_one",
446
+ "1:1",
447
+ "one_to_many",
448
+ "1:m",
449
+ "many_to_one",
450
+ "m:1",
451
+ "many_to_many",
452
+ "m:m",
453
+ ]
454
+
455
+ # join
456
+ JoinHow = Literal["left", "right", "inner", "outer"]
457
+ JoinValidate = Literal[
458
+ "one_to_one",
459
+ "1:1",
460
+ "one_to_many",
461
+ "1:m",
462
+ "many_to_one",
463
+ "m:1",
464
+ "many_to_many",
465
+ "m:m",
466
+ ]
467
+
468
+ # reindex
469
+ ReindexMethod = Union[FillnaOptions, Literal["nearest"]]
470
+
471
+ MatplotlibColor = Union[str, Sequence[float]]
472
+ TimeGrouperOrigin = Union[
473
+ "Timestamp", Literal["epoch", "start", "start_day", "end", "end_day"]
474
+ ]
475
+ TimeAmbiguous = Union[Literal["infer", "NaT", "raise"], "npt.NDArray[np.bool_]"]
476
+ TimeNonexistent = Union[
477
+ Literal["shift_forward", "shift_backward", "NaT", "raise"], timedelta
478
+ ]
479
+ DropKeep = Literal["first", "last", False]
480
+ CorrelationMethod = Union[
481
+ Literal["pearson", "kendall", "spearman"], Callable[[np.ndarray, np.ndarray], float]
482
+ ]
483
+ AlignJoin = Literal["outer", "inner", "left", "right"]
484
+ DtypeBackend = Literal["pyarrow", "numpy_nullable"]
485
+
486
+ TimeUnit = Literal["s", "ms", "us", "ns"]
487
+ OpenFileErrors = Literal[
488
+ "strict",
489
+ "ignore",
490
+ "replace",
491
+ "surrogateescape",
492
+ "xmlcharrefreplace",
493
+ "backslashreplace",
494
+ "namereplace",
495
+ ]
496
+
497
+ # update
498
+ UpdateJoin = Literal["left"]
499
+
500
+ # applymap
501
+ NaAction = Literal["ignore"]
502
+
503
+ # from_dict
504
+ FromDictOrient = Literal["columns", "index", "tight"]
505
+
506
+ # to_gbc
507
+ ToGbqIfexist = Literal["fail", "replace", "append"]
508
+
509
+ # to_stata
510
+ ToStataByteorder = Literal[">", "<", "little", "big"]
511
+
512
+ # ExcelWriter
513
+ ExcelWriterIfSheetExists = Literal["error", "new", "replace", "overlay"]
514
+
515
+ # Offsets
516
+ OffsetCalendar = Union[np.busdaycalendar, "AbstractHolidayCalendar"]
517
+
518
+ # read_csv: usecols
519
+ UsecolsArgType = Union[
520
+ SequenceNotStr[Hashable],
521
+ range,
522
+ AnyArrayLike,
523
+ Callable[[HashableT], bool],
524
+ None,
525
+ ]
infer_4_30_0/lib/python3.10/site-packages/pandas/_version.py ADDED
@@ -0,0 +1,692 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file helps to compute a version number in source trees obtained from
2
+ # git-archive tarball (such as those provided by githubs download-from-tag
3
+ # feature). Distribution tarballs (built by setup.py sdist) and build
4
+ # directories (produced by setup.py build) will contain a much shorter file
5
+ # that just contains the computed version number.
6
+
7
+ # This file is released into the public domain.
8
+ # Generated by versioneer-0.28
9
+ # https://github.com/python-versioneer/python-versioneer
10
+
11
+ """Git implementation of _version.py."""
12
+
13
+ import errno
14
+ import functools
15
+ import os
16
+ import re
17
+ import subprocess
18
+ import sys
19
+ from typing import Callable
20
+
21
+
22
+ def get_keywords():
23
+ """Get the keywords needed to look up the version information."""
24
+ # these strings will be replaced by git during git-archive.
25
+ # setup.py/versioneer.py will grep for the variable names, so they must
26
+ # each be defined on a line of their own. _version.py will just call
27
+ # get_keywords().
28
+ git_refnames = "$Format:%d$"
29
+ git_full = "$Format:%H$"
30
+ git_date = "$Format:%ci$"
31
+ keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
32
+ return keywords
33
+
34
+
35
+ class VersioneerConfig:
36
+ """Container for Versioneer configuration parameters."""
37
+
38
+
39
+ def get_config():
40
+ """Create, populate and return the VersioneerConfig() object."""
41
+ # these strings are filled in when 'setup.py versioneer' creates
42
+ # _version.py
43
+ cfg = VersioneerConfig()
44
+ cfg.VCS = "git"
45
+ cfg.style = "pep440"
46
+ cfg.tag_prefix = "v"
47
+ cfg.parentdir_prefix = "pandas-"
48
+ cfg.versionfile_source = "pandas/_version.py"
49
+ cfg.verbose = False
50
+ return cfg
51
+
52
+
53
+ class NotThisMethod(Exception):
54
+ """Exception raised if a method is not valid for the current scenario."""
55
+
56
+
57
+ LONG_VERSION_PY: dict[str, str] = {}
58
+ HANDLERS: dict[str, dict[str, Callable]] = {}
59
+
60
+
61
+ def register_vcs_handler(vcs, method): # decorator
62
+ """Create decorator to mark a method as the handler of a VCS."""
63
+
64
+ def decorate(f):
65
+ """Store f in HANDLERS[vcs][method]."""
66
+ if vcs not in HANDLERS:
67
+ HANDLERS[vcs] = {}
68
+ HANDLERS[vcs][method] = f
69
+ return f
70
+
71
+ return decorate
72
+
73
+
74
+ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
75
+ """Call the given command(s)."""
76
+ assert isinstance(commands, list)
77
+ process = None
78
+
79
+ popen_kwargs = {}
80
+ if sys.platform == "win32":
81
+ # This hides the console window if pythonw.exe is used
82
+ startupinfo = subprocess.STARTUPINFO()
83
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
84
+ popen_kwargs["startupinfo"] = startupinfo
85
+
86
+ for command in commands:
87
+ dispcmd = str([command] + args)
88
+ try:
89
+ # remember shell=False, so use git.cmd on windows, not just git
90
+ process = subprocess.Popen(
91
+ [command] + args,
92
+ cwd=cwd,
93
+ env=env,
94
+ stdout=subprocess.PIPE,
95
+ stderr=(subprocess.PIPE if hide_stderr else None),
96
+ **popen_kwargs,
97
+ )
98
+ break
99
+ except OSError:
100
+ e = sys.exc_info()[1]
101
+ if e.errno == errno.ENOENT:
102
+ continue
103
+ if verbose:
104
+ print(f"unable to run {dispcmd}")
105
+ print(e)
106
+ return None, None
107
+ else:
108
+ if verbose:
109
+ print(f"unable to find command, tried {commands}")
110
+ return None, None
111
+ stdout = process.communicate()[0].strip().decode()
112
+ if process.returncode != 0:
113
+ if verbose:
114
+ print(f"unable to run {dispcmd} (error)")
115
+ print(f"stdout was {stdout}")
116
+ return None, process.returncode
117
+ return stdout, process.returncode
118
+
119
+
120
+ def versions_from_parentdir(parentdir_prefix, root, verbose):
121
+ """Try to determine the version from the parent directory name.
122
+
123
+ Source tarballs conventionally unpack into a directory that includes both
124
+ the project name and a version string. We will also support searching up
125
+ two directory levels for an appropriately named parent directory
126
+ """
127
+ rootdirs = []
128
+
129
+ for _ in range(3):
130
+ dirname = os.path.basename(root)
131
+ if dirname.startswith(parentdir_prefix):
132
+ return {
133
+ "version": dirname[len(parentdir_prefix) :],
134
+ "full-revisionid": None,
135
+ "dirty": False,
136
+ "error": None,
137
+ "date": None,
138
+ }
139
+ rootdirs.append(root)
140
+ root = os.path.dirname(root) # up a level
141
+
142
+ if verbose:
143
+ print(
144
+ f"Tried directories {str(rootdirs)} \
145
+ but none started with prefix {parentdir_prefix}"
146
+ )
147
+ raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
148
+
149
+
150
+ @register_vcs_handler("git", "get_keywords")
151
+ def git_get_keywords(versionfile_abs):
152
+ """Extract version information from the given file."""
153
+ # the code embedded in _version.py can just fetch the value of these
154
+ # keywords. When used from setup.py, we don't want to import _version.py,
155
+ # so we do it with a regexp instead. This function is not used from
156
+ # _version.py.
157
+ keywords = {}
158
+ try:
159
+ with open(versionfile_abs, encoding="utf-8") as fobj:
160
+ for line in fobj:
161
+ if line.strip().startswith("git_refnames ="):
162
+ mo = re.search(r'=\s*"(.*)"', line)
163
+ if mo:
164
+ keywords["refnames"] = mo.group(1)
165
+ if line.strip().startswith("git_full ="):
166
+ mo = re.search(r'=\s*"(.*)"', line)
167
+ if mo:
168
+ keywords["full"] = mo.group(1)
169
+ if line.strip().startswith("git_date ="):
170
+ mo = re.search(r'=\s*"(.*)"', line)
171
+ if mo:
172
+ keywords["date"] = mo.group(1)
173
+ except OSError:
174
+ pass
175
+ return keywords
176
+
177
+
178
+ @register_vcs_handler("git", "keywords")
179
+ def git_versions_from_keywords(keywords, tag_prefix, verbose):
180
+ """Get version information from git keywords."""
181
+ if "refnames" not in keywords:
182
+ raise NotThisMethod("Short version file found")
183
+ date = keywords.get("date")
184
+ if date is not None:
185
+ # Use only the last line. Previous lines may contain GPG signature
186
+ # information.
187
+ date = date.splitlines()[-1]
188
+
189
+ # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
190
+ # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
191
+ # -like" string, which we must then edit to make compliant), because
192
+ # it's been around since git-1.5.3, and it's too difficult to
193
+ # discover which version we're using, or to work around using an
194
+ # older one.
195
+ date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
196
+ refnames = keywords["refnames"].strip()
197
+ if refnames.startswith("$Format"):
198
+ if verbose:
199
+ print("keywords are unexpanded, not using")
200
+ raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
201
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
202
+ # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
203
+ # just "foo-1.0". If we see a "tag: " prefix, prefer those.
204
+ TAG = "tag: "
205
+ tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
206
+ if not tags:
207
+ # Either we're using git < 1.8.3, or there really are no tags. We use
208
+ # a heuristic: assume all version tags have a digit. The old git %d
209
+ # expansion behaves like git log --decorate=short and strips out the
210
+ # refs/heads/ and refs/tags/ prefixes that would let us distinguish
211
+ # between branches and tags. By ignoring refnames without digits, we
212
+ # filter out many common branch names like "release" and
213
+ # "stabilization", as well as "HEAD" and "master".
214
+ tags = {r for r in refs if re.search(r"\d", r)}
215
+ if verbose:
216
+ print(f"discarding '{','.join(refs - tags)}', no digits")
217
+ if verbose:
218
+ print(f"likely tags: {','.join(sorted(tags))}")
219
+ for ref in sorted(tags):
220
+ # sorting will prefer e.g. "2.0" over "2.0rc1"
221
+ if ref.startswith(tag_prefix):
222
+ r = ref[len(tag_prefix) :]
223
+ # Filter out refs that exactly match prefix or that don't start
224
+ # with a number once the prefix is stripped (mostly a concern
225
+ # when prefix is '')
226
+ if not re.match(r"\d", r):
227
+ continue
228
+ if verbose:
229
+ print(f"picking {r}")
230
+ return {
231
+ "version": r,
232
+ "full-revisionid": keywords["full"].strip(),
233
+ "dirty": False,
234
+ "error": None,
235
+ "date": date,
236
+ }
237
+ # no suitable tags, so version is "0+unknown", but full hex is still there
238
+ if verbose:
239
+ print("no suitable tags, using unknown + full revision id")
240
+ return {
241
+ "version": "0+unknown",
242
+ "full-revisionid": keywords["full"].strip(),
243
+ "dirty": False,
244
+ "error": "no suitable tags",
245
+ "date": None,
246
+ }
247
+
248
+
249
+ @register_vcs_handler("git", "pieces_from_vcs")
250
+ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
251
+ """Get version from 'git describe' in the root of the source tree.
252
+
253
+ This only gets called if the git-archive 'subst' keywords were *not*
254
+ expanded, and _version.py hasn't already been rewritten with a short
255
+ version string, meaning we're inside a checked out source tree.
256
+ """
257
+ GITS = ["git"]
258
+ if sys.platform == "win32":
259
+ GITS = ["git.cmd", "git.exe"]
260
+
261
+ # GIT_DIR can interfere with correct operation of Versioneer.
262
+ # It may be intended to be passed to the Versioneer-versioned project,
263
+ # but that should not change where we get our version from.
264
+ env = os.environ.copy()
265
+ env.pop("GIT_DIR", None)
266
+ runner = functools.partial(runner, env=env)
267
+
268
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose)
269
+ if rc != 0:
270
+ if verbose:
271
+ print(f"Directory {root} not under git control")
272
+ raise NotThisMethod("'git rev-parse --git-dir' returned error")
273
+
274
+ # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
275
+ # if there isn't one, this yields HEX[-dirty] (no NUM)
276
+ describe_out, rc = runner(
277
+ GITS,
278
+ [
279
+ "describe",
280
+ "--tags",
281
+ "--dirty",
282
+ "--always",
283
+ "--long",
284
+ "--match",
285
+ f"{tag_prefix}[[:digit:]]*",
286
+ ],
287
+ cwd=root,
288
+ )
289
+ # --long was added in git-1.5.5
290
+ if describe_out is None:
291
+ raise NotThisMethod("'git describe' failed")
292
+ describe_out = describe_out.strip()
293
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
294
+ if full_out is None:
295
+ raise NotThisMethod("'git rev-parse' failed")
296
+ full_out = full_out.strip()
297
+
298
+ pieces = {}
299
+ pieces["long"] = full_out
300
+ pieces["short"] = full_out[:7] # maybe improved later
301
+ pieces["error"] = None
302
+
303
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
304
+ # --abbrev-ref was added in git-1.6.3
305
+ if rc != 0 or branch_name is None:
306
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
307
+ branch_name = branch_name.strip()
308
+
309
+ if branch_name == "HEAD":
310
+ # If we aren't exactly on a branch, pick a branch which represents
311
+ # the current commit. If all else fails, we are on a branchless
312
+ # commit.
313
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
314
+ # --contains was added in git-1.5.4
315
+ if rc != 0 or branches is None:
316
+ raise NotThisMethod("'git branch --contains' returned error")
317
+ branches = branches.split("\n")
318
+
319
+ # Remove the first line if we're running detached
320
+ if "(" in branches[0]:
321
+ branches.pop(0)
322
+
323
+ # Strip off the leading "* " from the list of branches.
324
+ branches = [branch[2:] for branch in branches]
325
+ if "master" in branches:
326
+ branch_name = "master"
327
+ elif not branches:
328
+ branch_name = None
329
+ else:
330
+ # Pick the first branch that is returned. Good or bad.
331
+ branch_name = branches[0]
332
+
333
+ pieces["branch"] = branch_name
334
+
335
+ # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
336
+ # TAG might have hyphens.
337
+ git_describe = describe_out
338
+
339
+ # look for -dirty suffix
340
+ dirty = git_describe.endswith("-dirty")
341
+ pieces["dirty"] = dirty
342
+ if dirty:
343
+ git_describe = git_describe[: git_describe.rindex("-dirty")]
344
+
345
+ # now we have TAG-NUM-gHEX or HEX
346
+
347
+ if "-" in git_describe:
348
+ # TAG-NUM-gHEX
349
+ mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
350
+ if not mo:
351
+ # unparsable. Maybe git-describe is misbehaving?
352
+ pieces["error"] = f"unable to parse git-describe output: '{describe_out}'"
353
+ return pieces
354
+
355
+ # tag
356
+ full_tag = mo.group(1)
357
+ if not full_tag.startswith(tag_prefix):
358
+ if verbose:
359
+ fmt = "tag '%s' doesn't start with prefix '%s'"
360
+ print(fmt % (full_tag, tag_prefix))
361
+ pieces[
362
+ "error"
363
+ ] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
364
+ return pieces
365
+ pieces["closest-tag"] = full_tag[len(tag_prefix) :]
366
+
367
+ # distance: number of commits since tag
368
+ pieces["distance"] = int(mo.group(2))
369
+
370
+ # commit: short hex revision ID
371
+ pieces["short"] = mo.group(3)
372
+
373
+ else:
374
+ # HEX: no tags
375
+ pieces["closest-tag"] = None
376
+ out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
377
+ pieces["distance"] = len(out.split()) # total number of commits
378
+
379
+ # commit date: see ISO-8601 comment in git_versions_from_keywords()
380
+ date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
381
+ # Use only the last line. Previous lines may contain GPG signature
382
+ # information.
383
+ date = date.splitlines()[-1]
384
+ pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
385
+
386
+ return pieces
387
+
388
+
389
+ def plus_or_dot(pieces) -> str:
390
+ """Return a + if we don't already have one, else return a ."""
391
+ if "+" in pieces.get("closest-tag", ""):
392
+ return "."
393
+ return "+"
394
+
395
+
396
+ def render_pep440(pieces):
397
+ """Build up version string, with post-release "local version identifier".
398
+
399
+ Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
400
+ get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
401
+
402
+ Exceptions:
403
+ 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
404
+ """
405
+ if pieces["closest-tag"]:
406
+ rendered = pieces["closest-tag"]
407
+ if pieces["distance"] or pieces["dirty"]:
408
+ rendered += plus_or_dot(pieces)
409
+ rendered += f"{pieces['distance']}.g{pieces['short']}"
410
+ if pieces["dirty"]:
411
+ rendered += ".dirty"
412
+ else:
413
+ # exception #1
414
+ rendered = f"0+untagged.{pieces['distance']}.g{pieces['short']}"
415
+ if pieces["dirty"]:
416
+ rendered += ".dirty"
417
+ return rendered
418
+
419
+
420
+ def render_pep440_branch(pieces):
421
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
422
+
423
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
424
+ (a feature branch will appear "older" than the master branch).
425
+
426
+ Exceptions:
427
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
428
+ """
429
+ if pieces["closest-tag"]:
430
+ rendered = pieces["closest-tag"]
431
+ if pieces["distance"] or pieces["dirty"]:
432
+ if pieces["branch"] != "master":
433
+ rendered += ".dev0"
434
+ rendered += plus_or_dot(pieces)
435
+ rendered += f"{pieces['distance']}.g{pieces['short']}"
436
+ if pieces["dirty"]:
437
+ rendered += ".dirty"
438
+ else:
439
+ # exception #1
440
+ rendered = "0"
441
+ if pieces["branch"] != "master":
442
+ rendered += ".dev0"
443
+ rendered += f"+untagged.{pieces['distance']}.g{pieces['short']}"
444
+ if pieces["dirty"]:
445
+ rendered += ".dirty"
446
+ return rendered
447
+
448
+
449
+ def pep440_split_post(ver):
450
+ """Split pep440 version string at the post-release segment.
451
+
452
+ Returns the release segments before the post-release and the
453
+ post-release version number (or -1 if no post-release segment is present).
454
+ """
455
+ vc = str.split(ver, ".post")
456
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
457
+
458
+
459
+ def render_pep440_pre(pieces):
460
+ """TAG[.postN.devDISTANCE] -- No -dirty.
461
+
462
+ Exceptions:
463
+ 1: no tags. 0.post0.devDISTANCE
464
+ """
465
+ if pieces["closest-tag"]:
466
+ if pieces["distance"]:
467
+ # update the post release segment
468
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
469
+ rendered = tag_version
470
+ if post_version is not None:
471
+ rendered += f".post{post_version + 1}.dev{pieces['distance']}"
472
+ else:
473
+ rendered += f".post0.dev{pieces['distance']}"
474
+ else:
475
+ # no commits, use the tag as the version
476
+ rendered = pieces["closest-tag"]
477
+ else:
478
+ # exception #1
479
+ rendered = f"0.post0.dev{pieces['distance']}"
480
+ return rendered
481
+
482
+
483
+ def render_pep440_post(pieces):
484
+ """TAG[.postDISTANCE[.dev0]+gHEX] .
485
+
486
+ The ".dev0" means dirty. Note that .dev0 sorts backwards
487
+ (a dirty tree will appear "older" than the corresponding clean one),
488
+ but you shouldn't be releasing software with -dirty anyways.
489
+
490
+ Exceptions:
491
+ 1: no tags. 0.postDISTANCE[.dev0]
492
+ """
493
+ if pieces["closest-tag"]:
494
+ rendered = pieces["closest-tag"]
495
+ if pieces["distance"] or pieces["dirty"]:
496
+ rendered += f".post{pieces['distance']}"
497
+ if pieces["dirty"]:
498
+ rendered += ".dev0"
499
+ rendered += plus_or_dot(pieces)
500
+ rendered += f"g{pieces['short']}"
501
+ else:
502
+ # exception #1
503
+ rendered = f"0.post{pieces['distance']}"
504
+ if pieces["dirty"]:
505
+ rendered += ".dev0"
506
+ rendered += f"+g{pieces['short']}"
507
+ return rendered
508
+
509
+
510
+ def render_pep440_post_branch(pieces):
511
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
512
+
513
+ The ".dev0" means not master branch.
514
+
515
+ Exceptions:
516
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
517
+ """
518
+ if pieces["closest-tag"]:
519
+ rendered = pieces["closest-tag"]
520
+ if pieces["distance"] or pieces["dirty"]:
521
+ rendered += f".post{pieces['distance']}"
522
+ if pieces["branch"] != "master":
523
+ rendered += ".dev0"
524
+ rendered += plus_or_dot(pieces)
525
+ rendered += f"g{pieces['short']}"
526
+ if pieces["dirty"]:
527
+ rendered += ".dirty"
528
+ else:
529
+ # exception #1
530
+ rendered = f"0.post{pieces['distance']}"
531
+ if pieces["branch"] != "master":
532
+ rendered += ".dev0"
533
+ rendered += f"+g{pieces['short']}"
534
+ if pieces["dirty"]:
535
+ rendered += ".dirty"
536
+ return rendered
537
+
538
+
539
+ def render_pep440_old(pieces):
540
+ """TAG[.postDISTANCE[.dev0]] .
541
+
542
+ The ".dev0" means dirty.
543
+
544
+ Exceptions:
545
+ 1: no tags. 0.postDISTANCE[.dev0]
546
+ """
547
+ if pieces["closest-tag"]:
548
+ rendered = pieces["closest-tag"]
549
+ if pieces["distance"] or pieces["dirty"]:
550
+ rendered += f"0.post{pieces['distance']}"
551
+ if pieces["dirty"]:
552
+ rendered += ".dev0"
553
+ else:
554
+ # exception #1
555
+ rendered = f"0.post{pieces['distance']}"
556
+ if pieces["dirty"]:
557
+ rendered += ".dev0"
558
+ return rendered
559
+
560
+
561
+ def render_git_describe(pieces):
562
+ """TAG[-DISTANCE-gHEX][-dirty].
563
+
564
+ Like 'git describe --tags --dirty --always'.
565
+
566
+ Exceptions:
567
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
568
+ """
569
+ if pieces["closest-tag"]:
570
+ rendered = pieces["closest-tag"]
571
+ if pieces["distance"]:
572
+ rendered += f"-{pieces['distance']}-g{pieces['short']}"
573
+ else:
574
+ # exception #1
575
+ rendered = pieces["short"]
576
+ if pieces["dirty"]:
577
+ rendered += "-dirty"
578
+ return rendered
579
+
580
+
581
+ def render_git_describe_long(pieces):
582
+ """TAG-DISTANCE-gHEX[-dirty].
583
+
584
+ Like 'git describe --tags --dirty --always -long'.
585
+ The distance/hash is unconditional.
586
+
587
+ Exceptions:
588
+ 1: no tags. HEX[-dirty] (note: no 'g' prefix)
589
+ """
590
+ if pieces["closest-tag"]:
591
+ rendered = pieces["closest-tag"]
592
+ rendered += f"-{pieces['distance']}-g{pieces['short']}"
593
+ else:
594
+ # exception #1
595
+ rendered = pieces["short"]
596
+ if pieces["dirty"]:
597
+ rendered += "-dirty"
598
+ return rendered
599
+
600
+
601
+ def render(pieces, style):
602
+ """Render the given version pieces into the requested style."""
603
+ if pieces["error"]:
604
+ return {
605
+ "version": "unknown",
606
+ "full-revisionid": pieces.get("long"),
607
+ "dirty": None,
608
+ "error": pieces["error"],
609
+ "date": None,
610
+ }
611
+
612
+ if not style or style == "default":
613
+ style = "pep440" # the default
614
+
615
+ if style == "pep440":
616
+ rendered = render_pep440(pieces)
617
+ elif style == "pep440-branch":
618
+ rendered = render_pep440_branch(pieces)
619
+ elif style == "pep440-pre":
620
+ rendered = render_pep440_pre(pieces)
621
+ elif style == "pep440-post":
622
+ rendered = render_pep440_post(pieces)
623
+ elif style == "pep440-post-branch":
624
+ rendered = render_pep440_post_branch(pieces)
625
+ elif style == "pep440-old":
626
+ rendered = render_pep440_old(pieces)
627
+ elif style == "git-describe":
628
+ rendered = render_git_describe(pieces)
629
+ elif style == "git-describe-long":
630
+ rendered = render_git_describe_long(pieces)
631
+ else:
632
+ raise ValueError(f"unknown style '{style}'")
633
+
634
+ return {
635
+ "version": rendered,
636
+ "full-revisionid": pieces["long"],
637
+ "dirty": pieces["dirty"],
638
+ "error": None,
639
+ "date": pieces.get("date"),
640
+ }
641
+
642
+
643
+ def get_versions():
644
+ """Get version information or return default if unable to do so."""
645
+ # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
646
+ # __file__, we can work backwards from there to the root. Some
647
+ # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
648
+ # case we can only use expanded keywords.
649
+
650
+ cfg = get_config()
651
+ verbose = cfg.verbose
652
+
653
+ try:
654
+ return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
655
+ except NotThisMethod:
656
+ pass
657
+
658
+ try:
659
+ root = os.path.realpath(__file__)
660
+ # versionfile_source is the relative path from the top of the source
661
+ # tree (where the .git directory might live) to this file. Invert
662
+ # this to find the root from __file__.
663
+ for _ in cfg.versionfile_source.split("/"):
664
+ root = os.path.dirname(root)
665
+ except NameError:
666
+ return {
667
+ "version": "0+unknown",
668
+ "full-revisionid": None,
669
+ "dirty": None,
670
+ "error": "unable to find root of source tree",
671
+ "date": None,
672
+ }
673
+
674
+ try:
675
+ pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
676
+ return render(pieces, cfg.style)
677
+ except NotThisMethod:
678
+ pass
679
+
680
+ try:
681
+ if cfg.parentdir_prefix:
682
+ return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
683
+ except NotThisMethod:
684
+ pass
685
+
686
+ return {
687
+ "version": "0+unknown",
688
+ "full-revisionid": None,
689
+ "dirty": None,
690
+ "error": "unable to compute version",
691
+ "date": None,
692
+ }
infer_4_30_0/lib/python3.10/site-packages/pandas/_version_meson.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ __version__="2.2.3"
2
+ __git_version__="0691c5cf90477d3503834d983f69350f250a6ff7"
infer_4_30_0/lib/python3.10/site-packages/pandas/conftest.py ADDED
@@ -0,0 +1,1980 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This file is very long and growing, but it was decided to not split it yet, as
3
+ it's still manageable (2020-03-17, ~1.1k LoC). See gh-31989
4
+
5
+ Instead of splitting it was decided to define sections here:
6
+ - Configuration / Settings
7
+ - Autouse fixtures
8
+ - Common arguments
9
+ - Missing values & co.
10
+ - Classes
11
+ - Indices
12
+ - Series'
13
+ - DataFrames
14
+ - Operators & Operations
15
+ - Data sets/files
16
+ - Time zones
17
+ - Dtypes
18
+ - Misc
19
+ """
20
+ from __future__ import annotations
21
+
22
+ from collections import abc
23
+ from datetime import (
24
+ date,
25
+ datetime,
26
+ time,
27
+ timedelta,
28
+ timezone,
29
+ )
30
+ from decimal import Decimal
31
+ import operator
32
+ import os
33
+ from typing import (
34
+ TYPE_CHECKING,
35
+ Callable,
36
+ )
37
+
38
+ from dateutil.tz import (
39
+ tzlocal,
40
+ tzutc,
41
+ )
42
+ import hypothesis
43
+ from hypothesis import strategies as st
44
+ import numpy as np
45
+ import pytest
46
+ from pytz import (
47
+ FixedOffset,
48
+ utc,
49
+ )
50
+
51
+ from pandas._config.config import _get_option
52
+
53
+ import pandas.util._test_decorators as td
54
+
55
+ from pandas.core.dtypes.dtypes import (
56
+ DatetimeTZDtype,
57
+ IntervalDtype,
58
+ )
59
+
60
+ import pandas as pd
61
+ from pandas import (
62
+ CategoricalIndex,
63
+ DataFrame,
64
+ Interval,
65
+ IntervalIndex,
66
+ Period,
67
+ RangeIndex,
68
+ Series,
69
+ Timedelta,
70
+ Timestamp,
71
+ date_range,
72
+ period_range,
73
+ timedelta_range,
74
+ )
75
+ import pandas._testing as tm
76
+ from pandas.core import ops
77
+ from pandas.core.indexes.api import (
78
+ Index,
79
+ MultiIndex,
80
+ )
81
+ from pandas.util.version import Version
82
+
83
+ if TYPE_CHECKING:
84
+ from collections.abc import (
85
+ Hashable,
86
+ Iterator,
87
+ )
88
+
89
+ try:
90
+ import pyarrow as pa
91
+ except ImportError:
92
+ has_pyarrow = False
93
+ else:
94
+ del pa
95
+ has_pyarrow = True
96
+
97
+ import zoneinfo
98
+
99
+ try:
100
+ zoneinfo.ZoneInfo("UTC")
101
+ except zoneinfo.ZoneInfoNotFoundError:
102
+ zoneinfo = None # type: ignore[assignment]
103
+
104
+
105
+ # ----------------------------------------------------------------
106
+ # Configuration / Settings
107
+ # ----------------------------------------------------------------
108
+ # pytest
109
+
110
+
111
+ def pytest_addoption(parser) -> None:
112
+ parser.addoption(
113
+ "--no-strict-data-files",
114
+ action="store_false",
115
+ help="Don't fail if a test is skipped for missing data file.",
116
+ )
117
+
118
+
119
+ def ignore_doctest_warning(item: pytest.Item, path: str, message: str) -> None:
120
+ """Ignore doctest warning.
121
+
122
+ Parameters
123
+ ----------
124
+ item : pytest.Item
125
+ pytest test item.
126
+ path : str
127
+ Module path to Python object, e.g. "pandas.core.frame.DataFrame.append". A
128
+ warning will be filtered when item.name ends with in given path. So it is
129
+ sufficient to specify e.g. "DataFrame.append".
130
+ message : str
131
+ Message to be filtered.
132
+ """
133
+ if item.name.endswith(path):
134
+ item.add_marker(pytest.mark.filterwarnings(f"ignore:{message}"))
135
+
136
+
137
+ def pytest_collection_modifyitems(items, config) -> None:
138
+ is_doctest = config.getoption("--doctest-modules") or config.getoption(
139
+ "--doctest-cython", default=False
140
+ )
141
+
142
+ # Warnings from doctests that can be ignored; place reason in comment above.
143
+ # Each entry specifies (path, message) - see the ignore_doctest_warning function
144
+ ignored_doctest_warnings = [
145
+ ("is_int64_dtype", "is_int64_dtype is deprecated"),
146
+ ("is_interval_dtype", "is_interval_dtype is deprecated"),
147
+ ("is_period_dtype", "is_period_dtype is deprecated"),
148
+ ("is_datetime64tz_dtype", "is_datetime64tz_dtype is deprecated"),
149
+ ("is_categorical_dtype", "is_categorical_dtype is deprecated"),
150
+ ("is_sparse", "is_sparse is deprecated"),
151
+ ("DataFrameGroupBy.fillna", "DataFrameGroupBy.fillna is deprecated"),
152
+ ("NDFrame.replace", "The 'method' keyword"),
153
+ ("NDFrame.replace", "Series.replace without 'value'"),
154
+ ("NDFrame.clip", "Downcasting behavior in Series and DataFrame methods"),
155
+ ("Series.idxmin", "The behavior of Series.idxmin"),
156
+ ("Series.idxmax", "The behavior of Series.idxmax"),
157
+ ("SeriesGroupBy.fillna", "SeriesGroupBy.fillna is deprecated"),
158
+ ("SeriesGroupBy.idxmin", "The behavior of Series.idxmin"),
159
+ ("SeriesGroupBy.idxmax", "The behavior of Series.idxmax"),
160
+ # Docstring divides by zero to show behavior difference
161
+ ("missing.mask_zero_div_zero", "divide by zero encountered"),
162
+ (
163
+ "to_pydatetime",
164
+ "The behavior of DatetimeProperties.to_pydatetime is deprecated",
165
+ ),
166
+ (
167
+ "pandas.core.generic.NDFrame.bool",
168
+ "(Series|DataFrame).bool is now deprecated and will be removed "
169
+ "in future version of pandas",
170
+ ),
171
+ (
172
+ "pandas.core.generic.NDFrame.first",
173
+ "first is deprecated and will be removed in a future version. "
174
+ "Please create a mask and filter using `.loc` instead",
175
+ ),
176
+ (
177
+ "Resampler.fillna",
178
+ "DatetimeIndexResampler.fillna is deprecated",
179
+ ),
180
+ (
181
+ "DataFrameGroupBy.fillna",
182
+ "DataFrameGroupBy.fillna with 'method' is deprecated",
183
+ ),
184
+ (
185
+ "DataFrameGroupBy.fillna",
186
+ "DataFrame.fillna with 'method' is deprecated",
187
+ ),
188
+ ("read_parquet", "Passing a BlockManager to DataFrame is deprecated"),
189
+ ]
190
+
191
+ if is_doctest:
192
+ for item in items:
193
+ for path, message in ignored_doctest_warnings:
194
+ ignore_doctest_warning(item, path, message)
195
+
196
+
197
+ hypothesis_health_checks = [hypothesis.HealthCheck.too_slow]
198
+ if Version(hypothesis.__version__) >= Version("6.83.2"):
199
+ hypothesis_health_checks.append(hypothesis.HealthCheck.differing_executors)
200
+
201
+ # Hypothesis
202
+ hypothesis.settings.register_profile(
203
+ "ci",
204
+ # Hypothesis timing checks are tuned for scalars by default, so we bump
205
+ # them from 200ms to 500ms per test case as the global default. If this
206
+ # is too short for a specific test, (a) try to make it faster, and (b)
207
+ # if it really is slow add `@settings(deadline=...)` with a working value,
208
+ # or `deadline=None` to entirely disable timeouts for that test.
209
+ # 2022-02-09: Changed deadline from 500 -> None. Deadline leads to
210
+ # non-actionable, flaky CI failures (# GH 24641, 44969, 45118, 44969)
211
+ deadline=None,
212
+ suppress_health_check=tuple(hypothesis_health_checks),
213
+ )
214
+ hypothesis.settings.load_profile("ci")
215
+
216
+ # Registering these strategies makes them globally available via st.from_type,
217
+ # which is use for offsets in tests/tseries/offsets/test_offsets_properties.py
218
+ for name in "MonthBegin MonthEnd BMonthBegin BMonthEnd".split():
219
+ cls = getattr(pd.tseries.offsets, name)
220
+ st.register_type_strategy(
221
+ cls, st.builds(cls, n=st.integers(-99, 99), normalize=st.booleans())
222
+ )
223
+
224
+ for name in "YearBegin YearEnd BYearBegin BYearEnd".split():
225
+ cls = getattr(pd.tseries.offsets, name)
226
+ st.register_type_strategy(
227
+ cls,
228
+ st.builds(
229
+ cls,
230
+ n=st.integers(-5, 5),
231
+ normalize=st.booleans(),
232
+ month=st.integers(min_value=1, max_value=12),
233
+ ),
234
+ )
235
+
236
+ for name in "QuarterBegin QuarterEnd BQuarterBegin BQuarterEnd".split():
237
+ cls = getattr(pd.tseries.offsets, name)
238
+ st.register_type_strategy(
239
+ cls,
240
+ st.builds(
241
+ cls,
242
+ n=st.integers(-24, 24),
243
+ normalize=st.booleans(),
244
+ startingMonth=st.integers(min_value=1, max_value=12),
245
+ ),
246
+ )
247
+
248
+
249
+ # ----------------------------------------------------------------
250
+ # Autouse fixtures
251
+ # ----------------------------------------------------------------
252
+
253
+
254
+ # https://github.com/pytest-dev/pytest/issues/11873
255
+ # Would like to avoid autouse=True, but cannot as of pytest 8.0.0
256
+ @pytest.fixture(autouse=True)
257
+ def add_doctest_imports(doctest_namespace) -> None:
258
+ """
259
+ Make `np` and `pd` names available for doctests.
260
+ """
261
+ doctest_namespace["np"] = np
262
+ doctest_namespace["pd"] = pd
263
+
264
+
265
+ @pytest.fixture(autouse=True)
266
+ def configure_tests() -> None:
267
+ """
268
+ Configure settings for all tests and test modules.
269
+ """
270
+ pd.set_option("chained_assignment", "raise")
271
+
272
+
273
+ # ----------------------------------------------------------------
274
+ # Common arguments
275
+ # ----------------------------------------------------------------
276
+ @pytest.fixture(params=[0, 1, "index", "columns"], ids=lambda x: f"axis={repr(x)}")
277
+ def axis(request):
278
+ """
279
+ Fixture for returning the axis numbers of a DataFrame.
280
+ """
281
+ return request.param
282
+
283
+
284
+ axis_frame = axis
285
+
286
+
287
+ @pytest.fixture(params=[1, "columns"], ids=lambda x: f"axis={repr(x)}")
288
+ def axis_1(request):
289
+ """
290
+ Fixture for returning aliases of axis 1 of a DataFrame.
291
+ """
292
+ return request.param
293
+
294
+
295
+ @pytest.fixture(params=[True, False, None])
296
+ def observed(request):
297
+ """
298
+ Pass in the observed keyword to groupby for [True, False]
299
+ This indicates whether categoricals should return values for
300
+ values which are not in the grouper [False / None], or only values which
301
+ appear in the grouper [True]. [None] is supported for future compatibility
302
+ if we decide to change the default (and would need to warn if this
303
+ parameter is not passed).
304
+ """
305
+ return request.param
306
+
307
+
308
+ @pytest.fixture(params=[True, False, None])
309
+ def ordered(request):
310
+ """
311
+ Boolean 'ordered' parameter for Categorical.
312
+ """
313
+ return request.param
314
+
315
+
316
+ @pytest.fixture(params=[True, False])
317
+ def skipna(request):
318
+ """
319
+ Boolean 'skipna' parameter.
320
+ """
321
+ return request.param
322
+
323
+
324
+ @pytest.fixture(params=["first", "last", False])
325
+ def keep(request):
326
+ """
327
+ Valid values for the 'keep' parameter used in
328
+ .duplicated or .drop_duplicates
329
+ """
330
+ return request.param
331
+
332
+
333
+ @pytest.fixture(params=["both", "neither", "left", "right"])
334
+ def inclusive_endpoints_fixture(request):
335
+ """
336
+ Fixture for trying all interval 'inclusive' parameters.
337
+ """
338
+ return request.param
339
+
340
+
341
+ @pytest.fixture(params=["left", "right", "both", "neither"])
342
+ def closed(request):
343
+ """
344
+ Fixture for trying all interval closed parameters.
345
+ """
346
+ return request.param
347
+
348
+
349
+ @pytest.fixture(params=["left", "right", "both", "neither"])
350
+ def other_closed(request):
351
+ """
352
+ Secondary closed fixture to allow parametrizing over all pairs of closed.
353
+ """
354
+ return request.param
355
+
356
+
357
+ @pytest.fixture(
358
+ params=[
359
+ None,
360
+ "gzip",
361
+ "bz2",
362
+ "zip",
363
+ "xz",
364
+ "tar",
365
+ pytest.param("zstd", marks=td.skip_if_no("zstandard")),
366
+ ]
367
+ )
368
+ def compression(request):
369
+ """
370
+ Fixture for trying common compression types in compression tests.
371
+ """
372
+ return request.param
373
+
374
+
375
+ @pytest.fixture(
376
+ params=[
377
+ "gzip",
378
+ "bz2",
379
+ "zip",
380
+ "xz",
381
+ "tar",
382
+ pytest.param("zstd", marks=td.skip_if_no("zstandard")),
383
+ ]
384
+ )
385
+ def compression_only(request):
386
+ """
387
+ Fixture for trying common compression types in compression tests excluding
388
+ uncompressed case.
389
+ """
390
+ return request.param
391
+
392
+
393
+ @pytest.fixture(params=[True, False])
394
+ def writable(request):
395
+ """
396
+ Fixture that an array is writable.
397
+ """
398
+ return request.param
399
+
400
+
401
+ @pytest.fixture(params=["inner", "outer", "left", "right"])
402
+ def join_type(request):
403
+ """
404
+ Fixture for trying all types of join operations.
405
+ """
406
+ return request.param
407
+
408
+
409
+ @pytest.fixture(params=["nlargest", "nsmallest"])
410
+ def nselect_method(request):
411
+ """
412
+ Fixture for trying all nselect methods.
413
+ """
414
+ return request.param
415
+
416
+
417
+ # ----------------------------------------------------------------
418
+ # Missing values & co.
419
+ # ----------------------------------------------------------------
420
+ @pytest.fixture(params=tm.NULL_OBJECTS, ids=lambda x: type(x).__name__)
421
+ def nulls_fixture(request):
422
+ """
423
+ Fixture for each null type in pandas.
424
+ """
425
+ return request.param
426
+
427
+
428
+ nulls_fixture2 = nulls_fixture # Generate cartesian product of nulls_fixture
429
+
430
+
431
+ @pytest.fixture(params=[None, np.nan, pd.NaT])
432
+ def unique_nulls_fixture(request):
433
+ """
434
+ Fixture for each null type in pandas, each null type exactly once.
435
+ """
436
+ return request.param
437
+
438
+
439
+ # Generate cartesian product of unique_nulls_fixture:
440
+ unique_nulls_fixture2 = unique_nulls_fixture
441
+
442
+
443
+ @pytest.fixture(params=tm.NP_NAT_OBJECTS, ids=lambda x: type(x).__name__)
444
+ def np_nat_fixture(request):
445
+ """
446
+ Fixture for each NaT type in numpy.
447
+ """
448
+ return request.param
449
+
450
+
451
+ # Generate cartesian product of np_nat_fixture:
452
+ np_nat_fixture2 = np_nat_fixture
453
+
454
+
455
+ # ----------------------------------------------------------------
456
+ # Classes
457
+ # ----------------------------------------------------------------
458
+
459
+
460
+ @pytest.fixture(params=[DataFrame, Series])
461
+ def frame_or_series(request):
462
+ """
463
+ Fixture to parametrize over DataFrame and Series.
464
+ """
465
+ return request.param
466
+
467
+
468
+ @pytest.fixture(params=[Index, Series], ids=["index", "series"])
469
+ def index_or_series(request):
470
+ """
471
+ Fixture to parametrize over Index and Series, made necessary by a mypy
472
+ bug, giving an error:
473
+
474
+ List item 0 has incompatible type "Type[Series]"; expected "Type[PandasObject]"
475
+
476
+ See GH#29725
477
+ """
478
+ return request.param
479
+
480
+
481
+ # Generate cartesian product of index_or_series fixture:
482
+ index_or_series2 = index_or_series
483
+
484
+
485
+ @pytest.fixture(params=[Index, Series, pd.array], ids=["index", "series", "array"])
486
+ def index_or_series_or_array(request):
487
+ """
488
+ Fixture to parametrize over Index, Series, and ExtensionArray
489
+ """
490
+ return request.param
491
+
492
+
493
+ @pytest.fixture(params=[Index, Series, DataFrame, pd.array], ids=lambda x: x.__name__)
494
+ def box_with_array(request):
495
+ """
496
+ Fixture to test behavior for Index, Series, DataFrame, and pandas Array
497
+ classes
498
+ """
499
+ return request.param
500
+
501
+
502
+ box_with_array2 = box_with_array
503
+
504
+
505
+ @pytest.fixture
506
+ def dict_subclass() -> type[dict]:
507
+ """
508
+ Fixture for a dictionary subclass.
509
+ """
510
+
511
+ class TestSubDict(dict):
512
+ def __init__(self, *args, **kwargs) -> None:
513
+ dict.__init__(self, *args, **kwargs)
514
+
515
+ return TestSubDict
516
+
517
+
518
+ @pytest.fixture
519
+ def non_dict_mapping_subclass() -> type[abc.Mapping]:
520
+ """
521
+ Fixture for a non-mapping dictionary subclass.
522
+ """
523
+
524
+ class TestNonDictMapping(abc.Mapping):
525
+ def __init__(self, underlying_dict) -> None:
526
+ self._data = underlying_dict
527
+
528
+ def __getitem__(self, key):
529
+ return self._data.__getitem__(key)
530
+
531
+ def __iter__(self) -> Iterator:
532
+ return self._data.__iter__()
533
+
534
+ def __len__(self) -> int:
535
+ return self._data.__len__()
536
+
537
+ return TestNonDictMapping
538
+
539
+
540
+ # ----------------------------------------------------------------
541
+ # Indices
542
+ # ----------------------------------------------------------------
543
+ @pytest.fixture
544
+ def multiindex_year_month_day_dataframe_random_data():
545
+ """
546
+ DataFrame with 3 level MultiIndex (year, month, day) covering
547
+ first 100 business days from 2000-01-01 with random data
548
+ """
549
+ tdf = DataFrame(
550
+ np.random.default_rng(2).standard_normal((100, 4)),
551
+ columns=Index(list("ABCD"), dtype=object),
552
+ index=date_range("2000-01-01", periods=100, freq="B"),
553
+ )
554
+ ymd = tdf.groupby([lambda x: x.year, lambda x: x.month, lambda x: x.day]).sum()
555
+ # use int64 Index, to make sure things work
556
+ ymd.index = ymd.index.set_levels([lev.astype("i8") for lev in ymd.index.levels])
557
+ ymd.index.set_names(["year", "month", "day"], inplace=True)
558
+ return ymd
559
+
560
+
561
+ @pytest.fixture
562
+ def lexsorted_two_level_string_multiindex() -> MultiIndex:
563
+ """
564
+ 2-level MultiIndex, lexsorted, with string names.
565
+ """
566
+ return MultiIndex(
567
+ levels=[["foo", "bar", "baz", "qux"], ["one", "two", "three"]],
568
+ codes=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3], [0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
569
+ names=["first", "second"],
570
+ )
571
+
572
+
573
+ @pytest.fixture
574
+ def multiindex_dataframe_random_data(
575
+ lexsorted_two_level_string_multiindex,
576
+ ) -> DataFrame:
577
+ """DataFrame with 2 level MultiIndex with random data"""
578
+ index = lexsorted_two_level_string_multiindex
579
+ return DataFrame(
580
+ np.random.default_rng(2).standard_normal((10, 3)),
581
+ index=index,
582
+ columns=Index(["A", "B", "C"], name="exp"),
583
+ )
584
+
585
+
586
+ def _create_multiindex():
587
+ """
588
+ MultiIndex used to test the general functionality of this object
589
+ """
590
+
591
+ # See Also: tests.multi.conftest.idx
592
+ major_axis = Index(["foo", "bar", "baz", "qux"])
593
+ minor_axis = Index(["one", "two"])
594
+
595
+ major_codes = np.array([0, 0, 1, 2, 3, 3])
596
+ minor_codes = np.array([0, 1, 0, 1, 0, 1])
597
+ index_names = ["first", "second"]
598
+ return MultiIndex(
599
+ levels=[major_axis, minor_axis],
600
+ codes=[major_codes, minor_codes],
601
+ names=index_names,
602
+ verify_integrity=False,
603
+ )
604
+
605
+
606
+ def _create_mi_with_dt64tz_level():
607
+ """
608
+ MultiIndex with a level that is a tzaware DatetimeIndex.
609
+ """
610
+ # GH#8367 round trip with pickle
611
+ return MultiIndex.from_product(
612
+ [[1, 2], ["a", "b"], date_range("20130101", periods=3, tz="US/Eastern")],
613
+ names=["one", "two", "three"],
614
+ )
615
+
616
+
617
+ indices_dict = {
618
+ "string": Index([f"pandas_{i}" for i in range(100)]),
619
+ "datetime": date_range("2020-01-01", periods=100),
620
+ "datetime-tz": date_range("2020-01-01", periods=100, tz="US/Pacific"),
621
+ "period": period_range("2020-01-01", periods=100, freq="D"),
622
+ "timedelta": timedelta_range(start="1 day", periods=100, freq="D"),
623
+ "range": RangeIndex(100),
624
+ "int8": Index(np.arange(100), dtype="int8"),
625
+ "int16": Index(np.arange(100), dtype="int16"),
626
+ "int32": Index(np.arange(100), dtype="int32"),
627
+ "int64": Index(np.arange(100), dtype="int64"),
628
+ "uint8": Index(np.arange(100), dtype="uint8"),
629
+ "uint16": Index(np.arange(100), dtype="uint16"),
630
+ "uint32": Index(np.arange(100), dtype="uint32"),
631
+ "uint64": Index(np.arange(100), dtype="uint64"),
632
+ "float32": Index(np.arange(100), dtype="float32"),
633
+ "float64": Index(np.arange(100), dtype="float64"),
634
+ "bool-object": Index([True, False] * 5, dtype=object),
635
+ "bool-dtype": Index([True, False] * 5, dtype=bool),
636
+ "complex64": Index(
637
+ np.arange(100, dtype="complex64") + 1.0j * np.arange(100, dtype="complex64")
638
+ ),
639
+ "complex128": Index(
640
+ np.arange(100, dtype="complex128") + 1.0j * np.arange(100, dtype="complex128")
641
+ ),
642
+ "categorical": CategoricalIndex(list("abcd") * 25),
643
+ "interval": IntervalIndex.from_breaks(np.linspace(0, 100, num=101)),
644
+ "empty": Index([]),
645
+ "tuples": MultiIndex.from_tuples(zip(["foo", "bar", "baz"], [1, 2, 3])),
646
+ "mi-with-dt64tz-level": _create_mi_with_dt64tz_level(),
647
+ "multi": _create_multiindex(),
648
+ "repeats": Index([0, 0, 1, 1, 2, 2]),
649
+ "nullable_int": Index(np.arange(100), dtype="Int64"),
650
+ "nullable_uint": Index(np.arange(100), dtype="UInt16"),
651
+ "nullable_float": Index(np.arange(100), dtype="Float32"),
652
+ "nullable_bool": Index(np.arange(100).astype(bool), dtype="boolean"),
653
+ "string-python": Index(
654
+ pd.array([f"pandas_{i}" for i in range(100)], dtype="string[python]")
655
+ ),
656
+ }
657
+ if has_pyarrow:
658
+ idx = Index(pd.array([f"pandas_{i}" for i in range(100)], dtype="string[pyarrow]"))
659
+ indices_dict["string-pyarrow"] = idx
660
+
661
+
662
+ @pytest.fixture(params=indices_dict.keys())
663
+ def index(request):
664
+ """
665
+ Fixture for many "simple" kinds of indices.
666
+
667
+ These indices are unlikely to cover corner cases, e.g.
668
+ - no names
669
+ - no NaTs/NaNs
670
+ - no values near implementation bounds
671
+ - ...
672
+ """
673
+ # copy to avoid mutation, e.g. setting .name
674
+ return indices_dict[request.param].copy()
675
+
676
+
677
+ # Needed to generate cartesian product of indices
678
+ index_fixture2 = index
679
+
680
+
681
+ @pytest.fixture(
682
+ params=[
683
+ key for key, value in indices_dict.items() if not isinstance(value, MultiIndex)
684
+ ]
685
+ )
686
+ def index_flat(request):
687
+ """
688
+ index fixture, but excluding MultiIndex cases.
689
+ """
690
+ key = request.param
691
+ return indices_dict[key].copy()
692
+
693
+
694
+ # Alias so we can test with cartesian product of index_flat
695
+ index_flat2 = index_flat
696
+
697
+
698
+ @pytest.fixture(
699
+ params=[
700
+ key
701
+ for key, value in indices_dict.items()
702
+ if not (
703
+ key.startswith(("int", "uint", "float"))
704
+ or key in ["range", "empty", "repeats", "bool-dtype"]
705
+ )
706
+ and not isinstance(value, MultiIndex)
707
+ ]
708
+ )
709
+ def index_with_missing(request):
710
+ """
711
+ Fixture for indices with missing values.
712
+
713
+ Integer-dtype and empty cases are excluded because they cannot hold missing
714
+ values.
715
+
716
+ MultiIndex is excluded because isna() is not defined for MultiIndex.
717
+ """
718
+
719
+ # GH 35538. Use deep copy to avoid illusive bug on np-dev
720
+ # GHA pipeline that writes into indices_dict despite copy
721
+ ind = indices_dict[request.param].copy(deep=True)
722
+ vals = ind.values.copy()
723
+ if request.param in ["tuples", "mi-with-dt64tz-level", "multi"]:
724
+ # For setting missing values in the top level of MultiIndex
725
+ vals = ind.tolist()
726
+ vals[0] = (None,) + vals[0][1:]
727
+ vals[-1] = (None,) + vals[-1][1:]
728
+ return MultiIndex.from_tuples(vals)
729
+ else:
730
+ vals[0] = None
731
+ vals[-1] = None
732
+ return type(ind)(vals)
733
+
734
+
735
+ # ----------------------------------------------------------------
736
+ # Series'
737
+ # ----------------------------------------------------------------
738
+ @pytest.fixture
739
+ def string_series() -> Series:
740
+ """
741
+ Fixture for Series of floats with Index of unique strings
742
+ """
743
+ return Series(
744
+ np.arange(30, dtype=np.float64) * 1.1,
745
+ index=Index([f"i_{i}" for i in range(30)], dtype=object),
746
+ name="series",
747
+ )
748
+
749
+
750
+ @pytest.fixture
751
+ def object_series() -> Series:
752
+ """
753
+ Fixture for Series of dtype object with Index of unique strings
754
+ """
755
+ data = [f"foo_{i}" for i in range(30)]
756
+ index = Index([f"bar_{i}" for i in range(30)], dtype=object)
757
+ return Series(data, index=index, name="objects", dtype=object)
758
+
759
+
760
+ @pytest.fixture
761
+ def datetime_series() -> Series:
762
+ """
763
+ Fixture for Series of floats with DatetimeIndex
764
+ """
765
+ return Series(
766
+ np.random.default_rng(2).standard_normal(30),
767
+ index=date_range("2000-01-01", periods=30, freq="B"),
768
+ name="ts",
769
+ )
770
+
771
+
772
+ def _create_series(index):
773
+ """Helper for the _series dict"""
774
+ size = len(index)
775
+ data = np.random.default_rng(2).standard_normal(size)
776
+ return Series(data, index=index, name="a", copy=False)
777
+
778
+
779
+ _series = {
780
+ f"series-with-{index_id}-index": _create_series(index)
781
+ for index_id, index in indices_dict.items()
782
+ }
783
+
784
+
785
+ @pytest.fixture
786
+ def series_with_simple_index(index) -> Series:
787
+ """
788
+ Fixture for tests on series with changing types of indices.
789
+ """
790
+ return _create_series(index)
791
+
792
+
793
+ _narrow_series = {
794
+ f"{dtype.__name__}-series": Series(
795
+ range(30), index=[f"i-{i}" for i in range(30)], name="a", dtype=dtype
796
+ )
797
+ for dtype in tm.NARROW_NP_DTYPES
798
+ }
799
+
800
+
801
+ _index_or_series_objs = {**indices_dict, **_series, **_narrow_series}
802
+
803
+
804
+ @pytest.fixture(params=_index_or_series_objs.keys())
805
+ def index_or_series_obj(request):
806
+ """
807
+ Fixture for tests on indexes, series and series with a narrow dtype
808
+ copy to avoid mutation, e.g. setting .name
809
+ """
810
+ return _index_or_series_objs[request.param].copy(deep=True)
811
+
812
+
813
+ _typ_objects_series = {
814
+ f"{dtype.__name__}-series": Series(dtype) for dtype in tm.PYTHON_DATA_TYPES
815
+ }
816
+
817
+
818
+ _index_or_series_memory_objs = {
819
+ **indices_dict,
820
+ **_series,
821
+ **_narrow_series,
822
+ **_typ_objects_series,
823
+ }
824
+
825
+
826
+ @pytest.fixture(params=_index_or_series_memory_objs.keys())
827
+ def index_or_series_memory_obj(request):
828
+ """
829
+ Fixture for tests on indexes, series, series with a narrow dtype and
830
+ series with empty objects type
831
+ copy to avoid mutation, e.g. setting .name
832
+ """
833
+ return _index_or_series_memory_objs[request.param].copy(deep=True)
834
+
835
+
836
+ # ----------------------------------------------------------------
837
+ # DataFrames
838
+ # ----------------------------------------------------------------
839
+ @pytest.fixture
840
+ def int_frame() -> DataFrame:
841
+ """
842
+ Fixture for DataFrame of ints with index of unique strings
843
+
844
+ Columns are ['A', 'B', 'C', 'D']
845
+ """
846
+ return DataFrame(
847
+ np.ones((30, 4), dtype=np.int64),
848
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
849
+ columns=Index(list("ABCD"), dtype=object),
850
+ )
851
+
852
+
853
+ @pytest.fixture
854
+ def float_frame() -> DataFrame:
855
+ """
856
+ Fixture for DataFrame of floats with index of unique strings
857
+
858
+ Columns are ['A', 'B', 'C', 'D'].
859
+ """
860
+ return DataFrame(
861
+ np.random.default_rng(2).standard_normal((30, 4)),
862
+ index=Index([f"foo_{i}" for i in range(30)]),
863
+ columns=Index(list("ABCD")),
864
+ )
865
+
866
+
867
+ @pytest.fixture
868
+ def rand_series_with_duplicate_datetimeindex() -> Series:
869
+ """
870
+ Fixture for Series with a DatetimeIndex that has duplicates.
871
+ """
872
+ dates = [
873
+ datetime(2000, 1, 2),
874
+ datetime(2000, 1, 2),
875
+ datetime(2000, 1, 2),
876
+ datetime(2000, 1, 3),
877
+ datetime(2000, 1, 3),
878
+ datetime(2000, 1, 3),
879
+ datetime(2000, 1, 4),
880
+ datetime(2000, 1, 4),
881
+ datetime(2000, 1, 4),
882
+ datetime(2000, 1, 5),
883
+ ]
884
+
885
+ return Series(np.random.default_rng(2).standard_normal(len(dates)), index=dates)
886
+
887
+
888
+ # ----------------------------------------------------------------
889
+ # Scalars
890
+ # ----------------------------------------------------------------
891
+ @pytest.fixture(
892
+ params=[
893
+ (Interval(left=0, right=5), IntervalDtype("int64", "right")),
894
+ (Interval(left=0.1, right=0.5), IntervalDtype("float64", "right")),
895
+ (Period("2012-01", freq="M"), "period[M]"),
896
+ (Period("2012-02-01", freq="D"), "period[D]"),
897
+ (
898
+ Timestamp("2011-01-01", tz="US/Eastern"),
899
+ DatetimeTZDtype(unit="s", tz="US/Eastern"),
900
+ ),
901
+ (Timedelta(seconds=500), "timedelta64[ns]"),
902
+ ]
903
+ )
904
+ def ea_scalar_and_dtype(request):
905
+ return request.param
906
+
907
+
908
+ # ----------------------------------------------------------------
909
+ # Operators & Operations
910
+ # ----------------------------------------------------------------
911
+
912
+
913
+ @pytest.fixture(params=tm.arithmetic_dunder_methods)
914
+ def all_arithmetic_operators(request):
915
+ """
916
+ Fixture for dunder names for common arithmetic operations.
917
+ """
918
+ return request.param
919
+
920
+
921
+ @pytest.fixture(
922
+ params=[
923
+ operator.add,
924
+ ops.radd,
925
+ operator.sub,
926
+ ops.rsub,
927
+ operator.mul,
928
+ ops.rmul,
929
+ operator.truediv,
930
+ ops.rtruediv,
931
+ operator.floordiv,
932
+ ops.rfloordiv,
933
+ operator.mod,
934
+ ops.rmod,
935
+ operator.pow,
936
+ ops.rpow,
937
+ operator.eq,
938
+ operator.ne,
939
+ operator.lt,
940
+ operator.le,
941
+ operator.gt,
942
+ operator.ge,
943
+ operator.and_,
944
+ ops.rand_,
945
+ operator.xor,
946
+ ops.rxor,
947
+ operator.or_,
948
+ ops.ror_,
949
+ ]
950
+ )
951
+ def all_binary_operators(request):
952
+ """
953
+ Fixture for operator and roperator arithmetic, comparison, and logical ops.
954
+ """
955
+ return request.param
956
+
957
+
958
+ @pytest.fixture(
959
+ params=[
960
+ operator.add,
961
+ ops.radd,
962
+ operator.sub,
963
+ ops.rsub,
964
+ operator.mul,
965
+ ops.rmul,
966
+ operator.truediv,
967
+ ops.rtruediv,
968
+ operator.floordiv,
969
+ ops.rfloordiv,
970
+ operator.mod,
971
+ ops.rmod,
972
+ operator.pow,
973
+ ops.rpow,
974
+ ]
975
+ )
976
+ def all_arithmetic_functions(request):
977
+ """
978
+ Fixture for operator and roperator arithmetic functions.
979
+
980
+ Notes
981
+ -----
982
+ This includes divmod and rdivmod, whereas all_arithmetic_operators
983
+ does not.
984
+ """
985
+ return request.param
986
+
987
+
988
+ _all_numeric_reductions = [
989
+ "count",
990
+ "sum",
991
+ "max",
992
+ "min",
993
+ "mean",
994
+ "prod",
995
+ "std",
996
+ "var",
997
+ "median",
998
+ "kurt",
999
+ "skew",
1000
+ "sem",
1001
+ ]
1002
+
1003
+
1004
+ @pytest.fixture(params=_all_numeric_reductions)
1005
+ def all_numeric_reductions(request):
1006
+ """
1007
+ Fixture for numeric reduction names.
1008
+ """
1009
+ return request.param
1010
+
1011
+
1012
+ _all_boolean_reductions = ["all", "any"]
1013
+
1014
+
1015
+ @pytest.fixture(params=_all_boolean_reductions)
1016
+ def all_boolean_reductions(request):
1017
+ """
1018
+ Fixture for boolean reduction names.
1019
+ """
1020
+ return request.param
1021
+
1022
+
1023
+ _all_reductions = _all_numeric_reductions + _all_boolean_reductions
1024
+
1025
+
1026
+ @pytest.fixture(params=_all_reductions)
1027
+ def all_reductions(request):
1028
+ """
1029
+ Fixture for all (boolean + numeric) reduction names.
1030
+ """
1031
+ return request.param
1032
+
1033
+
1034
+ @pytest.fixture(
1035
+ params=[
1036
+ operator.eq,
1037
+ operator.ne,
1038
+ operator.gt,
1039
+ operator.ge,
1040
+ operator.lt,
1041
+ operator.le,
1042
+ ]
1043
+ )
1044
+ def comparison_op(request):
1045
+ """
1046
+ Fixture for operator module comparison functions.
1047
+ """
1048
+ return request.param
1049
+
1050
+
1051
+ @pytest.fixture(params=["__le__", "__lt__", "__ge__", "__gt__"])
1052
+ def compare_operators_no_eq_ne(request):
1053
+ """
1054
+ Fixture for dunder names for compare operations except == and !=
1055
+
1056
+ * >=
1057
+ * >
1058
+ * <
1059
+ * <=
1060
+ """
1061
+ return request.param
1062
+
1063
+
1064
+ @pytest.fixture(
1065
+ params=["__and__", "__rand__", "__or__", "__ror__", "__xor__", "__rxor__"]
1066
+ )
1067
+ def all_logical_operators(request):
1068
+ """
1069
+ Fixture for dunder names for common logical operations
1070
+
1071
+ * |
1072
+ * &
1073
+ * ^
1074
+ """
1075
+ return request.param
1076
+
1077
+
1078
+ _all_numeric_accumulations = ["cumsum", "cumprod", "cummin", "cummax"]
1079
+
1080
+
1081
+ @pytest.fixture(params=_all_numeric_accumulations)
1082
+ def all_numeric_accumulations(request):
1083
+ """
1084
+ Fixture for numeric accumulation names
1085
+ """
1086
+ return request.param
1087
+
1088
+
1089
+ # ----------------------------------------------------------------
1090
+ # Data sets/files
1091
+ # ----------------------------------------------------------------
1092
+ @pytest.fixture
1093
+ def strict_data_files(pytestconfig):
1094
+ """
1095
+ Returns the configuration for the test setting `--no-strict-data-files`.
1096
+ """
1097
+ return pytestconfig.getoption("--no-strict-data-files")
1098
+
1099
+
1100
+ @pytest.fixture
1101
+ def datapath(strict_data_files: str) -> Callable[..., str]:
1102
+ """
1103
+ Get the path to a data file.
1104
+
1105
+ Parameters
1106
+ ----------
1107
+ path : str
1108
+ Path to the file, relative to ``pandas/tests/``
1109
+
1110
+ Returns
1111
+ -------
1112
+ path including ``pandas/tests``.
1113
+
1114
+ Raises
1115
+ ------
1116
+ ValueError
1117
+ If the path doesn't exist and the --no-strict-data-files option is not set.
1118
+ """
1119
+ BASE_PATH = os.path.join(os.path.dirname(__file__), "tests")
1120
+
1121
+ def deco(*args):
1122
+ path = os.path.join(BASE_PATH, *args)
1123
+ if not os.path.exists(path):
1124
+ if strict_data_files:
1125
+ raise ValueError(
1126
+ f"Could not find file {path} and --no-strict-data-files is not set."
1127
+ )
1128
+ pytest.skip(f"Could not find {path}.")
1129
+ return path
1130
+
1131
+ return deco
1132
+
1133
+
1134
+ # ----------------------------------------------------------------
1135
+ # Time zones
1136
+ # ----------------------------------------------------------------
1137
+ TIMEZONES = [
1138
+ None,
1139
+ "UTC",
1140
+ "US/Eastern",
1141
+ "Asia/Tokyo",
1142
+ "dateutil/US/Pacific",
1143
+ "dateutil/Asia/Singapore",
1144
+ "+01:15",
1145
+ "-02:15",
1146
+ "UTC+01:15",
1147
+ "UTC-02:15",
1148
+ tzutc(),
1149
+ tzlocal(),
1150
+ FixedOffset(300),
1151
+ FixedOffset(0),
1152
+ FixedOffset(-300),
1153
+ timezone.utc,
1154
+ timezone(timedelta(hours=1)),
1155
+ timezone(timedelta(hours=-1), name="foo"),
1156
+ ]
1157
+ if zoneinfo is not None:
1158
+ TIMEZONES.extend(
1159
+ [
1160
+ zoneinfo.ZoneInfo("US/Pacific"), # type: ignore[list-item]
1161
+ zoneinfo.ZoneInfo("UTC"), # type: ignore[list-item]
1162
+ ]
1163
+ )
1164
+ TIMEZONE_IDS = [repr(i) for i in TIMEZONES]
1165
+
1166
+
1167
+ @td.parametrize_fixture_doc(str(TIMEZONE_IDS))
1168
+ @pytest.fixture(params=TIMEZONES, ids=TIMEZONE_IDS)
1169
+ def tz_naive_fixture(request):
1170
+ """
1171
+ Fixture for trying timezones including default (None): {0}
1172
+ """
1173
+ return request.param
1174
+
1175
+
1176
+ @td.parametrize_fixture_doc(str(TIMEZONE_IDS[1:]))
1177
+ @pytest.fixture(params=TIMEZONES[1:], ids=TIMEZONE_IDS[1:])
1178
+ def tz_aware_fixture(request):
1179
+ """
1180
+ Fixture for trying explicit timezones: {0}
1181
+ """
1182
+ return request.param
1183
+
1184
+
1185
+ # Generate cartesian product of tz_aware_fixture:
1186
+ tz_aware_fixture2 = tz_aware_fixture
1187
+
1188
+
1189
+ _UTCS = ["utc", "dateutil/UTC", utc, tzutc(), timezone.utc]
1190
+ if zoneinfo is not None:
1191
+ _UTCS.append(zoneinfo.ZoneInfo("UTC"))
1192
+
1193
+
1194
+ @pytest.fixture(params=_UTCS)
1195
+ def utc_fixture(request):
1196
+ """
1197
+ Fixture to provide variants of UTC timezone strings and tzinfo objects.
1198
+ """
1199
+ return request.param
1200
+
1201
+
1202
+ utc_fixture2 = utc_fixture
1203
+
1204
+
1205
+ @pytest.fixture(params=["s", "ms", "us", "ns"])
1206
+ def unit(request):
1207
+ """
1208
+ datetime64 units we support.
1209
+ """
1210
+ return request.param
1211
+
1212
+
1213
+ unit2 = unit
1214
+
1215
+
1216
+ # ----------------------------------------------------------------
1217
+ # Dtypes
1218
+ # ----------------------------------------------------------------
1219
+ @pytest.fixture(params=tm.STRING_DTYPES)
1220
+ def string_dtype(request):
1221
+ """
1222
+ Parametrized fixture for string dtypes.
1223
+
1224
+ * str
1225
+ * 'str'
1226
+ * 'U'
1227
+ """
1228
+ return request.param
1229
+
1230
+
1231
+ @pytest.fixture(
1232
+ params=[
1233
+ "string[python]",
1234
+ pytest.param("string[pyarrow]", marks=td.skip_if_no("pyarrow")),
1235
+ ]
1236
+ )
1237
+ def nullable_string_dtype(request):
1238
+ """
1239
+ Parametrized fixture for string dtypes.
1240
+
1241
+ * 'string[python]'
1242
+ * 'string[pyarrow]'
1243
+ """
1244
+ return request.param
1245
+
1246
+
1247
+ @pytest.fixture(
1248
+ params=[
1249
+ "python",
1250
+ pytest.param("pyarrow", marks=td.skip_if_no("pyarrow")),
1251
+ pytest.param("pyarrow_numpy", marks=td.skip_if_no("pyarrow")),
1252
+ ]
1253
+ )
1254
+ def string_storage(request):
1255
+ """
1256
+ Parametrized fixture for pd.options.mode.string_storage.
1257
+
1258
+ * 'python'
1259
+ * 'pyarrow'
1260
+ * 'pyarrow_numpy'
1261
+ """
1262
+ return request.param
1263
+
1264
+
1265
+ @pytest.fixture(
1266
+ params=[
1267
+ "numpy_nullable",
1268
+ pytest.param("pyarrow", marks=td.skip_if_no("pyarrow")),
1269
+ ]
1270
+ )
1271
+ def dtype_backend(request):
1272
+ """
1273
+ Parametrized fixture for pd.options.mode.string_storage.
1274
+
1275
+ * 'python'
1276
+ * 'pyarrow'
1277
+ """
1278
+ return request.param
1279
+
1280
+
1281
+ # Alias so we can test with cartesian product of string_storage
1282
+ string_storage2 = string_storage
1283
+
1284
+
1285
+ @pytest.fixture(params=tm.BYTES_DTYPES)
1286
+ def bytes_dtype(request):
1287
+ """
1288
+ Parametrized fixture for bytes dtypes.
1289
+
1290
+ * bytes
1291
+ * 'bytes'
1292
+ """
1293
+ return request.param
1294
+
1295
+
1296
+ @pytest.fixture(params=tm.OBJECT_DTYPES)
1297
+ def object_dtype(request):
1298
+ """
1299
+ Parametrized fixture for object dtypes.
1300
+
1301
+ * object
1302
+ * 'object'
1303
+ """
1304
+ return request.param
1305
+
1306
+
1307
+ @pytest.fixture(
1308
+ params=[
1309
+ "object",
1310
+ "string[python]",
1311
+ pytest.param("string[pyarrow]", marks=td.skip_if_no("pyarrow")),
1312
+ pytest.param("string[pyarrow_numpy]", marks=td.skip_if_no("pyarrow")),
1313
+ ]
1314
+ )
1315
+ def any_string_dtype(request):
1316
+ """
1317
+ Parametrized fixture for string dtypes.
1318
+ * 'object'
1319
+ * 'string[python]'
1320
+ * 'string[pyarrow]'
1321
+ """
1322
+ return request.param
1323
+
1324
+
1325
+ @pytest.fixture(params=tm.DATETIME64_DTYPES)
1326
+ def datetime64_dtype(request):
1327
+ """
1328
+ Parametrized fixture for datetime64 dtypes.
1329
+
1330
+ * 'datetime64[ns]'
1331
+ * 'M8[ns]'
1332
+ """
1333
+ return request.param
1334
+
1335
+
1336
+ @pytest.fixture(params=tm.TIMEDELTA64_DTYPES)
1337
+ def timedelta64_dtype(request):
1338
+ """
1339
+ Parametrized fixture for timedelta64 dtypes.
1340
+
1341
+ * 'timedelta64[ns]'
1342
+ * 'm8[ns]'
1343
+ """
1344
+ return request.param
1345
+
1346
+
1347
+ @pytest.fixture
1348
+ def fixed_now_ts() -> Timestamp:
1349
+ """
1350
+ Fixture emits fixed Timestamp.now()
1351
+ """
1352
+ return Timestamp( # pyright: ignore[reportGeneralTypeIssues]
1353
+ year=2021, month=1, day=1, hour=12, minute=4, second=13, microsecond=22
1354
+ )
1355
+
1356
+
1357
+ @pytest.fixture(params=tm.FLOAT_NUMPY_DTYPES)
1358
+ def float_numpy_dtype(request):
1359
+ """
1360
+ Parameterized fixture for float dtypes.
1361
+
1362
+ * float
1363
+ * 'float32'
1364
+ * 'float64'
1365
+ """
1366
+ return request.param
1367
+
1368
+
1369
+ @pytest.fixture(params=tm.FLOAT_EA_DTYPES)
1370
+ def float_ea_dtype(request):
1371
+ """
1372
+ Parameterized fixture for float dtypes.
1373
+
1374
+ * 'Float32'
1375
+ * 'Float64'
1376
+ """
1377
+ return request.param
1378
+
1379
+
1380
+ @pytest.fixture(params=tm.ALL_FLOAT_DTYPES)
1381
+ def any_float_dtype(request):
1382
+ """
1383
+ Parameterized fixture for float dtypes.
1384
+
1385
+ * float
1386
+ * 'float32'
1387
+ * 'float64'
1388
+ * 'Float32'
1389
+ * 'Float64'
1390
+ """
1391
+ return request.param
1392
+
1393
+
1394
+ @pytest.fixture(params=tm.COMPLEX_DTYPES)
1395
+ def complex_dtype(request):
1396
+ """
1397
+ Parameterized fixture for complex dtypes.
1398
+
1399
+ * complex
1400
+ * 'complex64'
1401
+ * 'complex128'
1402
+ """
1403
+ return request.param
1404
+
1405
+
1406
+ @pytest.fixture(params=tm.COMPLEX_FLOAT_DTYPES)
1407
+ def complex_or_float_dtype(request):
1408
+ """
1409
+ Parameterized fixture for complex and numpy float dtypes.
1410
+
1411
+ * complex
1412
+ * 'complex64'
1413
+ * 'complex128'
1414
+ * float
1415
+ * 'float32'
1416
+ * 'float64'
1417
+ """
1418
+ return request.param
1419
+
1420
+
1421
+ @pytest.fixture(params=tm.SIGNED_INT_NUMPY_DTYPES)
1422
+ def any_signed_int_numpy_dtype(request):
1423
+ """
1424
+ Parameterized fixture for signed integer dtypes.
1425
+
1426
+ * int
1427
+ * 'int8'
1428
+ * 'int16'
1429
+ * 'int32'
1430
+ * 'int64'
1431
+ """
1432
+ return request.param
1433
+
1434
+
1435
+ @pytest.fixture(params=tm.UNSIGNED_INT_NUMPY_DTYPES)
1436
+ def any_unsigned_int_numpy_dtype(request):
1437
+ """
1438
+ Parameterized fixture for unsigned integer dtypes.
1439
+
1440
+ * 'uint8'
1441
+ * 'uint16'
1442
+ * 'uint32'
1443
+ * 'uint64'
1444
+ """
1445
+ return request.param
1446
+
1447
+
1448
+ @pytest.fixture(params=tm.ALL_INT_NUMPY_DTYPES)
1449
+ def any_int_numpy_dtype(request):
1450
+ """
1451
+ Parameterized fixture for any integer dtype.
1452
+
1453
+ * int
1454
+ * 'int8'
1455
+ * 'uint8'
1456
+ * 'int16'
1457
+ * 'uint16'
1458
+ * 'int32'
1459
+ * 'uint32'
1460
+ * 'int64'
1461
+ * 'uint64'
1462
+ """
1463
+ return request.param
1464
+
1465
+
1466
+ @pytest.fixture(params=tm.ALL_INT_EA_DTYPES)
1467
+ def any_int_ea_dtype(request):
1468
+ """
1469
+ Parameterized fixture for any nullable integer dtype.
1470
+
1471
+ * 'UInt8'
1472
+ * 'Int8'
1473
+ * 'UInt16'
1474
+ * 'Int16'
1475
+ * 'UInt32'
1476
+ * 'Int32'
1477
+ * 'UInt64'
1478
+ * 'Int64'
1479
+ """
1480
+ return request.param
1481
+
1482
+
1483
+ @pytest.fixture(params=tm.ALL_INT_DTYPES)
1484
+ def any_int_dtype(request):
1485
+ """
1486
+ Parameterized fixture for any nullable integer dtype.
1487
+
1488
+ * int
1489
+ * 'int8'
1490
+ * 'uint8'
1491
+ * 'int16'
1492
+ * 'uint16'
1493
+ * 'int32'
1494
+ * 'uint32'
1495
+ * 'int64'
1496
+ * 'uint64'
1497
+ * 'UInt8'
1498
+ * 'Int8'
1499
+ * 'UInt16'
1500
+ * 'Int16'
1501
+ * 'UInt32'
1502
+ * 'Int32'
1503
+ * 'UInt64'
1504
+ * 'Int64'
1505
+ """
1506
+ return request.param
1507
+
1508
+
1509
+ @pytest.fixture(params=tm.ALL_INT_EA_DTYPES + tm.FLOAT_EA_DTYPES)
1510
+ def any_numeric_ea_dtype(request):
1511
+ """
1512
+ Parameterized fixture for any nullable integer dtype and
1513
+ any float ea dtypes.
1514
+
1515
+ * 'UInt8'
1516
+ * 'Int8'
1517
+ * 'UInt16'
1518
+ * 'Int16'
1519
+ * 'UInt32'
1520
+ * 'Int32'
1521
+ * 'UInt64'
1522
+ * 'Int64'
1523
+ * 'Float32'
1524
+ * 'Float64'
1525
+ """
1526
+ return request.param
1527
+
1528
+
1529
+ # Unsupported operand types for + ("List[Union[str, ExtensionDtype, dtype[Any],
1530
+ # Type[object]]]" and "List[str]")
1531
+ @pytest.fixture(
1532
+ params=tm.ALL_INT_EA_DTYPES
1533
+ + tm.FLOAT_EA_DTYPES
1534
+ + tm.ALL_INT_PYARROW_DTYPES_STR_REPR
1535
+ + tm.FLOAT_PYARROW_DTYPES_STR_REPR # type: ignore[operator]
1536
+ )
1537
+ def any_numeric_ea_and_arrow_dtype(request):
1538
+ """
1539
+ Parameterized fixture for any nullable integer dtype and
1540
+ any float ea dtypes.
1541
+
1542
+ * 'UInt8'
1543
+ * 'Int8'
1544
+ * 'UInt16'
1545
+ * 'Int16'
1546
+ * 'UInt32'
1547
+ * 'Int32'
1548
+ * 'UInt64'
1549
+ * 'Int64'
1550
+ * 'Float32'
1551
+ * 'Float64'
1552
+ * 'uint8[pyarrow]'
1553
+ * 'int8[pyarrow]'
1554
+ * 'uint16[pyarrow]'
1555
+ * 'int16[pyarrow]'
1556
+ * 'uint32[pyarrow]'
1557
+ * 'int32[pyarrow]'
1558
+ * 'uint64[pyarrow]'
1559
+ * 'int64[pyarrow]'
1560
+ * 'float32[pyarrow]'
1561
+ * 'float64[pyarrow]'
1562
+ """
1563
+ return request.param
1564
+
1565
+
1566
+ @pytest.fixture(params=tm.SIGNED_INT_EA_DTYPES)
1567
+ def any_signed_int_ea_dtype(request):
1568
+ """
1569
+ Parameterized fixture for any signed nullable integer dtype.
1570
+
1571
+ * 'Int8'
1572
+ * 'Int16'
1573
+ * 'Int32'
1574
+ * 'Int64'
1575
+ """
1576
+ return request.param
1577
+
1578
+
1579
+ @pytest.fixture(params=tm.ALL_REAL_NUMPY_DTYPES)
1580
+ def any_real_numpy_dtype(request):
1581
+ """
1582
+ Parameterized fixture for any (purely) real numeric dtype.
1583
+
1584
+ * int
1585
+ * 'int8'
1586
+ * 'uint8'
1587
+ * 'int16'
1588
+ * 'uint16'
1589
+ * 'int32'
1590
+ * 'uint32'
1591
+ * 'int64'
1592
+ * 'uint64'
1593
+ * float
1594
+ * 'float32'
1595
+ * 'float64'
1596
+ """
1597
+ return request.param
1598
+
1599
+
1600
+ @pytest.fixture(params=tm.ALL_REAL_DTYPES)
1601
+ def any_real_numeric_dtype(request):
1602
+ """
1603
+ Parameterized fixture for any (purely) real numeric dtype.
1604
+
1605
+ * int
1606
+ * 'int8'
1607
+ * 'uint8'
1608
+ * 'int16'
1609
+ * 'uint16'
1610
+ * 'int32'
1611
+ * 'uint32'
1612
+ * 'int64'
1613
+ * 'uint64'
1614
+ * float
1615
+ * 'float32'
1616
+ * 'float64'
1617
+
1618
+ and associated ea dtypes.
1619
+ """
1620
+ return request.param
1621
+
1622
+
1623
+ @pytest.fixture(params=tm.ALL_NUMPY_DTYPES)
1624
+ def any_numpy_dtype(request):
1625
+ """
1626
+ Parameterized fixture for all numpy dtypes.
1627
+
1628
+ * bool
1629
+ * 'bool'
1630
+ * int
1631
+ * 'int8'
1632
+ * 'uint8'
1633
+ * 'int16'
1634
+ * 'uint16'
1635
+ * 'int32'
1636
+ * 'uint32'
1637
+ * 'int64'
1638
+ * 'uint64'
1639
+ * float
1640
+ * 'float32'
1641
+ * 'float64'
1642
+ * complex
1643
+ * 'complex64'
1644
+ * 'complex128'
1645
+ * str
1646
+ * 'str'
1647
+ * 'U'
1648
+ * bytes
1649
+ * 'bytes'
1650
+ * 'datetime64[ns]'
1651
+ * 'M8[ns]'
1652
+ * 'timedelta64[ns]'
1653
+ * 'm8[ns]'
1654
+ * object
1655
+ * 'object'
1656
+ """
1657
+ return request.param
1658
+
1659
+
1660
+ @pytest.fixture(params=tm.ALL_REAL_NULLABLE_DTYPES)
1661
+ def any_real_nullable_dtype(request):
1662
+ """
1663
+ Parameterized fixture for all real dtypes that can hold NA.
1664
+
1665
+ * float
1666
+ * 'float32'
1667
+ * 'float64'
1668
+ * 'Float32'
1669
+ * 'Float64'
1670
+ * 'UInt8'
1671
+ * 'UInt16'
1672
+ * 'UInt32'
1673
+ * 'UInt64'
1674
+ * 'Int8'
1675
+ * 'Int16'
1676
+ * 'Int32'
1677
+ * 'Int64'
1678
+ * 'uint8[pyarrow]'
1679
+ * 'uint16[pyarrow]'
1680
+ * 'uint32[pyarrow]'
1681
+ * 'uint64[pyarrow]'
1682
+ * 'int8[pyarrow]'
1683
+ * 'int16[pyarrow]'
1684
+ * 'int32[pyarrow]'
1685
+ * 'int64[pyarrow]'
1686
+ * 'float[pyarrow]'
1687
+ * 'double[pyarrow]'
1688
+ """
1689
+ return request.param
1690
+
1691
+
1692
+ @pytest.fixture(params=tm.ALL_NUMERIC_DTYPES)
1693
+ def any_numeric_dtype(request):
1694
+ """
1695
+ Parameterized fixture for all numeric dtypes.
1696
+
1697
+ * int
1698
+ * 'int8'
1699
+ * 'uint8'
1700
+ * 'int16'
1701
+ * 'uint16'
1702
+ * 'int32'
1703
+ * 'uint32'
1704
+ * 'int64'
1705
+ * 'uint64'
1706
+ * float
1707
+ * 'float32'
1708
+ * 'float64'
1709
+ * complex
1710
+ * 'complex64'
1711
+ * 'complex128'
1712
+ * 'UInt8'
1713
+ * 'Int8'
1714
+ * 'UInt16'
1715
+ * 'Int16'
1716
+ * 'UInt32'
1717
+ * 'Int32'
1718
+ * 'UInt64'
1719
+ * 'Int64'
1720
+ * 'Float32'
1721
+ * 'Float64'
1722
+ """
1723
+ return request.param
1724
+
1725
+
1726
+ # categoricals are handled separately
1727
+ _any_skipna_inferred_dtype = [
1728
+ ("string", ["a", np.nan, "c"]),
1729
+ ("string", ["a", pd.NA, "c"]),
1730
+ ("mixed", ["a", pd.NaT, "c"]), # pd.NaT not considered valid by is_string_array
1731
+ ("bytes", [b"a", np.nan, b"c"]),
1732
+ ("empty", [np.nan, np.nan, np.nan]),
1733
+ ("empty", []),
1734
+ ("mixed-integer", ["a", np.nan, 2]),
1735
+ ("mixed", ["a", np.nan, 2.0]),
1736
+ ("floating", [1.0, np.nan, 2.0]),
1737
+ ("integer", [1, np.nan, 2]),
1738
+ ("mixed-integer-float", [1, np.nan, 2.0]),
1739
+ ("decimal", [Decimal(1), np.nan, Decimal(2)]),
1740
+ ("boolean", [True, np.nan, False]),
1741
+ ("boolean", [True, pd.NA, False]),
1742
+ ("datetime64", [np.datetime64("2013-01-01"), np.nan, np.datetime64("2018-01-01")]),
1743
+ ("datetime", [Timestamp("20130101"), np.nan, Timestamp("20180101")]),
1744
+ ("date", [date(2013, 1, 1), np.nan, date(2018, 1, 1)]),
1745
+ ("complex", [1 + 1j, np.nan, 2 + 2j]),
1746
+ # The following dtype is commented out due to GH 23554
1747
+ # ('timedelta64', [np.timedelta64(1, 'D'),
1748
+ # np.nan, np.timedelta64(2, 'D')]),
1749
+ ("timedelta", [timedelta(1), np.nan, timedelta(2)]),
1750
+ ("time", [time(1), np.nan, time(2)]),
1751
+ ("period", [Period(2013), pd.NaT, Period(2018)]),
1752
+ ("interval", [Interval(0, 1), np.nan, Interval(0, 2)]),
1753
+ ]
1754
+ ids, _ = zip(*_any_skipna_inferred_dtype) # use inferred type as fixture-id
1755
+
1756
+
1757
+ @pytest.fixture(params=_any_skipna_inferred_dtype, ids=ids)
1758
+ def any_skipna_inferred_dtype(request):
1759
+ """
1760
+ Fixture for all inferred dtypes from _libs.lib.infer_dtype
1761
+
1762
+ The covered (inferred) types are:
1763
+ * 'string'
1764
+ * 'empty'
1765
+ * 'bytes'
1766
+ * 'mixed'
1767
+ * 'mixed-integer'
1768
+ * 'mixed-integer-float'
1769
+ * 'floating'
1770
+ * 'integer'
1771
+ * 'decimal'
1772
+ * 'boolean'
1773
+ * 'datetime64'
1774
+ * 'datetime'
1775
+ * 'date'
1776
+ * 'timedelta'
1777
+ * 'time'
1778
+ * 'period'
1779
+ * 'interval'
1780
+
1781
+ Returns
1782
+ -------
1783
+ inferred_dtype : str
1784
+ The string for the inferred dtype from _libs.lib.infer_dtype
1785
+ values : np.ndarray
1786
+ An array of object dtype that will be inferred to have
1787
+ `inferred_dtype`
1788
+
1789
+ Examples
1790
+ --------
1791
+ >>> from pandas._libs import lib
1792
+ >>>
1793
+ >>> def test_something(any_skipna_inferred_dtype):
1794
+ ... inferred_dtype, values = any_skipna_inferred_dtype
1795
+ ... # will pass
1796
+ ... assert lib.infer_dtype(values, skipna=True) == inferred_dtype
1797
+ """
1798
+ inferred_dtype, values = request.param
1799
+ values = np.array(values, dtype=object) # object dtype to avoid casting
1800
+
1801
+ # correctness of inference tested in tests/dtypes/test_inference.py
1802
+ return inferred_dtype, values
1803
+
1804
+
1805
+ # ----------------------------------------------------------------
1806
+ # Misc
1807
+ # ----------------------------------------------------------------
1808
+ @pytest.fixture
1809
+ def ip():
1810
+ """
1811
+ Get an instance of IPython.InteractiveShell.
1812
+
1813
+ Will raise a skip if IPython is not installed.
1814
+ """
1815
+ pytest.importorskip("IPython", minversion="6.0.0")
1816
+ from IPython.core.interactiveshell import InteractiveShell
1817
+
1818
+ # GH#35711 make sure sqlite history file handle is not leaked
1819
+ from traitlets.config import Config # isort:skip
1820
+
1821
+ c = Config()
1822
+ c.HistoryManager.hist_file = ":memory:"
1823
+
1824
+ return InteractiveShell(config=c)
1825
+
1826
+
1827
+ @pytest.fixture(params=["bsr", "coo", "csc", "csr", "dia", "dok", "lil"])
1828
+ def spmatrix(request):
1829
+ """
1830
+ Yields scipy sparse matrix classes.
1831
+ """
1832
+ sparse = pytest.importorskip("scipy.sparse")
1833
+
1834
+ return getattr(sparse, request.param + "_matrix")
1835
+
1836
+
1837
+ @pytest.fixture(
1838
+ params=[
1839
+ getattr(pd.offsets, o)
1840
+ for o in pd.offsets.__all__
1841
+ if issubclass(getattr(pd.offsets, o), pd.offsets.Tick) and o != "Tick"
1842
+ ]
1843
+ )
1844
+ def tick_classes(request):
1845
+ """
1846
+ Fixture for Tick based datetime offsets available for a time series.
1847
+ """
1848
+ return request.param
1849
+
1850
+
1851
+ @pytest.fixture(params=[None, lambda x: x])
1852
+ def sort_by_key(request):
1853
+ """
1854
+ Simple fixture for testing keys in sorting methods.
1855
+ Tests None (no key) and the identity key.
1856
+ """
1857
+ return request.param
1858
+
1859
+
1860
+ @pytest.fixture(
1861
+ params=[
1862
+ ("foo", None, None),
1863
+ ("Egon", "Venkman", None),
1864
+ ("NCC1701D", "NCC1701D", "NCC1701D"),
1865
+ # possibly-matching NAs
1866
+ (np.nan, np.nan, np.nan),
1867
+ (np.nan, pd.NaT, None),
1868
+ (np.nan, pd.NA, None),
1869
+ (pd.NA, pd.NA, pd.NA),
1870
+ ]
1871
+ )
1872
+ def names(request) -> tuple[Hashable, Hashable, Hashable]:
1873
+ """
1874
+ A 3-tuple of names, the first two for operands, the last for a result.
1875
+ """
1876
+ return request.param
1877
+
1878
+
1879
+ @pytest.fixture(params=[tm.setitem, tm.loc, tm.iloc])
1880
+ def indexer_sli(request):
1881
+ """
1882
+ Parametrize over __setitem__, loc.__setitem__, iloc.__setitem__
1883
+ """
1884
+ return request.param
1885
+
1886
+
1887
+ @pytest.fixture(params=[tm.loc, tm.iloc])
1888
+ def indexer_li(request):
1889
+ """
1890
+ Parametrize over loc.__getitem__, iloc.__getitem__
1891
+ """
1892
+ return request.param
1893
+
1894
+
1895
+ @pytest.fixture(params=[tm.setitem, tm.iloc])
1896
+ def indexer_si(request):
1897
+ """
1898
+ Parametrize over __setitem__, iloc.__setitem__
1899
+ """
1900
+ return request.param
1901
+
1902
+
1903
+ @pytest.fixture(params=[tm.setitem, tm.loc])
1904
+ def indexer_sl(request):
1905
+ """
1906
+ Parametrize over __setitem__, loc.__setitem__
1907
+ """
1908
+ return request.param
1909
+
1910
+
1911
+ @pytest.fixture(params=[tm.at, tm.loc])
1912
+ def indexer_al(request):
1913
+ """
1914
+ Parametrize over at.__setitem__, loc.__setitem__
1915
+ """
1916
+ return request.param
1917
+
1918
+
1919
+ @pytest.fixture(params=[tm.iat, tm.iloc])
1920
+ def indexer_ial(request):
1921
+ """
1922
+ Parametrize over iat.__setitem__, iloc.__setitem__
1923
+ """
1924
+ return request.param
1925
+
1926
+
1927
+ @pytest.fixture
1928
+ def using_array_manager() -> bool:
1929
+ """
1930
+ Fixture to check if the array manager is being used.
1931
+ """
1932
+ return _get_option("mode.data_manager", silent=True) == "array"
1933
+
1934
+
1935
+ @pytest.fixture
1936
+ def using_copy_on_write() -> bool:
1937
+ """
1938
+ Fixture to check if Copy-on-Write is enabled.
1939
+ """
1940
+ return (
1941
+ pd.options.mode.copy_on_write is True
1942
+ and _get_option("mode.data_manager", silent=True) == "block"
1943
+ )
1944
+
1945
+
1946
+ @pytest.fixture
1947
+ def warn_copy_on_write() -> bool:
1948
+ """
1949
+ Fixture to check if Copy-on-Write is in warning mode.
1950
+ """
1951
+ return (
1952
+ pd.options.mode.copy_on_write == "warn"
1953
+ and _get_option("mode.data_manager", silent=True) == "block"
1954
+ )
1955
+
1956
+
1957
+ @pytest.fixture
1958
+ def using_infer_string() -> bool:
1959
+ """
1960
+ Fixture to check if infer string option is enabled.
1961
+ """
1962
+ return pd.options.future.infer_string is True
1963
+
1964
+
1965
+ warsaws = ["Europe/Warsaw", "dateutil/Europe/Warsaw"]
1966
+ if zoneinfo is not None:
1967
+ warsaws.append(zoneinfo.ZoneInfo("Europe/Warsaw")) # type: ignore[arg-type]
1968
+
1969
+
1970
+ @pytest.fixture(params=warsaws)
1971
+ def warsaw(request) -> str:
1972
+ """
1973
+ tzinfo for Europe/Warsaw using pytz, dateutil, or zoneinfo.
1974
+ """
1975
+ return request.param
1976
+
1977
+
1978
+ @pytest.fixture()
1979
+ def arrow_string_storage():
1980
+ return ("pyarrow", "pyarrow_numpy")
infer_4_30_0/lib/python3.10/site-packages/pandas/pyproject.toml ADDED
@@ -0,0 +1,811 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [build-system]
2
+ # Minimum requirements for the build system to execute.
3
+ # See https://github.com/scipy/scipy/pull/12940 for the AIX issue.
4
+ requires = [
5
+ "meson-python==0.13.1",
6
+ "meson==1.2.1",
7
+ "wheel",
8
+ "Cython~=3.0.5", # Note: sync with setup.py, environment.yml and asv.conf.json
9
+ # Force numpy higher than 2.0, so that built wheels are compatible
10
+ # with both numpy 1 and 2
11
+ "numpy>=2.0",
12
+ "versioneer[toml]"
13
+ ]
14
+
15
+ build-backend = "mesonpy"
16
+
17
+ [project]
18
+ name = 'pandas'
19
+ dynamic = [
20
+ 'version'
21
+ ]
22
+ description = 'Powerful data structures for data analysis, time series, and statistics'
23
+ readme = 'README.md'
24
+ authors = [
25
+ { name = 'The Pandas Development Team', email='[email protected]' },
26
+ ]
27
+ license = {file = 'LICENSE'}
28
+ requires-python = '>=3.9'
29
+ dependencies = [
30
+ "numpy>=1.22.4; python_version<'3.11'",
31
+ "numpy>=1.23.2; python_version=='3.11'",
32
+ "numpy>=1.26.0; python_version>='3.12'",
33
+ "python-dateutil>=2.8.2",
34
+ "pytz>=2020.1",
35
+ "tzdata>=2022.7"
36
+ ]
37
+ classifiers = [
38
+ 'Development Status :: 5 - Production/Stable',
39
+ 'Environment :: Console',
40
+ 'Intended Audience :: Science/Research',
41
+ 'License :: OSI Approved :: BSD License',
42
+ 'Operating System :: OS Independent',
43
+ 'Programming Language :: Cython',
44
+ 'Programming Language :: Python',
45
+ 'Programming Language :: Python :: 3',
46
+ 'Programming Language :: Python :: 3 :: Only',
47
+ 'Programming Language :: Python :: 3.9',
48
+ 'Programming Language :: Python :: 3.10',
49
+ 'Programming Language :: Python :: 3.11',
50
+ 'Programming Language :: Python :: 3.12',
51
+ 'Topic :: Scientific/Engineering'
52
+ ]
53
+
54
+ [project.urls]
55
+ homepage = 'https://pandas.pydata.org'
56
+ documentation = 'https://pandas.pydata.org/docs/'
57
+ repository = 'https://github.com/pandas-dev/pandas'
58
+
59
+ [project.entry-points."pandas_plotting_backends"]
60
+ matplotlib = "pandas:plotting._matplotlib"
61
+
62
+ [project.optional-dependencies]
63
+ test = ['hypothesis>=6.46.1', 'pytest>=7.3.2', 'pytest-xdist>=2.2.0']
64
+ pyarrow = ['pyarrow>=10.0.1']
65
+ performance = ['bottleneck>=1.3.6', 'numba>=0.56.4', 'numexpr>=2.8.4']
66
+ computation = ['scipy>=1.10.0', 'xarray>=2022.12.0']
67
+ fss = ['fsspec>=2022.11.0']
68
+ aws = ['s3fs>=2022.11.0']
69
+ gcp = ['gcsfs>=2022.11.0', 'pandas-gbq>=0.19.0']
70
+ excel = ['odfpy>=1.4.1', 'openpyxl>=3.1.0', 'python-calamine>=0.1.7', 'pyxlsb>=1.0.10', 'xlrd>=2.0.1', 'xlsxwriter>=3.0.5']
71
+ parquet = ['pyarrow>=10.0.1']
72
+ feather = ['pyarrow>=10.0.1']
73
+ hdf5 = [# blosc only available on conda (https://github.com/Blosc/python-blosc/issues/297)
74
+ #'blosc>=1.20.1',
75
+ 'tables>=3.8.0']
76
+ spss = ['pyreadstat>=1.2.0']
77
+ postgresql = ['SQLAlchemy>=2.0.0', 'psycopg2>=2.9.6', 'adbc-driver-postgresql>=0.8.0']
78
+ mysql = ['SQLAlchemy>=2.0.0', 'pymysql>=1.0.2']
79
+ sql-other = ['SQLAlchemy>=2.0.0', 'adbc-driver-postgresql>=0.8.0', 'adbc-driver-sqlite>=0.8.0']
80
+ html = ['beautifulsoup4>=4.11.2', 'html5lib>=1.1', 'lxml>=4.9.2']
81
+ xml = ['lxml>=4.9.2']
82
+ plot = ['matplotlib>=3.6.3']
83
+ output-formatting = ['jinja2>=3.1.2', 'tabulate>=0.9.0']
84
+ clipboard = ['PyQt5>=5.15.9', 'qtpy>=2.3.0']
85
+ compression = ['zstandard>=0.19.0']
86
+ consortium-standard = ['dataframe-api-compat>=0.1.7']
87
+ all = ['adbc-driver-postgresql>=0.8.0',
88
+ 'adbc-driver-sqlite>=0.8.0',
89
+ 'beautifulsoup4>=4.11.2',
90
+ # blosc only available on conda (https://github.com/Blosc/python-blosc/issues/297)
91
+ #'blosc>=1.21.3',
92
+ 'bottleneck>=1.3.6',
93
+ 'dataframe-api-compat>=0.1.7',
94
+ 'fastparquet>=2022.12.0',
95
+ 'fsspec>=2022.11.0',
96
+ 'gcsfs>=2022.11.0',
97
+ 'html5lib>=1.1',
98
+ 'hypothesis>=6.46.1',
99
+ 'jinja2>=3.1.2',
100
+ 'lxml>=4.9.2',
101
+ 'matplotlib>=3.6.3',
102
+ 'numba>=0.56.4',
103
+ 'numexpr>=2.8.4',
104
+ 'odfpy>=1.4.1',
105
+ 'openpyxl>=3.1.0',
106
+ 'pandas-gbq>=0.19.0',
107
+ 'psycopg2>=2.9.6',
108
+ 'pyarrow>=10.0.1',
109
+ 'pymysql>=1.0.2',
110
+ 'PyQt5>=5.15.9',
111
+ 'pyreadstat>=1.2.0',
112
+ 'pytest>=7.3.2',
113
+ 'pytest-xdist>=2.2.0',
114
+ 'python-calamine>=0.1.7',
115
+ 'pyxlsb>=1.0.10',
116
+ 'qtpy>=2.3.0',
117
+ 'scipy>=1.10.0',
118
+ 's3fs>=2022.11.0',
119
+ 'SQLAlchemy>=2.0.0',
120
+ 'tables>=3.8.0',
121
+ 'tabulate>=0.9.0',
122
+ 'xarray>=2022.12.0',
123
+ 'xlrd>=2.0.1',
124
+ 'xlsxwriter>=3.0.5',
125
+ 'zstandard>=0.19.0']
126
+
127
+ # TODO: Remove after setuptools support is dropped.
128
+ [tool.setuptools]
129
+ include-package-data = true
130
+
131
+ [tool.setuptools.packages.find]
132
+ include = ["pandas", "pandas.*"]
133
+ namespaces = false
134
+
135
+ [tool.setuptools.exclude-package-data]
136
+ "*" = ["*.c", "*.h"]
137
+
138
+ # See the docstring in versioneer.py for instructions. Note that you must
139
+ # re-run 'versioneer.py setup' after changing this section, and commit the
140
+ # resulting files.
141
+ [tool.versioneer]
142
+ VCS = "git"
143
+ style = "pep440"
144
+ versionfile_source = "pandas/_version.py"
145
+ versionfile_build = "pandas/_version.py"
146
+ tag_prefix = "v"
147
+ parentdir_prefix = "pandas-"
148
+
149
+ [tool.meson-python.args]
150
+ setup = ['--vsenv'] # For Windows
151
+
152
+ [tool.cibuildwheel]
153
+ skip = "cp36-* cp37-* cp38-* pp* *_i686 *_ppc64le *_s390x"
154
+ build-verbosity = "3"
155
+ environment = {LDFLAGS="-Wl,--strip-all"}
156
+ # pytz 2024.2 causing some failures
157
+ test-requires = "hypothesis>=6.46.1 pytest>=7.3.2 pytest-xdist>=2.2.0 pytz<2024.2"
158
+ test-command = """
159
+ PANDAS_CI='1' python -c 'import pandas as pd; \
160
+ pd.test(extra_args=["-m not clipboard and not single_cpu and not slow and not network and not db", "-n 2", "--no-strict-data-files"]); \
161
+ pd.test(extra_args=["-m not clipboard and single_cpu and not slow and not network and not db", "--no-strict-data-files"]);' \
162
+ """
163
+ free-threaded-support = true
164
+ before-build = "PACKAGE_DIR={package} bash {package}/scripts/cibw_before_build.sh"
165
+
166
+ [tool.cibuildwheel.windows]
167
+ before-build = "pip install delvewheel && bash {package}/scripts/cibw_before_build.sh"
168
+ repair-wheel-command = "delvewheel repair -w {dest_dir} {wheel}"
169
+
170
+ [[tool.cibuildwheel.overrides]]
171
+ select = "*-manylinux_aarch64*"
172
+ test-command = """
173
+ PANDAS_CI='1' python -c 'import pandas as pd; \
174
+ pd.test(extra_args=["-m not clipboard and not single_cpu and not slow and not network and not db and not fails_arm_wheels", "-n 2", "--no-strict-data-files"]); \
175
+ pd.test(extra_args=["-m not clipboard and single_cpu and not slow and not network and not db", "--no-strict-data-files"]);' \
176
+ """
177
+
178
+ [[tool.cibuildwheel.overrides]]
179
+ select = "*-musllinux*"
180
+ before-test = "apk update && apk add musl-locales"
181
+
182
+ [[tool.cibuildwheel.overrides]]
183
+ select = "*-win*"
184
+ # We test separately for Windows, since we use
185
+ # the windowsservercore docker image to check if any dlls are
186
+ # missing from the wheel
187
+ test-command = ""
188
+
189
+ [[tool.cibuildwheel.overrides]]
190
+ # Don't strip wheels on macOS.
191
+ # macOS doesn't support stripping wheels with linker
192
+ # https://github.com/MacPython/numpy-wheels/pull/87#issuecomment-624878264
193
+ select = "*-macosx*"
194
+ environment = {CFLAGS="-g0"}
195
+
196
+ [tool.black]
197
+ target-version = ['py39', 'py310']
198
+ required-version = '23.11.0'
199
+ exclude = '''
200
+ (
201
+ asv_bench/env
202
+ | \.egg
203
+ | \.git
204
+ | \.hg
205
+ | \.mypy_cache
206
+ | \.nox
207
+ | \.tox
208
+ | \.venv
209
+ | _build
210
+ | buck-out
211
+ | build
212
+ | dist
213
+ | setup.py
214
+ )
215
+ '''
216
+
217
+ [tool.ruff]
218
+ line-length = 88
219
+ target-version = "py310"
220
+ fix = true
221
+ unfixable = []
222
+ typing-modules = ["pandas._typing"]
223
+
224
+ select = [
225
+ # pyflakes
226
+ "F",
227
+ # pycodestyle
228
+ "E", "W",
229
+ # flake8-2020
230
+ "YTT",
231
+ # flake8-bugbear
232
+ "B",
233
+ # flake8-quotes
234
+ "Q",
235
+ # flake8-debugger
236
+ "T10",
237
+ # flake8-gettext
238
+ "INT",
239
+ # pylint
240
+ "PL",
241
+ # misc lints
242
+ "PIE",
243
+ # flake8-pyi
244
+ "PYI",
245
+ # tidy imports
246
+ "TID",
247
+ # implicit string concatenation
248
+ "ISC",
249
+ # type-checking imports
250
+ "TCH",
251
+ # comprehensions
252
+ "C4",
253
+ # pygrep-hooks
254
+ "PGH",
255
+ # Ruff-specific rules
256
+ "RUF",
257
+ # flake8-bandit: exec-builtin
258
+ "S102",
259
+ # numpy-legacy-random
260
+ "NPY002",
261
+ # Perflint
262
+ "PERF",
263
+ # flynt
264
+ "FLY",
265
+ # flake8-logging-format
266
+ "G",
267
+ # flake8-future-annotations
268
+ "FA",
269
+ ]
270
+
271
+ ignore = [
272
+ ### Intentionally disabled
273
+ # space before : (needed for how black formats slicing)
274
+ "E203",
275
+ # module level import not at top of file
276
+ "E402",
277
+ # do not assign a lambda expression, use a def
278
+ "E731",
279
+ # line break before binary operator
280
+ # "W503", # not yet implemented
281
+ # line break after binary operator
282
+ # "W504", # not yet implemented
283
+ # controversial
284
+ "B006",
285
+ # controversial
286
+ "B007",
287
+ # controversial
288
+ "B008",
289
+ # setattr is used to side-step mypy
290
+ "B009",
291
+ # getattr is used to side-step mypy
292
+ "B010",
293
+ # tests use assert False
294
+ "B011",
295
+ # tests use comparisons but not their returned value
296
+ "B015",
297
+ # false positives
298
+ "B019",
299
+ # Loop control variable overrides iterable it iterates
300
+ "B020",
301
+ # Function definition does not bind loop variable
302
+ "B023",
303
+ # Functions defined inside a loop must not use variables redefined in the loop
304
+ # "B301", # not yet implemented
305
+ # Only works with python >=3.10
306
+ "B905",
307
+ # Too many arguments to function call
308
+ "PLR0913",
309
+ # Too many returns
310
+ "PLR0911",
311
+ # Too many branches
312
+ "PLR0912",
313
+ # Too many statements
314
+ "PLR0915",
315
+ # Redefined loop name
316
+ "PLW2901",
317
+ # Global statements are discouraged
318
+ "PLW0603",
319
+ # Docstrings should not be included in stubs
320
+ "PYI021",
321
+ # Use `typing.NamedTuple` instead of `collections.namedtuple`
322
+ "PYI024",
323
+ # No builtin `eval()` allowed
324
+ "PGH001",
325
+ # compare-to-empty-string
326
+ "PLC1901",
327
+ # while int | float can be shortened to float, the former is more explicit
328
+ "PYI041",
329
+ # incorrect-dict-iterator, flags valid Series.items usage
330
+ "PERF102",
331
+ # try-except-in-loop, becomes useless in Python 3.11
332
+ "PERF203",
333
+
334
+
335
+ ### TODO: Enable gradually
336
+ # Useless statement
337
+ "B018",
338
+ # Within an except clause, raise exceptions with ...
339
+ "B904",
340
+ # Magic number
341
+ "PLR2004",
342
+ # comparison-with-itself
343
+ "PLR0124",
344
+ # Consider `elif` instead of `else` then `if` to remove indentation level
345
+ "PLR5501",
346
+ # collection-literal-concatenation
347
+ "RUF005",
348
+ # pairwise-over-zipped (>=PY310 only)
349
+ "RUF007",
350
+ # explicit-f-string-type-conversion
351
+ "RUF010",
352
+ # mutable-class-default
353
+ "RUF012"
354
+ ]
355
+
356
+ exclude = [
357
+ "doc/sphinxext/*.py",
358
+ "doc/build/*.py",
359
+ "doc/temp/*.py",
360
+ ".eggs/*.py",
361
+ # vendored files
362
+ "pandas/util/version/*",
363
+ "pandas/io/clipboard/__init__.py",
364
+ # exclude asv benchmark environments from linting
365
+ "env",
366
+ ]
367
+
368
+ [tool.ruff.per-file-ignores]
369
+ # relative imports allowed for asv_bench
370
+ "asv_bench/*" = ["TID", "NPY002"]
371
+ # to be enabled gradually
372
+ "pandas/core/*" = ["PLR5501"]
373
+ "pandas/tests/*" = ["B028", "FLY"]
374
+ "scripts/*" = ["B028"]
375
+ # Keep this one enabled
376
+ "pandas/_typing.py" = ["TCH"]
377
+
378
+ [tool.pylint.messages_control]
379
+ max-line-length = 88
380
+ disable = [
381
+ # intentionally turned off
382
+ "bad-mcs-classmethod-argument",
383
+ "broad-except",
384
+ "c-extension-no-member",
385
+ "comparison-with-itself",
386
+ "consider-using-enumerate",
387
+ "import-error",
388
+ "import-outside-toplevel",
389
+ "invalid-name",
390
+ "invalid-unary-operand-type",
391
+ "line-too-long",
392
+ "no-else-continue",
393
+ "no-else-raise",
394
+ "no-else-return",
395
+ "no-member",
396
+ "no-name-in-module",
397
+ "not-an-iterable",
398
+ "overridden-final-method",
399
+ "pointless-statement",
400
+ "redundant-keyword-arg",
401
+ "singleton-comparison",
402
+ "too-many-ancestors",
403
+ "too-many-arguments",
404
+ "too-many-boolean-expressions",
405
+ "too-many-branches",
406
+ "too-many-function-args",
407
+ "too-many-instance-attributes",
408
+ "too-many-locals",
409
+ "too-many-nested-blocks",
410
+ "too-many-public-methods",
411
+ "too-many-return-statements",
412
+ "too-many-statements",
413
+ "unexpected-keyword-arg",
414
+ "ungrouped-imports",
415
+ "unsubscriptable-object",
416
+ "unsupported-assignment-operation",
417
+ "unsupported-membership-test",
418
+ "unused-import",
419
+ "use-dict-literal",
420
+ "use-implicit-booleaness-not-comparison",
421
+ "use-implicit-booleaness-not-len",
422
+ "wrong-import-order",
423
+ "wrong-import-position",
424
+ "redefined-loop-name",
425
+
426
+ # misc
427
+ "abstract-class-instantiated",
428
+ "no-value-for-parameter",
429
+ "undefined-variable",
430
+ "unpacking-non-sequence",
431
+ "used-before-assignment",
432
+
433
+ # pylint type "C": convention, for programming standard violation
434
+ "missing-class-docstring",
435
+ "missing-function-docstring",
436
+ "missing-module-docstring",
437
+ "superfluous-parens",
438
+ "too-many-lines",
439
+ "unidiomatic-typecheck",
440
+ "unnecessary-dunder-call",
441
+ "unnecessary-lambda-assignment",
442
+
443
+ # pylint type "R": refactor, for bad code smell
444
+ "consider-using-with",
445
+ "cyclic-import",
446
+ "duplicate-code",
447
+ "inconsistent-return-statements",
448
+ "redefined-argument-from-local",
449
+ "too-few-public-methods",
450
+
451
+ # pylint type "W": warning, for python specific problems
452
+ "abstract-method",
453
+ "arguments-differ",
454
+ "arguments-out-of-order",
455
+ "arguments-renamed",
456
+ "attribute-defined-outside-init",
457
+ "broad-exception-raised",
458
+ "comparison-with-callable",
459
+ "dangerous-default-value",
460
+ "deprecated-module",
461
+ "eval-used",
462
+ "expression-not-assigned",
463
+ "fixme",
464
+ "global-statement",
465
+ "invalid-overridden-method",
466
+ "keyword-arg-before-vararg",
467
+ "possibly-unused-variable",
468
+ "protected-access",
469
+ "raise-missing-from",
470
+ "redefined-builtin",
471
+ "redefined-outer-name",
472
+ "self-cls-assignment",
473
+ "signature-differs",
474
+ "super-init-not-called",
475
+ "try-except-raise",
476
+ "unnecessary-lambda",
477
+ "unused-argument",
478
+ "unused-variable",
479
+ "using-constant-test"
480
+ ]
481
+
482
+ [tool.pytest.ini_options]
483
+ # sync minversion with pyproject.toml & install.rst
484
+ minversion = "7.3.2"
485
+ addopts = "--strict-markers --strict-config --capture=no --durations=30 --junitxml=test-data.xml"
486
+ empty_parameter_set_mark = "fail_at_collect"
487
+ xfail_strict = true
488
+ testpaths = "pandas"
489
+ doctest_optionflags = [
490
+ "NORMALIZE_WHITESPACE",
491
+ "IGNORE_EXCEPTION_DETAIL",
492
+ "ELLIPSIS",
493
+ ]
494
+ filterwarnings = [
495
+ "error:::pandas",
496
+ "error::ResourceWarning",
497
+ "error::pytest.PytestUnraisableExceptionWarning",
498
+ # TODO(PY311-minimum): Specify EncodingWarning
499
+ # Ignore 3rd party EncodingWarning but raise on pandas'
500
+ "ignore:.*encoding.* argument not specified",
501
+ "error:.*encoding.* argument not specified::pandas",
502
+ "ignore:.*ssl.SSLSocket:pytest.PytestUnraisableExceptionWarning",
503
+ "ignore:.*ssl.SSLSocket:ResourceWarning",
504
+ # GH 44844: Can remove once minimum matplotlib version >= 3.7
505
+ "ignore:.*FileIO:pytest.PytestUnraisableExceptionWarning",
506
+ "ignore:.*BufferedRandom:ResourceWarning",
507
+ "ignore::ResourceWarning:asyncio",
508
+ # From plotting doctests
509
+ "ignore:More than 20 figures have been opened:RuntimeWarning",
510
+ # Will be fixed in numba 0.56: https://github.com/numba/numba/issues/7758
511
+ "ignore:`np.MachAr` is deprecated:DeprecationWarning:numba",
512
+ "ignore:.*urllib3:DeprecationWarning:botocore",
513
+ "ignore:Setuptools is replacing distutils.:UserWarning:_distutils_hack",
514
+ # https://github.com/PyTables/PyTables/issues/822
515
+ "ignore:a closed node found in the registry:UserWarning:tables",
516
+ "ignore:`np.object` is a deprecated:DeprecationWarning:tables",
517
+ "ignore:tostring:DeprecationWarning:tables",
518
+ "ignore:distutils Version classes are deprecated:DeprecationWarning:pandas_datareader",
519
+ "ignore:distutils Version classes are deprecated:DeprecationWarning:numexpr",
520
+ "ignore:distutils Version classes are deprecated:DeprecationWarning:fastparquet",
521
+ "ignore:distutils Version classes are deprecated:DeprecationWarning:fsspec",
522
+ # Can be removed once https://github.com/numpy/numpy/pull/24794 is merged
523
+ "ignore:.*In the future `np.long` will be defined as.*:FutureWarning",
524
+ ]
525
+ junit_family = "xunit2"
526
+ markers = [
527
+ "single_cpu: tests that should run on a single cpu only",
528
+ "slow: mark a test as slow",
529
+ "network: mark a test as network",
530
+ "db: tests requiring a database (mysql or postgres)",
531
+ "clipboard: mark a pd.read_clipboard test",
532
+ "arm_slow: mark a test as slow for arm64 architecture",
533
+ "skip_ubsan: Tests known to fail UBSAN check",
534
+ # TODO: someone should investigate this ...
535
+ # these tests only fail in the wheel builder and don't fail in regular
536
+ # ARM CI
537
+ "fails_arm_wheels: Tests that fail in the ARM wheel build only",
538
+ ]
539
+
540
+ [tool.mypy]
541
+ # Import discovery
542
+ mypy_path = "typings"
543
+ files = ["pandas", "typings"]
544
+ namespace_packages = false
545
+ explicit_package_bases = false
546
+ ignore_missing_imports = true
547
+ follow_imports = "normal"
548
+ follow_imports_for_stubs = false
549
+ no_site_packages = false
550
+ no_silence_site_packages = false
551
+ # Platform configuration
552
+ python_version = "3.11"
553
+ platform = "linux-64"
554
+ # Disallow dynamic typing
555
+ disallow_any_unimported = false # TODO
556
+ disallow_any_expr = false # TODO
557
+ disallow_any_decorated = false # TODO
558
+ disallow_any_explicit = false # TODO
559
+ disallow_any_generics = false # TODO
560
+ disallow_subclassing_any = false # TODO
561
+ # Untyped definitions and calls
562
+ disallow_untyped_calls = true
563
+ disallow_untyped_defs = true
564
+ disallow_incomplete_defs = true
565
+ check_untyped_defs = true
566
+ disallow_untyped_decorators = true
567
+ # None and Optional handling
568
+ no_implicit_optional = true
569
+ strict_optional = true
570
+ # Configuring warnings
571
+ warn_redundant_casts = true
572
+ warn_unused_ignores = true
573
+ warn_no_return = true
574
+ warn_return_any = false # TODO
575
+ warn_unreachable = false # GH#27396
576
+ # Suppressing errors
577
+ ignore_errors = false
578
+ enable_error_code = "ignore-without-code"
579
+ # Miscellaneous strictness flags
580
+ allow_untyped_globals = false
581
+ allow_redefinition = false
582
+ local_partial_types = false
583
+ implicit_reexport = true
584
+ strict_equality = true
585
+ # Configuring error messages
586
+ show_error_context = false
587
+ show_column_numbers = false
588
+ show_error_codes = true
589
+
590
+ [[tool.mypy.overrides]]
591
+ module = [
592
+ "pandas._config.config", # TODO
593
+ "pandas._libs.*",
594
+ "pandas._testing.*", # TODO
595
+ "pandas.arrays", # TODO
596
+ "pandas.compat.numpy.function", # TODO
597
+ "pandas.compat._optional", # TODO
598
+ "pandas.compat.compressors", # TODO
599
+ "pandas.compat.pickle_compat", # TODO
600
+ "pandas.core._numba.executor", # TODO
601
+ "pandas.core.array_algos.datetimelike_accumulations", # TODO
602
+ "pandas.core.array_algos.masked_accumulations", # TODO
603
+ "pandas.core.array_algos.masked_reductions", # TODO
604
+ "pandas.core.array_algos.putmask", # TODO
605
+ "pandas.core.array_algos.quantile", # TODO
606
+ "pandas.core.array_algos.replace", # TODO
607
+ "pandas.core.array_algos.take", # TODO
608
+ "pandas.core.arrays.*", # TODO
609
+ "pandas.core.computation.*", # TODO
610
+ "pandas.core.dtypes.astype", # TODO
611
+ "pandas.core.dtypes.cast", # TODO
612
+ "pandas.core.dtypes.common", # TODO
613
+ "pandas.core.dtypes.concat", # TODO
614
+ "pandas.core.dtypes.dtypes", # TODO
615
+ "pandas.core.dtypes.generic", # TODO
616
+ "pandas.core.dtypes.inference", # TODO
617
+ "pandas.core.dtypes.missing", # TODO
618
+ "pandas.core.groupby.categorical", # TODO
619
+ "pandas.core.groupby.generic", # TODO
620
+ "pandas.core.groupby.grouper", # TODO
621
+ "pandas.core.groupby.groupby", # TODO
622
+ "pandas.core.groupby.ops", # TODO
623
+ "pandas.core.indexers.*", # TODO
624
+ "pandas.core.indexes.*", # TODO
625
+ "pandas.core.interchange.column", # TODO
626
+ "pandas.core.interchange.dataframe_protocol", # TODO
627
+ "pandas.core.interchange.from_dataframe", # TODO
628
+ "pandas.core.internals.*", # TODO
629
+ "pandas.core.methods.*", # TODO
630
+ "pandas.core.ops.array_ops", # TODO
631
+ "pandas.core.ops.common", # TODO
632
+ "pandas.core.ops.invalid", # TODO
633
+ "pandas.core.ops.mask_ops", # TODO
634
+ "pandas.core.ops.missing", # TODO
635
+ "pandas.core.reshape.*", # TODO
636
+ "pandas.core.strings.*", # TODO
637
+ "pandas.core.tools.*", # TODO
638
+ "pandas.core.window.common", # TODO
639
+ "pandas.core.window.ewm", # TODO
640
+ "pandas.core.window.expanding", # TODO
641
+ "pandas.core.window.numba_", # TODO
642
+ "pandas.core.window.online", # TODO
643
+ "pandas.core.window.rolling", # TODO
644
+ "pandas.core.accessor", # TODO
645
+ "pandas.core.algorithms", # TODO
646
+ "pandas.core.apply", # TODO
647
+ "pandas.core.arraylike", # TODO
648
+ "pandas.core.base", # TODO
649
+ "pandas.core.common", # TODO
650
+ "pandas.core.config_init", # TODO
651
+ "pandas.core.construction", # TODO
652
+ "pandas.core.flags", # TODO
653
+ "pandas.core.frame", # TODO
654
+ "pandas.core.generic", # TODO
655
+ "pandas.core.indexing", # TODO
656
+ "pandas.core.missing", # TODO
657
+ "pandas.core.nanops", # TODO
658
+ "pandas.core.resample", # TODO
659
+ "pandas.core.roperator", # TODO
660
+ "pandas.core.sample", # TODO
661
+ "pandas.core.series", # TODO
662
+ "pandas.core.sorting", # TODO
663
+ "pandas.errors", # TODO
664
+ "pandas.io.clipboard", # TODO
665
+ "pandas.io.excel._base", # TODO
666
+ "pandas.io.excel._odfreader", # TODO
667
+ "pandas.io.excel._odswriter", # TODO
668
+ "pandas.io.excel._openpyxl", # TODO
669
+ "pandas.io.excel._pyxlsb", # TODO
670
+ "pandas.io.excel._xlrd", # TODO
671
+ "pandas.io.excel._xlsxwriter", # TODO
672
+ "pandas.io.formats.console", # TODO
673
+ "pandas.io.formats.css", # TODO
674
+ "pandas.io.formats.excel", # TODO
675
+ "pandas.io.formats.format", # TODO
676
+ "pandas.io.formats.info", # TODO
677
+ "pandas.io.formats.printing", # TODO
678
+ "pandas.io.formats.style", # TODO
679
+ "pandas.io.formats.style_render", # TODO
680
+ "pandas.io.formats.xml", # TODO
681
+ "pandas.io.json.*", # TODO
682
+ "pandas.io.parsers.*", # TODO
683
+ "pandas.io.sas.sas_xport", # TODO
684
+ "pandas.io.sas.sas7bdat", # TODO
685
+ "pandas.io.clipboards", # TODO
686
+ "pandas.io.common", # TODO
687
+ "pandas.io.gbq", # TODO
688
+ "pandas.io.html", # TODO
689
+ "pandas.io.gbq", # TODO
690
+ "pandas.io.parquet", # TODO
691
+ "pandas.io.pytables", # TODO
692
+ "pandas.io.sql", # TODO
693
+ "pandas.io.stata", # TODO
694
+ "pandas.io.xml", # TODO
695
+ "pandas.plotting.*", # TODO
696
+ "pandas.tests.*",
697
+ "pandas.tseries.frequencies", # TODO
698
+ "pandas.tseries.holiday", # TODO
699
+ "pandas.util._decorators", # TODO
700
+ "pandas.util._doctools", # TODO
701
+ "pandas.util._print_versions", # TODO
702
+ "pandas.util._test_decorators", # TODO
703
+ "pandas.util._validators", # TODO
704
+ "pandas.util", # TODO
705
+ "pandas._version",
706
+ "pandas.conftest",
707
+ "pandas"
708
+ ]
709
+ disallow_untyped_calls = false
710
+ disallow_untyped_defs = false
711
+ disallow_incomplete_defs = false
712
+
713
+ [[tool.mypy.overrides]]
714
+ module = [
715
+ "pandas.tests.*",
716
+ "pandas._version",
717
+ "pandas.io.clipboard",
718
+ ]
719
+ check_untyped_defs = false
720
+
721
+ [[tool.mypy.overrides]]
722
+ module = [
723
+ "pandas.tests.apply.test_series_apply",
724
+ "pandas.tests.arithmetic.conftest",
725
+ "pandas.tests.arrays.sparse.test_combine_concat",
726
+ "pandas.tests.dtypes.test_common",
727
+ "pandas.tests.frame.methods.test_to_records",
728
+ "pandas.tests.groupby.test_rank",
729
+ "pandas.tests.groupby.transform.test_transform",
730
+ "pandas.tests.indexes.interval.test_interval",
731
+ "pandas.tests.indexing.test_categorical",
732
+ "pandas.tests.io.excel.test_writers",
733
+ "pandas.tests.reductions.test_reductions",
734
+ "pandas.tests.test_expressions",
735
+ ]
736
+ ignore_errors = true
737
+
738
+ # To be kept consistent with "Import Formatting" section in contributing.rst
739
+ [tool.isort]
740
+ known_pre_libs = "pandas._config"
741
+ known_pre_core = ["pandas._libs", "pandas._typing", "pandas.util._*", "pandas.compat", "pandas.errors"]
742
+ known_dtypes = "pandas.core.dtypes"
743
+ known_post_core = ["pandas.tseries", "pandas.io", "pandas.plotting"]
744
+ sections = ["FUTURE", "STDLIB", "THIRDPARTY" ,"PRE_LIBS" , "PRE_CORE", "DTYPES", "FIRSTPARTY", "POST_CORE", "LOCALFOLDER"]
745
+ profile = "black"
746
+ combine_as_imports = true
747
+ force_grid_wrap = 2
748
+ force_sort_within_sections = true
749
+ skip_glob = "env"
750
+ skip = "pandas/__init__.py"
751
+
752
+ [tool.pyright]
753
+ pythonVersion = "3.11"
754
+ typeCheckingMode = "basic"
755
+ useLibraryCodeForTypes = false
756
+ include = ["pandas", "typings"]
757
+ exclude = ["pandas/tests", "pandas/io/clipboard", "pandas/util/version", "pandas/core/_numba/extensions.py"]
758
+ # enable subset of "strict"
759
+ reportDuplicateImport = true
760
+ reportInconsistentConstructor = true
761
+ reportInvalidStubStatement = true
762
+ reportOverlappingOverload = true
763
+ reportPropertyTypeMismatch = true
764
+ reportUntypedClassDecorator = true
765
+ reportUntypedFunctionDecorator = true
766
+ reportUntypedNamedTuple = true
767
+ reportUnusedImport = true
768
+ disableBytesTypePromotions = true
769
+ # disable subset of "basic"
770
+ reportGeneralTypeIssues = false
771
+ reportMissingModuleSource = false
772
+ reportOptionalCall = false
773
+ reportOptionalIterable = false
774
+ reportOptionalMemberAccess = false
775
+ reportOptionalOperand = false
776
+ reportOptionalSubscript = false
777
+ reportPrivateImportUsage = false
778
+ reportUnboundVariable = false
779
+
780
+ [tool.coverage.run]
781
+ branch = true
782
+ omit = ["pandas/_typing.py", "pandas/_version.py"]
783
+ plugins = ["Cython.Coverage"]
784
+ source = ["pandas"]
785
+
786
+ [tool.coverage.report]
787
+ ignore_errors = false
788
+ show_missing = true
789
+ omit = ["pandas/_version.py"]
790
+ exclude_lines = [
791
+ # Have to re-enable the standard pragma
792
+ "pragma: no cover",
793
+ # Don't complain about missing debug-only code:s
794
+ "def __repr__",
795
+ "if self.debug",
796
+ # Don't complain if tests don't hit defensive assertion code:
797
+ "raise AssertionError",
798
+ "raise NotImplementedError",
799
+ "AbstractMethodError",
800
+ # Don't complain if non-runnable code isn't run:
801
+ "if 0:",
802
+ "if __name__ == .__main__.:",
803
+ "if TYPE_CHECKING:",
804
+ ]
805
+
806
+ [tool.coverage.html]
807
+ directory = "coverage_html_report"
808
+
809
+ [tool.codespell]
810
+ ignore-words-list = "blocs, coo, hist, nd, sav, ser, recuse, nin, timere, expec, expecs"
811
+ ignore-regex = 'https://([\w/\.])+'
infer_4_30_0/lib/python3.10/site-packages/pandas/testing.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Public testing utility functions.
3
+ """
4
+
5
+
6
+ from pandas._testing import (
7
+ assert_extension_array_equal,
8
+ assert_frame_equal,
9
+ assert_index_equal,
10
+ assert_series_equal,
11
+ )
12
+
13
+ __all__ = [
14
+ "assert_extension_array_equal",
15
+ "assert_frame_equal",
16
+ "assert_series_equal",
17
+ "assert_index_equal",
18
+ ]
infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/LICENSE ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) Soumith Chintala 2016,
4
+ All rights reserved.
5
+
6
+ Redistribution and use in source and binary forms, with or without
7
+ modification, are permitted provided that the following conditions are met:
8
+
9
+ * Redistributions of source code must retain the above copyright notice, this
10
+ list of conditions and the following disclaimer.
11
+
12
+ * Redistributions in binary form must reproduce the above copyright notice,
13
+ this list of conditions and the following disclaimer in the documentation
14
+ and/or other materials provided with the distribution.
15
+
16
+ * Neither the name of the copyright holder nor the names of its
17
+ contributors may be used to endorse or promote products derived from
18
+ this software without specific prior written permission.
19
+
20
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/METADATA ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: torchvision
3
+ Version: 0.20.1
4
+ Summary: image and video datasets and models for torch deep learning
5
+ Home-page: https://github.com/pytorch/vision
6
+ Author: PyTorch Core Team
7
+ Author-email: [email protected]
8
+ License: BSD
9
+ Requires-Python: >=3.8
10
+ Description-Content-Type: text/markdown
11
+ License-File: LICENSE
12
+ Requires-Dist: numpy
13
+ Requires-Dist: torch (==2.5.1)
14
+ Requires-Dist: pillow (!=8.3.*,>=5.3.0)
15
+ Provides-Extra: gdown
16
+ Requires-Dist: gdown (>=4.7.3) ; extra == 'gdown'
17
+ Provides-Extra: scipy
18
+ Requires-Dist: scipy ; extra == 'scipy'
19
+
20
+ # torchvision
21
+
22
+ [![total torchvision downloads](https://pepy.tech/badge/torchvision)](https://pepy.tech/project/torchvision)
23
+ [![documentation](https://img.shields.io/badge/dynamic/json.svg?label=docs&url=https%3A%2F%2Fpypi.org%2Fpypi%2Ftorchvision%2Fjson&query=%24.info.version&colorB=brightgreen&prefix=v)](https://pytorch.org/vision/stable/index.html)
24
+
25
+ The torchvision package consists of popular datasets, model architectures, and common image transformations for computer
26
+ vision.
27
+
28
+ ## Installation
29
+
30
+ Please refer to the [official
31
+ instructions](https://pytorch.org/get-started/locally/) to install the stable
32
+ versions of `torch` and `torchvision` on your system.
33
+
34
+ To build source, refer to our [contributing
35
+ page](https://github.com/pytorch/vision/blob/main/CONTRIBUTING.md#development-installation).
36
+
37
+ The following is the corresponding `torchvision` versions and supported Python
38
+ versions.
39
+
40
+ | `torch` | `torchvision` | Python |
41
+ | ------------------ | ------------------ | ------------------- |
42
+ | `main` / `nightly` | `main` / `nightly` | `>=3.9`, `<=3.12` |
43
+ | `2.4` | `0.19` | `>=3.8`, `<=3.12` |
44
+ | `2.3` | `0.18` | `>=3.8`, `<=3.12` |
45
+ | `2.2` | `0.17` | `>=3.8`, `<=3.11` |
46
+ | `2.1` | `0.16` | `>=3.8`, `<=3.11` |
47
+ | `2.0` | `0.15` | `>=3.8`, `<=3.11` |
48
+
49
+ <details>
50
+ <summary>older versions</summary>
51
+
52
+ | `torch` | `torchvision` | Python |
53
+ |---------|-------------------|---------------------------|
54
+ | `1.13` | `0.14` | `>=3.7.2`, `<=3.10` |
55
+ | `1.12` | `0.13` | `>=3.7`, `<=3.10` |
56
+ | `1.11` | `0.12` | `>=3.7`, `<=3.10` |
57
+ | `1.10` | `0.11` | `>=3.6`, `<=3.9` |
58
+ | `1.9` | `0.10` | `>=3.6`, `<=3.9` |
59
+ | `1.8` | `0.9` | `>=3.6`, `<=3.9` |
60
+ | `1.7` | `0.8` | `>=3.6`, `<=3.9` |
61
+ | `1.6` | `0.7` | `>=3.6`, `<=3.8` |
62
+ | `1.5` | `0.6` | `>=3.5`, `<=3.8` |
63
+ | `1.4` | `0.5` | `==2.7`, `>=3.5`, `<=3.8` |
64
+ | `1.3` | `0.4.2` / `0.4.3` | `==2.7`, `>=3.5`, `<=3.7` |
65
+ | `1.2` | `0.4.1` | `==2.7`, `>=3.5`, `<=3.7` |
66
+ | `1.1` | `0.3` | `==2.7`, `>=3.5`, `<=3.7` |
67
+ | `<=1.0` | `0.2` | `==2.7`, `>=3.5`, `<=3.7` |
68
+
69
+ </details>
70
+
71
+ ## Image Backends
72
+
73
+ Torchvision currently supports the following image backends:
74
+
75
+ - torch tensors
76
+ - PIL images:
77
+ - [Pillow](https://python-pillow.org/)
78
+ - [Pillow-SIMD](https://github.com/uploadcare/pillow-simd) - a **much faster** drop-in replacement for Pillow with SIMD.
79
+
80
+ Read more in in our [docs](https://pytorch.org/vision/stable/transforms.html).
81
+
82
+ ## [UNSTABLE] Video Backend
83
+
84
+ Torchvision currently supports the following video backends:
85
+
86
+ - [pyav](https://github.com/PyAV-Org/PyAV) (default) - Pythonic binding for ffmpeg libraries.
87
+ - video_reader - This needs ffmpeg to be installed and torchvision to be built from source. There shouldn't be any
88
+ conflicting version of ffmpeg installed. Currently, this is only supported on Linux.
89
+
90
+ ```
91
+ conda install -c conda-forge 'ffmpeg<4.3'
92
+ python setup.py install
93
+ ```
94
+
95
+ # Using the models on C++
96
+
97
+ Refer to [example/cpp](https://github.com/pytorch/vision/tree/main/examples/cpp).
98
+
99
+ **DISCLAIMER**: the `libtorchvision` library includes the torchvision
100
+ custom ops as well as most of the C++ torchvision APIs. Those APIs do not come
101
+ with any backward-compatibility guarantees and may change from one version to
102
+ the next. Only the Python APIs are stable and with backward-compatibility
103
+ guarantees. So, if you need stability within a C++ environment, your best bet is
104
+ to export the Python APIs via torchscript.
105
+
106
+ ## Documentation
107
+
108
+ You can find the API documentation on the pytorch website: <https://pytorch.org/vision/stable/index.html>
109
+
110
+ ## Contributing
111
+
112
+ See the [CONTRIBUTING](CONTRIBUTING.md) file for how to help out.
113
+
114
+ ## Disclaimer on Datasets
115
+
116
+ This is a utility library that downloads and prepares public datasets. We do not host or distribute these datasets,
117
+ vouch for their quality or fairness, or claim that you have license to use the dataset. It is your responsibility to
118
+ determine whether you have permission to use the dataset under the dataset's license.
119
+
120
+ If you're a dataset owner and wish to update any part of it (description, citation, etc.), or do not want your dataset
121
+ to be included in this library, please get in touch through a GitHub issue. Thanks for your contribution to the ML
122
+ community!
123
+
124
+ ## Pre-trained Model License
125
+
126
+ The pre-trained models provided in this library may have their own licenses or terms and conditions derived from the
127
+ dataset used for training. It is your responsibility to determine whether you have permission to use the models for your
128
+ use case.
129
+
130
+ More specifically, SWAG models are released under the CC-BY-NC 4.0 license. See
131
+ [SWAG LICENSE](https://github.com/facebookresearch/SWAG/blob/main/LICENSE) for additional details.
132
+
133
+ ## Citing TorchVision
134
+
135
+ If you find TorchVision useful in your work, please consider citing the following BibTeX entry:
136
+
137
+ ```bibtex
138
+ @software{torchvision2016,
139
+ title = {TorchVision: PyTorch's Computer Vision library},
140
+ author = {TorchVision maintainers and contributors},
141
+ year = 2016,
142
+ journal = {GitHub repository},
143
+ publisher = {GitHub},
144
+ howpublished = {\url{https://github.com/pytorch/vision}}
145
+ }
146
+ ```
infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/RECORD ADDED
@@ -0,0 +1,385 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ torchvision-0.20.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ torchvision-0.20.1.dist-info/LICENSE,sha256=ZQL2doUc_iX4r3VTHfsyN1tzJbc8N-e0N0H6QiiT5x0,1517
3
+ torchvision-0.20.1.dist-info/METADATA,sha256=KdpnEZQcR2IsY1ARsKthcFVj1_jftHgCQSJ_kP843Wk,6068
4
+ torchvision-0.20.1.dist-info/RECORD,,
5
+ torchvision-0.20.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ torchvision-0.20.1.dist-info/WHEEL,sha256=-tCk4BkDuXUB87ypq3-Y_qNDCWWx0qcANqRVWFABVws,104
7
+ torchvision-0.20.1.dist-info/top_level.txt,sha256=ucJZoaluBW9BGYT4TuCE6zoZY_JuSP30wbDh-IRpxUU,12
8
+ torchvision.libs/libcudart.41118559.so.12,sha256=h3QiT1sRpzsV0HSj_M5zJzIsXEz9_ZJNaoJnee7JaP4,707904
9
+ torchvision.libs/libjpeg.ceea7512.so.62,sha256=Q0Nt1U7kvyOPOH37o9EyH96wBEFcgH1NNJDDaL1eXew,285328
10
+ torchvision.libs/libnvjpeg.02b6d700.so.12,sha256=btLqpaLN_zk7yVxUNHS5YRemcYi76kbqdNEvf4OHR5c,6722352
11
+ torchvision.libs/libpng16.7f72a3c5.so.16,sha256=oLjzyAs4Xamd6gz3yNqpVAANI7AWTxEqvnsvr_Cg9j0,1079081
12
+ torchvision.libs/libwebp.4a54d2c8.so.4,sha256=PCamJr4mr2g5WEa4H_DP9GDtB0TInsPuUMYmiaYTdhE,320536
13
+ torchvision.libs/libz.5f199d92.so.1,sha256=Cw5oKp3H_UiVpngyiPhRt5PciWM_KHFAJ5dPpNZvORQ,124744
14
+ torchvision/_C.so,sha256=0Z1qM0HrPA4kiuCMBr_57Hl9H8owZZ3Hri5SfVUgbCo,7746688
15
+ torchvision/__init__.py,sha256=7iyfQRDPEgPbSMQmAWBzKawfGXCfqRwVL42V61NDenM,3534
16
+ torchvision/__pycache__/__init__.cpython-310.pyc,,
17
+ torchvision/__pycache__/_internally_replaced_utils.cpython-310.pyc,,
18
+ torchvision/__pycache__/_meta_registrations.cpython-310.pyc,,
19
+ torchvision/__pycache__/_utils.cpython-310.pyc,,
20
+ torchvision/__pycache__/extension.cpython-310.pyc,,
21
+ torchvision/__pycache__/utils.cpython-310.pyc,,
22
+ torchvision/__pycache__/version.cpython-310.pyc,,
23
+ torchvision/_internally_replaced_utils.py,sha256=67zSUHOn6JwdnMUQchHgpNLCtWQQ9dJFpV_OUn8Qb_w,1389
24
+ torchvision/_meta_registrations.py,sha256=lkEGW61fKUrGSh0iOFsZ1ZHskItS1EJ9Oo2UfM-OvQ8,7208
25
+ torchvision/_utils.py,sha256=6TWK0JGaZVQrofgCAp5ox61_NQE2gIwhYouKQMiTaJ8,934
26
+ torchvision/datasets/__init__.py,sha256=AHSoX8LkWIt7RGlJDmk64pDvmWq6GCh-D7XwE2l382A,3587
27
+ torchvision/datasets/__pycache__/__init__.cpython-310.pyc,,
28
+ torchvision/datasets/__pycache__/_optical_flow.cpython-310.pyc,,
29
+ torchvision/datasets/__pycache__/_stereo_matching.cpython-310.pyc,,
30
+ torchvision/datasets/__pycache__/caltech.cpython-310.pyc,,
31
+ torchvision/datasets/__pycache__/celeba.cpython-310.pyc,,
32
+ torchvision/datasets/__pycache__/cifar.cpython-310.pyc,,
33
+ torchvision/datasets/__pycache__/cityscapes.cpython-310.pyc,,
34
+ torchvision/datasets/__pycache__/clevr.cpython-310.pyc,,
35
+ torchvision/datasets/__pycache__/coco.cpython-310.pyc,,
36
+ torchvision/datasets/__pycache__/country211.cpython-310.pyc,,
37
+ torchvision/datasets/__pycache__/dtd.cpython-310.pyc,,
38
+ torchvision/datasets/__pycache__/eurosat.cpython-310.pyc,,
39
+ torchvision/datasets/__pycache__/fakedata.cpython-310.pyc,,
40
+ torchvision/datasets/__pycache__/fer2013.cpython-310.pyc,,
41
+ torchvision/datasets/__pycache__/fgvc_aircraft.cpython-310.pyc,,
42
+ torchvision/datasets/__pycache__/flickr.cpython-310.pyc,,
43
+ torchvision/datasets/__pycache__/flowers102.cpython-310.pyc,,
44
+ torchvision/datasets/__pycache__/folder.cpython-310.pyc,,
45
+ torchvision/datasets/__pycache__/food101.cpython-310.pyc,,
46
+ torchvision/datasets/__pycache__/gtsrb.cpython-310.pyc,,
47
+ torchvision/datasets/__pycache__/hmdb51.cpython-310.pyc,,
48
+ torchvision/datasets/__pycache__/imagenet.cpython-310.pyc,,
49
+ torchvision/datasets/__pycache__/imagenette.cpython-310.pyc,,
50
+ torchvision/datasets/__pycache__/inaturalist.cpython-310.pyc,,
51
+ torchvision/datasets/__pycache__/kinetics.cpython-310.pyc,,
52
+ torchvision/datasets/__pycache__/kitti.cpython-310.pyc,,
53
+ torchvision/datasets/__pycache__/lfw.cpython-310.pyc,,
54
+ torchvision/datasets/__pycache__/lsun.cpython-310.pyc,,
55
+ torchvision/datasets/__pycache__/mnist.cpython-310.pyc,,
56
+ torchvision/datasets/__pycache__/moving_mnist.cpython-310.pyc,,
57
+ torchvision/datasets/__pycache__/omniglot.cpython-310.pyc,,
58
+ torchvision/datasets/__pycache__/oxford_iiit_pet.cpython-310.pyc,,
59
+ torchvision/datasets/__pycache__/pcam.cpython-310.pyc,,
60
+ torchvision/datasets/__pycache__/phototour.cpython-310.pyc,,
61
+ torchvision/datasets/__pycache__/places365.cpython-310.pyc,,
62
+ torchvision/datasets/__pycache__/rendered_sst2.cpython-310.pyc,,
63
+ torchvision/datasets/__pycache__/sbd.cpython-310.pyc,,
64
+ torchvision/datasets/__pycache__/sbu.cpython-310.pyc,,
65
+ torchvision/datasets/__pycache__/semeion.cpython-310.pyc,,
66
+ torchvision/datasets/__pycache__/stanford_cars.cpython-310.pyc,,
67
+ torchvision/datasets/__pycache__/stl10.cpython-310.pyc,,
68
+ torchvision/datasets/__pycache__/sun397.cpython-310.pyc,,
69
+ torchvision/datasets/__pycache__/svhn.cpython-310.pyc,,
70
+ torchvision/datasets/__pycache__/ucf101.cpython-310.pyc,,
71
+ torchvision/datasets/__pycache__/usps.cpython-310.pyc,,
72
+ torchvision/datasets/__pycache__/utils.cpython-310.pyc,,
73
+ torchvision/datasets/__pycache__/video_utils.cpython-310.pyc,,
74
+ torchvision/datasets/__pycache__/vision.cpython-310.pyc,,
75
+ torchvision/datasets/__pycache__/voc.cpython-310.pyc,,
76
+ torchvision/datasets/__pycache__/widerface.cpython-310.pyc,,
77
+ torchvision/datasets/_optical_flow.py,sha256=oRm_6rlBpJyi9d2IeTiebHssDEXQDKEKGw3ZqNVDMrg,19697
78
+ torchvision/datasets/_stereo_matching.py,sha256=f1sAkmyKKmFtyvrw4osElkMR7vupD8gEp8Y2rQ4btFA,49112
79
+ torchvision/datasets/caltech.py,sha256=6W8artbXAhp7lok8LDhx28Q5-MkupkyUmmc1RTXACnQ,8933
80
+ torchvision/datasets/celeba.py,sha256=BfMfogQ5DkzdbZMXF7qC7PMSAEY4o-jeEQTYKdGszeQ,8470
81
+ torchvision/datasets/cifar.py,sha256=mwMBBDUu10FE1SshtQQaQ65jSt3XeH44rkkaUUN_UcE,5850
82
+ torchvision/datasets/cityscapes.py,sha256=h6uX9d886G86_zm1Ok_Nz876wA7oC50qDWfQTn8ErKA,10321
83
+ torchvision/datasets/clevr.py,sha256=Yw2dTlep-ERTzIsKHPGL9cblF88mGlRcoGoBGac1XZ0,3460
84
+ torchvision/datasets/coco.py,sha256=Zmfp6yZgWcDxXLDshcTnxDaKC6xvYsasPcBh_j9E9m4,4180
85
+ torchvision/datasets/country211.py,sha256=T_WIsox6Ve6CxmFwnx6bX3KkLy1xzBCbAFBcGqHVYC8,2436
86
+ torchvision/datasets/dtd.py,sha256=c6GtnNd4xj4BCE52GMaXnn-AnZm7yn9Yha8Iwb5xhCo,4019
87
+ torchvision/datasets/eurosat.py,sha256=nKBDlYaYupwughReDD7Z_EH_WVTqqSyGRBjnIjmvUUk,2307
88
+ torchvision/datasets/fakedata.py,sha256=gKmN6VyQzWjjeEPpLPxb9i4DWwW-MtGVJfZf8uwHgyo,2447
89
+ torchvision/datasets/fer2013.py,sha256=f_Zj3Qf32x8ew5dZu8A03uph3I4AUvmmZabaLhTSMnU,5118
90
+ torchvision/datasets/fgvc_aircraft.py,sha256=Y5P7SsYLeXDuxy7VHVTx9TYDKHloxtxlxT4JBDgbvXg,4626
91
+ torchvision/datasets/flickr.py,sha256=rcbyRlYd-d_vRW9qmOPfX1bKBgFu4NbF-qlldqt2mcU,5431
92
+ torchvision/datasets/flowers102.py,sha256=SdPXQtHAeZ5Iod0xyK2Xq7n0ENA6YIoEUFfRqiBu1Q0,4641
93
+ torchvision/datasets/folder.py,sha256=bh7Jv0BOphBkKYxD-BogUWexE9RIrGR0FLM5MR24aGM,12919
94
+ torchvision/datasets/food101.py,sha256=1vbbbahI-Lp9xySy5bsnS50TeV93ovesSIotY0astw0,3752
95
+ torchvision/datasets/gtsrb.py,sha256=0n6GQIGPuKU7yA0tSpiAA1UktoShE2vzeA2EqhQZK-Q,3785
96
+ torchvision/datasets/hmdb51.py,sha256=lC16QNHvbKkS8QfgVdhBvSwN2eLRFUBUNL021nkvgdc,5971
97
+ torchvision/datasets/imagenet.py,sha256=kllmhLsUPgm88rww0j-OaEa-iuzGgyu49q6gphpXLjA,8691
98
+ torchvision/datasets/imagenette.py,sha256=zzgx2cWRkDCrzX93qbhv4fOdngu8WXpTT6M0ZCg_AsE,4456
99
+ torchvision/datasets/inaturalist.py,sha256=8F43yInRw4Q4yAjWalwhgDIYkvzHtWBiQ_MtB0Jyn4g,10161
100
+ torchvision/datasets/kinetics.py,sha256=JlLErOUo7OQf_lp-vUS2yNtfP5vxMgjl-onPLj2tffw,10416
101
+ torchvision/datasets/kitti.py,sha256=8mCScWNce0OdG3b6vWCJGR370CydbK2Iy8W96Dfsl0I,5637
102
+ torchvision/datasets/lfw.py,sha256=7cwiL0PgnnS2d2CTse8LL2mOoo_eremqosyYmHETiwI,10560
103
+ torchvision/datasets/lsun.py,sha256=SAwzOTu0cQr7Tfo-iT0hIT986lCwOXKsIQYccSPDTBg,5728
104
+ torchvision/datasets/mnist.py,sha256=ymXGCJfp0V3YLsMGw15Ofry-_NwmbvaXnp13eJ67GQA,21718
105
+ torchvision/datasets/moving_mnist.py,sha256=6yCTZVgIlWy2f9bNlrAjpUWryeLohaWuN0bRhMdAERw,3644
106
+ torchvision/datasets/omniglot.py,sha256=b2MTG1TVxq3dk2ASBdHLu5uxLBnT4lpgSer8k9uuQq4,4151
107
+ torchvision/datasets/oxford_iiit_pet.py,sha256=t4me06AwjDjSTIE7f80VFuGxISGHFPz6B4Sn3uOrCBw,5519
108
+ torchvision/datasets/pcam.py,sha256=Ub7UWrAufIzLXN8p6Cunt7osnHCNTL-sxDmEMGypq2Q,5285
109
+ torchvision/datasets/phototour.py,sha256=4Sjdg-1dHk5Me5Ku-G75zSek0vs0CqkpQUgGF0KzI84,8037
110
+ torchvision/datasets/places365.py,sha256=rdktgfZAQWtXwptMeXNsNz3mqftmaN7DqMqWH0eTicY,7259
111
+ torchvision/datasets/rendered_sst2.py,sha256=2NRiL3I1hDrOdNllubdQ-gQ-Unaaqb2mLAXG4_JL5wY,3597
112
+ torchvision/datasets/samplers/__init__.py,sha256=W1ZtQpGLG6aoHylo1t8PEsHIVoWwso5bSFk9JzKfH8g,161
113
+ torchvision/datasets/samplers/__pycache__/__init__.cpython-310.pyc,,
114
+ torchvision/datasets/samplers/__pycache__/clip_sampler.cpython-310.pyc,,
115
+ torchvision/datasets/samplers/clip_sampler.py,sha256=1-k3bxToGpBlqC4-iyVDggtojA701NflW0nBRLK27tQ,6244
116
+ torchvision/datasets/sbd.py,sha256=BpowMEO3_IxJgyjrtEN7XSLAKlrONVhCGr2kJXtTIzs,5414
117
+ torchvision/datasets/sbu.py,sha256=LFMPoEeuf7w0ABpevnIAuoxnTL-n1F1yzBVtB2z7m08,4143
118
+ torchvision/datasets/semeion.py,sha256=6GK9LWRZgwOFQA6yVxe5V7IsbM64-H4smYfPBquYGhY,3148
119
+ torchvision/datasets/stanford_cars.py,sha256=WgmPvMR-ZOpw-IV53Ud2cNvnnC1rHUDl-soCJSzEP1Y,4517
120
+ torchvision/datasets/stl10.py,sha256=0rUR0czJgbilfJ57L8qvwsSdojEhBsxtXLNzdxEJJPc,7293
121
+ torchvision/datasets/sun397.py,sha256=q_qfa_rdx4GUklR9oIHCgQC0JUKXMc7UudTq6yUeJPQ,2783
122
+ torchvision/datasets/svhn.py,sha256=Vk8VO74JUUaZHvejvkWJBRnmk-zpmHwjksMCZoBDWDc,4828
123
+ torchvision/datasets/ucf101.py,sha256=s7rHl7qonY7PnmEZac_O2gmJUIVFzyNxVbvMY7IY_Io,5533
124
+ torchvision/datasets/usps.py,sha256=7IP-xNZUJQNibubSodJgnpUJlCvNe-prd8BHsrbzSR0,3500
125
+ torchvision/datasets/utils.py,sha256=OJP_dKoAM1gx6OUSjLQnwRAN4DRFMx-iAHLDxBResro,16355
126
+ torchvision/datasets/video_utils.py,sha256=14GvzCRi7tbfeCq31MN9XP_6-bfewRSrvwavO4VBFdk,17213
127
+ torchvision/datasets/vision.py,sha256=x8AuTqEBwwBoHmkkWD6Iki8o5LMxac2yhrzIFBDgodE,4249
128
+ torchvision/datasets/voc.py,sha256=LhdQavn7-nq13zf9HIfjNYxPDa5SaTUDgayDe8uLfZc,8835
129
+ torchvision/datasets/widerface.py,sha256=f70xsvDz-PGLUA2eUFP6wSqbaA_ws0EErUPFvjnJ7wE,8323
130
+ torchvision/extension.py,sha256=YWBDURfCFXSmRvXi2iEg2L0hafN2-RnybpImh9JAUtQ,3141
131
+ torchvision/image.so,sha256=0eUsH9xFGN9c_8oDmWJtbCNpwEEgL6-P5Uwe24sNjZc,667265
132
+ torchvision/io/__init__.py,sha256=GMwjZuig-LWPufamClwl5EpFq0fExa7MXabkaMEuaHs,1625
133
+ torchvision/io/__pycache__/__init__.cpython-310.pyc,,
134
+ torchvision/io/__pycache__/_load_gpu_decoder.cpython-310.pyc,,
135
+ torchvision/io/__pycache__/_video_opt.cpython-310.pyc,,
136
+ torchvision/io/__pycache__/image.cpython-310.pyc,,
137
+ torchvision/io/__pycache__/video.cpython-310.pyc,,
138
+ torchvision/io/__pycache__/video_reader.cpython-310.pyc,,
139
+ torchvision/io/_load_gpu_decoder.py,sha256=Cc8eP620qPDFc0q2qd-VYtjxtsgFPjOgg7Z04RXRziU,178
140
+ torchvision/io/_video_opt.py,sha256=oW2Vvs13fa9nopb4Ot3n_VNiOUCn5ZPLQnH8Xf8-81g,20456
141
+ torchvision/io/image.py,sha256=KooxdS2Ov2_mnbIOnYbSJU3SjPMvY0ck6NKIZ3hWneQ,17714
142
+ torchvision/io/video.py,sha256=AGMKrxzGb2KStloWlElYidVUvu3rRnYZyQF62MFXKgk,16779
143
+ torchvision/io/video_reader.py,sha256=eI09x1vuUsbtL6rnyeiv894y8EA9bfdJakV1zWYzBtQ,11689
144
+ torchvision/models/__init__.py,sha256=A8GQPE1bl3oUHpuD9ND53DV557IPY4459FNLW6sVXGI,865
145
+ torchvision/models/__pycache__/__init__.cpython-310.pyc,,
146
+ torchvision/models/__pycache__/_api.cpython-310.pyc,,
147
+ torchvision/models/__pycache__/_meta.cpython-310.pyc,,
148
+ torchvision/models/__pycache__/_utils.cpython-310.pyc,,
149
+ torchvision/models/__pycache__/alexnet.cpython-310.pyc,,
150
+ torchvision/models/__pycache__/convnext.cpython-310.pyc,,
151
+ torchvision/models/__pycache__/densenet.cpython-310.pyc,,
152
+ torchvision/models/__pycache__/efficientnet.cpython-310.pyc,,
153
+ torchvision/models/__pycache__/feature_extraction.cpython-310.pyc,,
154
+ torchvision/models/__pycache__/googlenet.cpython-310.pyc,,
155
+ torchvision/models/__pycache__/inception.cpython-310.pyc,,
156
+ torchvision/models/__pycache__/maxvit.cpython-310.pyc,,
157
+ torchvision/models/__pycache__/mnasnet.cpython-310.pyc,,
158
+ torchvision/models/__pycache__/mobilenet.cpython-310.pyc,,
159
+ torchvision/models/__pycache__/mobilenetv2.cpython-310.pyc,,
160
+ torchvision/models/__pycache__/mobilenetv3.cpython-310.pyc,,
161
+ torchvision/models/__pycache__/regnet.cpython-310.pyc,,
162
+ torchvision/models/__pycache__/resnet.cpython-310.pyc,,
163
+ torchvision/models/__pycache__/shufflenetv2.cpython-310.pyc,,
164
+ torchvision/models/__pycache__/squeezenet.cpython-310.pyc,,
165
+ torchvision/models/__pycache__/swin_transformer.cpython-310.pyc,,
166
+ torchvision/models/__pycache__/vgg.cpython-310.pyc,,
167
+ torchvision/models/__pycache__/vision_transformer.cpython-310.pyc,,
168
+ torchvision/models/_api.py,sha256=uIIJnxX1zYMNpdvJ0haSq15_XlR1QteFZBYVAdtEheg,10054
169
+ torchvision/models/_meta.py,sha256=fqpeQBsf9EEYbmApQ8Q0LKyM9_UFwjireII5mwDbwJY,28875
170
+ torchvision/models/_utils.py,sha256=S8uDD7maNefy-fEW6mpz8dFU68acK1HxN0kt1qpkkDo,10893
171
+ torchvision/models/alexnet.py,sha256=dvBZLVH60TOTHCNNkWg0TFLtuJ5Ghh_xXN73r3Vyq58,4488
172
+ torchvision/models/convnext.py,sha256=tP73tH-us6h2KSdVcPypEX9Izk5lsr82KsGT15mj4NE,15326
173
+ torchvision/models/densenet.py,sha256=OZEsHJw76kOSRG4TKhLy7lPGsGEixy6llHkpC8snSOo,16825
174
+ torchvision/models/detection/__init__.py,sha256=JwYm_fTGO_FeRg4eTOQLwQPZ9lC9jheZ-QEoJgqKTjg,168
175
+ torchvision/models/detection/__pycache__/__init__.cpython-310.pyc,,
176
+ torchvision/models/detection/__pycache__/_utils.cpython-310.pyc,,
177
+ torchvision/models/detection/__pycache__/anchor_utils.cpython-310.pyc,,
178
+ torchvision/models/detection/__pycache__/backbone_utils.cpython-310.pyc,,
179
+ torchvision/models/detection/__pycache__/faster_rcnn.cpython-310.pyc,,
180
+ torchvision/models/detection/__pycache__/fcos.cpython-310.pyc,,
181
+ torchvision/models/detection/__pycache__/generalized_rcnn.cpython-310.pyc,,
182
+ torchvision/models/detection/__pycache__/image_list.cpython-310.pyc,,
183
+ torchvision/models/detection/__pycache__/keypoint_rcnn.cpython-310.pyc,,
184
+ torchvision/models/detection/__pycache__/mask_rcnn.cpython-310.pyc,,
185
+ torchvision/models/detection/__pycache__/retinanet.cpython-310.pyc,,
186
+ torchvision/models/detection/__pycache__/roi_heads.cpython-310.pyc,,
187
+ torchvision/models/detection/__pycache__/rpn.cpython-310.pyc,,
188
+ torchvision/models/detection/__pycache__/ssd.cpython-310.pyc,,
189
+ torchvision/models/detection/__pycache__/ssdlite.cpython-310.pyc,,
190
+ torchvision/models/detection/__pycache__/transform.cpython-310.pyc,,
191
+ torchvision/models/detection/_utils.py,sha256=2y3FQ4F5yXhFM7VIWmu_70FpKgZjxdT_ucfzYwi3ZUQ,22127
192
+ torchvision/models/detection/anchor_utils.py,sha256=8Ix1Vp3i2kgJGr6esie3rw0_yAjtrUSvLXVKPaoZeQo,11859
193
+ torchvision/models/detection/backbone_utils.py,sha256=4FyzocR6YS7cG5IJTMRwC44tupbXQDA_Ru_8qqaju2I,10548
194
+ torchvision/models/detection/faster_rcnn.py,sha256=8DnegLKZnr8Q-zrzGT7_peIc_k_R1q1ijDH5n1P3gQE,36979
195
+ torchvision/models/detection/fcos.py,sha256=8r8MayvUMeTKfDoza4Hy67ChgRglLzBG6YS5qNe84sM,34235
196
+ torchvision/models/detection/generalized_rcnn.py,sha256=4-Dp8Vx-SjDDSZ7TsZ11rmkvEH336aLuSOlERXiQ7fs,4743
197
+ torchvision/models/detection/image_list.py,sha256=SUJ3xMn-1xc6ivYZUNIdWBh3RH9xD8EtCdpsXnPI_iM,783
198
+ torchvision/models/detection/keypoint_rcnn.py,sha256=4HxwRrp8lJfdyi8K3eBq4vstbRrL8bZc2Hhh-pVHjsI,21947
199
+ torchvision/models/detection/mask_rcnn.py,sha256=X1GQS314qOy4uCCp7MPfH6W12IydRwW-tDCmCnB1FGg,26713
200
+ torchvision/models/detection/retinanet.py,sha256=17Q0RdqqugASEVDGJfr8lCD61zjEqD5XxwQZAmZUZ24,37300
201
+ torchvision/models/detection/roi_heads.py,sha256=Uh9950xZUEmejwD2pRRhKvqNV0bY_G2Om8yGC2EdDDg,33822
202
+ torchvision/models/detection/rpn.py,sha256=7jbqPpLelnGCb5Fn-muUXeZF9EQ2nhE5r2aNAuR9V0M,15838
203
+ torchvision/models/detection/ssd.py,sha256=tbsgVbRD36WrjkZEB4xi1fvOXT62ry0p8G_Sd-j5CrY,28979
204
+ torchvision/models/detection/ssdlite.py,sha256=8nyEUYONUYe319JpgevKEfjr_FxCgDNU8gOyfuZ3L3c,13219
205
+ torchvision/models/detection/transform.py,sha256=Ma0CDvLCMlk3MxS3asXcDxrSosRLacaLpi-T34LXm1A,12189
206
+ torchvision/models/efficientnet.py,sha256=4qyeoXkYGFyUsBDt8TygDYycMMt1zhGwB_l4PmoPv4g,43090
207
+ torchvision/models/feature_extraction.py,sha256=RD4Ba_6FPKRVBZs1Io3ebA1P-iZS7T7flxY5MWPPlv4,26339
208
+ torchvision/models/googlenet.py,sha256=ni7VlSJW2_zG0Adxx56fuN5t4yI6vROBAuAu06-V4f0,12806
209
+ torchvision/models/inception.py,sha256=ifrLErzOVG-vlwQOMXLX5yMgcpHxCQQ17L7Wacn5QhQ,18851
210
+ torchvision/models/maxvit.py,sha256=_8L8gG5ob2DCZJbiny81P1fBAMmOcOKbTngckPy8xTE,32053
211
+ torchvision/models/mnasnet.py,sha256=h9jY1TupaChZj9khnXya_l4O1exUWhWOOCmhJCCImKc,17574
212
+ torchvision/models/mobilenet.py,sha256=lSRVxw2TL3LFBwCadvyvH6n3GzqUTnK2-rhX3MOgSrs,211
213
+ torchvision/models/mobilenetv2.py,sha256=v9cRBAp7_C_50JFkjGZ0luvuh45oCYgYn37pcG2UL8o,9710
214
+ torchvision/models/mobilenetv3.py,sha256=-Xk32m_Wdn-ap8wCL4Tl7wjiROIwDwhasInYTMwwOrE,16279
215
+ torchvision/models/optical_flow/__init__.py,sha256=0zRlMWQJCjFqoUafUXVgO89-z7em7tACo9E8hHSq9RQ,20
216
+ torchvision/models/optical_flow/__pycache__/__init__.cpython-310.pyc,,
217
+ torchvision/models/optical_flow/__pycache__/_utils.cpython-310.pyc,,
218
+ torchvision/models/optical_flow/__pycache__/raft.cpython-310.pyc,,
219
+ torchvision/models/optical_flow/_utils.py,sha256=v-tQJzYmYukrD1sQAE-5j5jxyvComwF1UdGkz5tVTLw,2077
220
+ torchvision/models/optical_flow/raft.py,sha256=FpSLPXisugu5Rzp_D5XCr037snBapMJ0dDPrw9c3CNk,39995
221
+ torchvision/models/quantization/__init__.py,sha256=gqFM7zI4UUHKKBDJAumozOn7xPL0JtvyNS8Ejz6QXp0,125
222
+ torchvision/models/quantization/__pycache__/__init__.cpython-310.pyc,,
223
+ torchvision/models/quantization/__pycache__/googlenet.cpython-310.pyc,,
224
+ torchvision/models/quantization/__pycache__/inception.cpython-310.pyc,,
225
+ torchvision/models/quantization/__pycache__/mobilenet.cpython-310.pyc,,
226
+ torchvision/models/quantization/__pycache__/mobilenetv2.cpython-310.pyc,,
227
+ torchvision/models/quantization/__pycache__/mobilenetv3.cpython-310.pyc,,
228
+ torchvision/models/quantization/__pycache__/resnet.cpython-310.pyc,,
229
+ torchvision/models/quantization/__pycache__/shufflenetv2.cpython-310.pyc,,
230
+ torchvision/models/quantization/__pycache__/utils.cpython-310.pyc,,
231
+ torchvision/models/quantization/googlenet.py,sha256=C-8lm9TnjkEuwu6zaPp0r5mb0QMYvTMGOtz2--s1IFo,8080
232
+ torchvision/models/quantization/inception.py,sha256=hg8K1QNk7T-Qo3zOB47eupS3Thu_RjVI6mG2HzAEx8M,10815
233
+ torchvision/models/quantization/mobilenet.py,sha256=lSRVxw2TL3LFBwCadvyvH6n3GzqUTnK2-rhX3MOgSrs,211
234
+ torchvision/models/quantization/mobilenetv2.py,sha256=ggpNLU4_JkyMn8IPTgj1p0xx_Wvspcii2Wd3ISj5tBE,5883
235
+ torchvision/models/quantization/mobilenetv3.py,sha256=PVWmSP62Pn8hQkd682l6uYFLQp1nxZltMOE-FhhO9OU,9230
236
+ torchvision/models/quantization/resnet.py,sha256=9Hb6KyPv33Jj1A6JciXvGX06q0RkwwP10u8GxFfmorM,17939
237
+ torchvision/models/quantization/shufflenetv2.py,sha256=eS2y34ZTG03dNJgtVJ2qSXQWZ22PHIWBYeC8cbvI1yI,16884
238
+ torchvision/models/quantization/utils.py,sha256=n8mWsK9_Ek_M2AqGKPLoLlcKaYGH2PrF2l5_W84oBMk,2058
239
+ torchvision/models/regnet.py,sha256=-7s5n0qzXZPR9HgzOk9aj1sv9dWZ3AxnP7CmZRdUeZI,63553
240
+ torchvision/models/resnet.py,sha256=dJmlBZrXsaH491Q8BLShN5UUD62DfDhTC0j_XZYQv24,38932
241
+ torchvision/models/segmentation/__init__.py,sha256=TGk6UdVXAMtwBpYalrvdXZnmSwqzTDOT1lgKrfzhHrQ,66
242
+ torchvision/models/segmentation/__pycache__/__init__.cpython-310.pyc,,
243
+ torchvision/models/segmentation/__pycache__/_utils.cpython-310.pyc,,
244
+ torchvision/models/segmentation/__pycache__/deeplabv3.cpython-310.pyc,,
245
+ torchvision/models/segmentation/__pycache__/fcn.cpython-310.pyc,,
246
+ torchvision/models/segmentation/__pycache__/lraspp.cpython-310.pyc,,
247
+ torchvision/models/segmentation/_utils.py,sha256=QfyqCtH_MJnIkKW5m-98GZD2MjtPYLtPTDi79pcIGhs,1197
248
+ torchvision/models/segmentation/deeplabv3.py,sha256=wVgXz21sugSck2KbG7WD-wgMwCAW0wd8jBGhgue300s,15015
249
+ torchvision/models/segmentation/fcn.py,sha256=I1FqaZZVPc3Fbg_7E2L5qpumnupxBYc7KYsW03EG_Cs,8973
250
+ torchvision/models/segmentation/lraspp.py,sha256=dt5DJ_qbDZlEM0SIuN87JU43JHfVlb8Oepp76KDv5tw,7643
251
+ torchvision/models/shufflenetv2.py,sha256=84FiPfkhJpSw6Q9Jmaug5MW5qmWCO3VhAPF61EiMn7Q,15444
252
+ torchvision/models/squeezenet.py,sha256=apjFPEI5nr_493bAQsR245EorzaMYXVQSqdcveyAfy0,8763
253
+ torchvision/models/swin_transformer.py,sha256=VwvnImWcjblashj0OONycDJnIkz-zRDpm365v_a0-zo,39337
254
+ torchvision/models/vgg.py,sha256=jYjIoY2jtKAc-aURCQsvbgBxup1Gh4fVZSt2NzFLlZY,19225
255
+ torchvision/models/video/__init__.py,sha256=O4HB-RaXgCtnvpMDAuMBaIeKIiYEkNxra_fmAHLUIJM,93
256
+ torchvision/models/video/__pycache__/__init__.cpython-310.pyc,,
257
+ torchvision/models/video/__pycache__/mvit.cpython-310.pyc,,
258
+ torchvision/models/video/__pycache__/resnet.cpython-310.pyc,,
259
+ torchvision/models/video/__pycache__/s3d.cpython-310.pyc,,
260
+ torchvision/models/video/__pycache__/swin_transformer.cpython-310.pyc,,
261
+ torchvision/models/video/mvit.py,sha256=0AZ31K5QcUBWZUUPTI1FCCM2Fma95bPs1o82zzpw2i0,32998
262
+ torchvision/models/video/resnet.py,sha256=RUnbUXFmoWNo_XbEKLmVSM8LUDcyv6jGZJ8GGpZi_6U,16771
263
+ torchvision/models/video/s3d.py,sha256=jx9gMP18Bzb7UO3vjejVBHlrCrJPdWFDfTn7XeU5kMg,7815
264
+ torchvision/models/video/swin_transformer.py,sha256=3GMyPGPeMcwJ1p9TGiRbpIlP-G7Qv_jWNbZmqIwMNyA,27688
265
+ torchvision/models/vision_transformer.py,sha256=O4mdBjYFsp-HTZA9bXfux_wJzIPRv1uS43PjuNh52zc,32136
266
+ torchvision/ops/__init__.py,sha256=eVv16QSBwgKaojOUHMPCy4ou9ZeFh-HoCV4DpqrZG4U,1928
267
+ torchvision/ops/__pycache__/__init__.cpython-310.pyc,,
268
+ torchvision/ops/__pycache__/_box_convert.cpython-310.pyc,,
269
+ torchvision/ops/__pycache__/_register_onnx_ops.cpython-310.pyc,,
270
+ torchvision/ops/__pycache__/_utils.cpython-310.pyc,,
271
+ torchvision/ops/__pycache__/boxes.cpython-310.pyc,,
272
+ torchvision/ops/__pycache__/ciou_loss.cpython-310.pyc,,
273
+ torchvision/ops/__pycache__/deform_conv.cpython-310.pyc,,
274
+ torchvision/ops/__pycache__/diou_loss.cpython-310.pyc,,
275
+ torchvision/ops/__pycache__/drop_block.cpython-310.pyc,,
276
+ torchvision/ops/__pycache__/feature_pyramid_network.cpython-310.pyc,,
277
+ torchvision/ops/__pycache__/focal_loss.cpython-310.pyc,,
278
+ torchvision/ops/__pycache__/giou_loss.cpython-310.pyc,,
279
+ torchvision/ops/__pycache__/misc.cpython-310.pyc,,
280
+ torchvision/ops/__pycache__/poolers.cpython-310.pyc,,
281
+ torchvision/ops/__pycache__/ps_roi_align.cpython-310.pyc,,
282
+ torchvision/ops/__pycache__/ps_roi_pool.cpython-310.pyc,,
283
+ torchvision/ops/__pycache__/roi_align.cpython-310.pyc,,
284
+ torchvision/ops/__pycache__/roi_pool.cpython-310.pyc,,
285
+ torchvision/ops/__pycache__/stochastic_depth.cpython-310.pyc,,
286
+ torchvision/ops/_box_convert.py,sha256=_bRRpErwk03rcPuscO1tCI9v3l88oNlDBDl2jzPlbKo,2409
287
+ torchvision/ops/_register_onnx_ops.py,sha256=Fyb1kC2m2OqZdfW_M86pt9-S66e1qNUhXNu1EQRa034,4181
288
+ torchvision/ops/_utils.py,sha256=pVHPpsmx6XcfGjUVk-XAEnd8QJBkrw_cT6fO_IwICE4,3630
289
+ torchvision/ops/boxes.py,sha256=n1aBkhkQYOwYdjkQMv5S9_G1NhpaBhmx3iwuJAq3nC8,16363
290
+ torchvision/ops/ciou_loss.py,sha256=3HClrMMKOJ3bndIUinNp3cp6Cim4-ZmmfuLn1-NPDUo,2756
291
+ torchvision/ops/deform_conv.py,sha256=fJxkVR_p_OQMzMja4flvmTgqDPvrOOcwzDG8bV7Q7pE,6990
292
+ torchvision/ops/diou_loss.py,sha256=tssNJhII4WT-wmidFS8gFNteQIAJz-Nd1Q7Trz1BjIY,3362
293
+ torchvision/ops/drop_block.py,sha256=A4EGIl7txrU_QmkI1N0W9hfd8tq8yx6zq32oYXaddLQ,5855
294
+ torchvision/ops/feature_pyramid_network.py,sha256=mfkaygxRz-0TAdTMq2fCAL-E0WxlRnTfdb-s_J5qPE4,8702
295
+ torchvision/ops/focal_loss.py,sha256=9kFqGyA0-hodRw9Au74k-FuS14OhsAvbFxDGvpx08Sg,2261
296
+ torchvision/ops/giou_loss.py,sha256=OXSaMZDZ0qy7jgaQ9exB_DMQXzcATBAFiIjzSlOV-bQ,2696
297
+ torchvision/ops/misc.py,sha256=yFnK7GT9OCMfDrn4NtQXKdh5broi1xocL94SoyqhWuw,13572
298
+ torchvision/ops/poolers.py,sha256=zzYhH7poMwGlYxDvAvCaL9emg9X7sM4xZFLEy0zvv5s,11920
299
+ torchvision/ops/ps_roi_align.py,sha256=4iAbeUVTessAcxvJhuARN_aFGUTZC9R4KrKC_mBH3MQ,3625
300
+ torchvision/ops/ps_roi_pool.py,sha256=jOv-2pAZdLFvvt4r4NwiRfxU5WAOy_vi6gxZjMvlusw,2870
301
+ torchvision/ops/roi_align.py,sha256=Ig9jLul90wBM3kaZuYEutsJEXfaCo3D0s_PxYMr9jQc,11292
302
+ torchvision/ops/roi_pool.py,sha256=70ou6Xc7qJxKe3SC54QIW3L99PoS0gLlwGocaYDbD2w,2943
303
+ torchvision/ops/stochastic_depth.py,sha256=ISZ9noJyZLxpTG-wa2VmPs66qjhVsP7ZxWHvumWSP3U,2236
304
+ torchvision/transforms/__init__.py,sha256=EMft42B1JAiU11J1rxIN4Znis6EJPbp-bsGjAzH-24M,53
305
+ torchvision/transforms/__pycache__/__init__.cpython-310.pyc,,
306
+ torchvision/transforms/__pycache__/_functional_pil.cpython-310.pyc,,
307
+ torchvision/transforms/__pycache__/_functional_tensor.cpython-310.pyc,,
308
+ torchvision/transforms/__pycache__/_functional_video.cpython-310.pyc,,
309
+ torchvision/transforms/__pycache__/_presets.cpython-310.pyc,,
310
+ torchvision/transforms/__pycache__/_transforms_video.cpython-310.pyc,,
311
+ torchvision/transforms/__pycache__/autoaugment.cpython-310.pyc,,
312
+ torchvision/transforms/__pycache__/functional.cpython-310.pyc,,
313
+ torchvision/transforms/__pycache__/transforms.cpython-310.pyc,,
314
+ torchvision/transforms/_functional_pil.py,sha256=TXZK3Y0huFHhXUGPin6ET5ToNoCbgdNGy65f8MPSpM0,12070
315
+ torchvision/transforms/_functional_tensor.py,sha256=3cEs8IYfRNQyff5Iriv--cZTWOIfvw2eaWiHU1-94AE,33939
316
+ torchvision/transforms/_functional_video.py,sha256=YcV557YglbJsq9SRGJHFoRbtxawiLSJ1oM5rV75OyqQ,3857
317
+ torchvision/transforms/_presets.py,sha256=RAjD6DgpU4QnNxV0MfZ3uHgzuARf-cdxD3Vo_KKIYeY,8510
318
+ torchvision/transforms/_transforms_video.py,sha256=Buz5LCWVPGiEonHE-cXIXfbkBhNc0qxVraxkNdxKp8o,4950
319
+ torchvision/transforms/autoaugment.py,sha256=JcbdEDbR0-OqTE4cwkhVSB45woFZQ_Fq5xmjFu_3bjg,28243
320
+ torchvision/transforms/functional.py,sha256=r9DojEr-0oqCOLuSMH0B4kWtI3UEbY_4jS7RBWDZKqM,67855
321
+ torchvision/transforms/transforms.py,sha256=eRIUr0I1i7BMqrXm4xsBJQYYGpvIkDr_VMsctQOI0M4,85547
322
+ torchvision/transforms/v2/__init__.py,sha256=UUJgzZguNPl7B33Zt3gexO7gSApSuWHTpzE7fNXQpA0,1545
323
+ torchvision/transforms/v2/__pycache__/__init__.cpython-310.pyc,,
324
+ torchvision/transforms/v2/__pycache__/_augment.cpython-310.pyc,,
325
+ torchvision/transforms/v2/__pycache__/_auto_augment.cpython-310.pyc,,
326
+ torchvision/transforms/v2/__pycache__/_color.cpython-310.pyc,,
327
+ torchvision/transforms/v2/__pycache__/_container.cpython-310.pyc,,
328
+ torchvision/transforms/v2/__pycache__/_deprecated.cpython-310.pyc,,
329
+ torchvision/transforms/v2/__pycache__/_geometry.cpython-310.pyc,,
330
+ torchvision/transforms/v2/__pycache__/_meta.cpython-310.pyc,,
331
+ torchvision/transforms/v2/__pycache__/_misc.cpython-310.pyc,,
332
+ torchvision/transforms/v2/__pycache__/_temporal.cpython-310.pyc,,
333
+ torchvision/transforms/v2/__pycache__/_transform.cpython-310.pyc,,
334
+ torchvision/transforms/v2/__pycache__/_type_conversion.cpython-310.pyc,,
335
+ torchvision/transforms/v2/__pycache__/_utils.cpython-310.pyc,,
336
+ torchvision/transforms/v2/_augment.py,sha256=NtbxWHrD1tbBJ9LVGcYsEv1tlHqpQyYNE23aH0NZ868,16159
337
+ torchvision/transforms/v2/_auto_augment.py,sha256=sQWkEF1N17XU4F6nBGva7kUuiuGNEOCAGHYGn8oa0A8,32025
338
+ torchvision/transforms/v2/_color.py,sha256=YHc7vhv4XR0CfSmEUoGQDexbENjV_whIHi9c-JgPrpo,16990
339
+ torchvision/transforms/v2/_container.py,sha256=SFh-FU8ceir934hxS_VkbVQq0SxzGSULPaYpouJJhPs,6055
340
+ torchvision/transforms/v2/_deprecated.py,sha256=0kXQWo6x1D2Gg98pJ0wahiDHuJBGNvsadZwdFtOM5YE,1947
341
+ torchvision/transforms/v2/_geometry.py,sha256=Ux5ghMCEVwpYYKB4sBamJUIfRbz8EutjfI_cskbNnhk,67606
342
+ torchvision/transforms/v2/_meta.py,sha256=Pcrh0dKMgwfpHTdho8uXcYYfKtbHy36VVyz4o2umld0,1405
343
+ torchvision/transforms/v2/_misc.py,sha256=Y-QjkjKYGMJYQvRP1elB_5gSwsvJR-I2vCEheBLCwuo,19114
344
+ torchvision/transforms/v2/_temporal.py,sha256=ByHqYqy1KO1Rd-Cg-eynHQEnF4y7OaMGIeO44kl8QJw,906
345
+ torchvision/transforms/v2/_transform.py,sha256=008PBMswQWIc7dEmhWqm772_O4ciDY3rycGu08nhcME,8476
346
+ torchvision/transforms/v2/_type_conversion.py,sha256=f3J1wYeB_zTaF8mxIjoudDKCiljmWqLGszSS9DN5EsQ,2860
347
+ torchvision/transforms/v2/_utils.py,sha256=AjGKWomXlDX2I1jCd4ROkJr8nRtr3ofm3MdXRH3YTTo,8652
348
+ torchvision/transforms/v2/functional/__init__.py,sha256=4SDjzgj9e4oM4IUKy9YJAwXFnBoLpygd8sSM_7sMvK0,3546
349
+ torchvision/transforms/v2/functional/__pycache__/__init__.cpython-310.pyc,,
350
+ torchvision/transforms/v2/functional/__pycache__/_augment.cpython-310.pyc,,
351
+ torchvision/transforms/v2/functional/__pycache__/_color.cpython-310.pyc,,
352
+ torchvision/transforms/v2/functional/__pycache__/_deprecated.cpython-310.pyc,,
353
+ torchvision/transforms/v2/functional/__pycache__/_geometry.cpython-310.pyc,,
354
+ torchvision/transforms/v2/functional/__pycache__/_meta.cpython-310.pyc,,
355
+ torchvision/transforms/v2/functional/__pycache__/_misc.cpython-310.pyc,,
356
+ torchvision/transforms/v2/functional/__pycache__/_temporal.cpython-310.pyc,,
357
+ torchvision/transforms/v2/functional/__pycache__/_type_conversion.cpython-310.pyc,,
358
+ torchvision/transforms/v2/functional/__pycache__/_utils.cpython-310.pyc,,
359
+ torchvision/transforms/v2/functional/_augment.py,sha256=MRM8E3_gKfTTC0qFt3cKI4UxTxQtuGI9MeY2mBsrj04,3473
360
+ torchvision/transforms/v2/functional/_color.py,sha256=nUASg1bTHmsf2AT_1Q7CLNXhObrRPbB1w2fDuz9k5e8,30244
361
+ torchvision/transforms/v2/functional/_deprecated.py,sha256=ycYZLDwDyd612aPbTKIV3gqhCRLMdF03MQELct4LeGs,801
362
+ torchvision/transforms/v2/functional/_geometry.py,sha256=5QL4IdQV72PkJX61c4A5M4WLq60ihTQB6g1PE9tMqmM,87520
363
+ torchvision/transforms/v2/functional/_meta.py,sha256=AxTEF6mdybAW1lC_DcjfKlxvSuiVupnqbJJrqS5x4lc,10547
364
+ torchvision/transforms/v2/functional/_misc.py,sha256=OXu4GTCF9i_1lz7T62gKcEs94faBO7wyYmpUOCnkUEY,17517
365
+ torchvision/transforms/v2/functional/_temporal.py,sha256=24CQCXXO12TnW7aUiUQdrk5DRSpTPONjjC4jaGh3lH4,1136
366
+ torchvision/transforms/v2/functional/_type_conversion.py,sha256=78wl0dNPwX08jOCW6KcZSGy8RAQqyxMtdrTUQVQlUTM,869
367
+ torchvision/transforms/v2/functional/_utils.py,sha256=tsmwIF37Z9QnP9x3x4hAs1hLrcvL78GLkuO6Rq1EUTk,5479
368
+ torchvision/tv_tensors/__init__.py,sha256=C6N8p5aulpehsOBBmH1cPIY1xiOSASZVBfnlXgGvR_s,1509
369
+ torchvision/tv_tensors/__pycache__/__init__.cpython-310.pyc,,
370
+ torchvision/tv_tensors/__pycache__/_bounding_boxes.cpython-310.pyc,,
371
+ torchvision/tv_tensors/__pycache__/_dataset_wrapper.cpython-310.pyc,,
372
+ torchvision/tv_tensors/__pycache__/_image.cpython-310.pyc,,
373
+ torchvision/tv_tensors/__pycache__/_mask.cpython-310.pyc,,
374
+ torchvision/tv_tensors/__pycache__/_torch_function_helpers.cpython-310.pyc,,
375
+ torchvision/tv_tensors/__pycache__/_tv_tensor.cpython-310.pyc,,
376
+ torchvision/tv_tensors/__pycache__/_video.cpython-310.pyc,,
377
+ torchvision/tv_tensors/_bounding_boxes.py,sha256=_-bDwN1gnHpfnHXEK0O6bQrcEOv656VOliHOgoNstpw,4493
378
+ torchvision/tv_tensors/_dataset_wrapper.py,sha256=fNnk3CSXipBNFsmnsPpa10DRN0I_Ly4Xib2Y5Zng9Ro,24505
379
+ torchvision/tv_tensors/_image.py,sha256=bwx4n8qObrknE3xEIDJOs0vWJzCg4XISjtXR7ksJTgs,1934
380
+ torchvision/tv_tensors/_mask.py,sha256=s85DdYFK6cyrL0_MnhAC2jTJxZzL7MJ8DTx985JPVhQ,1478
381
+ torchvision/tv_tensors/_torch_function_helpers.py,sha256=81qDZqgzUeSgfSeWhsrw1Ukwltvf97WbwmKWHm7X8X0,2276
382
+ torchvision/tv_tensors/_tv_tensor.py,sha256=dGQJhvOVTjb1LVT5qPZLJxox30uDMmODB26Iz6TjVbc,6248
383
+ torchvision/tv_tensors/_video.py,sha256=4dQ5Rh_0ghPtaLVSOxVWXJv1uWi8ZKXlfbRsBZ3roxw,1416
384
+ torchvision/utils.py,sha256=cGBWrAicxrx1YECsTGm7m_JL1GaGXp_UmAA9rmIQ3t8,26734
385
+ torchvision/version.py,sha256=P_l-ZSRLCCu_2SuJrwuv_07WrX_5RAvKwEbhRkRj9vg,203
infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/REQUESTED ADDED
File without changes
infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (72.1.0)
3
+ Root-Is-Purelib: false
4
+ Tag: cp310-cp310-linux_x86_64
5
+
infer_4_30_0/lib/python3.10/site-packages/torchvision-0.20.1.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ torchvision