Light-Dav commited on
Commit
9f4a1ba
·
verified ·
1 Parent(s): bc7fe36

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. sentiment-bert-model/model.safetensors +3 -0
  2. venv/Lib/site-packages/arrow-1.3.0.dist-info/INSTALLER +1 -0
  3. venv/Lib/site-packages/arrow-1.3.0.dist-info/LICENSE +201 -0
  4. venv/Lib/site-packages/arrow-1.3.0.dist-info/METADATA +176 -0
  5. venv/Lib/site-packages/arrow-1.3.0.dist-info/RECORD +26 -0
  6. venv/Lib/site-packages/arrow-1.3.0.dist-info/WHEEL +4 -0
  7. venv/Lib/site-packages/arrow/__init__.py +39 -0
  8. venv/Lib/site-packages/arrow/_version.py +1 -0
  9. venv/Lib/site-packages/arrow/arrow.py +1869 -0
  10. venv/Lib/site-packages/arrow/constants.py +177 -0
  11. venv/Lib/site-packages/arrow/factory.py +345 -0
  12. venv/Lib/site-packages/arrow/formatter.py +148 -0
  13. venv/Lib/site-packages/arrow/locales.py +0 -0
  14. venv/Lib/site-packages/arrow/parser.py +771 -0
  15. venv/Lib/site-packages/arrow/py.typed +0 -0
  16. venv/Lib/site-packages/arrow/util.py +117 -0
  17. venv/Lib/site-packages/asttokens-3.0.0.dist-info/INSTALLER +1 -0
  18. venv/Lib/site-packages/asttokens-3.0.0.dist-info/LICENSE +201 -0
  19. venv/Lib/site-packages/asttokens-3.0.0.dist-info/METADATA +118 -0
  20. venv/Lib/site-packages/asttokens-3.0.0.dist-info/RECORD +21 -0
  21. venv/Lib/site-packages/asttokens-3.0.0.dist-info/WHEEL +5 -0
  22. venv/Lib/site-packages/asttokens-3.0.0.dist-info/top_level.txt +1 -0
  23. venv/Lib/site-packages/asttokens/__init__.py +24 -0
  24. venv/Lib/site-packages/asttokens/astroid_compat.py +18 -0
  25. venv/Lib/site-packages/asttokens/asttokens.py +450 -0
  26. venv/Lib/site-packages/asttokens/line_numbers.py +76 -0
  27. venv/Lib/site-packages/asttokens/mark_tokens.py +467 -0
  28. venv/Lib/site-packages/asttokens/py.typed +0 -0
  29. venv/Lib/site-packages/asttokens/util.py +485 -0
  30. venv/Lib/site-packages/asttokens/version.py +1 -0
  31. venv/Lib/site-packages/async_lru-2.0.5.dist-info/INSTALLER +1 -0
  32. venv/Lib/site-packages/async_lru-2.0.5.dist-info/LICENSE +23 -0
  33. venv/Lib/site-packages/async_lru-2.0.5.dist-info/METADATA +130 -0
  34. venv/Lib/site-packages/async_lru-2.0.5.dist-info/RECORD +9 -0
  35. venv/Lib/site-packages/async_lru-2.0.5.dist-info/WHEEL +5 -0
  36. venv/Lib/site-packages/async_lru-2.0.5.dist-info/top_level.txt +1 -0
  37. venv/Lib/site-packages/async_lru/__init__.py +346 -0
  38. venv/Lib/site-packages/async_lru/py.typed +0 -0
  39. venv/Lib/site-packages/attr/__init__.py +104 -0
  40. venv/Lib/site-packages/attr/__init__.pyi +389 -0
  41. venv/Lib/site-packages/attr/_cmp.py +160 -0
  42. venv/Lib/site-packages/attr/_cmp.pyi +13 -0
  43. venv/Lib/site-packages/attr/_compat.py +94 -0
  44. venv/Lib/site-packages/attr/_config.py +31 -0
  45. venv/Lib/site-packages/attr/_funcs.py +468 -0
  46. venv/Lib/site-packages/attr/_make.py +3123 -0
  47. venv/Lib/site-packages/attr/_next_gen.py +623 -0
  48. venv/Lib/site-packages/attr/_typing_compat.pyi +15 -0
  49. venv/Lib/site-packages/attr/_version_info.py +86 -0
  50. venv/Lib/site-packages/attr/_version_info.pyi +9 -0
sentiment-bert-model/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3a56cbfa62fb1546a0efc5c4204eaca9c45d3d1eacf4ac30c1d54c59df17d97
3
+ size 437958648
venv/Lib/site-packages/arrow-1.3.0.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
venv/Lib/site-packages/arrow-1.3.0.dist-info/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright 2023 Chris Smith
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
venv/Lib/site-packages/arrow-1.3.0.dist-info/METADATA ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: arrow
3
+ Version: 1.3.0
4
+ Summary: Better dates & times for Python
5
+ Keywords: arrow,date,time,datetime,timestamp,timezone,humanize
6
+ Author-email: Chris Smith <[email protected]>
7
+ Requires-Python: >=3.8
8
+ Description-Content-Type: text/x-rst
9
+ Classifier: Development Status :: 5 - Production/Stable
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: Intended Audience :: Information Technology
12
+ Classifier: License :: OSI Approved :: Apache Software License
13
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3 :: Only
16
+ Classifier: Programming Language :: Python :: 3.8
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Operating System :: OS Independent
22
+ Requires-Dist: python-dateutil>=2.7.0
23
+ Requires-Dist: types-python-dateutil>=2.8.10
24
+ Requires-Dist: doc8 ; extra == "doc"
25
+ Requires-Dist: sphinx>=7.0.0 ; extra == "doc"
26
+ Requires-Dist: sphinx-autobuild ; extra == "doc"
27
+ Requires-Dist: sphinx-autodoc-typehints ; extra == "doc"
28
+ Requires-Dist: sphinx_rtd_theme>=1.3.0 ; extra == "doc"
29
+ Requires-Dist: dateparser==1.* ; extra == "test"
30
+ Requires-Dist: pre-commit ; extra == "test"
31
+ Requires-Dist: pytest ; extra == "test"
32
+ Requires-Dist: pytest-cov ; extra == "test"
33
+ Requires-Dist: pytest-mock ; extra == "test"
34
+ Requires-Dist: pytz==2021.1 ; extra == "test"
35
+ Requires-Dist: simplejson==3.* ; extra == "test"
36
+ Project-URL: Documentation, https://arrow.readthedocs.io
37
+ Project-URL: Issues, https://github.com/arrow-py/arrow/issues
38
+ Project-URL: Source, https://github.com/arrow-py/arrow
39
+ Provides-Extra: doc
40
+ Provides-Extra: test
41
+
42
+ Arrow: Better dates & times for Python
43
+ ======================================
44
+
45
+ .. start-inclusion-marker-do-not-remove
46
+
47
+ .. image:: https://github.com/arrow-py/arrow/workflows/tests/badge.svg?branch=master
48
+ :alt: Build Status
49
+ :target: https://github.com/arrow-py/arrow/actions?query=workflow%3Atests+branch%3Amaster
50
+
51
+ .. image:: https://codecov.io/gh/arrow-py/arrow/branch/master/graph/badge.svg
52
+ :alt: Coverage
53
+ :target: https://codecov.io/gh/arrow-py/arrow
54
+
55
+ .. image:: https://img.shields.io/pypi/v/arrow.svg
56
+ :alt: PyPI Version
57
+ :target: https://pypi.python.org/pypi/arrow
58
+
59
+ .. image:: https://img.shields.io/pypi/pyversions/arrow.svg
60
+ :alt: Supported Python Versions
61
+ :target: https://pypi.python.org/pypi/arrow
62
+
63
+ .. image:: https://img.shields.io/pypi/l/arrow.svg
64
+ :alt: License
65
+ :target: https://pypi.python.org/pypi/arrow
66
+
67
+ .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
68
+ :alt: Code Style: Black
69
+ :target: https://github.com/psf/black
70
+
71
+
72
+ **Arrow** is a Python library that offers a sensible and human-friendly approach to creating, manipulating, formatting and converting dates, times and timestamps. It implements and updates the datetime type, plugging gaps in functionality and providing an intelligent module API that supports many common creation scenarios. Simply put, it helps you work with dates and times with fewer imports and a lot less code.
73
+
74
+ Arrow is named after the `arrow of time <https://en.wikipedia.org/wiki/Arrow_of_time>`_ and is heavily inspired by `moment.js <https://github.com/moment/moment>`_ and `requests <https://github.com/psf/requests>`_.
75
+
76
+ Why use Arrow over built-in modules?
77
+ ------------------------------------
78
+
79
+ Python's standard library and some other low-level modules have near-complete date, time and timezone functionality, but don't work very well from a usability perspective:
80
+
81
+ - Too many modules: datetime, time, calendar, dateutil, pytz and more
82
+ - Too many types: date, time, datetime, tzinfo, timedelta, relativedelta, etc.
83
+ - Timezones and timestamp conversions are verbose and unpleasant
84
+ - Timezone naivety is the norm
85
+ - Gaps in functionality: ISO 8601 parsing, timespans, humanization
86
+
87
+ Features
88
+ --------
89
+
90
+ - Fully-implemented, drop-in replacement for datetime
91
+ - Support for Python 3.6+
92
+ - Timezone-aware and UTC by default
93
+ - Super-simple creation options for many common input scenarios
94
+ - ``shift`` method with support for relative offsets, including weeks
95
+ - Format and parse strings automatically
96
+ - Wide support for the `ISO 8601 <https://en.wikipedia.org/wiki/ISO_8601>`_ standard
97
+ - Timezone conversion
98
+ - Support for ``dateutil``, ``pytz``, and ``ZoneInfo`` tzinfo objects
99
+ - Generates time spans, ranges, floors and ceilings for time frames ranging from microsecond to year
100
+ - Humanize dates and times with a growing list of contributed locales
101
+ - Extensible for your own Arrow-derived types
102
+ - Full support for PEP 484-style type hints
103
+
104
+ Quick Start
105
+ -----------
106
+
107
+ Installation
108
+ ~~~~~~~~~~~~
109
+
110
+ To install Arrow, use `pip <https://pip.pypa.io/en/stable/quickstart/>`_ or `pipenv <https://docs.pipenv.org>`_:
111
+
112
+ .. code-block:: console
113
+
114
+ $ pip install -U arrow
115
+
116
+ Example Usage
117
+ ~~~~~~~~~~~~~
118
+
119
+ .. code-block:: python
120
+
121
+ >>> import arrow
122
+ >>> arrow.get('2013-05-11T21:23:58.970460+07:00')
123
+ <Arrow [2013-05-11T21:23:58.970460+07:00]>
124
+
125
+ >>> utc = arrow.utcnow()
126
+ >>> utc
127
+ <Arrow [2013-05-11T21:23:58.970460+00:00]>
128
+
129
+ >>> utc = utc.shift(hours=-1)
130
+ >>> utc
131
+ <Arrow [2013-05-11T20:23:58.970460+00:00]>
132
+
133
+ >>> local = utc.to('US/Pacific')
134
+ >>> local
135
+ <Arrow [2013-05-11T13:23:58.970460-07:00]>
136
+
137
+ >>> local.timestamp()
138
+ 1368303838.970460
139
+
140
+ >>> local.format()
141
+ '2013-05-11 13:23:58 -07:00'
142
+
143
+ >>> local.format('YYYY-MM-DD HH:mm:ss ZZ')
144
+ '2013-05-11 13:23:58 -07:00'
145
+
146
+ >>> local.humanize()
147
+ 'an hour ago'
148
+
149
+ >>> local.humanize(locale='ko-kr')
150
+ '한시간 전'
151
+
152
+ .. end-inclusion-marker-do-not-remove
153
+
154
+ Documentation
155
+ -------------
156
+
157
+ For full documentation, please visit `arrow.readthedocs.io <https://arrow.readthedocs.io>`_.
158
+
159
+ Contributing
160
+ ------------
161
+
162
+ Contributions are welcome for both code and localizations (adding and updating locales). Begin by gaining familiarity with the Arrow library and its features. Then, jump into contributing:
163
+
164
+ #. Find an issue or feature to tackle on the `issue tracker <https://github.com/arrow-py/arrow/issues>`_. Issues marked with the `"good first issue" label <https://github.com/arrow-py/arrow/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22>`_ may be a great place to start!
165
+ #. Fork `this repository <https://github.com/arrow-py/arrow>`_ on GitHub and begin making changes in a branch.
166
+ #. Add a few tests to ensure that the bug was fixed or the feature works as expected.
167
+ #. Run the entire test suite and linting checks by running one of the following commands: ``tox && tox -e lint,docs`` (if you have `tox <https://tox.readthedocs.io>`_ installed) **OR** ``make build39 && make test && make lint`` (if you do not have Python 3.9 installed, replace ``build39`` with the latest Python version on your system).
168
+ #. Submit a pull request and await feedback 😃.
169
+
170
+ If you have any questions along the way, feel free to ask them `here <https://github.com/arrow-py/arrow/discussions>`_.
171
+
172
+ Support Arrow
173
+ -------------
174
+
175
+ `Open Collective <https://opencollective.com/>`_ is an online funding platform that provides tools to raise money and share your finances with full transparency. It is the platform of choice for individuals and companies to make one-time or recurring donations directly to the project. If you are interested in making a financial contribution, please visit the `Arrow collective <https://opencollective.com/arrow>`_.
176
+
venv/Lib/site-packages/arrow-1.3.0.dist-info/RECORD ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ arrow-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ arrow-1.3.0.dist-info/LICENSE,sha256=tIH4cpbLCr2xP9jLuUsUwyi-iA7J5oVHphuE2s_9Bno,11341
3
+ arrow-1.3.0.dist-info/METADATA,sha256=P7gh6Gt6pIqBLBP577OoTZWFhRmAOpMyiwpuNEeklac,7534
4
+ arrow-1.3.0.dist-info/RECORD,,
5
+ arrow-1.3.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
6
+ arrow/__init__.py,sha256=HxsSJGl56GoeHB__No-kdGmC_Wes_Ttf0ohOy7OoFig,872
7
+ arrow/__pycache__/__init__.cpython-312.pyc,,
8
+ arrow/__pycache__/_version.cpython-312.pyc,,
9
+ arrow/__pycache__/api.cpython-312.pyc,,
10
+ arrow/__pycache__/arrow.cpython-312.pyc,,
11
+ arrow/__pycache__/constants.cpython-312.pyc,,
12
+ arrow/__pycache__/factory.cpython-312.pyc,,
13
+ arrow/__pycache__/formatter.cpython-312.pyc,,
14
+ arrow/__pycache__/locales.cpython-312.pyc,,
15
+ arrow/__pycache__/parser.cpython-312.pyc,,
16
+ arrow/__pycache__/util.cpython-312.pyc,,
17
+ arrow/_version.py,sha256=F5mW07pSyGrqDNY2Ehr-UpDzpBtN-FsYU0QGZWf6PJE,22
18
+ arrow/api.py,sha256=6tdqrG0NjrKO22_eWHU4a5xerfR6IrZPY-yynGpnvTM,2755
19
+ arrow/arrow.py,sha256=m9XvNnpQ1aTHZWXPud3W2-QMfilgWXnUCnuZInwf27g,63517
20
+ arrow/constants.py,sha256=y3scgWgxiFuQg4DeFlhmexy1BA7K8LFNZyqK-VWPQJs,3238
21
+ arrow/factory.py,sha256=qiDSokfcVWJhiJbIkOcU1Ohh4N0PdKxghsJzBnI8AUo,11432
22
+ arrow/formatter.py,sha256=0D0-AjBZwuay9312KvY0UnaVBfAZj-vEIqWcG0_3ZDQ,5267
23
+ arrow/locales.py,sha256=6g5xHq5UkIAZPF8N2PvzN_xoUvsfNcPhNfJw0TUi8tw,156894
24
+ arrow/parser.py,sha256=FO6NWpzjvZcsMhIck6pd7hKe1ijlKUZE9l_OFlyskyw,25790
25
+ arrow/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ arrow/util.py,sha256=xnDevqRyNeYWbl3x-n_Tyo4cOgHcdgbxFECFsJ1XoEc,3679
venv/Lib/site-packages/arrow-1.3.0.dist-info/WHEEL ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: flit 3.9.0
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
venv/Lib/site-packages/arrow/__init__.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from ._version import __version__
2
+ from .api import get, now, utcnow
3
+ from .arrow import Arrow
4
+ from .factory import ArrowFactory
5
+ from .formatter import (
6
+ FORMAT_ATOM,
7
+ FORMAT_COOKIE,
8
+ FORMAT_RFC822,
9
+ FORMAT_RFC850,
10
+ FORMAT_RFC1036,
11
+ FORMAT_RFC1123,
12
+ FORMAT_RFC2822,
13
+ FORMAT_RFC3339,
14
+ FORMAT_RSS,
15
+ FORMAT_W3C,
16
+ )
17
+ from .parser import ParserError
18
+
19
+ # https://mypy.readthedocs.io/en/stable/command_line.html#cmdoption-mypy-no-implicit-reexport
20
+ # Mypy with --strict or --no-implicit-reexport requires an explicit reexport.
21
+ __all__ = [
22
+ "__version__",
23
+ "get",
24
+ "now",
25
+ "utcnow",
26
+ "Arrow",
27
+ "ArrowFactory",
28
+ "FORMAT_ATOM",
29
+ "FORMAT_COOKIE",
30
+ "FORMAT_RFC822",
31
+ "FORMAT_RFC850",
32
+ "FORMAT_RFC1036",
33
+ "FORMAT_RFC1123",
34
+ "FORMAT_RFC2822",
35
+ "FORMAT_RFC3339",
36
+ "FORMAT_RSS",
37
+ "FORMAT_W3C",
38
+ "ParserError",
39
+ ]
venv/Lib/site-packages/arrow/_version.py ADDED
@@ -0,0 +1 @@
 
 
1
+ __version__ = "1.3.0"
venv/Lib/site-packages/arrow/arrow.py ADDED
@@ -0,0 +1,1869 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Provides the :class:`Arrow <arrow.arrow.Arrow>` class, an enhanced ``datetime``
3
+ replacement.
4
+
5
+ """
6
+
7
+
8
+ import calendar
9
+ import re
10
+ import sys
11
+ from datetime import date
12
+ from datetime import datetime as dt_datetime
13
+ from datetime import time as dt_time
14
+ from datetime import timedelta
15
+ from datetime import tzinfo as dt_tzinfo
16
+ from math import trunc
17
+ from time import struct_time
18
+ from typing import (
19
+ Any,
20
+ ClassVar,
21
+ Generator,
22
+ Iterable,
23
+ List,
24
+ Mapping,
25
+ Optional,
26
+ Tuple,
27
+ Union,
28
+ cast,
29
+ overload,
30
+ )
31
+
32
+ from dateutil import tz as dateutil_tz
33
+ from dateutil.relativedelta import relativedelta
34
+
35
+ from arrow import formatter, locales, parser, util
36
+ from arrow.constants import DEFAULT_LOCALE, DEHUMANIZE_LOCALES
37
+ from arrow.locales import TimeFrameLiteral
38
+
39
+ if sys.version_info < (3, 8): # pragma: no cover
40
+ from typing_extensions import Final, Literal
41
+ else:
42
+ from typing import Final, Literal # pragma: no cover
43
+
44
+
45
+ TZ_EXPR = Union[dt_tzinfo, str]
46
+
47
+ _T_FRAMES = Literal[
48
+ "year",
49
+ "years",
50
+ "month",
51
+ "months",
52
+ "day",
53
+ "days",
54
+ "hour",
55
+ "hours",
56
+ "minute",
57
+ "minutes",
58
+ "second",
59
+ "seconds",
60
+ "microsecond",
61
+ "microseconds",
62
+ "week",
63
+ "weeks",
64
+ "quarter",
65
+ "quarters",
66
+ ]
67
+
68
+ _BOUNDS = Literal["[)", "()", "(]", "[]"]
69
+
70
+ _GRANULARITY = Literal[
71
+ "auto",
72
+ "second",
73
+ "minute",
74
+ "hour",
75
+ "day",
76
+ "week",
77
+ "month",
78
+ "quarter",
79
+ "year",
80
+ ]
81
+
82
+
83
+ class Arrow:
84
+ """An :class:`Arrow <arrow.arrow.Arrow>` object.
85
+
86
+ Implements the ``datetime`` interface, behaving as an aware ``datetime`` while implementing
87
+ additional functionality.
88
+
89
+ :param year: the calendar year.
90
+ :param month: the calendar month.
91
+ :param day: the calendar day.
92
+ :param hour: (optional) the hour. Defaults to 0.
93
+ :param minute: (optional) the minute, Defaults to 0.
94
+ :param second: (optional) the second, Defaults to 0.
95
+ :param microsecond: (optional) the microsecond. Defaults to 0.
96
+ :param tzinfo: (optional) A timezone expression. Defaults to UTC.
97
+ :param fold: (optional) 0 or 1, used to disambiguate repeated wall times. Defaults to 0.
98
+
99
+ .. _tz-expr:
100
+
101
+ Recognized timezone expressions:
102
+
103
+ - A ``tzinfo`` object.
104
+ - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
105
+ - A ``str`` in ISO 8601 style, as in '+07:00'.
106
+ - A ``str``, one of the following: 'local', 'utc', 'UTC'.
107
+
108
+ Usage::
109
+
110
+ >>> import arrow
111
+ >>> arrow.Arrow(2013, 5, 5, 12, 30, 45)
112
+ <Arrow [2013-05-05T12:30:45+00:00]>
113
+
114
+ """
115
+
116
+ resolution: ClassVar[timedelta] = dt_datetime.resolution
117
+ min: ClassVar["Arrow"]
118
+ max: ClassVar["Arrow"]
119
+
120
+ _ATTRS: Final[List[str]] = [
121
+ "year",
122
+ "month",
123
+ "day",
124
+ "hour",
125
+ "minute",
126
+ "second",
127
+ "microsecond",
128
+ ]
129
+ _ATTRS_PLURAL: Final[List[str]] = [f"{a}s" for a in _ATTRS]
130
+ _MONTHS_PER_QUARTER: Final[int] = 3
131
+ _SECS_PER_MINUTE: Final[int] = 60
132
+ _SECS_PER_HOUR: Final[int] = 60 * 60
133
+ _SECS_PER_DAY: Final[int] = 60 * 60 * 24
134
+ _SECS_PER_WEEK: Final[int] = 60 * 60 * 24 * 7
135
+ _SECS_PER_MONTH: Final[float] = 60 * 60 * 24 * 30.5
136
+ _SECS_PER_QUARTER: Final[float] = 60 * 60 * 24 * 30.5 * 3
137
+ _SECS_PER_YEAR: Final[int] = 60 * 60 * 24 * 365
138
+
139
+ _SECS_MAP: Final[Mapping[TimeFrameLiteral, float]] = {
140
+ "second": 1.0,
141
+ "minute": _SECS_PER_MINUTE,
142
+ "hour": _SECS_PER_HOUR,
143
+ "day": _SECS_PER_DAY,
144
+ "week": _SECS_PER_WEEK,
145
+ "month": _SECS_PER_MONTH,
146
+ "quarter": _SECS_PER_QUARTER,
147
+ "year": _SECS_PER_YEAR,
148
+ }
149
+
150
+ _datetime: dt_datetime
151
+
152
+ def __init__(
153
+ self,
154
+ year: int,
155
+ month: int,
156
+ day: int,
157
+ hour: int = 0,
158
+ minute: int = 0,
159
+ second: int = 0,
160
+ microsecond: int = 0,
161
+ tzinfo: Optional[TZ_EXPR] = None,
162
+ **kwargs: Any,
163
+ ) -> None:
164
+ if tzinfo is None:
165
+ tzinfo = dateutil_tz.tzutc()
166
+ # detect that tzinfo is a pytz object (issue #626)
167
+ elif (
168
+ isinstance(tzinfo, dt_tzinfo)
169
+ and hasattr(tzinfo, "localize")
170
+ and hasattr(tzinfo, "zone")
171
+ and tzinfo.zone
172
+ ):
173
+ tzinfo = parser.TzinfoParser.parse(tzinfo.zone)
174
+ elif isinstance(tzinfo, str):
175
+ tzinfo = parser.TzinfoParser.parse(tzinfo)
176
+
177
+ fold = kwargs.get("fold", 0)
178
+
179
+ self._datetime = dt_datetime(
180
+ year, month, day, hour, minute, second, microsecond, tzinfo, fold=fold
181
+ )
182
+
183
+ # factories: single object, both original and from datetime.
184
+
185
+ @classmethod
186
+ def now(cls, tzinfo: Optional[dt_tzinfo] = None) -> "Arrow":
187
+ """Constructs an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in the given
188
+ timezone.
189
+
190
+ :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time.
191
+
192
+ Usage::
193
+
194
+ >>> arrow.now('Asia/Baku')
195
+ <Arrow [2019-01-24T20:26:31.146412+04:00]>
196
+
197
+ """
198
+
199
+ if tzinfo is None:
200
+ tzinfo = dateutil_tz.tzlocal()
201
+
202
+ dt = dt_datetime.now(tzinfo)
203
+
204
+ return cls(
205
+ dt.year,
206
+ dt.month,
207
+ dt.day,
208
+ dt.hour,
209
+ dt.minute,
210
+ dt.second,
211
+ dt.microsecond,
212
+ dt.tzinfo,
213
+ fold=getattr(dt, "fold", 0),
214
+ )
215
+
216
+ @classmethod
217
+ def utcnow(cls) -> "Arrow":
218
+ """Constructs an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC
219
+ time.
220
+
221
+ Usage::
222
+
223
+ >>> arrow.utcnow()
224
+ <Arrow [2019-01-24T16:31:40.651108+00:00]>
225
+
226
+ """
227
+
228
+ dt = dt_datetime.now(dateutil_tz.tzutc())
229
+
230
+ return cls(
231
+ dt.year,
232
+ dt.month,
233
+ dt.day,
234
+ dt.hour,
235
+ dt.minute,
236
+ dt.second,
237
+ dt.microsecond,
238
+ dt.tzinfo,
239
+ fold=getattr(dt, "fold", 0),
240
+ )
241
+
242
+ @classmethod
243
+ def fromtimestamp(
244
+ cls,
245
+ timestamp: Union[int, float, str],
246
+ tzinfo: Optional[TZ_EXPR] = None,
247
+ ) -> "Arrow":
248
+ """Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a timestamp, converted to
249
+ the given timezone.
250
+
251
+ :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either.
252
+ :param tzinfo: (optional) a ``tzinfo`` object. Defaults to local time.
253
+
254
+ """
255
+
256
+ if tzinfo is None:
257
+ tzinfo = dateutil_tz.tzlocal()
258
+ elif isinstance(tzinfo, str):
259
+ tzinfo = parser.TzinfoParser.parse(tzinfo)
260
+
261
+ if not util.is_timestamp(timestamp):
262
+ raise ValueError(f"The provided timestamp {timestamp!r} is invalid.")
263
+
264
+ timestamp = util.normalize_timestamp(float(timestamp))
265
+ dt = dt_datetime.fromtimestamp(timestamp, tzinfo)
266
+
267
+ return cls(
268
+ dt.year,
269
+ dt.month,
270
+ dt.day,
271
+ dt.hour,
272
+ dt.minute,
273
+ dt.second,
274
+ dt.microsecond,
275
+ dt.tzinfo,
276
+ fold=getattr(dt, "fold", 0),
277
+ )
278
+
279
+ @classmethod
280
+ def utcfromtimestamp(cls, timestamp: Union[int, float, str]) -> "Arrow":
281
+ """Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a timestamp, in UTC time.
282
+
283
+ :param timestamp: an ``int`` or ``float`` timestamp, or a ``str`` that converts to either.
284
+
285
+ """
286
+
287
+ if not util.is_timestamp(timestamp):
288
+ raise ValueError(f"The provided timestamp {timestamp!r} is invalid.")
289
+
290
+ timestamp = util.normalize_timestamp(float(timestamp))
291
+ dt = dt_datetime.utcfromtimestamp(timestamp)
292
+
293
+ return cls(
294
+ dt.year,
295
+ dt.month,
296
+ dt.day,
297
+ dt.hour,
298
+ dt.minute,
299
+ dt.second,
300
+ dt.microsecond,
301
+ dateutil_tz.tzutc(),
302
+ fold=getattr(dt, "fold", 0),
303
+ )
304
+
305
+ @classmethod
306
+ def fromdatetime(cls, dt: dt_datetime, tzinfo: Optional[TZ_EXPR] = None) -> "Arrow":
307
+ """Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a ``datetime`` and
308
+ optional replacement timezone.
309
+
310
+ :param dt: the ``datetime``
311
+ :param tzinfo: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to ``dt``'s
312
+ timezone, or UTC if naive.
313
+
314
+ Usage::
315
+
316
+ >>> dt
317
+ datetime.datetime(2021, 4, 7, 13, 48, tzinfo=tzfile('/usr/share/zoneinfo/US/Pacific'))
318
+ >>> arrow.Arrow.fromdatetime(dt)
319
+ <Arrow [2021-04-07T13:48:00-07:00]>
320
+
321
+ """
322
+
323
+ if tzinfo is None:
324
+ if dt.tzinfo is None:
325
+ tzinfo = dateutil_tz.tzutc()
326
+ else:
327
+ tzinfo = dt.tzinfo
328
+
329
+ return cls(
330
+ dt.year,
331
+ dt.month,
332
+ dt.day,
333
+ dt.hour,
334
+ dt.minute,
335
+ dt.second,
336
+ dt.microsecond,
337
+ tzinfo,
338
+ fold=getattr(dt, "fold", 0),
339
+ )
340
+
341
+ @classmethod
342
+ def fromdate(cls, date: date, tzinfo: Optional[TZ_EXPR] = None) -> "Arrow":
343
+ """Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a ``date`` and optional
344
+ replacement timezone. All time values are set to 0.
345
+
346
+ :param date: the ``date``
347
+ :param tzinfo: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to UTC.
348
+
349
+ """
350
+
351
+ if tzinfo is None:
352
+ tzinfo = dateutil_tz.tzutc()
353
+
354
+ return cls(date.year, date.month, date.day, tzinfo=tzinfo)
355
+
356
+ @classmethod
357
+ def strptime(
358
+ cls, date_str: str, fmt: str, tzinfo: Optional[TZ_EXPR] = None
359
+ ) -> "Arrow":
360
+ """Constructs an :class:`Arrow <arrow.arrow.Arrow>` object from a date string and format,
361
+ in the style of ``datetime.strptime``. Optionally replaces the parsed timezone.
362
+
363
+ :param date_str: the date string.
364
+ :param fmt: the format string using datetime format codes.
365
+ :param tzinfo: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to the parsed
366
+ timezone if ``fmt`` contains a timezone directive, otherwise UTC.
367
+
368
+ Usage::
369
+
370
+ >>> arrow.Arrow.strptime('20-01-2019 15:49:10', '%d-%m-%Y %H:%M:%S')
371
+ <Arrow [2019-01-20T15:49:10+00:00]>
372
+
373
+ """
374
+
375
+ dt = dt_datetime.strptime(date_str, fmt)
376
+ if tzinfo is None:
377
+ tzinfo = dt.tzinfo
378
+
379
+ return cls(
380
+ dt.year,
381
+ dt.month,
382
+ dt.day,
383
+ dt.hour,
384
+ dt.minute,
385
+ dt.second,
386
+ dt.microsecond,
387
+ tzinfo,
388
+ fold=getattr(dt, "fold", 0),
389
+ )
390
+
391
+ @classmethod
392
+ def fromordinal(cls, ordinal: int) -> "Arrow":
393
+ """Constructs an :class:`Arrow <arrow.arrow.Arrow>` object corresponding
394
+ to the Gregorian Ordinal.
395
+
396
+ :param ordinal: an ``int`` corresponding to a Gregorian Ordinal.
397
+
398
+ Usage::
399
+
400
+ >>> arrow.fromordinal(737741)
401
+ <Arrow [2020-11-12T00:00:00+00:00]>
402
+
403
+ """
404
+
405
+ util.validate_ordinal(ordinal)
406
+ dt = dt_datetime.fromordinal(ordinal)
407
+ return cls(
408
+ dt.year,
409
+ dt.month,
410
+ dt.day,
411
+ dt.hour,
412
+ dt.minute,
413
+ dt.second,
414
+ dt.microsecond,
415
+ dt.tzinfo,
416
+ fold=getattr(dt, "fold", 0),
417
+ )
418
+
419
+ # factories: ranges and spans
420
+
421
+ @classmethod
422
+ def range(
423
+ cls,
424
+ frame: _T_FRAMES,
425
+ start: Union["Arrow", dt_datetime],
426
+ end: Union["Arrow", dt_datetime, None] = None,
427
+ tz: Optional[TZ_EXPR] = None,
428
+ limit: Optional[int] = None,
429
+ ) -> Generator["Arrow", None, None]:
430
+ """Returns an iterator of :class:`Arrow <arrow.arrow.Arrow>` objects, representing
431
+ points in time between two inputs.
432
+
433
+ :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...).
434
+ :param start: A datetime expression, the start of the range.
435
+ :param end: (optional) A datetime expression, the end of the range.
436
+ :param tz: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to
437
+ ``start``'s timezone, or UTC if ``start`` is naive.
438
+ :param limit: (optional) A maximum number of tuples to return.
439
+
440
+ **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to
441
+ return the entire range. Call with ``limit`` alone to return a maximum # of results from
442
+ the start. Call with both to cap a range at a maximum # of results.
443
+
444
+ **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before
445
+ iterating. As such, either call with naive objects and ``tz``, or aware objects from the
446
+ same timezone and no ``tz``.
447
+
448
+ Supported frame values: year, quarter, month, week, day, hour, minute, second, microsecond.
449
+
450
+ Recognized datetime expressions:
451
+
452
+ - An :class:`Arrow <arrow.arrow.Arrow>` object.
453
+ - A ``datetime`` object.
454
+
455
+ Usage::
456
+
457
+ >>> start = datetime(2013, 5, 5, 12, 30)
458
+ >>> end = datetime(2013, 5, 5, 17, 15)
459
+ >>> for r in arrow.Arrow.range('hour', start, end):
460
+ ... print(repr(r))
461
+ ...
462
+ <Arrow [2013-05-05T12:30:00+00:00]>
463
+ <Arrow [2013-05-05T13:30:00+00:00]>
464
+ <Arrow [2013-05-05T14:30:00+00:00]>
465
+ <Arrow [2013-05-05T15:30:00+00:00]>
466
+ <Arrow [2013-05-05T16:30:00+00:00]>
467
+
468
+ **NOTE**: Unlike Python's ``range``, ``end`` *may* be included in the returned iterator::
469
+
470
+ >>> start = datetime(2013, 5, 5, 12, 30)
471
+ >>> end = datetime(2013, 5, 5, 13, 30)
472
+ >>> for r in arrow.Arrow.range('hour', start, end):
473
+ ... print(repr(r))
474
+ ...
475
+ <Arrow [2013-05-05T12:30:00+00:00]>
476
+ <Arrow [2013-05-05T13:30:00+00:00]>
477
+
478
+ """
479
+
480
+ _, frame_relative, relative_steps = cls._get_frames(frame)
481
+
482
+ tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz)
483
+
484
+ start = cls._get_datetime(start).replace(tzinfo=tzinfo)
485
+ end, limit = cls._get_iteration_params(end, limit)
486
+ end = cls._get_datetime(end).replace(tzinfo=tzinfo)
487
+
488
+ current = cls.fromdatetime(start)
489
+ original_day = start.day
490
+ day_is_clipped = False
491
+ i = 0
492
+
493
+ while current <= end and i < limit:
494
+ i += 1
495
+ yield current
496
+
497
+ values = [getattr(current, f) for f in cls._ATTRS]
498
+ current = cls(*values, tzinfo=tzinfo).shift( # type: ignore[misc]
499
+ **{frame_relative: relative_steps}
500
+ )
501
+
502
+ if frame in ["month", "quarter", "year"] and current.day < original_day:
503
+ day_is_clipped = True
504
+
505
+ if day_is_clipped and not cls._is_last_day_of_month(current):
506
+ current = current.replace(day=original_day)
507
+
508
+ def span(
509
+ self,
510
+ frame: _T_FRAMES,
511
+ count: int = 1,
512
+ bounds: _BOUNDS = "[)",
513
+ exact: bool = False,
514
+ week_start: int = 1,
515
+ ) -> Tuple["Arrow", "Arrow"]:
516
+ """Returns a tuple of two new :class:`Arrow <arrow.arrow.Arrow>` objects, representing the timespan
517
+ of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
518
+
519
+ :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
520
+ :param count: (optional) the number of frames to span.
521
+ :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies
522
+ whether to include or exclude the start and end values in the span. '(' excludes
523
+ the start, '[' includes the start, ')' excludes the end, and ']' includes the end.
524
+ If the bounds are not specified, the default bound '[)' is used.
525
+ :param exact: (optional) whether to have the start of the timespan begin exactly
526
+ at the time specified by ``start`` and the end of the timespan truncated
527
+ so as not to extend beyond ``end``.
528
+ :param week_start: (optional) only used in combination with the week timeframe. Follows isoweekday() where
529
+ Monday is 1 and Sunday is 7.
530
+
531
+ Supported frame values: year, quarter, month, week, day, hour, minute, second.
532
+
533
+ Usage::
534
+
535
+ >>> arrow.utcnow()
536
+ <Arrow [2013-05-09T03:32:36.186203+00:00]>
537
+
538
+ >>> arrow.utcnow().span('hour')
539
+ (<Arrow [2013-05-09T03:00:00+00:00]>, <Arrow [2013-05-09T03:59:59.999999+00:00]>)
540
+
541
+ >>> arrow.utcnow().span('day')
542
+ (<Arrow [2013-05-09T00:00:00+00:00]>, <Arrow [2013-05-09T23:59:59.999999+00:00]>)
543
+
544
+ >>> arrow.utcnow().span('day', count=2)
545
+ (<Arrow [2013-05-09T00:00:00+00:00]>, <Arrow [2013-05-10T23:59:59.999999+00:00]>)
546
+
547
+ >>> arrow.utcnow().span('day', bounds='[]')
548
+ (<Arrow [2013-05-09T00:00:00+00:00]>, <Arrow [2013-05-10T00:00:00+00:00]>)
549
+
550
+ >>> arrow.utcnow().span('week')
551
+ (<Arrow [2021-02-22T00:00:00+00:00]>, <Arrow [2021-02-28T23:59:59.999999+00:00]>)
552
+
553
+ >>> arrow.utcnow().span('week', week_start=6)
554
+ (<Arrow [2021-02-20T00:00:00+00:00]>, <Arrow [2021-02-26T23:59:59.999999+00:00]>)
555
+
556
+ """
557
+ if not 1 <= week_start <= 7:
558
+ raise ValueError("week_start argument must be between 1 and 7.")
559
+
560
+ util.validate_bounds(bounds)
561
+
562
+ frame_absolute, frame_relative, relative_steps = self._get_frames(frame)
563
+
564
+ if frame_absolute == "week":
565
+ attr = "day"
566
+ elif frame_absolute == "quarter":
567
+ attr = "month"
568
+ else:
569
+ attr = frame_absolute
570
+
571
+ floor = self
572
+ if not exact:
573
+ index = self._ATTRS.index(attr)
574
+ frames = self._ATTRS[: index + 1]
575
+
576
+ values = [getattr(self, f) for f in frames]
577
+
578
+ for _ in range(3 - len(values)):
579
+ values.append(1)
580
+
581
+ floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore[misc]
582
+
583
+ if frame_absolute == "week":
584
+ # if week_start is greater than self.isoweekday() go back one week by setting delta = 7
585
+ delta = 7 if week_start > self.isoweekday() else 0
586
+ floor = floor.shift(days=-(self.isoweekday() - week_start) - delta)
587
+ elif frame_absolute == "quarter":
588
+ floor = floor.shift(months=-((self.month - 1) % 3))
589
+
590
+ ceil = floor.shift(**{frame_relative: count * relative_steps})
591
+
592
+ if bounds[0] == "(":
593
+ floor = floor.shift(microseconds=+1)
594
+
595
+ if bounds[1] == ")":
596
+ ceil = ceil.shift(microseconds=-1)
597
+
598
+ return floor, ceil
599
+
600
+ def floor(self, frame: _T_FRAMES) -> "Arrow":
601
+ """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, representing the "floor"
602
+ of the timespan of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
603
+ Equivalent to the first element in the 2-tuple returned by
604
+ :func:`span <arrow.arrow.Arrow.span>`.
605
+
606
+ :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
607
+
608
+ Usage::
609
+
610
+ >>> arrow.utcnow().floor('hour')
611
+ <Arrow [2013-05-09T03:00:00+00:00]>
612
+
613
+ """
614
+
615
+ return self.span(frame)[0]
616
+
617
+ def ceil(self, frame: _T_FRAMES) -> "Arrow":
618
+ """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, representing the "ceiling"
619
+ of the timespan of the :class:`Arrow <arrow.arrow.Arrow>` object in a given timeframe.
620
+ Equivalent to the second element in the 2-tuple returned by
621
+ :func:`span <arrow.arrow.Arrow.span>`.
622
+
623
+ :param frame: the timeframe. Can be any ``datetime`` property (day, hour, minute...).
624
+
625
+ Usage::
626
+
627
+ >>> arrow.utcnow().ceil('hour')
628
+ <Arrow [2013-05-09T03:59:59.999999+00:00]>
629
+
630
+ """
631
+
632
+ return self.span(frame)[1]
633
+
634
+ @classmethod
635
+ def span_range(
636
+ cls,
637
+ frame: _T_FRAMES,
638
+ start: dt_datetime,
639
+ end: dt_datetime,
640
+ tz: Optional[TZ_EXPR] = None,
641
+ limit: Optional[int] = None,
642
+ bounds: _BOUNDS = "[)",
643
+ exact: bool = False,
644
+ ) -> Iterable[Tuple["Arrow", "Arrow"]]:
645
+ """Returns an iterator of tuples, each :class:`Arrow <arrow.arrow.Arrow>` objects,
646
+ representing a series of timespans between two inputs.
647
+
648
+ :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...).
649
+ :param start: A datetime expression, the start of the range.
650
+ :param end: (optional) A datetime expression, the end of the range.
651
+ :param tz: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to
652
+ ``start``'s timezone, or UTC if ``start`` is naive.
653
+ :param limit: (optional) A maximum number of tuples to return.
654
+ :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies
655
+ whether to include or exclude the start and end values in each span in the range. '(' excludes
656
+ the start, '[' includes the start, ')' excludes the end, and ']' includes the end.
657
+ If the bounds are not specified, the default bound '[)' is used.
658
+ :param exact: (optional) whether to have the first timespan start exactly
659
+ at the time specified by ``start`` and the final span truncated
660
+ so as not to extend beyond ``end``.
661
+
662
+ **NOTE**: The ``end`` or ``limit`` must be provided. Call with ``end`` alone to
663
+ return the entire range. Call with ``limit`` alone to return a maximum # of results from
664
+ the start. Call with both to cap a range at a maximum # of results.
665
+
666
+ **NOTE**: ``tz`` internally **replaces** the timezones of both ``start`` and ``end`` before
667
+ iterating. As such, either call with naive objects and ``tz``, or aware objects from the
668
+ same timezone and no ``tz``.
669
+
670
+ Supported frame values: year, quarter, month, week, day, hour, minute, second, microsecond.
671
+
672
+ Recognized datetime expressions:
673
+
674
+ - An :class:`Arrow <arrow.arrow.Arrow>` object.
675
+ - A ``datetime`` object.
676
+
677
+ **NOTE**: Unlike Python's ``range``, ``end`` will *always* be included in the returned
678
+ iterator of timespans.
679
+
680
+ Usage:
681
+
682
+ >>> start = datetime(2013, 5, 5, 12, 30)
683
+ >>> end = datetime(2013, 5, 5, 17, 15)
684
+ >>> for r in arrow.Arrow.span_range('hour', start, end):
685
+ ... print(r)
686
+ ...
687
+ (<Arrow [2013-05-05T12:00:00+00:00]>, <Arrow [2013-05-05T12:59:59.999999+00:00]>)
688
+ (<Arrow [2013-05-05T13:00:00+00:00]>, <Arrow [2013-05-05T13:59:59.999999+00:00]>)
689
+ (<Arrow [2013-05-05T14:00:00+00:00]>, <Arrow [2013-05-05T14:59:59.999999+00:00]>)
690
+ (<Arrow [2013-05-05T15:00:00+00:00]>, <Arrow [2013-05-05T15:59:59.999999+00:00]>)
691
+ (<Arrow [2013-05-05T16:00:00+00:00]>, <Arrow [2013-05-05T16:59:59.999999+00:00]>)
692
+ (<Arrow [2013-05-05T17:00:00+00:00]>, <Arrow [2013-05-05T17:59:59.999999+00:00]>)
693
+
694
+ """
695
+
696
+ tzinfo = cls._get_tzinfo(start.tzinfo if tz is None else tz)
697
+ start = cls.fromdatetime(start, tzinfo).span(frame, exact=exact)[0]
698
+ end = cls.fromdatetime(end, tzinfo)
699
+ _range = cls.range(frame, start, end, tz, limit)
700
+ if not exact:
701
+ for r in _range:
702
+ yield r.span(frame, bounds=bounds, exact=exact)
703
+
704
+ for r in _range:
705
+ floor, ceil = r.span(frame, bounds=bounds, exact=exact)
706
+ if ceil > end:
707
+ ceil = end
708
+ if bounds[1] == ")":
709
+ ceil += relativedelta(microseconds=-1)
710
+ if floor == end:
711
+ break
712
+ elif floor + relativedelta(microseconds=-1) == end:
713
+ break
714
+ yield floor, ceil
715
+
716
+ @classmethod
717
+ def interval(
718
+ cls,
719
+ frame: _T_FRAMES,
720
+ start: dt_datetime,
721
+ end: dt_datetime,
722
+ interval: int = 1,
723
+ tz: Optional[TZ_EXPR] = None,
724
+ bounds: _BOUNDS = "[)",
725
+ exact: bool = False,
726
+ ) -> Iterable[Tuple["Arrow", "Arrow"]]:
727
+ """Returns an iterator of tuples, each :class:`Arrow <arrow.arrow.Arrow>` objects,
728
+ representing a series of intervals between two inputs.
729
+
730
+ :param frame: The timeframe. Can be any ``datetime`` property (day, hour, minute...).
731
+ :param start: A datetime expression, the start of the range.
732
+ :param end: (optional) A datetime expression, the end of the range.
733
+ :param interval: (optional) Time interval for the given time frame.
734
+ :param tz: (optional) A timezone expression. Defaults to UTC.
735
+ :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies
736
+ whether to include or exclude the start and end values in the intervals. '(' excludes
737
+ the start, '[' includes the start, ')' excludes the end, and ']' includes the end.
738
+ If the bounds are not specified, the default bound '[)' is used.
739
+ :param exact: (optional) whether to have the first timespan start exactly
740
+ at the time specified by ``start`` and the final interval truncated
741
+ so as not to extend beyond ``end``.
742
+
743
+ Supported frame values: year, quarter, month, week, day, hour, minute, second
744
+
745
+ Recognized datetime expressions:
746
+
747
+ - An :class:`Arrow <arrow.arrow.Arrow>` object.
748
+ - A ``datetime`` object.
749
+
750
+ Recognized timezone expressions:
751
+
752
+ - A ``tzinfo`` object.
753
+ - A ``str`` describing a timezone, similar to 'US/Pacific', or 'Europe/Berlin'.
754
+ - A ``str`` in ISO 8601 style, as in '+07:00'.
755
+ - A ``str``, one of the following: 'local', 'utc', 'UTC'.
756
+
757
+ Usage:
758
+
759
+ >>> start = datetime(2013, 5, 5, 12, 30)
760
+ >>> end = datetime(2013, 5, 5, 17, 15)
761
+ >>> for r in arrow.Arrow.interval('hour', start, end, 2):
762
+ ... print(r)
763
+ ...
764
+ (<Arrow [2013-05-05T12:00:00+00:00]>, <Arrow [2013-05-05T13:59:59.999999+00:00]>)
765
+ (<Arrow [2013-05-05T14:00:00+00:00]>, <Arrow [2013-05-05T15:59:59.999999+00:00]>)
766
+ (<Arrow [2013-05-05T16:00:00+00:00]>, <Arrow [2013-05-05T17:59:59.999999+00:0]>)
767
+ """
768
+ if interval < 1:
769
+ raise ValueError("interval has to be a positive integer")
770
+
771
+ spanRange = iter(
772
+ cls.span_range(frame, start, end, tz, bounds=bounds, exact=exact)
773
+ )
774
+ while True:
775
+ try:
776
+ intvlStart, intvlEnd = next(spanRange)
777
+ for _ in range(interval - 1):
778
+ try:
779
+ _, intvlEnd = next(spanRange)
780
+ except StopIteration:
781
+ continue
782
+ yield intvlStart, intvlEnd
783
+ except StopIteration:
784
+ return
785
+
786
+ # representations
787
+
788
+ def __repr__(self) -> str:
789
+ return f"<{self.__class__.__name__} [{self.__str__()}]>"
790
+
791
+ def __str__(self) -> str:
792
+ return self._datetime.isoformat()
793
+
794
+ def __format__(self, formatstr: str) -> str:
795
+ if len(formatstr) > 0:
796
+ return self.format(formatstr)
797
+
798
+ return str(self)
799
+
800
+ def __hash__(self) -> int:
801
+ return self._datetime.__hash__()
802
+
803
+ # attributes and properties
804
+
805
+ def __getattr__(self, name: str) -> int:
806
+ if name == "week":
807
+ return self.isocalendar()[1]
808
+
809
+ if name == "quarter":
810
+ return int((self.month - 1) / self._MONTHS_PER_QUARTER) + 1
811
+
812
+ if not name.startswith("_"):
813
+ value: Optional[int] = getattr(self._datetime, name, None)
814
+
815
+ if value is not None:
816
+ return value
817
+
818
+ return cast(int, object.__getattribute__(self, name))
819
+
820
+ @property
821
+ def tzinfo(self) -> dt_tzinfo:
822
+ """Gets the ``tzinfo`` of the :class:`Arrow <arrow.arrow.Arrow>` object.
823
+
824
+ Usage::
825
+
826
+ >>> arw=arrow.utcnow()
827
+ >>> arw.tzinfo
828
+ tzutc()
829
+
830
+ """
831
+
832
+ # In Arrow, `_datetime` cannot be naive.
833
+ return cast(dt_tzinfo, self._datetime.tzinfo)
834
+
835
+ @property
836
+ def datetime(self) -> dt_datetime:
837
+ """Returns a datetime representation of the :class:`Arrow <arrow.arrow.Arrow>` object.
838
+
839
+ Usage::
840
+
841
+ >>> arw=arrow.utcnow()
842
+ >>> arw.datetime
843
+ datetime.datetime(2019, 1, 24, 16, 35, 27, 276649, tzinfo=tzutc())
844
+
845
+ """
846
+
847
+ return self._datetime
848
+
849
+ @property
850
+ def naive(self) -> dt_datetime:
851
+ """Returns a naive datetime representation of the :class:`Arrow <arrow.arrow.Arrow>`
852
+ object.
853
+
854
+ Usage::
855
+
856
+ >>> nairobi = arrow.now('Africa/Nairobi')
857
+ >>> nairobi
858
+ <Arrow [2019-01-23T19:27:12.297999+03:00]>
859
+ >>> nairobi.naive
860
+ datetime.datetime(2019, 1, 23, 19, 27, 12, 297999)
861
+
862
+ """
863
+
864
+ return self._datetime.replace(tzinfo=None)
865
+
866
+ def timestamp(self) -> float:
867
+ """Returns a timestamp representation of the :class:`Arrow <arrow.arrow.Arrow>` object, in
868
+ UTC time.
869
+
870
+ Usage::
871
+
872
+ >>> arrow.utcnow().timestamp()
873
+ 1616882340.256501
874
+
875
+ """
876
+
877
+ return self._datetime.timestamp()
878
+
879
+ @property
880
+ def int_timestamp(self) -> int:
881
+ """Returns an integer timestamp representation of the :class:`Arrow <arrow.arrow.Arrow>` object, in
882
+ UTC time.
883
+
884
+ Usage::
885
+
886
+ >>> arrow.utcnow().int_timestamp
887
+ 1548260567
888
+
889
+ """
890
+
891
+ return int(self.timestamp())
892
+
893
+ @property
894
+ def float_timestamp(self) -> float:
895
+ """Returns a floating-point timestamp representation of the :class:`Arrow <arrow.arrow.Arrow>`
896
+ object, in UTC time.
897
+
898
+ Usage::
899
+
900
+ >>> arrow.utcnow().float_timestamp
901
+ 1548260516.830896
902
+
903
+ """
904
+
905
+ return self.timestamp()
906
+
907
+ @property
908
+ def fold(self) -> int:
909
+ """Returns the ``fold`` value of the :class:`Arrow <arrow.arrow.Arrow>` object."""
910
+
911
+ return self._datetime.fold
912
+
913
+ @property
914
+ def ambiguous(self) -> bool:
915
+ """Indicates whether the :class:`Arrow <arrow.arrow.Arrow>` object is a repeated wall time in the current
916
+ timezone.
917
+
918
+ """
919
+
920
+ return dateutil_tz.datetime_ambiguous(self._datetime)
921
+
922
+ @property
923
+ def imaginary(self) -> bool:
924
+ """Indicates whether the :class: `Arrow <arrow.arrow.Arrow>` object exists in the current timezone."""
925
+
926
+ return not dateutil_tz.datetime_exists(self._datetime)
927
+
928
+ # mutation and duplication.
929
+
930
+ def clone(self) -> "Arrow":
931
+ """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, cloned from the current one.
932
+
933
+ Usage:
934
+
935
+ >>> arw = arrow.utcnow()
936
+ >>> cloned = arw.clone()
937
+
938
+ """
939
+
940
+ return self.fromdatetime(self._datetime)
941
+
942
+ def replace(self, **kwargs: Any) -> "Arrow":
943
+ """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object with attributes updated
944
+ according to inputs.
945
+
946
+ Use property names to set their value absolutely::
947
+
948
+ >>> import arrow
949
+ >>> arw = arrow.utcnow()
950
+ >>> arw
951
+ <Arrow [2013-05-11T22:27:34.787885+00:00]>
952
+ >>> arw.replace(year=2014, month=6)
953
+ <Arrow [2014-06-11T22:27:34.787885+00:00]>
954
+
955
+ You can also replace the timezone without conversion, using a
956
+ :ref:`timezone expression <tz-expr>`::
957
+
958
+ >>> arw.replace(tzinfo=tz.tzlocal())
959
+ <Arrow [2013-05-11T22:27:34.787885-07:00]>
960
+
961
+ """
962
+
963
+ absolute_kwargs = {}
964
+
965
+ for key, value in kwargs.items():
966
+ if key in self._ATTRS:
967
+ absolute_kwargs[key] = value
968
+ elif key in ["week", "quarter"]:
969
+ raise ValueError(f"Setting absolute {key} is not supported.")
970
+ elif key not in ["tzinfo", "fold"]:
971
+ raise ValueError(f"Unknown attribute: {key!r}.")
972
+
973
+ current = self._datetime.replace(**absolute_kwargs)
974
+
975
+ tzinfo = kwargs.get("tzinfo")
976
+
977
+ if tzinfo is not None:
978
+ tzinfo = self._get_tzinfo(tzinfo)
979
+ current = current.replace(tzinfo=tzinfo)
980
+
981
+ fold = kwargs.get("fold")
982
+
983
+ if fold is not None:
984
+ current = current.replace(fold=fold)
985
+
986
+ return self.fromdatetime(current)
987
+
988
+ def shift(self, **kwargs: Any) -> "Arrow":
989
+ """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object with attributes updated
990
+ according to inputs.
991
+
992
+ Use pluralized property names to relatively shift their current value:
993
+
994
+ >>> import arrow
995
+ >>> arw = arrow.utcnow()
996
+ >>> arw
997
+ <Arrow [2013-05-11T22:27:34.787885+00:00]>
998
+ >>> arw.shift(years=1, months=-1)
999
+ <Arrow [2014-04-11T22:27:34.787885+00:00]>
1000
+
1001
+ Day-of-the-week relative shifting can use either Python's weekday numbers
1002
+ (Monday = 0, Tuesday = 1 .. Sunday = 6) or using dateutil.relativedelta's
1003
+ day instances (MO, TU .. SU). When using weekday numbers, the returned
1004
+ date will always be greater than or equal to the starting date.
1005
+
1006
+ Using the above code (which is a Saturday) and asking it to shift to Saturday:
1007
+
1008
+ >>> arw.shift(weekday=5)
1009
+ <Arrow [2013-05-11T22:27:34.787885+00:00]>
1010
+
1011
+ While asking for a Monday:
1012
+
1013
+ >>> arw.shift(weekday=0)
1014
+ <Arrow [2013-05-13T22:27:34.787885+00:00]>
1015
+
1016
+ """
1017
+
1018
+ relative_kwargs = {}
1019
+ additional_attrs = ["weeks", "quarters", "weekday"]
1020
+
1021
+ for key, value in kwargs.items():
1022
+ if key in self._ATTRS_PLURAL or key in additional_attrs:
1023
+ relative_kwargs[key] = value
1024
+ else:
1025
+ supported_attr = ", ".join(self._ATTRS_PLURAL + additional_attrs)
1026
+ raise ValueError(
1027
+ f"Invalid shift time frame. Please select one of the following: {supported_attr}."
1028
+ )
1029
+
1030
+ # core datetime does not support quarters, translate to months.
1031
+ relative_kwargs.setdefault("months", 0)
1032
+ relative_kwargs["months"] += (
1033
+ relative_kwargs.pop("quarters", 0) * self._MONTHS_PER_QUARTER
1034
+ )
1035
+
1036
+ current = self._datetime + relativedelta(**relative_kwargs)
1037
+
1038
+ if not dateutil_tz.datetime_exists(current):
1039
+ current = dateutil_tz.resolve_imaginary(current)
1040
+
1041
+ return self.fromdatetime(current)
1042
+
1043
+ def to(self, tz: TZ_EXPR) -> "Arrow":
1044
+ """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, converted
1045
+ to the target timezone.
1046
+
1047
+ :param tz: A :ref:`timezone expression <tz-expr>`.
1048
+
1049
+ Usage::
1050
+
1051
+ >>> utc = arrow.utcnow()
1052
+ >>> utc
1053
+ <Arrow [2013-05-09T03:49:12.311072+00:00]>
1054
+
1055
+ >>> utc.to('US/Pacific')
1056
+ <Arrow [2013-05-08T20:49:12.311072-07:00]>
1057
+
1058
+ >>> utc.to(tz.tzlocal())
1059
+ <Arrow [2013-05-08T20:49:12.311072-07:00]>
1060
+
1061
+ >>> utc.to('-07:00')
1062
+ <Arrow [2013-05-08T20:49:12.311072-07:00]>
1063
+
1064
+ >>> utc.to('local')
1065
+ <Arrow [2013-05-08T20:49:12.311072-07:00]>
1066
+
1067
+ >>> utc.to('local').to('utc')
1068
+ <Arrow [2013-05-09T03:49:12.311072+00:00]>
1069
+
1070
+ """
1071
+
1072
+ if not isinstance(tz, dt_tzinfo):
1073
+ tz = parser.TzinfoParser.parse(tz)
1074
+
1075
+ dt = self._datetime.astimezone(tz)
1076
+
1077
+ return self.__class__(
1078
+ dt.year,
1079
+ dt.month,
1080
+ dt.day,
1081
+ dt.hour,
1082
+ dt.minute,
1083
+ dt.second,
1084
+ dt.microsecond,
1085
+ dt.tzinfo,
1086
+ fold=getattr(dt, "fold", 0),
1087
+ )
1088
+
1089
+ # string output and formatting
1090
+
1091
+ def format(
1092
+ self, fmt: str = "YYYY-MM-DD HH:mm:ssZZ", locale: str = DEFAULT_LOCALE
1093
+ ) -> str:
1094
+ """Returns a string representation of the :class:`Arrow <arrow.arrow.Arrow>` object,
1095
+ formatted according to the provided format string.
1096
+
1097
+ :param fmt: the format string.
1098
+ :param locale: the locale to format.
1099
+
1100
+ Usage::
1101
+
1102
+ >>> arrow.utcnow().format('YYYY-MM-DD HH:mm:ss ZZ')
1103
+ '2013-05-09 03:56:47 -00:00'
1104
+
1105
+ >>> arrow.utcnow().format('X')
1106
+ '1368071882'
1107
+
1108
+ >>> arrow.utcnow().format('MMMM DD, YYYY')
1109
+ 'May 09, 2013'
1110
+
1111
+ >>> arrow.utcnow().format()
1112
+ '2013-05-09 03:56:47 -00:00'
1113
+
1114
+ """
1115
+
1116
+ return formatter.DateTimeFormatter(locale).format(self._datetime, fmt)
1117
+
1118
+ def humanize(
1119
+ self,
1120
+ other: Union["Arrow", dt_datetime, None] = None,
1121
+ locale: str = DEFAULT_LOCALE,
1122
+ only_distance: bool = False,
1123
+ granularity: Union[_GRANULARITY, List[_GRANULARITY]] = "auto",
1124
+ ) -> str:
1125
+ """Returns a localized, humanized representation of a relative difference in time.
1126
+
1127
+ :param other: (optional) an :class:`Arrow <arrow.arrow.Arrow>` or ``datetime`` object.
1128
+ Defaults to now in the current :class:`Arrow <arrow.arrow.Arrow>` object's timezone.
1129
+ :param locale: (optional) a ``str`` specifying a locale. Defaults to 'en-us'.
1130
+ :param only_distance: (optional) returns only time difference eg: "11 seconds" without "in" or "ago" part.
1131
+ :param granularity: (optional) defines the precision of the output. Set it to strings 'second', 'minute',
1132
+ 'hour', 'day', 'week', 'month' or 'year' or a list of any combination of these strings
1133
+
1134
+ Usage::
1135
+
1136
+ >>> earlier = arrow.utcnow().shift(hours=-2)
1137
+ >>> earlier.humanize()
1138
+ '2 hours ago'
1139
+
1140
+ >>> later = earlier.shift(hours=4)
1141
+ >>> later.humanize(earlier)
1142
+ 'in 4 hours'
1143
+
1144
+ """
1145
+
1146
+ locale_name = locale
1147
+ locale = locales.get_locale(locale)
1148
+
1149
+ if other is None:
1150
+ utc = dt_datetime.utcnow().replace(tzinfo=dateutil_tz.tzutc())
1151
+ dt = utc.astimezone(self._datetime.tzinfo)
1152
+
1153
+ elif isinstance(other, Arrow):
1154
+ dt = other._datetime
1155
+
1156
+ elif isinstance(other, dt_datetime):
1157
+ if other.tzinfo is None:
1158
+ dt = other.replace(tzinfo=self._datetime.tzinfo)
1159
+ else:
1160
+ dt = other.astimezone(self._datetime.tzinfo)
1161
+
1162
+ else:
1163
+ raise TypeError(
1164
+ f"Invalid 'other' argument of type {type(other).__name__!r}. "
1165
+ "Argument must be of type None, Arrow, or datetime."
1166
+ )
1167
+
1168
+ if isinstance(granularity, list) and len(granularity) == 1:
1169
+ granularity = granularity[0]
1170
+
1171
+ _delta = int(round((self._datetime - dt).total_seconds()))
1172
+ sign = -1 if _delta < 0 else 1
1173
+ delta_second = diff = abs(_delta)
1174
+
1175
+ try:
1176
+ if granularity == "auto":
1177
+ if diff < 10:
1178
+ return locale.describe("now", only_distance=only_distance)
1179
+
1180
+ if diff < self._SECS_PER_MINUTE:
1181
+ seconds = sign * delta_second
1182
+ return locale.describe(
1183
+ "seconds", seconds, only_distance=only_distance
1184
+ )
1185
+
1186
+ elif diff < self._SECS_PER_MINUTE * 2:
1187
+ return locale.describe("minute", sign, only_distance=only_distance)
1188
+ elif diff < self._SECS_PER_HOUR:
1189
+ minutes = sign * max(delta_second // self._SECS_PER_MINUTE, 2)
1190
+ return locale.describe(
1191
+ "minutes", minutes, only_distance=only_distance
1192
+ )
1193
+
1194
+ elif diff < self._SECS_PER_HOUR * 2:
1195
+ return locale.describe("hour", sign, only_distance=only_distance)
1196
+ elif diff < self._SECS_PER_DAY:
1197
+ hours = sign * max(delta_second // self._SECS_PER_HOUR, 2)
1198
+ return locale.describe("hours", hours, only_distance=only_distance)
1199
+ elif diff < self._SECS_PER_DAY * 2:
1200
+ return locale.describe("day", sign, only_distance=only_distance)
1201
+ elif diff < self._SECS_PER_WEEK:
1202
+ days = sign * max(delta_second // self._SECS_PER_DAY, 2)
1203
+ return locale.describe("days", days, only_distance=only_distance)
1204
+
1205
+ elif diff < self._SECS_PER_WEEK * 2:
1206
+ return locale.describe("week", sign, only_distance=only_distance)
1207
+ elif diff < self._SECS_PER_MONTH:
1208
+ weeks = sign * max(delta_second // self._SECS_PER_WEEK, 2)
1209
+ return locale.describe("weeks", weeks, only_distance=only_distance)
1210
+
1211
+ elif diff < self._SECS_PER_MONTH * 2:
1212
+ return locale.describe("month", sign, only_distance=only_distance)
1213
+ elif diff < self._SECS_PER_YEAR:
1214
+ # TODO revisit for humanization during leap years
1215
+ self_months = self._datetime.year * 12 + self._datetime.month
1216
+ other_months = dt.year * 12 + dt.month
1217
+
1218
+ months = sign * max(abs(other_months - self_months), 2)
1219
+
1220
+ return locale.describe(
1221
+ "months", months, only_distance=only_distance
1222
+ )
1223
+
1224
+ elif diff < self._SECS_PER_YEAR * 2:
1225
+ return locale.describe("year", sign, only_distance=only_distance)
1226
+ else:
1227
+ years = sign * max(delta_second // self._SECS_PER_YEAR, 2)
1228
+ return locale.describe("years", years, only_distance=only_distance)
1229
+
1230
+ elif isinstance(granularity, str):
1231
+ granularity = cast(TimeFrameLiteral, granularity) # type: ignore[assignment]
1232
+
1233
+ if granularity == "second":
1234
+ delta = sign * float(delta_second)
1235
+ if abs(delta) < 2:
1236
+ return locale.describe("now", only_distance=only_distance)
1237
+ elif granularity == "minute":
1238
+ delta = sign * delta_second / self._SECS_PER_MINUTE
1239
+ elif granularity == "hour":
1240
+ delta = sign * delta_second / self._SECS_PER_HOUR
1241
+ elif granularity == "day":
1242
+ delta = sign * delta_second / self._SECS_PER_DAY
1243
+ elif granularity == "week":
1244
+ delta = sign * delta_second / self._SECS_PER_WEEK
1245
+ elif granularity == "month":
1246
+ delta = sign * delta_second / self._SECS_PER_MONTH
1247
+ elif granularity == "quarter":
1248
+ delta = sign * delta_second / self._SECS_PER_QUARTER
1249
+ elif granularity == "year":
1250
+ delta = sign * delta_second / self._SECS_PER_YEAR
1251
+ else:
1252
+ raise ValueError(
1253
+ "Invalid level of granularity. "
1254
+ "Please select between 'second', 'minute', 'hour', 'day', 'week', 'month', 'quarter' or 'year'."
1255
+ )
1256
+
1257
+ if trunc(abs(delta)) != 1:
1258
+ granularity += "s" # type: ignore[assignment]
1259
+ return locale.describe(granularity, delta, only_distance=only_distance)
1260
+
1261
+ else:
1262
+ if not granularity:
1263
+ raise ValueError(
1264
+ "Empty granularity list provided. "
1265
+ "Please select one or more from 'second', 'minute', 'hour', 'day', 'week', 'month', 'quarter', 'year'."
1266
+ )
1267
+
1268
+ timeframes: List[Tuple[TimeFrameLiteral, float]] = []
1269
+
1270
+ def gather_timeframes(_delta: float, _frame: TimeFrameLiteral) -> float:
1271
+ if _frame in granularity:
1272
+ value = sign * _delta / self._SECS_MAP[_frame]
1273
+ _delta %= self._SECS_MAP[_frame]
1274
+ if trunc(abs(value)) != 1:
1275
+ timeframes.append(
1276
+ (cast(TimeFrameLiteral, _frame + "s"), value)
1277
+ )
1278
+ else:
1279
+ timeframes.append((_frame, value))
1280
+ return _delta
1281
+
1282
+ delta = float(delta_second)
1283
+ frames: Tuple[TimeFrameLiteral, ...] = (
1284
+ "year",
1285
+ "quarter",
1286
+ "month",
1287
+ "week",
1288
+ "day",
1289
+ "hour",
1290
+ "minute",
1291
+ "second",
1292
+ )
1293
+ for frame in frames:
1294
+ delta = gather_timeframes(delta, frame)
1295
+
1296
+ if len(timeframes) < len(granularity):
1297
+ raise ValueError(
1298
+ "Invalid level of granularity. "
1299
+ "Please select between 'second', 'minute', 'hour', 'day', 'week', 'month', 'quarter' or 'year'."
1300
+ )
1301
+
1302
+ return locale.describe_multi(timeframes, only_distance=only_distance)
1303
+
1304
+ except KeyError as e:
1305
+ raise ValueError(
1306
+ f"Humanization of the {e} granularity is not currently translated in the {locale_name!r} locale. "
1307
+ "Please consider making a contribution to this locale."
1308
+ )
1309
+
1310
+ def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow":
1311
+ """Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents
1312
+ the time difference relative to the attributes of the
1313
+ :class:`Arrow <arrow.arrow.Arrow>` object.
1314
+
1315
+ :param timestring: a ``str`` representing a humanized relative time.
1316
+ :param locale: (optional) a ``str`` specifying a locale. Defaults to 'en-us'.
1317
+
1318
+ Usage::
1319
+
1320
+ >>> arw = arrow.utcnow()
1321
+ >>> arw
1322
+ <Arrow [2021-04-20T22:27:34.787885+00:00]>
1323
+ >>> earlier = arw.dehumanize("2 days ago")
1324
+ >>> earlier
1325
+ <Arrow [2021-04-18T22:27:34.787885+00:00]>
1326
+
1327
+ >>> arw = arrow.utcnow()
1328
+ >>> arw
1329
+ <Arrow [2021-04-20T22:27:34.787885+00:00]>
1330
+ >>> later = arw.dehumanize("in a month")
1331
+ >>> later
1332
+ <Arrow [2021-05-18T22:27:34.787885+00:00]>
1333
+
1334
+ """
1335
+
1336
+ # Create a locale object based off given local
1337
+ locale_obj = locales.get_locale(locale)
1338
+
1339
+ # Check to see if locale is supported
1340
+ normalized_locale_name = locale.lower().replace("_", "-")
1341
+
1342
+ if normalized_locale_name not in DEHUMANIZE_LOCALES:
1343
+ raise ValueError(
1344
+ f"Dehumanize does not currently support the {locale} locale, please consider making a contribution to add support for this locale."
1345
+ )
1346
+
1347
+ current_time = self.fromdatetime(self._datetime)
1348
+
1349
+ # Create an object containing the relative time info
1350
+ time_object_info = dict.fromkeys(
1351
+ ["seconds", "minutes", "hours", "days", "weeks", "months", "years"], 0
1352
+ )
1353
+
1354
+ # Create an object representing if unit has been seen
1355
+ unit_visited = dict.fromkeys(
1356
+ ["now", "seconds", "minutes", "hours", "days", "weeks", "months", "years"],
1357
+ False,
1358
+ )
1359
+
1360
+ # Create a regex pattern object for numbers
1361
+ num_pattern = re.compile(r"\d+")
1362
+
1363
+ # Search input string for each time unit within locale
1364
+ for unit, unit_object in locale_obj.timeframes.items():
1365
+ # Need to check the type of unit_object to create the correct dictionary
1366
+ if isinstance(unit_object, Mapping):
1367
+ strings_to_search = unit_object
1368
+ else:
1369
+ strings_to_search = {unit: str(unit_object)}
1370
+
1371
+ # Search for any matches that exist for that locale's unit.
1372
+ # Needs to cycle all through strings as some locales have strings that
1373
+ # could overlap in a regex match, since input validation isn't being performed.
1374
+ for time_delta, time_string in strings_to_search.items():
1375
+ # Replace {0} with regex \d representing digits
1376
+ search_string = str(time_string)
1377
+ search_string = search_string.format(r"\d+")
1378
+
1379
+ # Create search pattern and find within string
1380
+ pattern = re.compile(rf"(^|\b|\d){search_string}")
1381
+ match = pattern.search(input_string)
1382
+
1383
+ # If there is no match continue to next iteration
1384
+ if not match:
1385
+ continue
1386
+
1387
+ match_string = match.group()
1388
+ num_match = num_pattern.search(match_string)
1389
+
1390
+ # If no number matches
1391
+ # Need for absolute value as some locales have signs included in their objects
1392
+ if not num_match:
1393
+ change_value = (
1394
+ 1 if not time_delta.isnumeric() else abs(int(time_delta))
1395
+ )
1396
+ else:
1397
+ change_value = int(num_match.group())
1398
+
1399
+ # No time to update if now is the unit
1400
+ if unit == "now":
1401
+ unit_visited[unit] = True
1402
+ continue
1403
+
1404
+ # Add change value to the correct unit (incorporates the plurality that exists within timeframe i.e second v.s seconds)
1405
+ time_unit_to_change = str(unit)
1406
+ time_unit_to_change += (
1407
+ "s" if (str(time_unit_to_change)[-1] != "s") else ""
1408
+ )
1409
+ time_object_info[time_unit_to_change] = change_value
1410
+ unit_visited[time_unit_to_change] = True
1411
+
1412
+ # Assert error if string does not modify any units
1413
+ if not any([True for k, v in unit_visited.items() if v]):
1414
+ raise ValueError(
1415
+ "Input string not valid. Note: Some locales do not support the week granularity in Arrow. "
1416
+ "If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error."
1417
+ )
1418
+
1419
+ # Sign logic
1420
+ future_string = locale_obj.future
1421
+ future_string = future_string.format(".*")
1422
+ future_pattern = re.compile(rf"^{future_string}$")
1423
+ future_pattern_match = future_pattern.findall(input_string)
1424
+
1425
+ past_string = locale_obj.past
1426
+ past_string = past_string.format(".*")
1427
+ past_pattern = re.compile(rf"^{past_string}$")
1428
+ past_pattern_match = past_pattern.findall(input_string)
1429
+
1430
+ # If a string contains the now unit, there will be no relative units, hence the need to check if the now unit
1431
+ # was visited before raising a ValueError
1432
+ if past_pattern_match:
1433
+ sign_val = -1
1434
+ elif future_pattern_match:
1435
+ sign_val = 1
1436
+ elif unit_visited["now"]:
1437
+ sign_val = 0
1438
+ else:
1439
+ raise ValueError(
1440
+ "Invalid input String. String does not contain any relative time information. "
1441
+ "String should either represent a time in the future or a time in the past. "
1442
+ "Ex: 'in 5 seconds' or '5 seconds ago'."
1443
+ )
1444
+
1445
+ time_changes = {k: sign_val * v for k, v in time_object_info.items()}
1446
+
1447
+ return current_time.shift(**time_changes)
1448
+
1449
+ # query functions
1450
+
1451
+ def is_between(
1452
+ self,
1453
+ start: "Arrow",
1454
+ end: "Arrow",
1455
+ bounds: _BOUNDS = "()",
1456
+ ) -> bool:
1457
+ """Returns a boolean denoting whether the :class:`Arrow <arrow.arrow.Arrow>` object is between
1458
+ the start and end limits.
1459
+
1460
+ :param start: an :class:`Arrow <arrow.arrow.Arrow>` object.
1461
+ :param end: an :class:`Arrow <arrow.arrow.Arrow>` object.
1462
+ :param bounds: (optional) a ``str`` of either '()', '(]', '[)', or '[]' that specifies
1463
+ whether to include or exclude the start and end values in the range. '(' excludes
1464
+ the start, '[' includes the start, ')' excludes the end, and ']' includes the end.
1465
+ If the bounds are not specified, the default bound '()' is used.
1466
+
1467
+ Usage::
1468
+
1469
+ >>> start = arrow.get(datetime(2013, 5, 5, 12, 30, 10))
1470
+ >>> end = arrow.get(datetime(2013, 5, 5, 12, 30, 36))
1471
+ >>> arrow.get(datetime(2013, 5, 5, 12, 30, 27)).is_between(start, end)
1472
+ True
1473
+
1474
+ >>> start = arrow.get(datetime(2013, 5, 5))
1475
+ >>> end = arrow.get(datetime(2013, 5, 8))
1476
+ >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[]')
1477
+ True
1478
+
1479
+ >>> start = arrow.get(datetime(2013, 5, 5))
1480
+ >>> end = arrow.get(datetime(2013, 5, 8))
1481
+ >>> arrow.get(datetime(2013, 5, 8)).is_between(start, end, '[)')
1482
+ False
1483
+
1484
+ """
1485
+
1486
+ util.validate_bounds(bounds)
1487
+
1488
+ if not isinstance(start, Arrow):
1489
+ raise TypeError(
1490
+ f"Cannot parse start date argument type of {type(start)!r}."
1491
+ )
1492
+
1493
+ if not isinstance(end, Arrow):
1494
+ raise TypeError(f"Cannot parse end date argument type of {type(start)!r}.")
1495
+
1496
+ include_start = bounds[0] == "["
1497
+ include_end = bounds[1] == "]"
1498
+
1499
+ target_ts = self.float_timestamp
1500
+ start_ts = start.float_timestamp
1501
+ end_ts = end.float_timestamp
1502
+
1503
+ return (
1504
+ (start_ts <= target_ts <= end_ts)
1505
+ and (include_start or start_ts < target_ts)
1506
+ and (include_end or target_ts < end_ts)
1507
+ )
1508
+
1509
+ # datetime methods
1510
+
1511
+ def date(self) -> date:
1512
+ """Returns a ``date`` object with the same year, month and day.
1513
+
1514
+ Usage::
1515
+
1516
+ >>> arrow.utcnow().date()
1517
+ datetime.date(2019, 1, 23)
1518
+
1519
+ """
1520
+
1521
+ return self._datetime.date()
1522
+
1523
+ def time(self) -> dt_time:
1524
+ """Returns a ``time`` object with the same hour, minute, second, microsecond.
1525
+
1526
+ Usage::
1527
+
1528
+ >>> arrow.utcnow().time()
1529
+ datetime.time(12, 15, 34, 68352)
1530
+
1531
+ """
1532
+
1533
+ return self._datetime.time()
1534
+
1535
+ def timetz(self) -> dt_time:
1536
+ """Returns a ``time`` object with the same hour, minute, second, microsecond and
1537
+ tzinfo.
1538
+
1539
+ Usage::
1540
+
1541
+ >>> arrow.utcnow().timetz()
1542
+ datetime.time(12, 5, 18, 298893, tzinfo=tzutc())
1543
+
1544
+ """
1545
+
1546
+ return self._datetime.timetz()
1547
+
1548
+ def astimezone(self, tz: Optional[dt_tzinfo]) -> dt_datetime:
1549
+ """Returns a ``datetime`` object, converted to the specified timezone.
1550
+
1551
+ :param tz: a ``tzinfo`` object.
1552
+
1553
+ Usage::
1554
+
1555
+ >>> pacific=arrow.now('US/Pacific')
1556
+ >>> nyc=arrow.now('America/New_York').tzinfo
1557
+ >>> pacific.astimezone(nyc)
1558
+ datetime.datetime(2019, 1, 20, 10, 24, 22, 328172, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York'))
1559
+
1560
+ """
1561
+
1562
+ return self._datetime.astimezone(tz)
1563
+
1564
+ def utcoffset(self) -> Optional[timedelta]:
1565
+ """Returns a ``timedelta`` object representing the whole number of minutes difference from
1566
+ UTC time.
1567
+
1568
+ Usage::
1569
+
1570
+ >>> arrow.now('US/Pacific').utcoffset()
1571
+ datetime.timedelta(-1, 57600)
1572
+
1573
+ """
1574
+
1575
+ return self._datetime.utcoffset()
1576
+
1577
+ def dst(self) -> Optional[timedelta]:
1578
+ """Returns the daylight savings time adjustment.
1579
+
1580
+ Usage::
1581
+
1582
+ >>> arrow.utcnow().dst()
1583
+ datetime.timedelta(0)
1584
+
1585
+ """
1586
+
1587
+ return self._datetime.dst()
1588
+
1589
+ def timetuple(self) -> struct_time:
1590
+ """Returns a ``time.struct_time``, in the current timezone.
1591
+
1592
+ Usage::
1593
+
1594
+ >>> arrow.utcnow().timetuple()
1595
+ time.struct_time(tm_year=2019, tm_mon=1, tm_mday=20, tm_hour=15, tm_min=17, tm_sec=8, tm_wday=6, tm_yday=20, tm_isdst=0)
1596
+
1597
+ """
1598
+
1599
+ return self._datetime.timetuple()
1600
+
1601
+ def utctimetuple(self) -> struct_time:
1602
+ """Returns a ``time.struct_time``, in UTC time.
1603
+
1604
+ Usage::
1605
+
1606
+ >>> arrow.utcnow().utctimetuple()
1607
+ time.struct_time(tm_year=2019, tm_mon=1, tm_mday=19, tm_hour=21, tm_min=41, tm_sec=7, tm_wday=5, tm_yday=19, tm_isdst=0)
1608
+
1609
+ """
1610
+
1611
+ return self._datetime.utctimetuple()
1612
+
1613
+ def toordinal(self) -> int:
1614
+ """Returns the proleptic Gregorian ordinal of the date.
1615
+
1616
+ Usage::
1617
+
1618
+ >>> arrow.utcnow().toordinal()
1619
+ 737078
1620
+
1621
+ """
1622
+
1623
+ return self._datetime.toordinal()
1624
+
1625
+ def weekday(self) -> int:
1626
+ """Returns the day of the week as an integer (0-6).
1627
+
1628
+ Usage::
1629
+
1630
+ >>> arrow.utcnow().weekday()
1631
+ 5
1632
+
1633
+ """
1634
+
1635
+ return self._datetime.weekday()
1636
+
1637
+ def isoweekday(self) -> int:
1638
+ """Returns the ISO day of the week as an integer (1-7).
1639
+
1640
+ Usage::
1641
+
1642
+ >>> arrow.utcnow().isoweekday()
1643
+ 6
1644
+
1645
+ """
1646
+
1647
+ return self._datetime.isoweekday()
1648
+
1649
+ def isocalendar(self) -> Tuple[int, int, int]:
1650
+ """Returns a 3-tuple, (ISO year, ISO week number, ISO weekday).
1651
+
1652
+ Usage::
1653
+
1654
+ >>> arrow.utcnow().isocalendar()
1655
+ (2019, 3, 6)
1656
+
1657
+ """
1658
+
1659
+ return self._datetime.isocalendar()
1660
+
1661
+ def isoformat(self, sep: str = "T", timespec: str = "auto") -> str:
1662
+ """Returns an ISO 8601 formatted representation of the date and time.
1663
+
1664
+ Usage::
1665
+
1666
+ >>> arrow.utcnow().isoformat()
1667
+ '2019-01-19T18:30:52.442118+00:00'
1668
+
1669
+ """
1670
+
1671
+ return self._datetime.isoformat(sep, timespec)
1672
+
1673
+ def ctime(self) -> str:
1674
+ """Returns a ctime formatted representation of the date and time.
1675
+
1676
+ Usage::
1677
+
1678
+ >>> arrow.utcnow().ctime()
1679
+ 'Sat Jan 19 18:26:50 2019'
1680
+
1681
+ """
1682
+
1683
+ return self._datetime.ctime()
1684
+
1685
+ def strftime(self, format: str) -> str:
1686
+ """Formats in the style of ``datetime.strftime``.
1687
+
1688
+ :param format: the format string.
1689
+
1690
+ Usage::
1691
+
1692
+ >>> arrow.utcnow().strftime('%d-%m-%Y %H:%M:%S')
1693
+ '23-01-2019 12:28:17'
1694
+
1695
+ """
1696
+
1697
+ return self._datetime.strftime(format)
1698
+
1699
+ def for_json(self) -> str:
1700
+ """Serializes for the ``for_json`` protocol of simplejson.
1701
+
1702
+ Usage::
1703
+
1704
+ >>> arrow.utcnow().for_json()
1705
+ '2019-01-19T18:25:36.760079+00:00'
1706
+
1707
+ """
1708
+
1709
+ return self.isoformat()
1710
+
1711
+ # math
1712
+
1713
+ def __add__(self, other: Any) -> "Arrow":
1714
+ if isinstance(other, (timedelta, relativedelta)):
1715
+ return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
1716
+
1717
+ return NotImplemented
1718
+
1719
+ def __radd__(self, other: Union[timedelta, relativedelta]) -> "Arrow":
1720
+ return self.__add__(other)
1721
+
1722
+ @overload
1723
+ def __sub__(self, other: Union[timedelta, relativedelta]) -> "Arrow":
1724
+ pass # pragma: no cover
1725
+
1726
+ @overload
1727
+ def __sub__(self, other: Union[dt_datetime, "Arrow"]) -> timedelta:
1728
+ pass # pragma: no cover
1729
+
1730
+ def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]:
1731
+ if isinstance(other, (timedelta, relativedelta)):
1732
+ return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
1733
+
1734
+ elif isinstance(other, dt_datetime):
1735
+ return self._datetime - other
1736
+
1737
+ elif isinstance(other, Arrow):
1738
+ return self._datetime - other._datetime
1739
+
1740
+ return NotImplemented
1741
+
1742
+ def __rsub__(self, other: Any) -> timedelta:
1743
+ if isinstance(other, dt_datetime):
1744
+ return other - self._datetime
1745
+
1746
+ return NotImplemented
1747
+
1748
+ # comparisons
1749
+
1750
+ def __eq__(self, other: Any) -> bool:
1751
+ if not isinstance(other, (Arrow, dt_datetime)):
1752
+ return False
1753
+
1754
+ return self._datetime == self._get_datetime(other)
1755
+
1756
+ def __ne__(self, other: Any) -> bool:
1757
+ if not isinstance(other, (Arrow, dt_datetime)):
1758
+ return True
1759
+
1760
+ return not self.__eq__(other)
1761
+
1762
+ def __gt__(self, other: Any) -> bool:
1763
+ if not isinstance(other, (Arrow, dt_datetime)):
1764
+ return NotImplemented
1765
+
1766
+ return self._datetime > self._get_datetime(other)
1767
+
1768
+ def __ge__(self, other: Any) -> bool:
1769
+ if not isinstance(other, (Arrow, dt_datetime)):
1770
+ return NotImplemented
1771
+
1772
+ return self._datetime >= self._get_datetime(other)
1773
+
1774
+ def __lt__(self, other: Any) -> bool:
1775
+ if not isinstance(other, (Arrow, dt_datetime)):
1776
+ return NotImplemented
1777
+
1778
+ return self._datetime < self._get_datetime(other)
1779
+
1780
+ def __le__(self, other: Any) -> bool:
1781
+ if not isinstance(other, (Arrow, dt_datetime)):
1782
+ return NotImplemented
1783
+
1784
+ return self._datetime <= self._get_datetime(other)
1785
+
1786
+ # internal methods
1787
+ @staticmethod
1788
+ def _get_tzinfo(tz_expr: Optional[TZ_EXPR]) -> dt_tzinfo:
1789
+ """Get normalized tzinfo object from various inputs."""
1790
+ if tz_expr is None:
1791
+ return dateutil_tz.tzutc()
1792
+ if isinstance(tz_expr, dt_tzinfo):
1793
+ return tz_expr
1794
+ else:
1795
+ try:
1796
+ return parser.TzinfoParser.parse(tz_expr)
1797
+ except parser.ParserError:
1798
+ raise ValueError(f"{tz_expr!r} not recognized as a timezone.")
1799
+
1800
+ @classmethod
1801
+ def _get_datetime(
1802
+ cls, expr: Union["Arrow", dt_datetime, int, float, str]
1803
+ ) -> dt_datetime:
1804
+ """Get datetime object from a specified expression."""
1805
+ if isinstance(expr, Arrow):
1806
+ return expr.datetime
1807
+ elif isinstance(expr, dt_datetime):
1808
+ return expr
1809
+ elif util.is_timestamp(expr):
1810
+ timestamp = float(expr)
1811
+ return cls.utcfromtimestamp(timestamp).datetime
1812
+ else:
1813
+ raise ValueError(f"{expr!r} not recognized as a datetime or timestamp.")
1814
+
1815
+ @classmethod
1816
+ def _get_frames(cls, name: _T_FRAMES) -> Tuple[str, str, int]:
1817
+ """Finds relevant timeframe and steps for use in range and span methods.
1818
+
1819
+ Returns a 3 element tuple in the form (frame, plural frame, step), for example ("day", "days", 1)
1820
+
1821
+ """
1822
+ if name in cls._ATTRS:
1823
+ return name, f"{name}s", 1
1824
+ elif name[-1] == "s" and name[:-1] in cls._ATTRS:
1825
+ return name[:-1], name, 1
1826
+ elif name in ["week", "weeks"]:
1827
+ return "week", "weeks", 1
1828
+ elif name in ["quarter", "quarters"]:
1829
+ return "quarter", "months", 3
1830
+ else:
1831
+ supported = ", ".join(
1832
+ [
1833
+ "year(s)",
1834
+ "month(s)",
1835
+ "day(s)",
1836
+ "hour(s)",
1837
+ "minute(s)",
1838
+ "second(s)",
1839
+ "microsecond(s)",
1840
+ "week(s)",
1841
+ "quarter(s)",
1842
+ ]
1843
+ )
1844
+ raise ValueError(
1845
+ f"Range or span over frame {name} not supported. Supported frames: {supported}."
1846
+ )
1847
+
1848
+ @classmethod
1849
+ def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]:
1850
+ """Sets default end and limit values for range method."""
1851
+ if end is None:
1852
+ if limit is None:
1853
+ raise ValueError("One of 'end' or 'limit' is required.")
1854
+
1855
+ return cls.max, limit
1856
+
1857
+ else:
1858
+ if limit is None:
1859
+ return end, sys.maxsize
1860
+ return end, limit
1861
+
1862
+ @staticmethod
1863
+ def _is_last_day_of_month(date: "Arrow") -> bool:
1864
+ """Returns a boolean indicating whether the datetime is the last day of the month."""
1865
+ return date.day == calendar.monthrange(date.year, date.month)[1]
1866
+
1867
+
1868
+ Arrow.min = Arrow.fromdatetime(dt_datetime.min)
1869
+ Arrow.max = Arrow.fromdatetime(dt_datetime.max)
venv/Lib/site-packages/arrow/constants.py ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Constants used internally in arrow."""
2
+
3
+ import sys
4
+ from datetime import datetime
5
+
6
+ if sys.version_info < (3, 8): # pragma: no cover
7
+ from typing_extensions import Final
8
+ else:
9
+ from typing import Final # pragma: no cover
10
+
11
+ # datetime.max.timestamp() errors on Windows, so we must hardcode
12
+ # the highest possible datetime value that can output a timestamp.
13
+ # tl;dr platform-independent max timestamps are hard to form
14
+ # See: https://stackoverflow.com/q/46133223
15
+ try:
16
+ # Get max timestamp. Works on POSIX-based systems like Linux and macOS,
17
+ # but will trigger an OverflowError, ValueError, or OSError on Windows
18
+ _MAX_TIMESTAMP = datetime.max.timestamp()
19
+ except (OverflowError, ValueError, OSError): # pragma: no cover
20
+ # Fallback for Windows and 32-bit systems if initial max timestamp call fails
21
+ # Must get max value of ctime on Windows based on architecture (x32 vs x64)
22
+ # https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/ctime-ctime32-ctime64-wctime-wctime32-wctime64
23
+ # Note: this may occur on both 32-bit Linux systems (issue #930) along with Windows systems
24
+ is_64bits = sys.maxsize > 2**32
25
+ _MAX_TIMESTAMP = (
26
+ datetime(3000, 1, 1, 23, 59, 59, 999999).timestamp()
27
+ if is_64bits
28
+ else datetime(2038, 1, 1, 23, 59, 59, 999999).timestamp()
29
+ )
30
+
31
+ MAX_TIMESTAMP: Final[float] = _MAX_TIMESTAMP
32
+ MAX_TIMESTAMP_MS: Final[float] = MAX_TIMESTAMP * 1000
33
+ MAX_TIMESTAMP_US: Final[float] = MAX_TIMESTAMP * 1_000_000
34
+
35
+ MAX_ORDINAL: Final[int] = datetime.max.toordinal()
36
+ MIN_ORDINAL: Final[int] = 1
37
+
38
+ DEFAULT_LOCALE: Final[str] = "en-us"
39
+
40
+ # Supported dehumanize locales
41
+ DEHUMANIZE_LOCALES = {
42
+ "en",
43
+ "en-us",
44
+ "en-gb",
45
+ "en-au",
46
+ "en-be",
47
+ "en-jp",
48
+ "en-za",
49
+ "en-ca",
50
+ "en-ph",
51
+ "fr",
52
+ "fr-fr",
53
+ "fr-ca",
54
+ "it",
55
+ "it-it",
56
+ "es",
57
+ "es-es",
58
+ "el",
59
+ "el-gr",
60
+ "ja",
61
+ "ja-jp",
62
+ "se",
63
+ "se-fi",
64
+ "se-no",
65
+ "se-se",
66
+ "sv",
67
+ "sv-se",
68
+ "fi",
69
+ "fi-fi",
70
+ "zh",
71
+ "zh-cn",
72
+ "zh-tw",
73
+ "zh-hk",
74
+ "nl",
75
+ "nl-nl",
76
+ "be",
77
+ "be-by",
78
+ "pl",
79
+ "pl-pl",
80
+ "ru",
81
+ "ru-ru",
82
+ "af",
83
+ "bg",
84
+ "bg-bg",
85
+ "ua",
86
+ "uk",
87
+ "uk-ua",
88
+ "mk",
89
+ "mk-mk",
90
+ "de",
91
+ "de-de",
92
+ "de-ch",
93
+ "de-at",
94
+ "nb",
95
+ "nb-no",
96
+ "nn",
97
+ "nn-no",
98
+ "pt",
99
+ "pt-pt",
100
+ "pt-br",
101
+ "tl",
102
+ "tl-ph",
103
+ "vi",
104
+ "vi-vn",
105
+ "tr",
106
+ "tr-tr",
107
+ "az",
108
+ "az-az",
109
+ "da",
110
+ "da-dk",
111
+ "ml",
112
+ "hi",
113
+ "cs",
114
+ "cs-cz",
115
+ "sk",
116
+ "sk-sk",
117
+ "fa",
118
+ "fa-ir",
119
+ "mr",
120
+ "ca",
121
+ "ca-es",
122
+ "ca-ad",
123
+ "ca-fr",
124
+ "ca-it",
125
+ "eo",
126
+ "eo-xx",
127
+ "bn",
128
+ "bn-bd",
129
+ "bn-in",
130
+ "rm",
131
+ "rm-ch",
132
+ "ro",
133
+ "ro-ro",
134
+ "sl",
135
+ "sl-si",
136
+ "id",
137
+ "id-id",
138
+ "ne",
139
+ "ne-np",
140
+ "ee",
141
+ "et",
142
+ "sw",
143
+ "sw-ke",
144
+ "sw-tz",
145
+ "la",
146
+ "la-va",
147
+ "lt",
148
+ "lt-lt",
149
+ "ms",
150
+ "ms-my",
151
+ "ms-bn",
152
+ "or",
153
+ "or-in",
154
+ "lb",
155
+ "lb-lu",
156
+ "zu",
157
+ "zu-za",
158
+ "sq",
159
+ "sq-al",
160
+ "ta",
161
+ "ta-in",
162
+ "ta-lk",
163
+ "ur",
164
+ "ur-pk",
165
+ "ka",
166
+ "ka-ge",
167
+ "kk",
168
+ "kk-kz",
169
+ # "lo",
170
+ # "lo-la",
171
+ "am",
172
+ "am-et",
173
+ "hy-am",
174
+ "hy",
175
+ "uz",
176
+ "uz-uz",
177
+ }
venv/Lib/site-packages/arrow/factory.py ADDED
@@ -0,0 +1,345 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Implements the :class:`ArrowFactory <arrow.factory.ArrowFactory>` class,
3
+ providing factory methods for common :class:`Arrow <arrow.arrow.Arrow>`
4
+ construction scenarios.
5
+
6
+ """
7
+
8
+
9
+ import calendar
10
+ from datetime import date, datetime
11
+ from datetime import tzinfo as dt_tzinfo
12
+ from decimal import Decimal
13
+ from time import struct_time
14
+ from typing import Any, List, Optional, Tuple, Type, Union, overload
15
+
16
+ from dateutil import tz as dateutil_tz
17
+
18
+ from arrow import parser
19
+ from arrow.arrow import TZ_EXPR, Arrow
20
+ from arrow.constants import DEFAULT_LOCALE
21
+ from arrow.util import is_timestamp, iso_to_gregorian
22
+
23
+
24
+ class ArrowFactory:
25
+ """A factory for generating :class:`Arrow <arrow.arrow.Arrow>` objects.
26
+
27
+ :param type: (optional) the :class:`Arrow <arrow.arrow.Arrow>`-based class to construct from.
28
+ Defaults to :class:`Arrow <arrow.arrow.Arrow>`.
29
+
30
+ """
31
+
32
+ type: Type[Arrow]
33
+
34
+ def __init__(self, type: Type[Arrow] = Arrow) -> None:
35
+ self.type = type
36
+
37
+ @overload
38
+ def get(
39
+ self,
40
+ *,
41
+ locale: str = DEFAULT_LOCALE,
42
+ tzinfo: Optional[TZ_EXPR] = None,
43
+ normalize_whitespace: bool = False,
44
+ ) -> Arrow:
45
+ ... # pragma: no cover
46
+
47
+ @overload
48
+ def get(
49
+ self,
50
+ __obj: Union[
51
+ Arrow,
52
+ datetime,
53
+ date,
54
+ struct_time,
55
+ dt_tzinfo,
56
+ int,
57
+ float,
58
+ str,
59
+ Tuple[int, int, int],
60
+ ],
61
+ *,
62
+ locale: str = DEFAULT_LOCALE,
63
+ tzinfo: Optional[TZ_EXPR] = None,
64
+ normalize_whitespace: bool = False,
65
+ ) -> Arrow:
66
+ ... # pragma: no cover
67
+
68
+ @overload
69
+ def get(
70
+ self,
71
+ __arg1: Union[datetime, date],
72
+ __arg2: TZ_EXPR,
73
+ *,
74
+ locale: str = DEFAULT_LOCALE,
75
+ tzinfo: Optional[TZ_EXPR] = None,
76
+ normalize_whitespace: bool = False,
77
+ ) -> Arrow:
78
+ ... # pragma: no cover
79
+
80
+ @overload
81
+ def get(
82
+ self,
83
+ __arg1: str,
84
+ __arg2: Union[str, List[str]],
85
+ *,
86
+ locale: str = DEFAULT_LOCALE,
87
+ tzinfo: Optional[TZ_EXPR] = None,
88
+ normalize_whitespace: bool = False,
89
+ ) -> Arrow:
90
+ ... # pragma: no cover
91
+
92
+ def get(self, *args: Any, **kwargs: Any) -> Arrow:
93
+ """Returns an :class:`Arrow <arrow.arrow.Arrow>` object based on flexible inputs.
94
+
95
+ :param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to 'en-us'.
96
+ :param tzinfo: (optional) a :ref:`timezone expression <tz-expr>` or tzinfo object.
97
+ Replaces the timezone unless using an input form that is explicitly UTC or specifies
98
+ the timezone in a positional argument. Defaults to UTC.
99
+ :param normalize_whitespace: (optional) a ``bool`` specifying whether or not to normalize
100
+ redundant whitespace (spaces, tabs, and newlines) in a datetime string before parsing.
101
+ Defaults to false.
102
+
103
+ Usage::
104
+
105
+ >>> import arrow
106
+
107
+ **No inputs** to get current UTC time::
108
+
109
+ >>> arrow.get()
110
+ <Arrow [2013-05-08T05:51:43.316458+00:00]>
111
+
112
+ **One** :class:`Arrow <arrow.arrow.Arrow>` object, to get a copy.
113
+
114
+ >>> arw = arrow.utcnow()
115
+ >>> arrow.get(arw)
116
+ <Arrow [2013-10-23T15:21:54.354846+00:00]>
117
+
118
+ **One** ``float`` or ``int``, convertible to a floating-point timestamp, to get
119
+ that timestamp in UTC::
120
+
121
+ >>> arrow.get(1367992474.293378)
122
+ <Arrow [2013-05-08T05:54:34.293378+00:00]>
123
+
124
+ >>> arrow.get(1367992474)
125
+ <Arrow [2013-05-08T05:54:34+00:00]>
126
+
127
+ **One** ISO 8601-formatted ``str``, to parse it::
128
+
129
+ >>> arrow.get('2013-09-29T01:26:43.830580')
130
+ <Arrow [2013-09-29T01:26:43.830580+00:00]>
131
+
132
+ **One** ISO 8601-formatted ``str``, in basic format, to parse it::
133
+
134
+ >>> arrow.get('20160413T133656.456289')
135
+ <Arrow [2016-04-13T13:36:56.456289+00:00]>
136
+
137
+ **One** ``tzinfo``, to get the current time **converted** to that timezone::
138
+
139
+ >>> arrow.get(tz.tzlocal())
140
+ <Arrow [2013-05-07T22:57:28.484717-07:00]>
141
+
142
+ **One** naive ``datetime``, to get that datetime in UTC::
143
+
144
+ >>> arrow.get(datetime(2013, 5, 5))
145
+ <Arrow [2013-05-05T00:00:00+00:00]>
146
+
147
+ **One** aware ``datetime``, to get that datetime::
148
+
149
+ >>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal()))
150
+ <Arrow [2013-05-05T00:00:00-07:00]>
151
+
152
+ **One** naive ``date``, to get that date in UTC::
153
+
154
+ >>> arrow.get(date(2013, 5, 5))
155
+ <Arrow [2013-05-05T00:00:00+00:00]>
156
+
157
+ **One** time.struct time::
158
+
159
+ >>> arrow.get(gmtime(0))
160
+ <Arrow [1970-01-01T00:00:00+00:00]>
161
+
162
+ **One** iso calendar ``tuple``, to get that week date in UTC::
163
+
164
+ >>> arrow.get((2013, 18, 7))
165
+ <Arrow [2013-05-05T00:00:00+00:00]>
166
+
167
+ **Two** arguments, a naive or aware ``datetime``, and a replacement
168
+ :ref:`timezone expression <tz-expr>`::
169
+
170
+ >>> arrow.get(datetime(2013, 5, 5), 'US/Pacific')
171
+ <Arrow [2013-05-05T00:00:00-07:00]>
172
+
173
+ **Two** arguments, a naive ``date``, and a replacement
174
+ :ref:`timezone expression <tz-expr>`::
175
+
176
+ >>> arrow.get(date(2013, 5, 5), 'US/Pacific')
177
+ <Arrow [2013-05-05T00:00:00-07:00]>
178
+
179
+ **Two** arguments, both ``str``, to parse the first according to the format of the second::
180
+
181
+ >>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ')
182
+ <Arrow [2013-05-05T12:30:45-05:00]>
183
+
184
+ **Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try::
185
+
186
+ >>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss'])
187
+ <Arrow [2013-05-05T12:30:45+00:00]>
188
+
189
+ **Three or more** arguments, as for the direct constructor of an ``Arrow`` object::
190
+
191
+ >>> arrow.get(2013, 5, 5, 12, 30, 45)
192
+ <Arrow [2013-05-05T12:30:45+00:00]>
193
+
194
+ """
195
+
196
+ arg_count = len(args)
197
+ locale = kwargs.pop("locale", DEFAULT_LOCALE)
198
+ tz = kwargs.get("tzinfo", None)
199
+ normalize_whitespace = kwargs.pop("normalize_whitespace", False)
200
+
201
+ # if kwargs given, send to constructor unless only tzinfo provided
202
+ if len(kwargs) > 1:
203
+ arg_count = 3
204
+
205
+ # tzinfo kwarg is not provided
206
+ if len(kwargs) == 1 and tz is None:
207
+ arg_count = 3
208
+
209
+ # () -> now, @ tzinfo or utc
210
+ if arg_count == 0:
211
+ if isinstance(tz, str):
212
+ tz = parser.TzinfoParser.parse(tz)
213
+ return self.type.now(tzinfo=tz)
214
+
215
+ if isinstance(tz, dt_tzinfo):
216
+ return self.type.now(tzinfo=tz)
217
+
218
+ return self.type.utcnow()
219
+
220
+ if arg_count == 1:
221
+ arg = args[0]
222
+ if isinstance(arg, Decimal):
223
+ arg = float(arg)
224
+
225
+ # (None) -> raises an exception
226
+ if arg is None:
227
+ raise TypeError("Cannot parse argument of type None.")
228
+
229
+ # try (int, float) -> from timestamp @ tzinfo
230
+ elif not isinstance(arg, str) and is_timestamp(arg):
231
+ if tz is None:
232
+ # set to UTC by default
233
+ tz = dateutil_tz.tzutc()
234
+ return self.type.fromtimestamp(arg, tzinfo=tz)
235
+
236
+ # (Arrow) -> from the object's datetime @ tzinfo
237
+ elif isinstance(arg, Arrow):
238
+ return self.type.fromdatetime(arg.datetime, tzinfo=tz)
239
+
240
+ # (datetime) -> from datetime @ tzinfo
241
+ elif isinstance(arg, datetime):
242
+ return self.type.fromdatetime(arg, tzinfo=tz)
243
+
244
+ # (date) -> from date @ tzinfo
245
+ elif isinstance(arg, date):
246
+ return self.type.fromdate(arg, tzinfo=tz)
247
+
248
+ # (tzinfo) -> now @ tzinfo
249
+ elif isinstance(arg, dt_tzinfo):
250
+ return self.type.now(tzinfo=arg)
251
+
252
+ # (str) -> parse @ tzinfo
253
+ elif isinstance(arg, str):
254
+ dt = parser.DateTimeParser(locale).parse_iso(arg, normalize_whitespace)
255
+ return self.type.fromdatetime(dt, tzinfo=tz)
256
+
257
+ # (struct_time) -> from struct_time
258
+ elif isinstance(arg, struct_time):
259
+ return self.type.utcfromtimestamp(calendar.timegm(arg))
260
+
261
+ # (iso calendar) -> convert then from date @ tzinfo
262
+ elif isinstance(arg, tuple) and len(arg) == 3:
263
+ d = iso_to_gregorian(*arg)
264
+ return self.type.fromdate(d, tzinfo=tz)
265
+
266
+ else:
267
+ raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.")
268
+
269
+ elif arg_count == 2:
270
+ arg_1, arg_2 = args[0], args[1]
271
+
272
+ if isinstance(arg_1, datetime):
273
+ # (datetime, tzinfo/str) -> fromdatetime @ tzinfo
274
+ if isinstance(arg_2, (dt_tzinfo, str)):
275
+ return self.type.fromdatetime(arg_1, tzinfo=arg_2)
276
+ else:
277
+ raise TypeError(
278
+ f"Cannot parse two arguments of types 'datetime', {type(arg_2)!r}."
279
+ )
280
+
281
+ elif isinstance(arg_1, date):
282
+ # (date, tzinfo/str) -> fromdate @ tzinfo
283
+ if isinstance(arg_2, (dt_tzinfo, str)):
284
+ return self.type.fromdate(arg_1, tzinfo=arg_2)
285
+ else:
286
+ raise TypeError(
287
+ f"Cannot parse two arguments of types 'date', {type(arg_2)!r}."
288
+ )
289
+
290
+ # (str, format) -> parse @ tzinfo
291
+ elif isinstance(arg_1, str) and isinstance(arg_2, (str, list)):
292
+ dt = parser.DateTimeParser(locale).parse(
293
+ args[0], args[1], normalize_whitespace
294
+ )
295
+ return self.type.fromdatetime(dt, tzinfo=tz)
296
+
297
+ else:
298
+ raise TypeError(
299
+ f"Cannot parse two arguments of types {type(arg_1)!r} and {type(arg_2)!r}."
300
+ )
301
+
302
+ # 3+ args -> datetime-like via constructor
303
+ else:
304
+ return self.type(*args, **kwargs)
305
+
306
+ def utcnow(self) -> Arrow:
307
+ """Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC time.
308
+
309
+ Usage::
310
+
311
+ >>> import arrow
312
+ >>> arrow.utcnow()
313
+ <Arrow [2013-05-08T05:19:07.018993+00:00]>
314
+ """
315
+
316
+ return self.type.utcnow()
317
+
318
+ def now(self, tz: Optional[TZ_EXPR] = None) -> Arrow:
319
+ """Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in the given
320
+ timezone.
321
+
322
+ :param tz: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to local time.
323
+
324
+ Usage::
325
+
326
+ >>> import arrow
327
+ >>> arrow.now()
328
+ <Arrow [2013-05-07T22:19:11.363410-07:00]>
329
+
330
+ >>> arrow.now('US/Pacific')
331
+ <Arrow [2013-05-07T22:19:15.251821-07:00]>
332
+
333
+ >>> arrow.now('+02:00')
334
+ <Arrow [2013-05-08T07:19:25.618646+02:00]>
335
+
336
+ >>> arrow.now('local')
337
+ <Arrow [2013-05-07T22:19:39.130059-07:00]>
338
+ """
339
+
340
+ if tz is None:
341
+ tz = dateutil_tz.tzlocal()
342
+ elif not isinstance(tz, dt_tzinfo):
343
+ tz = parser.TzinfoParser.parse(tz)
344
+
345
+ return self.type.now(tz)
venv/Lib/site-packages/arrow/formatter.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Provides the :class:`Arrow <arrow.formatter.DateTimeFormatter>` class, an improved formatter for datetimes."""
2
+
3
+ import re
4
+ import sys
5
+ from datetime import datetime, timedelta
6
+ from typing import Optional, Pattern, cast
7
+
8
+ from dateutil import tz as dateutil_tz
9
+
10
+ from arrow import locales
11
+ from arrow.constants import DEFAULT_LOCALE
12
+
13
+ if sys.version_info < (3, 8): # pragma: no cover
14
+ from typing_extensions import Final
15
+ else:
16
+ from typing import Final # pragma: no cover
17
+
18
+
19
+ FORMAT_ATOM: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"
20
+ FORMAT_COOKIE: Final[str] = "dddd, DD-MMM-YYYY HH:mm:ss ZZZ"
21
+ FORMAT_RFC822: Final[str] = "ddd, DD MMM YY HH:mm:ss Z"
22
+ FORMAT_RFC850: Final[str] = "dddd, DD-MMM-YY HH:mm:ss ZZZ"
23
+ FORMAT_RFC1036: Final[str] = "ddd, DD MMM YY HH:mm:ss Z"
24
+ FORMAT_RFC1123: Final[str] = "ddd, DD MMM YYYY HH:mm:ss Z"
25
+ FORMAT_RFC2822: Final[str] = "ddd, DD MMM YYYY HH:mm:ss Z"
26
+ FORMAT_RFC3339: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"
27
+ FORMAT_RSS: Final[str] = "ddd, DD MMM YYYY HH:mm:ss Z"
28
+ FORMAT_W3C: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"
29
+
30
+
31
+ class DateTimeFormatter:
32
+ # This pattern matches characters enclosed in square brackets are matched as
33
+ # an atomic group. For more info on atomic groups and how to they are
34
+ # emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
35
+
36
+ _FORMAT_RE: Final[Pattern[str]] = re.compile(
37
+ r"(\[(?:(?=(?P<literal>[^]]))(?P=literal))*\]|YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X|x|W)"
38
+ )
39
+
40
+ locale: locales.Locale
41
+
42
+ def __init__(self, locale: str = DEFAULT_LOCALE) -> None:
43
+ self.locale = locales.get_locale(locale)
44
+
45
+ def format(cls, dt: datetime, fmt: str) -> str:
46
+ # FIXME: _format_token() is nullable
47
+ return cls._FORMAT_RE.sub(
48
+ lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt
49
+ )
50
+
51
+ def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]:
52
+ if token and token.startswith("[") and token.endswith("]"):
53
+ return token[1:-1]
54
+
55
+ if token == "YYYY":
56
+ return self.locale.year_full(dt.year)
57
+ if token == "YY":
58
+ return self.locale.year_abbreviation(dt.year)
59
+
60
+ if token == "MMMM":
61
+ return self.locale.month_name(dt.month)
62
+ if token == "MMM":
63
+ return self.locale.month_abbreviation(dt.month)
64
+ if token == "MM":
65
+ return f"{dt.month:02d}"
66
+ if token == "M":
67
+ return f"{dt.month}"
68
+
69
+ if token == "DDDD":
70
+ return f"{dt.timetuple().tm_yday:03d}"
71
+ if token == "DDD":
72
+ return f"{dt.timetuple().tm_yday}"
73
+ if token == "DD":
74
+ return f"{dt.day:02d}"
75
+ if token == "D":
76
+ return f"{dt.day}"
77
+
78
+ if token == "Do":
79
+ return self.locale.ordinal_number(dt.day)
80
+
81
+ if token == "dddd":
82
+ return self.locale.day_name(dt.isoweekday())
83
+ if token == "ddd":
84
+ return self.locale.day_abbreviation(dt.isoweekday())
85
+ if token == "d":
86
+ return f"{dt.isoweekday()}"
87
+
88
+ if token == "HH":
89
+ return f"{dt.hour:02d}"
90
+ if token == "H":
91
+ return f"{dt.hour}"
92
+ if token == "hh":
93
+ return f"{dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12):02d}"
94
+ if token == "h":
95
+ return f"{dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12)}"
96
+
97
+ if token == "mm":
98
+ return f"{dt.minute:02d}"
99
+ if token == "m":
100
+ return f"{dt.minute}"
101
+
102
+ if token == "ss":
103
+ return f"{dt.second:02d}"
104
+ if token == "s":
105
+ return f"{dt.second}"
106
+
107
+ if token == "SSSSSS":
108
+ return f"{dt.microsecond:06d}"
109
+ if token == "SSSSS":
110
+ return f"{dt.microsecond // 10:05d}"
111
+ if token == "SSSS":
112
+ return f"{dt.microsecond // 100:04d}"
113
+ if token == "SSS":
114
+ return f"{dt.microsecond // 1000:03d}"
115
+ if token == "SS":
116
+ return f"{dt.microsecond // 10000:02d}"
117
+ if token == "S":
118
+ return f"{dt.microsecond // 100000}"
119
+
120
+ if token == "X":
121
+ return f"{dt.timestamp()}"
122
+
123
+ if token == "x":
124
+ return f"{dt.timestamp() * 1_000_000:.0f}"
125
+
126
+ if token == "ZZZ":
127
+ return dt.tzname()
128
+
129
+ if token in ["ZZ", "Z"]:
130
+ separator = ":" if token == "ZZ" else ""
131
+ tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo
132
+ # `dt` must be aware object. Otherwise, this line will raise AttributeError
133
+ # https://github.com/arrow-py/arrow/pull/883#discussion_r529866834
134
+ # datetime awareness: https://docs.python.org/3/library/datetime.html#aware-and-naive-objects
135
+ total_minutes = int(cast(timedelta, tz.utcoffset(dt)).total_seconds() / 60)
136
+
137
+ sign = "+" if total_minutes >= 0 else "-"
138
+ total_minutes = abs(total_minutes)
139
+ hour, minute = divmod(total_minutes, 60)
140
+
141
+ return f"{sign}{hour:02d}{separator}{minute:02d}"
142
+
143
+ if token in ("a", "A"):
144
+ return self.locale.meridian(dt.hour, token)
145
+
146
+ if token == "W":
147
+ year, week, day = dt.isocalendar()
148
+ return f"{year}-W{week:02d}-{day}"
venv/Lib/site-packages/arrow/locales.py ADDED
The diff for this file is too large to render. See raw diff
 
venv/Lib/site-packages/arrow/parser.py ADDED
@@ -0,0 +1,771 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Provides the :class:`Arrow <arrow.parser.DateTimeParser>` class, a better way to parse datetime strings."""
2
+
3
+ import re
4
+ import sys
5
+ from datetime import datetime, timedelta
6
+ from datetime import tzinfo as dt_tzinfo
7
+ from functools import lru_cache
8
+ from typing import (
9
+ Any,
10
+ ClassVar,
11
+ Dict,
12
+ Iterable,
13
+ List,
14
+ Match,
15
+ Optional,
16
+ Pattern,
17
+ SupportsFloat,
18
+ SupportsInt,
19
+ Tuple,
20
+ Union,
21
+ cast,
22
+ overload,
23
+ )
24
+
25
+ from dateutil import tz
26
+
27
+ from arrow import locales
28
+ from arrow.constants import DEFAULT_LOCALE
29
+ from arrow.util import next_weekday, normalize_timestamp
30
+
31
+ if sys.version_info < (3, 8): # pragma: no cover
32
+ from typing_extensions import Literal, TypedDict
33
+ else:
34
+ from typing import Literal, TypedDict # pragma: no cover
35
+
36
+
37
+ class ParserError(ValueError):
38
+ pass
39
+
40
+
41
+ # Allows for ParserErrors to be propagated from _build_datetime()
42
+ # when day_of_year errors occur.
43
+ # Before this, the ParserErrors were caught by the try/except in
44
+ # _parse_multiformat() and the appropriate error message was not
45
+ # transmitted to the user.
46
+ class ParserMatchError(ParserError):
47
+ pass
48
+
49
+
50
+ _WEEKDATE_ELEMENT = Union[str, bytes, SupportsInt, bytearray]
51
+
52
+ _FORMAT_TYPE = Literal[
53
+ "YYYY",
54
+ "YY",
55
+ "MM",
56
+ "M",
57
+ "DDDD",
58
+ "DDD",
59
+ "DD",
60
+ "D",
61
+ "HH",
62
+ "H",
63
+ "hh",
64
+ "h",
65
+ "mm",
66
+ "m",
67
+ "ss",
68
+ "s",
69
+ "X",
70
+ "x",
71
+ "ZZZ",
72
+ "ZZ",
73
+ "Z",
74
+ "S",
75
+ "W",
76
+ "MMMM",
77
+ "MMM",
78
+ "Do",
79
+ "dddd",
80
+ "ddd",
81
+ "d",
82
+ "a",
83
+ "A",
84
+ ]
85
+
86
+
87
+ class _Parts(TypedDict, total=False):
88
+ year: int
89
+ month: int
90
+ day_of_year: int
91
+ day: int
92
+ hour: int
93
+ minute: int
94
+ second: int
95
+ microsecond: int
96
+ timestamp: float
97
+ expanded_timestamp: int
98
+ tzinfo: dt_tzinfo
99
+ am_pm: Literal["am", "pm"]
100
+ day_of_week: int
101
+ weekdate: Tuple[_WEEKDATE_ELEMENT, _WEEKDATE_ELEMENT, Optional[_WEEKDATE_ELEMENT]]
102
+
103
+
104
+ class DateTimeParser:
105
+ _FORMAT_RE: ClassVar[Pattern[str]] = re.compile(
106
+ r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|x|X|W)"
107
+ )
108
+ _ESCAPE_RE: ClassVar[Pattern[str]] = re.compile(r"\[[^\[\]]*\]")
109
+
110
+ _ONE_OR_TWO_DIGIT_RE: ClassVar[Pattern[str]] = re.compile(r"\d{1,2}")
111
+ _ONE_OR_TWO_OR_THREE_DIGIT_RE: ClassVar[Pattern[str]] = re.compile(r"\d{1,3}")
112
+ _ONE_OR_MORE_DIGIT_RE: ClassVar[Pattern[str]] = re.compile(r"\d+")
113
+ _TWO_DIGIT_RE: ClassVar[Pattern[str]] = re.compile(r"\d{2}")
114
+ _THREE_DIGIT_RE: ClassVar[Pattern[str]] = re.compile(r"\d{3}")
115
+ _FOUR_DIGIT_RE: ClassVar[Pattern[str]] = re.compile(r"\d{4}")
116
+ _TZ_Z_RE: ClassVar[Pattern[str]] = re.compile(r"([\+\-])(\d{2})(?:(\d{2}))?|Z")
117
+ _TZ_ZZ_RE: ClassVar[Pattern[str]] = re.compile(r"([\+\-])(\d{2})(?:\:(\d{2}))?|Z")
118
+ _TZ_NAME_RE: ClassVar[Pattern[str]] = re.compile(r"\w[\w+\-/]+")
119
+ # NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will
120
+ # break cases like "15 Jul 2000" and a format list (see issue #447)
121
+ _TIMESTAMP_RE: ClassVar[Pattern[str]] = re.compile(r"^\-?\d+\.?\d+$")
122
+ _TIMESTAMP_EXPANDED_RE: ClassVar[Pattern[str]] = re.compile(r"^\-?\d+$")
123
+ _TIME_RE: ClassVar[Pattern[str]] = re.compile(
124
+ r"^(\d{2})(?:\:?(\d{2}))?(?:\:?(\d{2}))?(?:([\.\,])(\d+))?$"
125
+ )
126
+ _WEEK_DATE_RE: ClassVar[Pattern[str]] = re.compile(
127
+ r"(?P<year>\d{4})[\-]?W(?P<week>\d{2})[\-]?(?P<day>\d)?"
128
+ )
129
+
130
+ _BASE_INPUT_RE_MAP: ClassVar[Dict[_FORMAT_TYPE, Pattern[str]]] = {
131
+ "YYYY": _FOUR_DIGIT_RE,
132
+ "YY": _TWO_DIGIT_RE,
133
+ "MM": _TWO_DIGIT_RE,
134
+ "M": _ONE_OR_TWO_DIGIT_RE,
135
+ "DDDD": _THREE_DIGIT_RE,
136
+ "DDD": _ONE_OR_TWO_OR_THREE_DIGIT_RE,
137
+ "DD": _TWO_DIGIT_RE,
138
+ "D": _ONE_OR_TWO_DIGIT_RE,
139
+ "HH": _TWO_DIGIT_RE,
140
+ "H": _ONE_OR_TWO_DIGIT_RE,
141
+ "hh": _TWO_DIGIT_RE,
142
+ "h": _ONE_OR_TWO_DIGIT_RE,
143
+ "mm": _TWO_DIGIT_RE,
144
+ "m": _ONE_OR_TWO_DIGIT_RE,
145
+ "ss": _TWO_DIGIT_RE,
146
+ "s": _ONE_OR_TWO_DIGIT_RE,
147
+ "X": _TIMESTAMP_RE,
148
+ "x": _TIMESTAMP_EXPANDED_RE,
149
+ "ZZZ": _TZ_NAME_RE,
150
+ "ZZ": _TZ_ZZ_RE,
151
+ "Z": _TZ_Z_RE,
152
+ "S": _ONE_OR_MORE_DIGIT_RE,
153
+ "W": _WEEK_DATE_RE,
154
+ }
155
+
156
+ SEPARATORS: ClassVar[List[str]] = ["-", "/", "."]
157
+
158
+ locale: locales.Locale
159
+ _input_re_map: Dict[_FORMAT_TYPE, Pattern[str]]
160
+
161
+ def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None:
162
+ self.locale = locales.get_locale(locale)
163
+ self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
164
+ self._input_re_map.update(
165
+ {
166
+ "MMMM": self._generate_choice_re(
167
+ self.locale.month_names[1:], re.IGNORECASE
168
+ ),
169
+ "MMM": self._generate_choice_re(
170
+ self.locale.month_abbreviations[1:], re.IGNORECASE
171
+ ),
172
+ "Do": re.compile(self.locale.ordinal_day_re),
173
+ "dddd": self._generate_choice_re(
174
+ self.locale.day_names[1:], re.IGNORECASE
175
+ ),
176
+ "ddd": self._generate_choice_re(
177
+ self.locale.day_abbreviations[1:], re.IGNORECASE
178
+ ),
179
+ "d": re.compile(r"[1-7]"),
180
+ "a": self._generate_choice_re(
181
+ (self.locale.meridians["am"], self.locale.meridians["pm"])
182
+ ),
183
+ # note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to
184
+ # ensure backwards compatibility of this token
185
+ "A": self._generate_choice_re(self.locale.meridians.values()),
186
+ }
187
+ )
188
+ if cache_size > 0:
189
+ self._generate_pattern_re = lru_cache(maxsize=cache_size)( # type: ignore
190
+ self._generate_pattern_re
191
+ )
192
+
193
+ # TODO: since we support more than ISO 8601, we should rename this function
194
+ # IDEA: break into multiple functions
195
+ def parse_iso(
196
+ self, datetime_string: str, normalize_whitespace: bool = False
197
+ ) -> datetime:
198
+ if normalize_whitespace:
199
+ datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
200
+
201
+ has_space_divider = " " in datetime_string
202
+ has_t_divider = "T" in datetime_string
203
+
204
+ num_spaces = datetime_string.count(" ")
205
+ if has_space_divider and num_spaces != 1 or has_t_divider and num_spaces > 0:
206
+ raise ParserError(
207
+ f"Expected an ISO 8601-like string, but was given {datetime_string!r}. "
208
+ "Try passing in a format string to resolve this."
209
+ )
210
+
211
+ has_time = has_space_divider or has_t_divider
212
+ has_tz = False
213
+
214
+ # date formats (ISO 8601 and others) to test against
215
+ # NOTE: YYYYMM is omitted to avoid confusion with YYMMDD (no longer part of ISO 8601, but is still often used)
216
+ formats = [
217
+ "YYYY-MM-DD",
218
+ "YYYY-M-DD",
219
+ "YYYY-M-D",
220
+ "YYYY/MM/DD",
221
+ "YYYY/M/DD",
222
+ "YYYY/M/D",
223
+ "YYYY.MM.DD",
224
+ "YYYY.M.DD",
225
+ "YYYY.M.D",
226
+ "YYYYMMDD",
227
+ "YYYY-DDDD",
228
+ "YYYYDDDD",
229
+ "YYYY-MM",
230
+ "YYYY/MM",
231
+ "YYYY.MM",
232
+ "YYYY",
233
+ "W",
234
+ ]
235
+
236
+ if has_time:
237
+ if has_space_divider:
238
+ date_string, time_string = datetime_string.split(" ", 1)
239
+ else:
240
+ date_string, time_string = datetime_string.split("T", 1)
241
+
242
+ time_parts = re.split(
243
+ r"[\+\-Z]", time_string, maxsplit=1, flags=re.IGNORECASE
244
+ )
245
+
246
+ time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0])
247
+
248
+ if time_components is None:
249
+ raise ParserError(
250
+ "Invalid time component provided. "
251
+ "Please specify a format or provide a valid time component in the basic or extended ISO 8601 time format."
252
+ )
253
+
254
+ (
255
+ hours,
256
+ minutes,
257
+ seconds,
258
+ subseconds_sep,
259
+ subseconds,
260
+ ) = time_components.groups()
261
+
262
+ has_tz = len(time_parts) == 2
263
+ has_minutes = minutes is not None
264
+ has_seconds = seconds is not None
265
+ has_subseconds = subseconds is not None
266
+
267
+ is_basic_time_format = ":" not in time_parts[0]
268
+ tz_format = "Z"
269
+
270
+ # use 'ZZ' token instead since tz offset is present in non-basic format
271
+ if has_tz and ":" in time_parts[1]:
272
+ tz_format = "ZZ"
273
+
274
+ time_sep = "" if is_basic_time_format else ":"
275
+
276
+ if has_subseconds:
277
+ time_string = "HH{time_sep}mm{time_sep}ss{subseconds_sep}S".format(
278
+ time_sep=time_sep, subseconds_sep=subseconds_sep
279
+ )
280
+ elif has_seconds:
281
+ time_string = "HH{time_sep}mm{time_sep}ss".format(time_sep=time_sep)
282
+ elif has_minutes:
283
+ time_string = f"HH{time_sep}mm"
284
+ else:
285
+ time_string = "HH"
286
+
287
+ if has_space_divider:
288
+ formats = [f"{f} {time_string}" for f in formats]
289
+ else:
290
+ formats = [f"{f}T{time_string}" for f in formats]
291
+
292
+ if has_time and has_tz:
293
+ # Add "Z" or "ZZ" to the format strings to indicate to
294
+ # _parse_token() that a timezone needs to be parsed
295
+ formats = [f"{f}{tz_format}" for f in formats]
296
+
297
+ return self._parse_multiformat(datetime_string, formats)
298
+
299
+ def parse(
300
+ self,
301
+ datetime_string: str,
302
+ fmt: Union[List[str], str],
303
+ normalize_whitespace: bool = False,
304
+ ) -> datetime:
305
+ if normalize_whitespace:
306
+ datetime_string = re.sub(r"\s+", " ", datetime_string)
307
+
308
+ if isinstance(fmt, list):
309
+ return self._parse_multiformat(datetime_string, fmt)
310
+
311
+ try:
312
+ fmt_tokens: List[_FORMAT_TYPE]
313
+ fmt_pattern_re: Pattern[str]
314
+ fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt)
315
+ except re.error as e:
316
+ raise ParserMatchError(
317
+ f"Failed to generate regular expression pattern: {e}."
318
+ )
319
+
320
+ match = fmt_pattern_re.search(datetime_string)
321
+
322
+ if match is None:
323
+ raise ParserMatchError(
324
+ f"Failed to match {fmt!r} when parsing {datetime_string!r}."
325
+ )
326
+
327
+ parts: _Parts = {}
328
+ for token in fmt_tokens:
329
+ value: Union[Tuple[str, str, str], str]
330
+ if token == "Do":
331
+ value = match.group("value")
332
+ elif token == "W":
333
+ value = (match.group("year"), match.group("week"), match.group("day"))
334
+ else:
335
+ value = match.group(token)
336
+
337
+ if value is None:
338
+ raise ParserMatchError(
339
+ f"Unable to find a match group for the specified token {token!r}."
340
+ )
341
+
342
+ self._parse_token(token, value, parts) # type: ignore[arg-type]
343
+
344
+ return self._build_datetime(parts)
345
+
346
+ def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]:
347
+ # fmt is a string of tokens like 'YYYY-MM-DD'
348
+ # we construct a new string by replacing each
349
+ # token by its pattern:
350
+ # 'YYYY-MM-DD' -> '(?P<YYYY>\d{4})-(?P<MM>\d{2})-(?P<DD>\d{2})'
351
+ tokens: List[_FORMAT_TYPE] = []
352
+ offset = 0
353
+
354
+ # Escape all special RegEx chars
355
+ escaped_fmt = re.escape(fmt)
356
+
357
+ # Extract the bracketed expressions to be reinserted later.
358
+ escaped_fmt = re.sub(self._ESCAPE_RE, "#", escaped_fmt)
359
+
360
+ # Any number of S is the same as one.
361
+ # TODO: allow users to specify the number of digits to parse
362
+ escaped_fmt = re.sub(r"S+", "S", escaped_fmt)
363
+
364
+ escaped_data = re.findall(self._ESCAPE_RE, fmt)
365
+
366
+ fmt_pattern = escaped_fmt
367
+
368
+ for m in self._FORMAT_RE.finditer(escaped_fmt):
369
+ token: _FORMAT_TYPE = cast(_FORMAT_TYPE, m.group(0))
370
+ try:
371
+ input_re = self._input_re_map[token]
372
+ except KeyError:
373
+ raise ParserError(f"Unrecognized token {token!r}.")
374
+ input_pattern = f"(?P<{token}>{input_re.pattern})"
375
+ tokens.append(token)
376
+ # a pattern doesn't have the same length as the token
377
+ # it replaces! We keep the difference in the offset variable.
378
+ # This works because the string is scanned left-to-right and matches
379
+ # are returned in the order found by finditer.
380
+ fmt_pattern = (
381
+ fmt_pattern[: m.start() + offset]
382
+ + input_pattern
383
+ + fmt_pattern[m.end() + offset :]
384
+ )
385
+ offset += len(input_pattern) - (m.end() - m.start())
386
+
387
+ final_fmt_pattern = ""
388
+ split_fmt = fmt_pattern.split(r"\#")
389
+
390
+ # Due to the way Python splits, 'split_fmt' will always be longer
391
+ for i in range(len(split_fmt)):
392
+ final_fmt_pattern += split_fmt[i]
393
+ if i < len(escaped_data):
394
+ final_fmt_pattern += escaped_data[i][1:-1]
395
+
396
+ # Wrap final_fmt_pattern in a custom word boundary to strictly
397
+ # match the formatting pattern and filter out date and time formats
398
+ # that include junk such as: blah1998-09-12 blah, blah 1998-09-12blah,
399
+ # blah1998-09-12blah. The custom word boundary matches every character
400
+ # that is not a whitespace character to allow for searching for a date
401
+ # and time string in a natural language sentence. Therefore, searching
402
+ # for a string of the form YYYY-MM-DD in "blah 1998-09-12 blah" will
403
+ # work properly.
404
+ # Certain punctuation before or after the target pattern such as
405
+ # "1998-09-12," is permitted. For the full list of valid punctuation,
406
+ # see the documentation.
407
+
408
+ starting_word_boundary = (
409
+ r"(?<!\S\S)" # Don't have two consecutive non-whitespace characters. This ensures that we allow cases
410
+ # like .11.25.2019 but not 1.11.25.2019 (for pattern MM.DD.YYYY)
411
+ r"(?<![^\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)<>\s])" # This is the list of punctuation that is ok before the
412
+ # pattern (i.e. "It can't not be these characters before the pattern")
413
+ r"(\b|^)"
414
+ # The \b is to block cases like 1201912 but allow 201912 for pattern YYYYMM. The ^ was necessary to allow a
415
+ # negative number through i.e. before epoch numbers
416
+ )
417
+ ending_word_boundary = (
418
+ r"(?=[\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)\<\>]?" # Positive lookahead stating that these punctuation marks
419
+ # can appear after the pattern at most 1 time
420
+ r"(?!\S))" # Don't allow any non-whitespace character after the punctuation
421
+ )
422
+ bounded_fmt_pattern = r"{}{}{}".format(
423
+ starting_word_boundary, final_fmt_pattern, ending_word_boundary
424
+ )
425
+
426
+ return tokens, re.compile(bounded_fmt_pattern, flags=re.IGNORECASE)
427
+
428
+ @overload
429
+ def _parse_token(
430
+ self,
431
+ token: Literal[
432
+ "YYYY",
433
+ "YY",
434
+ "MM",
435
+ "M",
436
+ "DDDD",
437
+ "DDD",
438
+ "DD",
439
+ "D",
440
+ "Do",
441
+ "HH",
442
+ "hh",
443
+ "h",
444
+ "H",
445
+ "mm",
446
+ "m",
447
+ "ss",
448
+ "s",
449
+ "x",
450
+ ],
451
+ value: Union[str, bytes, SupportsInt, bytearray],
452
+ parts: _Parts,
453
+ ) -> None:
454
+ ... # pragma: no cover
455
+
456
+ @overload
457
+ def _parse_token(
458
+ self,
459
+ token: Literal["X"],
460
+ value: Union[str, bytes, SupportsFloat, bytearray],
461
+ parts: _Parts,
462
+ ) -> None:
463
+ ... # pragma: no cover
464
+
465
+ @overload
466
+ def _parse_token(
467
+ self,
468
+ token: Literal["MMMM", "MMM", "dddd", "ddd", "S"],
469
+ value: Union[str, bytes, bytearray],
470
+ parts: _Parts,
471
+ ) -> None:
472
+ ... # pragma: no cover
473
+
474
+ @overload
475
+ def _parse_token(
476
+ self,
477
+ token: Literal["a", "A", "ZZZ", "ZZ", "Z"],
478
+ value: Union[str, bytes],
479
+ parts: _Parts,
480
+ ) -> None:
481
+ ... # pragma: no cover
482
+
483
+ @overload
484
+ def _parse_token(
485
+ self,
486
+ token: Literal["W"],
487
+ value: Tuple[_WEEKDATE_ELEMENT, _WEEKDATE_ELEMENT, Optional[_WEEKDATE_ELEMENT]],
488
+ parts: _Parts,
489
+ ) -> None:
490
+ ... # pragma: no cover
491
+
492
+ def _parse_token(
493
+ self,
494
+ token: Any,
495
+ value: Any,
496
+ parts: _Parts,
497
+ ) -> None:
498
+ if token == "YYYY":
499
+ parts["year"] = int(value)
500
+
501
+ elif token == "YY":
502
+ value = int(value)
503
+ parts["year"] = 1900 + value if value > 68 else 2000 + value
504
+
505
+ elif token in ["MMMM", "MMM"]:
506
+ # FIXME: month_number() is nullable
507
+ parts["month"] = self.locale.month_number(value.lower()) # type: ignore[typeddict-item]
508
+
509
+ elif token in ["MM", "M"]:
510
+ parts["month"] = int(value)
511
+
512
+ elif token in ["DDDD", "DDD"]:
513
+ parts["day_of_year"] = int(value)
514
+
515
+ elif token in ["DD", "D"]:
516
+ parts["day"] = int(value)
517
+
518
+ elif token == "Do":
519
+ parts["day"] = int(value)
520
+
521
+ elif token == "dddd":
522
+ # locale day names are 1-indexed
523
+ day_of_week = [x.lower() for x in self.locale.day_names].index(
524
+ value.lower()
525
+ )
526
+ parts["day_of_week"] = day_of_week - 1
527
+
528
+ elif token == "ddd":
529
+ # locale day abbreviations are 1-indexed
530
+ day_of_week = [x.lower() for x in self.locale.day_abbreviations].index(
531
+ value.lower()
532
+ )
533
+ parts["day_of_week"] = day_of_week - 1
534
+
535
+ elif token.upper() in ["HH", "H"]:
536
+ parts["hour"] = int(value)
537
+
538
+ elif token in ["mm", "m"]:
539
+ parts["minute"] = int(value)
540
+
541
+ elif token in ["ss", "s"]:
542
+ parts["second"] = int(value)
543
+
544
+ elif token == "S":
545
+ # We have the *most significant* digits of an arbitrary-precision integer.
546
+ # We want the six most significant digits as an integer, rounded.
547
+ # IDEA: add nanosecond support somehow? Need datetime support for it first.
548
+ value = value.ljust(7, "0")
549
+
550
+ # floating-point (IEEE-754) defaults to half-to-even rounding
551
+ seventh_digit = int(value[6])
552
+ if seventh_digit == 5:
553
+ rounding = int(value[5]) % 2
554
+ elif seventh_digit > 5:
555
+ rounding = 1
556
+ else:
557
+ rounding = 0
558
+
559
+ parts["microsecond"] = int(value[:6]) + rounding
560
+
561
+ elif token == "X":
562
+ parts["timestamp"] = float(value)
563
+
564
+ elif token == "x":
565
+ parts["expanded_timestamp"] = int(value)
566
+
567
+ elif token in ["ZZZ", "ZZ", "Z"]:
568
+ parts["tzinfo"] = TzinfoParser.parse(value)
569
+
570
+ elif token in ["a", "A"]:
571
+ if value in (self.locale.meridians["am"], self.locale.meridians["AM"]):
572
+ parts["am_pm"] = "am"
573
+ if "hour" in parts and not 0 <= parts["hour"] <= 12:
574
+ raise ParserMatchError(
575
+ f"Hour token value must be between 0 and 12 inclusive for token {token!r}."
576
+ )
577
+ elif value in (self.locale.meridians["pm"], self.locale.meridians["PM"]):
578
+ parts["am_pm"] = "pm"
579
+ elif token == "W":
580
+ parts["weekdate"] = value
581
+
582
+ @staticmethod
583
+ def _build_datetime(parts: _Parts) -> datetime:
584
+ weekdate = parts.get("weekdate")
585
+
586
+ if weekdate is not None:
587
+ year, week = int(weekdate[0]), int(weekdate[1])
588
+
589
+ if weekdate[2] is not None:
590
+ _day = int(weekdate[2])
591
+ else:
592
+ # day not given, default to 1
593
+ _day = 1
594
+
595
+ date_string = f"{year}-{week}-{_day}"
596
+
597
+ # tokens for ISO 8601 weekdates
598
+ dt = datetime.strptime(date_string, "%G-%V-%u")
599
+
600
+ parts["year"] = dt.year
601
+ parts["month"] = dt.month
602
+ parts["day"] = dt.day
603
+
604
+ timestamp = parts.get("timestamp")
605
+
606
+ if timestamp is not None:
607
+ return datetime.fromtimestamp(timestamp, tz=tz.tzutc())
608
+
609
+ expanded_timestamp = parts.get("expanded_timestamp")
610
+
611
+ if expanded_timestamp is not None:
612
+ return datetime.fromtimestamp(
613
+ normalize_timestamp(expanded_timestamp),
614
+ tz=tz.tzutc(),
615
+ )
616
+
617
+ day_of_year = parts.get("day_of_year")
618
+
619
+ if day_of_year is not None:
620
+ _year = parts.get("year")
621
+ month = parts.get("month")
622
+ if _year is None:
623
+ raise ParserError(
624
+ "Year component is required with the DDD and DDDD tokens."
625
+ )
626
+
627
+ if month is not None:
628
+ raise ParserError(
629
+ "Month component is not allowed with the DDD and DDDD tokens."
630
+ )
631
+
632
+ date_string = f"{_year}-{day_of_year}"
633
+ try:
634
+ dt = datetime.strptime(date_string, "%Y-%j")
635
+ except ValueError:
636
+ raise ParserError(
637
+ f"The provided day of year {day_of_year!r} is invalid."
638
+ )
639
+
640
+ parts["year"] = dt.year
641
+ parts["month"] = dt.month
642
+ parts["day"] = dt.day
643
+
644
+ day_of_week: Optional[int] = parts.get("day_of_week")
645
+ day = parts.get("day")
646
+
647
+ # If day is passed, ignore day of week
648
+ if day_of_week is not None and day is None:
649
+ year = parts.get("year", 1970)
650
+ month = parts.get("month", 1)
651
+ day = 1
652
+
653
+ # dddd => first day of week after epoch
654
+ # dddd YYYY => first day of week in specified year
655
+ # dddd MM YYYY => first day of week in specified year and month
656
+ # dddd MM => first day after epoch in specified month
657
+ next_weekday_dt = next_weekday(datetime(year, month, day), day_of_week)
658
+ parts["year"] = next_weekday_dt.year
659
+ parts["month"] = next_weekday_dt.month
660
+ parts["day"] = next_weekday_dt.day
661
+
662
+ am_pm = parts.get("am_pm")
663
+ hour = parts.get("hour", 0)
664
+
665
+ if am_pm == "pm" and hour < 12:
666
+ hour += 12
667
+ elif am_pm == "am" and hour == 12:
668
+ hour = 0
669
+
670
+ # Support for midnight at the end of day
671
+ if hour == 24:
672
+ if parts.get("minute", 0) != 0:
673
+ raise ParserError("Midnight at the end of day must not contain minutes")
674
+ if parts.get("second", 0) != 0:
675
+ raise ParserError("Midnight at the end of day must not contain seconds")
676
+ if parts.get("microsecond", 0) != 0:
677
+ raise ParserError(
678
+ "Midnight at the end of day must not contain microseconds"
679
+ )
680
+ hour = 0
681
+ day_increment = 1
682
+ else:
683
+ day_increment = 0
684
+
685
+ # account for rounding up to 1000000
686
+ microsecond = parts.get("microsecond", 0)
687
+ if microsecond == 1000000:
688
+ microsecond = 0
689
+ second_increment = 1
690
+ else:
691
+ second_increment = 0
692
+
693
+ increment = timedelta(days=day_increment, seconds=second_increment)
694
+
695
+ return (
696
+ datetime(
697
+ year=parts.get("year", 1),
698
+ month=parts.get("month", 1),
699
+ day=parts.get("day", 1),
700
+ hour=hour,
701
+ minute=parts.get("minute", 0),
702
+ second=parts.get("second", 0),
703
+ microsecond=microsecond,
704
+ tzinfo=parts.get("tzinfo"),
705
+ )
706
+ + increment
707
+ )
708
+
709
+ def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime:
710
+ _datetime: Optional[datetime] = None
711
+
712
+ for fmt in formats:
713
+ try:
714
+ _datetime = self.parse(string, fmt)
715
+ break
716
+ except ParserMatchError:
717
+ pass
718
+
719
+ if _datetime is None:
720
+ supported_formats = ", ".join(formats)
721
+ raise ParserError(
722
+ f"Could not match input {string!r} to any of the following formats: {supported_formats}."
723
+ )
724
+
725
+ return _datetime
726
+
727
+ # generates a capture group of choices separated by an OR operator
728
+ @staticmethod
729
+ def _generate_choice_re(
730
+ choices: Iterable[str], flags: Union[int, re.RegexFlag] = 0
731
+ ) -> Pattern[str]:
732
+ return re.compile(r"({})".format("|".join(choices)), flags=flags)
733
+
734
+
735
+ class TzinfoParser:
736
+ _TZINFO_RE: ClassVar[Pattern[str]] = re.compile(
737
+ r"^(?:\(UTC)*([\+\-])?(\d{2})(?:\:?(\d{2}))?"
738
+ )
739
+
740
+ @classmethod
741
+ def parse(cls, tzinfo_string: str) -> dt_tzinfo:
742
+ tzinfo: Optional[dt_tzinfo] = None
743
+
744
+ if tzinfo_string == "local":
745
+ tzinfo = tz.tzlocal()
746
+
747
+ elif tzinfo_string in ["utc", "UTC", "Z"]:
748
+ tzinfo = tz.tzutc()
749
+
750
+ else:
751
+ iso_match = cls._TZINFO_RE.match(tzinfo_string)
752
+
753
+ if iso_match:
754
+ sign: Optional[str]
755
+ hours: str
756
+ minutes: Union[str, int, None]
757
+ sign, hours, minutes = iso_match.groups()
758
+ seconds = int(hours) * 3600 + int(minutes or 0) * 60
759
+
760
+ if sign == "-":
761
+ seconds *= -1
762
+
763
+ tzinfo = tz.tzoffset(None, seconds)
764
+
765
+ else:
766
+ tzinfo = tz.gettz(tzinfo_string)
767
+
768
+ if tzinfo is None:
769
+ raise ParserError(f"Could not parse timezone expression {tzinfo_string!r}.")
770
+
771
+ return tzinfo
venv/Lib/site-packages/arrow/py.typed ADDED
File without changes
venv/Lib/site-packages/arrow/util.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Helpful functions used internally within arrow."""
2
+
3
+ import datetime
4
+ from typing import Any, Optional, cast
5
+
6
+ from dateutil.rrule import WEEKLY, rrule
7
+
8
+ from arrow.constants import (
9
+ MAX_ORDINAL,
10
+ MAX_TIMESTAMP,
11
+ MAX_TIMESTAMP_MS,
12
+ MAX_TIMESTAMP_US,
13
+ MIN_ORDINAL,
14
+ )
15
+
16
+
17
+ def next_weekday(
18
+ start_date: Optional[datetime.date], weekday: int
19
+ ) -> datetime.datetime:
20
+ """Get next weekday from the specified start date.
21
+
22
+ :param start_date: Datetime object representing the start date.
23
+ :param weekday: Next weekday to obtain. Can be a value between 0 (Monday) and 6 (Sunday).
24
+ :return: Datetime object corresponding to the next weekday after start_date.
25
+
26
+ Usage::
27
+
28
+ # Get first Monday after epoch
29
+ >>> next_weekday(datetime(1970, 1, 1), 0)
30
+ 1970-01-05 00:00:00
31
+
32
+ # Get first Thursday after epoch
33
+ >>> next_weekday(datetime(1970, 1, 1), 3)
34
+ 1970-01-01 00:00:00
35
+
36
+ # Get first Sunday after epoch
37
+ >>> next_weekday(datetime(1970, 1, 1), 6)
38
+ 1970-01-04 00:00:00
39
+ """
40
+ if weekday < 0 or weekday > 6:
41
+ raise ValueError("Weekday must be between 0 (Monday) and 6 (Sunday).")
42
+ return cast(
43
+ datetime.datetime,
44
+ rrule(freq=WEEKLY, dtstart=start_date, byweekday=weekday, count=1)[0],
45
+ )
46
+
47
+
48
+ def is_timestamp(value: Any) -> bool:
49
+ """Check if value is a valid timestamp."""
50
+ if isinstance(value, bool):
51
+ return False
52
+ if not isinstance(value, (int, float, str)):
53
+ return False
54
+ try:
55
+ float(value)
56
+ return True
57
+ except ValueError:
58
+ return False
59
+
60
+
61
+ def validate_ordinal(value: Any) -> None:
62
+ """Raise an exception if value is an invalid Gregorian ordinal.
63
+
64
+ :param value: the input to be checked
65
+
66
+ """
67
+ if isinstance(value, bool) or not isinstance(value, int):
68
+ raise TypeError(f"Ordinal must be an integer (got type {type(value)}).")
69
+ if not (MIN_ORDINAL <= value <= MAX_ORDINAL):
70
+ raise ValueError(f"Ordinal {value} is out of range.")
71
+
72
+
73
+ def normalize_timestamp(timestamp: float) -> float:
74
+ """Normalize millisecond and microsecond timestamps into normal timestamps."""
75
+ if timestamp > MAX_TIMESTAMP:
76
+ if timestamp < MAX_TIMESTAMP_MS:
77
+ timestamp /= 1000
78
+ elif timestamp < MAX_TIMESTAMP_US:
79
+ timestamp /= 1_000_000
80
+ else:
81
+ raise ValueError(f"The specified timestamp {timestamp!r} is too large.")
82
+ return timestamp
83
+
84
+
85
+ # Credit to https://stackoverflow.com/a/1700069
86
+ def iso_to_gregorian(iso_year: int, iso_week: int, iso_day: int) -> datetime.date:
87
+ """Converts an ISO week date into a datetime object.
88
+
89
+ :param iso_year: the year
90
+ :param iso_week: the week number, each year has either 52 or 53 weeks
91
+ :param iso_day: the day numbered 1 through 7, beginning with Monday
92
+
93
+ """
94
+
95
+ if not 1 <= iso_week <= 53:
96
+ raise ValueError("ISO Calendar week value must be between 1-53.")
97
+
98
+ if not 1 <= iso_day <= 7:
99
+ raise ValueError("ISO Calendar day value must be between 1-7")
100
+
101
+ # The first week of the year always contains 4 Jan.
102
+ fourth_jan = datetime.date(iso_year, 1, 4)
103
+ delta = datetime.timedelta(fourth_jan.isoweekday() - 1)
104
+ year_start = fourth_jan - delta
105
+ gregorian = year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)
106
+
107
+ return gregorian
108
+
109
+
110
+ def validate_bounds(bounds: str) -> None:
111
+ if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]":
112
+ raise ValueError(
113
+ "Invalid bounds. Please select between '()', '(]', '[)', or '[]'."
114
+ )
115
+
116
+
117
+ __all__ = ["next_weekday", "is_timestamp", "validate_ordinal", "iso_to_gregorian"]
venv/Lib/site-packages/asttokens-3.0.0.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
venv/Lib/site-packages/asttokens-3.0.0.dist-info/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "{}"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright {yyyy} {name of copyright owner}
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
venv/Lib/site-packages/asttokens-3.0.0.dist-info/METADATA ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: asttokens
3
+ Version: 3.0.0
4
+ Summary: Annotate AST trees with source code positions
5
+ Home-page: https://github.com/gristlabs/asttokens
6
+ Author: Dmitry Sagalovskiy, Grist Labs
7
+ Author-email: [email protected]
8
+ License: Apache 2.0
9
+ Keywords: code,ast,parse,tokenize,refactor
10
+ Classifier: Development Status :: 5 - Production/Stable
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
13
+ Classifier: Topic :: Software Development :: Code Generators
14
+ Classifier: Topic :: Software Development :: Compilers
15
+ Classifier: Topic :: Software Development :: Interpreters
16
+ Classifier: Topic :: Software Development :: Pre-processors
17
+ Classifier: Environment :: Console
18
+ Classifier: Operating System :: OS Independent
19
+ Classifier: Programming Language :: Python :: 3.8
20
+ Classifier: Programming Language :: Python :: 3.9
21
+ Classifier: Programming Language :: Python :: 3.10
22
+ Classifier: Programming Language :: Python :: 3.11
23
+ Classifier: Programming Language :: Python :: 3.12
24
+ Classifier: Programming Language :: Python :: 3.13
25
+ Classifier: Programming Language :: Python :: Implementation :: CPython
26
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
27
+ Requires-Python: >=3.8
28
+ License-File: LICENSE
29
+ Provides-Extra: astroid
30
+ Requires-Dist: astroid<4,>=2; extra == "astroid"
31
+ Provides-Extra: test
32
+ Requires-Dist: astroid<4,>=2; extra == "test"
33
+ Requires-Dist: pytest; extra == "test"
34
+ Requires-Dist: pytest-cov; extra == "test"
35
+ Requires-Dist: pytest-xdist; extra == "test"
36
+
37
+ ASTTokens
38
+ =========
39
+
40
+ .. image:: https://img.shields.io/pypi/v/asttokens.svg
41
+ :target: https://pypi.python.org/pypi/asttokens/
42
+ .. image:: https://img.shields.io/pypi/pyversions/asttokens.svg
43
+ :target: https://pypi.python.org/pypi/asttokens/
44
+ .. image:: https://github.com/gristlabs/asttokens/actions/workflows/build-and-test.yml/badge.svg
45
+ :target: https://github.com/gristlabs/asttokens/actions/workflows/build-and-test.yml
46
+ .. image:: https://readthedocs.org/projects/asttokens/badge/?version=latest
47
+ :target: http://asttokens.readthedocs.io/en/latest/index.html
48
+ .. image:: https://coveralls.io/repos/github/gristlabs/asttokens/badge.svg
49
+ :target: https://coveralls.io/github/gristlabs/asttokens
50
+
51
+ .. Start of user-guide
52
+
53
+ The ``asttokens`` module annotates Python abstract syntax trees (ASTs) with the positions of tokens
54
+ and text in the source code that generated them.
55
+
56
+ It makes it possible for tools that work with logical AST nodes to find the particular text that
57
+ resulted in those nodes, for example for automated refactoring or highlighting.
58
+
59
+ Installation
60
+ ------------
61
+ asttokens is available on PyPI: https://pypi.python.org/pypi/asttokens/::
62
+
63
+ pip install asttokens
64
+
65
+ The code is on GitHub: https://github.com/gristlabs/asttokens.
66
+
67
+ The API Reference is here: http://asttokens.readthedocs.io/en/latest/api-index.html.
68
+
69
+ Usage
70
+ -----
71
+
72
+ ASTTokens can annotate both trees built by `ast <https://docs.python.org/2/library/ast.html>`_,
73
+ AND those built by `astroid <https://github.com/PyCQA/astroid>`_.
74
+
75
+ Here's an example:
76
+
77
+ .. code-block:: python
78
+
79
+ import asttokens, ast
80
+ source = "Robot('blue').walk(steps=10*n)"
81
+ atok = asttokens.ASTTokens(source, parse=True)
82
+
83
+ Once the tree has been marked, nodes get ``.first_token``, ``.last_token`` attributes, and
84
+ the ``ASTTokens`` object offers helpful methods:
85
+
86
+ .. code-block:: python
87
+
88
+ attr_node = next(n for n in ast.walk(atok.tree) if isinstance(n, ast.Attribute))
89
+ print(atok.get_text(attr_node))
90
+ start, end = attr_node.last_token.startpos, attr_node.last_token.endpos
91
+ print(atok.text[:start] + 'RUN' + atok.text[end:])
92
+
93
+ Which produces this output:
94
+
95
+ .. code-block:: text
96
+
97
+ Robot('blue').walk
98
+ Robot('blue').RUN(steps=10*n)
99
+
100
+ The ``ASTTokens`` object also offers methods to walk and search the list of tokens that make up
101
+ the code (or a particular AST node), which is more useful and powerful than dealing with the text
102
+ directly.
103
+
104
+
105
+ Contribute
106
+ ----------
107
+
108
+ To contribute:
109
+
110
+ 1. Fork this repository, and clone your fork.
111
+ 2. Install the package with test dependencies (ideally in a virtualenv) with::
112
+
113
+ pip install -e '.[test]'
114
+
115
+ 3. Run tests in your current interpreter with the command ``pytest`` or ``python -m pytest``.
116
+ 4. Run tests across all supported interpreters with the ``tox`` command. You will need to have the interpreters installed separately. We recommend ``pyenv`` for that. Use ``tox -p auto`` to run the tests in parallel.
117
+ 5. By default certain tests which take a very long time to run are skipped, but they are run in CI.
118
+ These are marked using the ``pytest`` marker ``slow`` and can be run on their own with ``pytest -m slow`` or as part of the full suite with ``pytest -m ''``.
venv/Lib/site-packages/asttokens-3.0.0.dist-info/RECORD ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ asttokens-3.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ asttokens-3.0.0.dist-info/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
3
+ asttokens-3.0.0.dist-info/METADATA,sha256=cg1yWNJgO6xzqQzaKsQoKJuKZMEfuJAh07iQLAgNv6k,4726
4
+ asttokens-3.0.0.dist-info/RECORD,,
5
+ asttokens-3.0.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
6
+ asttokens-3.0.0.dist-info/top_level.txt,sha256=nJDweSD7_NBhOlR3c8bkKJMKM-pxlAS8Kyh8GcCT2dk,10
7
+ asttokens/__init__.py,sha256=8eONA3X-9s93-v-2gEoz4649fDUpvzBthFB5Ld7dHAg,962
8
+ asttokens/__pycache__/__init__.cpython-312.pyc,,
9
+ asttokens/__pycache__/astroid_compat.cpython-312.pyc,,
10
+ asttokens/__pycache__/asttokens.cpython-312.pyc,,
11
+ asttokens/__pycache__/line_numbers.cpython-312.pyc,,
12
+ asttokens/__pycache__/mark_tokens.cpython-312.pyc,,
13
+ asttokens/__pycache__/util.cpython-312.pyc,,
14
+ asttokens/__pycache__/version.cpython-312.pyc,,
15
+ asttokens/astroid_compat.py,sha256=ilaVBRWcHpQ3ZLBSBs9usUwnLW3Orfn6sM89cMN8zNI,586
16
+ asttokens/asttokens.py,sha256=CQZ0ppXgTzHGbK4dqI4toSLywHIiqNK8jIVqbQClzYI,17760
17
+ asttokens/line_numbers.py,sha256=ODbdlHI4Iht4UnSfsxmOHCIVw4c2XX7j-MdaCa6F8bo,2834
18
+ asttokens/mark_tokens.py,sha256=YKE88IHnYyQiNvlFlxqU-BDhRRWkYYjMEsjxKlF1cqw,21012
19
+ asttokens/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ asttokens/util.py,sha256=zkszPUVGR0-UxZJI-I4lTrA7yH2IUOz8IBmwGas-pbs,17286
21
+ asttokens/version.py,sha256=EPmgXOdWKks5S__ZMH7Nu6xpAeVrZpfxaFy4pykuyeI,22
venv/Lib/site-packages/asttokens-3.0.0.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (75.6.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
venv/Lib/site-packages/asttokens-3.0.0.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ asttokens
venv/Lib/site-packages/asttokens/__init__.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Grist Labs, Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """
16
+ This module enhances the Python AST tree with token and source code information, sufficent to
17
+ detect the source text of each AST node. This is helpful for tools that make source code
18
+ transformations.
19
+ """
20
+
21
+ from .line_numbers import LineNumbers
22
+ from .asttokens import ASTText, ASTTokens, supports_tokenless
23
+
24
+ __all__ = ['ASTText', 'ASTTokens', 'LineNumbers', 'supports_tokenless']
venv/Lib/site-packages/asttokens/astroid_compat.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ try:
2
+ from astroid import nodes as astroid_node_classes
3
+
4
+ # astroid_node_classes should be whichever module has the NodeNG class
5
+ from astroid.nodes import NodeNG
6
+ from astroid.nodes import BaseContainer
7
+ except Exception:
8
+ try:
9
+ from astroid import node_classes as astroid_node_classes
10
+ from astroid.node_classes import NodeNG
11
+ from astroid.node_classes import _BaseContainer as BaseContainer
12
+ except Exception: # pragma: no cover
13
+ astroid_node_classes = None
14
+ NodeNG = None
15
+ BaseContainer = None
16
+
17
+
18
+ __all__ = ["astroid_node_classes", "NodeNG", "BaseContainer"]
venv/Lib/site-packages/asttokens/asttokens.py ADDED
@@ -0,0 +1,450 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Grist Labs, Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import abc
16
+ import ast
17
+ import bisect
18
+ import sys
19
+ import token
20
+ from ast import Module
21
+ from typing import Iterable, Iterator, List, Optional, Tuple, Any, cast, TYPE_CHECKING
22
+
23
+ from .line_numbers import LineNumbers
24
+ from .util import (
25
+ Token, match_token, is_non_coding_token, patched_generate_tokens, last_stmt,
26
+ annotate_fstring_nodes, generate_tokens, is_module, is_stmt
27
+ )
28
+
29
+ if TYPE_CHECKING: # pragma: no cover
30
+ from .util import AstNode, TokenInfo
31
+
32
+
33
+ class ASTTextBase(metaclass=abc.ABCMeta):
34
+ def __init__(self, source_text: str, filename: str) -> None:
35
+ self._filename = filename
36
+
37
+ # Decode source after parsing to let Python 2 handle coding declarations.
38
+ # (If the encoding was not utf-8 compatible, then even if it parses correctly,
39
+ # we'll fail with a unicode error here.)
40
+ source_text = str(source_text)
41
+
42
+ self._text = source_text
43
+ self._line_numbers = LineNumbers(source_text)
44
+
45
+ @abc.abstractmethod
46
+ def get_text_positions(self, node, padded):
47
+ # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]
48
+ """
49
+ Returns two ``(lineno, col_offset)`` tuples for the start and end of the given node.
50
+ If the positions can't be determined, or the nodes don't correspond to any particular text,
51
+ returns ``(1, 0)`` for both.
52
+
53
+ ``padded`` corresponds to the ``padded`` argument to ``ast.get_source_segment()``.
54
+ This means that if ``padded`` is True, the start position will be adjusted to include
55
+ leading whitespace if ``node`` is a multiline statement.
56
+ """
57
+ raise NotImplementedError # pragma: no cover
58
+
59
+ def get_text_range(self, node, padded=True):
60
+ # type: (AstNode, bool) -> Tuple[int, int]
61
+ """
62
+ Returns the (startpos, endpos) positions in source text corresponding to the given node.
63
+ Returns (0, 0) for nodes (like `Load`) that don't correspond to any particular text.
64
+
65
+ See ``get_text_positions()`` for details on the ``padded`` argument.
66
+ """
67
+ start, end = self.get_text_positions(node, padded)
68
+ return (
69
+ self._line_numbers.line_to_offset(*start),
70
+ self._line_numbers.line_to_offset(*end),
71
+ )
72
+
73
+ def get_text(self, node, padded=True):
74
+ # type: (AstNode, bool) -> str
75
+ """
76
+ Returns the text corresponding to the given node.
77
+ Returns '' for nodes (like `Load`) that don't correspond to any particular text.
78
+
79
+ See ``get_text_positions()`` for details on the ``padded`` argument.
80
+ """
81
+ start, end = self.get_text_range(node, padded)
82
+ return self._text[start: end]
83
+
84
+
85
+ class ASTTokens(ASTTextBase):
86
+ """
87
+ ASTTokens maintains the text of Python code in several forms: as a string, as line numbers, and
88
+ as tokens, and is used to mark and access token and position information.
89
+
90
+ ``source_text`` must be a unicode or UTF8-encoded string. If you pass in UTF8 bytes, remember
91
+ that all offsets you'll get are to the unicode text, which is available as the ``.text``
92
+ property.
93
+
94
+ If ``parse`` is set, the ``source_text`` will be parsed with ``ast.parse()``, and the resulting
95
+ tree marked with token info and made available as the ``.tree`` property.
96
+
97
+ If ``tree`` is given, it will be marked and made available as the ``.tree`` property. In
98
+ addition to the trees produced by the ``ast`` module, ASTTokens will also mark trees produced
99
+ using ``astroid`` library <https://www.astroid.org>.
100
+
101
+ If only ``source_text`` is given, you may use ``.mark_tokens(tree)`` to mark the nodes of an AST
102
+ tree created separately.
103
+ """
104
+
105
+ def __init__(self, source_text, parse=False, tree=None, filename='<unknown>', tokens=None):
106
+ # type: (Any, bool, Optional[Module], str, Iterable[TokenInfo]) -> None
107
+ super(ASTTokens, self).__init__(source_text, filename)
108
+
109
+ self._tree = ast.parse(source_text, filename) if parse else tree
110
+
111
+ # Tokenize the code.
112
+ if tokens is None:
113
+ tokens = generate_tokens(self._text)
114
+ self._tokens = list(self._translate_tokens(tokens))
115
+
116
+ # Extract the start positions of all tokens, so that we can quickly map positions to tokens.
117
+ self._token_offsets = [tok.startpos for tok in self._tokens]
118
+
119
+ if self._tree:
120
+ self.mark_tokens(self._tree)
121
+
122
+ def mark_tokens(self, root_node):
123
+ # type: (Module) -> None
124
+ """
125
+ Given the root of the AST or Astroid tree produced from source_text, visits all nodes marking
126
+ them with token and position information by adding ``.first_token`` and
127
+ ``.last_token`` attributes. This is done automatically in the constructor when ``parse`` or
128
+ ``tree`` arguments are set, but may be used manually with a separate AST or Astroid tree.
129
+ """
130
+ # The hard work of this class is done by MarkTokens
131
+ from .mark_tokens import MarkTokens # to avoid import loops
132
+ MarkTokens(self).visit_tree(root_node)
133
+
134
+ def _translate_tokens(self, original_tokens):
135
+ # type: (Iterable[TokenInfo]) -> Iterator[Token]
136
+ """
137
+ Translates the given standard library tokens into our own representation.
138
+ """
139
+ for index, tok in enumerate(patched_generate_tokens(original_tokens)):
140
+ tok_type, tok_str, start, end, line = tok
141
+ yield Token(tok_type, tok_str, start, end, line, index,
142
+ self._line_numbers.line_to_offset(start[0], start[1]),
143
+ self._line_numbers.line_to_offset(end[0], end[1]))
144
+
145
+ @property
146
+ def text(self):
147
+ # type: () -> str
148
+ """The source code passed into the constructor."""
149
+ return self._text
150
+
151
+ @property
152
+ def tokens(self):
153
+ # type: () -> List[Token]
154
+ """The list of tokens corresponding to the source code from the constructor."""
155
+ return self._tokens
156
+
157
+ @property
158
+ def tree(self):
159
+ # type: () -> Optional[Module]
160
+ """The root of the AST tree passed into the constructor or parsed from the source code."""
161
+ return self._tree
162
+
163
+ @property
164
+ def filename(self):
165
+ # type: () -> str
166
+ """The filename that was parsed"""
167
+ return self._filename
168
+
169
+ def get_token_from_offset(self, offset):
170
+ # type: (int) -> Token
171
+ """
172
+ Returns the token containing the given character offset (0-based position in source text),
173
+ or the preceeding token if the position is between tokens.
174
+ """
175
+ return self._tokens[bisect.bisect(self._token_offsets, offset) - 1]
176
+
177
+ def get_token(self, lineno, col_offset):
178
+ # type: (int, int) -> Token
179
+ """
180
+ Returns the token containing the given (lineno, col_offset) position, or the preceeding token
181
+ if the position is between tokens.
182
+ """
183
+ # TODO: add test for multibyte unicode. We need to translate offsets from ast module (which
184
+ # are in utf8) to offsets into the unicode text. tokenize module seems to use unicode offsets
185
+ # but isn't explicit.
186
+ return self.get_token_from_offset(self._line_numbers.line_to_offset(lineno, col_offset))
187
+
188
+ def get_token_from_utf8(self, lineno, col_offset):
189
+ # type: (int, int) -> Token
190
+ """
191
+ Same as get_token(), but interprets col_offset as a UTF8 offset, which is what `ast` uses.
192
+ """
193
+ return self.get_token(lineno, self._line_numbers.from_utf8_col(lineno, col_offset))
194
+
195
+ def next_token(self, tok, include_extra=False):
196
+ # type: (Token, bool) -> Token
197
+ """
198
+ Returns the next token after the given one. If include_extra is True, includes non-coding
199
+ tokens from the tokenize module, such as NL and COMMENT.
200
+ """
201
+ i = tok.index + 1
202
+ if not include_extra:
203
+ while is_non_coding_token(self._tokens[i].type):
204
+ i += 1
205
+ return self._tokens[i]
206
+
207
+ def prev_token(self, tok, include_extra=False):
208
+ # type: (Token, bool) -> Token
209
+ """
210
+ Returns the previous token before the given one. If include_extra is True, includes non-coding
211
+ tokens from the tokenize module, such as NL and COMMENT.
212
+ """
213
+ i = tok.index - 1
214
+ if not include_extra:
215
+ while is_non_coding_token(self._tokens[i].type):
216
+ i -= 1
217
+ return self._tokens[i]
218
+
219
+ def find_token(self, start_token, tok_type, tok_str=None, reverse=False):
220
+ # type: (Token, int, Optional[str], bool) -> Token
221
+ """
222
+ Looks for the first token, starting at start_token, that matches tok_type and, if given, the
223
+ token string. Searches backwards if reverse is True. Returns ENDMARKER token if not found (you
224
+ can check it with `token.ISEOF(t.type)`).
225
+ """
226
+ t = start_token
227
+ advance = self.prev_token if reverse else self.next_token
228
+ while not match_token(t, tok_type, tok_str) and not token.ISEOF(t.type):
229
+ t = advance(t, include_extra=True)
230
+ return t
231
+
232
+ def token_range(self,
233
+ first_token, # type: Token
234
+ last_token, # type: Token
235
+ include_extra=False, # type: bool
236
+ ):
237
+ # type: (...) -> Iterator[Token]
238
+ """
239
+ Yields all tokens in order from first_token through and including last_token. If
240
+ include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT.
241
+ """
242
+ for i in range(first_token.index, last_token.index + 1):
243
+ if include_extra or not is_non_coding_token(self._tokens[i].type):
244
+ yield self._tokens[i]
245
+
246
+ def get_tokens(self, node, include_extra=False):
247
+ # type: (AstNode, bool) -> Iterator[Token]
248
+ """
249
+ Yields all tokens making up the given node. If include_extra is True, includes non-coding
250
+ tokens such as tokenize.NL and .COMMENT.
251
+ """
252
+ return self.token_range(node.first_token, node.last_token, include_extra=include_extra)
253
+
254
+ def get_text_positions(self, node, padded):
255
+ # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]
256
+ """
257
+ Returns two ``(lineno, col_offset)`` tuples for the start and end of the given node.
258
+ If the positions can't be determined, or the nodes don't correspond to any particular text,
259
+ returns ``(1, 0)`` for both.
260
+
261
+ ``padded`` corresponds to the ``padded`` argument to ``ast.get_source_segment()``.
262
+ This means that if ``padded`` is True, the start position will be adjusted to include
263
+ leading whitespace if ``node`` is a multiline statement.
264
+ """
265
+ if not hasattr(node, 'first_token'):
266
+ return (1, 0), (1, 0)
267
+
268
+ start = node.first_token.start
269
+ end = node.last_token.end
270
+ if padded and any(match_token(t, token.NEWLINE) for t in self.get_tokens(node)):
271
+ # Set col_offset to 0 to include leading indentation for multiline statements.
272
+ start = (start[0], 0)
273
+
274
+ return start, end
275
+
276
+
277
+ class ASTText(ASTTextBase):
278
+ """
279
+ Supports the same ``get_text*`` methods as ``ASTTokens``,
280
+ but uses the AST to determine the text positions instead of tokens.
281
+ This is faster than ``ASTTokens`` as it requires less setup work.
282
+
283
+ It also (sometimes) supports nodes inside f-strings, which ``ASTTokens`` doesn't.
284
+
285
+ Some node types and/or Python versions are not supported.
286
+ In these cases the ``get_text*`` methods will fall back to using ``ASTTokens``
287
+ which incurs the usual setup cost the first time.
288
+ If you want to avoid this, check ``supports_tokenless(node)`` before calling ``get_text*`` methods.
289
+ """
290
+ def __init__(self, source_text, tree=None, filename='<unknown>'):
291
+ # type: (Any, Optional[Module], str) -> None
292
+ super(ASTText, self).__init__(source_text, filename)
293
+
294
+ self._tree = tree
295
+ if self._tree is not None:
296
+ annotate_fstring_nodes(self._tree)
297
+
298
+ self._asttokens = None # type: Optional[ASTTokens]
299
+
300
+ @property
301
+ def tree(self):
302
+ # type: () -> Module
303
+ if self._tree is None:
304
+ self._tree = ast.parse(self._text, self._filename)
305
+ annotate_fstring_nodes(self._tree)
306
+ return self._tree
307
+
308
+ @property
309
+ def asttokens(self):
310
+ # type: () -> ASTTokens
311
+ if self._asttokens is None:
312
+ self._asttokens = ASTTokens(
313
+ self._text,
314
+ tree=self.tree,
315
+ filename=self._filename,
316
+ )
317
+ return self._asttokens
318
+
319
+ def _get_text_positions_tokenless(self, node, padded):
320
+ # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]
321
+ """
322
+ Version of ``get_text_positions()`` that doesn't use tokens.
323
+ """
324
+ if is_module(node):
325
+ # Modules don't have position info, so just return the range of the whole text.
326
+ # The token-using method does something different, but its behavior seems weird and inconsistent.
327
+ # For example, in a file with only comments, it only returns the first line.
328
+ # It's hard to imagine a case when this matters.
329
+ return (1, 0), self._line_numbers.offset_to_line(len(self._text))
330
+
331
+ if getattr(node, 'lineno', None) is None:
332
+ return (1, 0), (1, 0)
333
+
334
+ assert node # tell mypy that node is not None, which we allowed up to here for compatibility
335
+
336
+ decorators = getattr(node, 'decorator_list', [])
337
+ if not decorators:
338
+ # Astroid uses node.decorators.nodes instead of node.decorator_list.
339
+ decorators_node = getattr(node, 'decorators', None)
340
+ decorators = getattr(decorators_node, 'nodes', [])
341
+ if decorators:
342
+ # Function/Class definition nodes are marked by AST as starting at def/class,
343
+ # not the first decorator. This doesn't match the token-using behavior,
344
+ # or inspect.getsource(), and just seems weird.
345
+ start_node = decorators[0]
346
+ else:
347
+ start_node = node
348
+
349
+ start_lineno = start_node.lineno
350
+ end_node = last_stmt(node)
351
+
352
+ # Include leading indentation for multiline statements.
353
+ # This doesn't mean simple statements that happen to be on multiple lines,
354
+ # but compound statements where inner indentation matters.
355
+ # So we don't just compare node.lineno and node.end_lineno,
356
+ # we check for a contained statement starting on a different line.
357
+ if padded and (
358
+ start_lineno != end_node.lineno
359
+ or (
360
+ # Astroid docstrings aren't treated as separate statements.
361
+ # So to handle function/class definitions with a docstring but no other body,
362
+ # we just check that the node is a statement with a docstring
363
+ # and spanning multiple lines in the simple, literal sense.
364
+ start_lineno != node.end_lineno
365
+ and getattr(node, "doc_node", None)
366
+ and is_stmt(node)
367
+ )
368
+ ):
369
+ start_col_offset = 0
370
+ else:
371
+ start_col_offset = self._line_numbers.from_utf8_col(start_lineno, start_node.col_offset)
372
+
373
+ start = (start_lineno, start_col_offset)
374
+
375
+ # To match the token-using behaviour, we exclude trailing semicolons and comments.
376
+ # This means that for blocks containing multiple statements, we have to use the last one
377
+ # instead of the actual node for end_lineno and end_col_offset.
378
+ end_lineno = cast(int, end_node.end_lineno)
379
+ end_col_offset = cast(int, end_node.end_col_offset)
380
+ end_col_offset = self._line_numbers.from_utf8_col(end_lineno, end_col_offset)
381
+ end = (end_lineno, end_col_offset)
382
+
383
+ return start, end
384
+
385
+ def get_text_positions(self, node, padded):
386
+ # type: (AstNode, bool) -> Tuple[Tuple[int, int], Tuple[int, int]]
387
+ """
388
+ Returns two ``(lineno, col_offset)`` tuples for the start and end of the given node.
389
+ If the positions can't be determined, or the nodes don't correspond to any particular text,
390
+ returns ``(1, 0)`` for both.
391
+
392
+ ``padded`` corresponds to the ``padded`` argument to ``ast.get_source_segment()``.
393
+ This means that if ``padded`` is True, the start position will be adjusted to include
394
+ leading whitespace if ``node`` is a multiline statement.
395
+ """
396
+ if getattr(node, "_broken_positions", None):
397
+ # This node was marked in util.annotate_fstring_nodes as having untrustworthy lineno/col_offset.
398
+ return (1, 0), (1, 0)
399
+
400
+ if supports_tokenless(node):
401
+ return self._get_text_positions_tokenless(node, padded)
402
+
403
+ return self.asttokens.get_text_positions(node, padded)
404
+
405
+
406
+ # Node types that _get_text_positions_tokenless doesn't support.
407
+ # These initial values are missing lineno.
408
+ _unsupported_tokenless_types = ("arguments", "Arguments", "withitem") # type: Tuple[str, ...]
409
+ if sys.version_info[:2] == (3, 8):
410
+ # _get_text_positions_tokenless works incorrectly for these types due to bugs in Python 3.8.
411
+ _unsupported_tokenless_types += ("arg", "Starred")
412
+ # no lineno in 3.8
413
+ _unsupported_tokenless_types += ("Slice", "ExtSlice", "Index", "keyword")
414
+
415
+
416
+ def supports_tokenless(node=None):
417
+ # type: (Any) -> bool
418
+ """
419
+ Returns True if the Python version and the node (if given) are supported by
420
+ the ``get_text*`` methods of ``ASTText`` without falling back to ``ASTTokens``.
421
+ See ``ASTText`` for why this matters.
422
+
423
+ The following cases are not supported:
424
+
425
+ - PyPy
426
+ - ``ast.arguments`` / ``astroid.Arguments``
427
+ - ``ast.withitem``
428
+ - ``astroid.Comprehension``
429
+ - ``astroid.AssignName`` inside ``astroid.Arguments`` or ``astroid.ExceptHandler``
430
+ - The following nodes in Python 3.8 only:
431
+ - ``ast.arg``
432
+ - ``ast.Starred``
433
+ - ``ast.Slice``
434
+ - ``ast.ExtSlice``
435
+ - ``ast.Index``
436
+ - ``ast.keyword``
437
+ """
438
+ return (
439
+ type(node).__name__ not in _unsupported_tokenless_types
440
+ and not (
441
+ # astroid nodes
442
+ not isinstance(node, ast.AST) and node is not None and (
443
+ (
444
+ type(node).__name__ == "AssignName"
445
+ and type(node.parent).__name__ in ("Arguments", "ExceptHandler")
446
+ )
447
+ )
448
+ )
449
+ and 'pypy' not in sys.version.lower()
450
+ )
venv/Lib/site-packages/asttokens/line_numbers.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Grist Labs, Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import bisect
16
+ import re
17
+ from typing import Dict, List, Tuple
18
+
19
+ _line_start_re = re.compile(r'^', re.M)
20
+
21
+ class LineNumbers:
22
+ """
23
+ Class to convert between character offsets in a text string, and pairs (line, column) of 1-based
24
+ line and 0-based column numbers, as used by tokens and AST nodes.
25
+
26
+ This class expects unicode for input and stores positions in unicode. But it supports
27
+ translating to and from utf8 offsets, which are used by ast parsing.
28
+ """
29
+ def __init__(self, text):
30
+ # type: (str) -> None
31
+ # A list of character offsets of each line's first character.
32
+ self._line_offsets = [m.start(0) for m in _line_start_re.finditer(text)]
33
+ self._text = text
34
+ self._text_len = len(text)
35
+ self._utf8_offset_cache = {} # type: Dict[int, List[int]] # maps line num to list of char offset for each byte in line
36
+
37
+ def from_utf8_col(self, line, utf8_column):
38
+ # type: (int, int) -> int
39
+ """
40
+ Given a 1-based line number and 0-based utf8 column, returns a 0-based unicode column.
41
+ """
42
+ offsets = self._utf8_offset_cache.get(line)
43
+ if offsets is None:
44
+ end_offset = self._line_offsets[line] if line < len(self._line_offsets) else self._text_len
45
+ line_text = self._text[self._line_offsets[line - 1] : end_offset]
46
+
47
+ offsets = [i for i,c in enumerate(line_text) for byte in c.encode('utf8')]
48
+ offsets.append(len(line_text))
49
+ self._utf8_offset_cache[line] = offsets
50
+
51
+ return offsets[max(0, min(len(offsets)-1, utf8_column))]
52
+
53
+ def line_to_offset(self, line, column):
54
+ # type: (int, int) -> int
55
+ """
56
+ Converts 1-based line number and 0-based column to 0-based character offset into text.
57
+ """
58
+ line -= 1
59
+ if line >= len(self._line_offsets):
60
+ return self._text_len
61
+ elif line < 0:
62
+ return 0
63
+ else:
64
+ return min(self._line_offsets[line] + max(0, column), self._text_len)
65
+
66
+ def offset_to_line(self, offset):
67
+ # type: (int) -> Tuple[int, int]
68
+ """
69
+ Converts 0-based character offset to pair (line, col) of 1-based line and 0-based column
70
+ numbers.
71
+ """
72
+ offset = max(0, min(self._text_len, offset))
73
+ line_index = bisect.bisect_right(self._line_offsets, offset) - 1
74
+ return (line_index + 1, offset - self._line_offsets[line_index])
75
+
76
+
venv/Lib/site-packages/asttokens/mark_tokens.py ADDED
@@ -0,0 +1,467 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Grist Labs, Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import ast
16
+ import numbers
17
+ import sys
18
+ import token
19
+ from ast import Module
20
+ from typing import Callable, List, Union, cast, Optional, Tuple, TYPE_CHECKING
21
+
22
+ from . import util
23
+ from .asttokens import ASTTokens
24
+ from .astroid_compat import astroid_node_classes as nc, BaseContainer as AstroidBaseContainer
25
+
26
+ if TYPE_CHECKING:
27
+ from .util import AstNode
28
+
29
+
30
+ # Mapping of matching braces. To find a token here, look up token[:2].
31
+ _matching_pairs_left = {
32
+ (token.OP, '('): (token.OP, ')'),
33
+ (token.OP, '['): (token.OP, ']'),
34
+ (token.OP, '{'): (token.OP, '}'),
35
+ }
36
+
37
+ _matching_pairs_right = {
38
+ (token.OP, ')'): (token.OP, '('),
39
+ (token.OP, ']'): (token.OP, '['),
40
+ (token.OP, '}'): (token.OP, '{'),
41
+ }
42
+
43
+
44
+ class MarkTokens:
45
+ """
46
+ Helper that visits all nodes in the AST tree and assigns .first_token and .last_token attributes
47
+ to each of them. This is the heart of the token-marking logic.
48
+ """
49
+ def __init__(self, code):
50
+ # type: (ASTTokens) -> None
51
+ self._code = code
52
+ self._methods = util.NodeMethods()
53
+ self._iter_children = None # type: Optional[Callable]
54
+
55
+ def visit_tree(self, node):
56
+ # type: (Module) -> None
57
+ self._iter_children = util.iter_children_func(node)
58
+ util.visit_tree(node, self._visit_before_children, self._visit_after_children)
59
+
60
+ def _visit_before_children(self, node, parent_token):
61
+ # type: (AstNode, Optional[util.Token]) -> Tuple[Optional[util.Token], Optional[util.Token]]
62
+ col = getattr(node, 'col_offset', None)
63
+ token = self._code.get_token_from_utf8(node.lineno, col) if col is not None else None
64
+
65
+ if not token and util.is_module(node):
66
+ # We'll assume that a Module node starts at the start of the source code.
67
+ token = self._code.get_token(1, 0)
68
+
69
+ # Use our own token, or our parent's if we don't have one, to pass to child calls as
70
+ # parent_token argument. The second value becomes the token argument of _visit_after_children.
71
+ return (token or parent_token, token)
72
+
73
+ def _visit_after_children(self, node, parent_token, token):
74
+ # type: (AstNode, Optional[util.Token], Optional[util.Token]) -> None
75
+ # This processes the node generically first, after all children have been processed.
76
+
77
+ # Get the first and last tokens that belong to children. Note how this doesn't assume that we
78
+ # iterate through children in order that corresponds to occurrence in source code. This
79
+ # assumption can fail (e.g. with return annotations).
80
+ first = token
81
+ last = None
82
+ for child in cast(Callable, self._iter_children)(node):
83
+ # astroid slices have especially wrong positions, we don't want them to corrupt their parents.
84
+ if util.is_empty_astroid_slice(child):
85
+ continue
86
+ if not first or child.first_token.index < first.index:
87
+ first = child.first_token
88
+ if not last or child.last_token.index > last.index:
89
+ last = child.last_token
90
+
91
+ # If we don't have a first token from _visit_before_children, and there were no children, then
92
+ # use the parent's token as the first token.
93
+ first = first or parent_token
94
+
95
+ # If no children, set last token to the first one.
96
+ last = last or first
97
+
98
+ # Statements continue to before NEWLINE. This helps cover a few different cases at once.
99
+ if util.is_stmt(node):
100
+ last = self._find_last_in_stmt(cast(util.Token, last))
101
+
102
+ # Capture any unmatched brackets.
103
+ first, last = self._expand_to_matching_pairs(cast(util.Token, first), cast(util.Token, last), node)
104
+
105
+ # Give a chance to node-specific methods to adjust.
106
+ nfirst, nlast = self._methods.get(self, node.__class__)(node, first, last)
107
+
108
+ if (nfirst, nlast) != (first, last):
109
+ # If anything changed, expand again to capture any unmatched brackets.
110
+ nfirst, nlast = self._expand_to_matching_pairs(nfirst, nlast, node)
111
+
112
+ node.first_token = nfirst
113
+ node.last_token = nlast
114
+
115
+ def _find_last_in_stmt(self, start_token):
116
+ # type: (util.Token) -> util.Token
117
+ t = start_token
118
+ while (not util.match_token(t, token.NEWLINE) and
119
+ not util.match_token(t, token.OP, ';') and
120
+ not token.ISEOF(t.type)):
121
+ t = self._code.next_token(t, include_extra=True)
122
+ return self._code.prev_token(t)
123
+
124
+ def _expand_to_matching_pairs(self, first_token, last_token, node):
125
+ # type: (util.Token, util.Token, AstNode) -> Tuple[util.Token, util.Token]
126
+ """
127
+ Scan tokens in [first_token, last_token] range that are between node's children, and for any
128
+ unmatched brackets, adjust first/last tokens to include the closing pair.
129
+ """
130
+ # We look for opening parens/braces among non-child tokens (i.e. tokens between our actual
131
+ # child nodes). If we find any closing ones, we match them to the opens.
132
+ to_match_right = [] # type: List[Tuple[int, str]]
133
+ to_match_left = []
134
+ for tok in self._code.token_range(first_token, last_token):
135
+ tok_info = tok[:2]
136
+ if to_match_right and tok_info == to_match_right[-1]:
137
+ to_match_right.pop()
138
+ elif tok_info in _matching_pairs_left:
139
+ to_match_right.append(_matching_pairs_left[tok_info])
140
+ elif tok_info in _matching_pairs_right:
141
+ to_match_left.append(_matching_pairs_right[tok_info])
142
+
143
+ # Once done, extend `last_token` to match any unclosed parens/braces.
144
+ for match in reversed(to_match_right):
145
+ last = self._code.next_token(last_token)
146
+ # Allow for trailing commas or colons (allowed in subscripts) before the closing delimiter
147
+ while any(util.match_token(last, token.OP, x) for x in (',', ':')):
148
+ last = self._code.next_token(last)
149
+ # Now check for the actual closing delimiter.
150
+ if util.match_token(last, *match):
151
+ last_token = last
152
+
153
+ # And extend `first_token` to match any unclosed opening parens/braces.
154
+ for match in to_match_left:
155
+ first = self._code.prev_token(first_token)
156
+ if util.match_token(first, *match):
157
+ first_token = first
158
+
159
+ return (first_token, last_token)
160
+
161
+ #----------------------------------------------------------------------
162
+ # Node visitors. Each takes a preliminary first and last tokens, and returns the adjusted pair
163
+ # that will actually be assigned.
164
+
165
+ def visit_default(self, node, first_token, last_token):
166
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
167
+ # pylint: disable=no-self-use
168
+ # By default, we don't need to adjust the token we computed earlier.
169
+ return (first_token, last_token)
170
+
171
+ def handle_comp(self, open_brace, node, first_token, last_token):
172
+ # type: (str, AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
173
+ # For list/set/dict comprehensions, we only get the token of the first child, so adjust it to
174
+ # include the opening brace (the closing brace will be matched automatically).
175
+ before = self._code.prev_token(first_token)
176
+ util.expect_token(before, token.OP, open_brace)
177
+ return (before, last_token)
178
+
179
+ def visit_comprehension(self,
180
+ node, # type: AstNode
181
+ first_token, # type: util.Token
182
+ last_token, # type: util.Token
183
+ ):
184
+ # type: (...) -> Tuple[util.Token, util.Token]
185
+ # The 'comprehension' node starts with 'for' but we only get first child; we search backwards
186
+ # to find the 'for' keyword.
187
+ first = self._code.find_token(first_token, token.NAME, 'for', reverse=True)
188
+ return (first, last_token)
189
+
190
+ def visit_if(self, node, first_token, last_token):
191
+ # type: (util.Token, util.Token, util.Token) -> Tuple[util.Token, util.Token]
192
+ while first_token.string not in ('if', 'elif'):
193
+ first_token = self._code.prev_token(first_token)
194
+ return first_token, last_token
195
+
196
+ def handle_attr(self, node, first_token, last_token):
197
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
198
+ # Attribute node has ".attr" (2 tokens) after the last child.
199
+ dot = self._code.find_token(last_token, token.OP, '.')
200
+ name = self._code.next_token(dot)
201
+ util.expect_token(name, token.NAME)
202
+ return (first_token, name)
203
+
204
+ visit_attribute = handle_attr
205
+ visit_assignattr = handle_attr
206
+ visit_delattr = handle_attr
207
+
208
+ def handle_def(self, node, first_token, last_token):
209
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
210
+ # With astroid, nodes that start with a doc-string can have an empty body, in which case we
211
+ # need to adjust the last token to include the doc string.
212
+ if not node.body and (getattr(node, 'doc_node', None) or getattr(node, 'doc', None)): # type: ignore[union-attr]
213
+ last_token = self._code.find_token(last_token, token.STRING)
214
+
215
+ # Include @ from decorator
216
+ if first_token.index > 0:
217
+ prev = self._code.prev_token(first_token)
218
+ if util.match_token(prev, token.OP, '@'):
219
+ first_token = prev
220
+ return (first_token, last_token)
221
+
222
+ visit_classdef = handle_def
223
+ visit_functiondef = handle_def
224
+
225
+ def handle_following_brackets(self, node, last_token, opening_bracket):
226
+ # type: (AstNode, util.Token, str) -> util.Token
227
+ # This is for calls and subscripts, which have a pair of brackets
228
+ # at the end which may contain no nodes, e.g. foo() or bar[:].
229
+ # We look for the opening bracket and then let the matching pair be found automatically
230
+ # Remember that last_token is at the end of all children,
231
+ # so we are not worried about encountering a bracket that belongs to a child.
232
+ first_child = next(cast(Callable, self._iter_children)(node))
233
+ call_start = self._code.find_token(first_child.last_token, token.OP, opening_bracket)
234
+ if call_start.index > last_token.index:
235
+ last_token = call_start
236
+ return last_token
237
+
238
+ def visit_call(self, node, first_token, last_token):
239
+ # type: (util.Token, util.Token, util.Token) -> Tuple[util.Token, util.Token]
240
+ last_token = self.handle_following_brackets(node, last_token, '(')
241
+
242
+ # Handling a python bug with decorators with empty parens, e.g.
243
+ # @deco()
244
+ # def ...
245
+ if util.match_token(first_token, token.OP, '@'):
246
+ first_token = self._code.next_token(first_token)
247
+ return (first_token, last_token)
248
+
249
+ def visit_matchclass(self, node, first_token, last_token):
250
+ # type: (util.Token, util.Token, util.Token) -> Tuple[util.Token, util.Token]
251
+ last_token = self.handle_following_brackets(node, last_token, '(')
252
+ return (first_token, last_token)
253
+
254
+ def visit_subscript(self,
255
+ node, # type: AstNode
256
+ first_token, # type: util.Token
257
+ last_token, # type: util.Token
258
+ ):
259
+ # type: (...) -> Tuple[util.Token, util.Token]
260
+ last_token = self.handle_following_brackets(node, last_token, '[')
261
+ return (first_token, last_token)
262
+
263
+ def visit_slice(self, node, first_token, last_token):
264
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
265
+ # consume `:` tokens to the left and right. In Python 3.9, Slice nodes are
266
+ # given a col_offset, (and end_col_offset), so this will always start inside
267
+ # the slice, even if it is the empty slice. However, in 3.8 and below, this
268
+ # will only expand to the full slice if the slice contains a node with a
269
+ # col_offset. So x[:] will only get the correct tokens in 3.9, but x[1:] and
270
+ # x[:1] will even on earlier versions of Python.
271
+ while True:
272
+ prev = self._code.prev_token(first_token)
273
+ if prev.string != ':':
274
+ break
275
+ first_token = prev
276
+ while True:
277
+ next_ = self._code.next_token(last_token)
278
+ if next_.string != ':':
279
+ break
280
+ last_token = next_
281
+ return (first_token, last_token)
282
+
283
+ def handle_bare_tuple(self, node, first_token, last_token):
284
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
285
+ # A bare tuple doesn't include parens; if there is a trailing comma, make it part of the tuple.
286
+ maybe_comma = self._code.next_token(last_token)
287
+ if util.match_token(maybe_comma, token.OP, ','):
288
+ last_token = maybe_comma
289
+ return (first_token, last_token)
290
+
291
+ # In Python3.8 parsed tuples include parentheses when present.
292
+ def handle_tuple_nonempty(self, node, first_token, last_token):
293
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
294
+ assert isinstance(node, ast.Tuple) or isinstance(node, AstroidBaseContainer)
295
+ # It's a bare tuple if the first token belongs to the first child. The first child may
296
+ # include extraneous parentheses (which don't create new nodes), so account for those too.
297
+ child = node.elts[0]
298
+ if TYPE_CHECKING:
299
+ child = cast(AstNode, child)
300
+ child_first, child_last = self._gobble_parens(child.first_token, child.last_token, True)
301
+ if first_token == child_first:
302
+ return self.handle_bare_tuple(node, first_token, last_token)
303
+ return (first_token, last_token)
304
+
305
+ def visit_tuple(self, node, first_token, last_token):
306
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
307
+ assert isinstance(node, ast.Tuple) or isinstance(node, AstroidBaseContainer)
308
+ if not node.elts:
309
+ # An empty tuple is just "()", and we need no further info.
310
+ return (first_token, last_token)
311
+ return self.handle_tuple_nonempty(node, first_token, last_token)
312
+
313
+ def _gobble_parens(self, first_token, last_token, include_all=False):
314
+ # type: (util.Token, util.Token, bool) -> Tuple[util.Token, util.Token]
315
+ # Expands a range of tokens to include one or all pairs of surrounding parentheses, and
316
+ # returns (first, last) tokens that include these parens.
317
+ while first_token.index > 0:
318
+ prev = self._code.prev_token(first_token)
319
+ next = self._code.next_token(last_token)
320
+ if util.match_token(prev, token.OP, '(') and util.match_token(next, token.OP, ')'):
321
+ first_token, last_token = prev, next
322
+ if include_all:
323
+ continue
324
+ break
325
+ return (first_token, last_token)
326
+
327
+ def visit_str(self, node, first_token, last_token):
328
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
329
+ return self.handle_str(first_token, last_token)
330
+
331
+ def visit_joinedstr(self,
332
+ node, # type: AstNode
333
+ first_token, # type: util.Token
334
+ last_token, # type: util.Token
335
+ ):
336
+ # type: (...) -> Tuple[util.Token, util.Token]
337
+ if sys.version_info < (3, 12):
338
+ # Older versions don't tokenize the contents of f-strings
339
+ return self.handle_str(first_token, last_token)
340
+
341
+ last = first_token
342
+ while True:
343
+ if util.match_token(last, getattr(token, "FSTRING_START")):
344
+ # Python 3.12+ has tokens for the start (e.g. `f"`) and end (`"`)
345
+ # of the f-string. We can't just look for the next FSTRING_END
346
+ # because f-strings can be nested, e.g. f"{f'{x}'}", so we need
347
+ # to treat this like matching balanced parentheses.
348
+ count = 1
349
+ while count > 0:
350
+ last = self._code.next_token(last)
351
+ # mypy complains about token.FSTRING_START and token.FSTRING_END.
352
+ if util.match_token(last, getattr(token, "FSTRING_START")):
353
+ count += 1
354
+ elif util.match_token(last, getattr(token, "FSTRING_END")):
355
+ count -= 1
356
+ last_token = last
357
+ last = self._code.next_token(last_token)
358
+ elif util.match_token(last, token.STRING):
359
+ # Similar to handle_str, we also need to handle adjacent strings.
360
+ last_token = last
361
+ last = self._code.next_token(last_token)
362
+ else:
363
+ break
364
+ return (first_token, last_token)
365
+
366
+ def visit_bytes(self, node, first_token, last_token):
367
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
368
+ return self.handle_str(first_token, last_token)
369
+
370
+ def handle_str(self, first_token, last_token):
371
+ # type: (util.Token, util.Token) -> Tuple[util.Token, util.Token]
372
+ # Multiple adjacent STRING tokens form a single string.
373
+ last = self._code.next_token(last_token)
374
+ while util.match_token(last, token.STRING):
375
+ last_token = last
376
+ last = self._code.next_token(last_token)
377
+ return (first_token, last_token)
378
+
379
+ def handle_num(self,
380
+ node, # type: AstNode
381
+ value, # type: Union[complex, int, numbers.Number]
382
+ first_token, # type: util.Token
383
+ last_token, # type: util.Token
384
+ ):
385
+ # type: (...) -> Tuple[util.Token, util.Token]
386
+ # A constant like '-1' gets turned into two tokens; this will skip the '-'.
387
+ while util.match_token(last_token, token.OP):
388
+ last_token = self._code.next_token(last_token)
389
+
390
+ if isinstance(value, complex):
391
+ # A complex number like -2j cannot be compared directly to 0
392
+ # A complex number like 1-2j is expressed as a binary operation
393
+ # so we don't need to worry about it
394
+ value = value.imag
395
+
396
+ # This makes sure that the - is included
397
+ if value < 0 and first_token.type == token.NUMBER: # type: ignore[operator]
398
+ first_token = self._code.prev_token(first_token)
399
+ return (first_token, last_token)
400
+
401
+ def visit_num(self, node, first_token, last_token):
402
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
403
+ return self.handle_num(node, cast(ast.Num, node).n, first_token, last_token)
404
+
405
+ def visit_const(self, node, first_token, last_token):
406
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
407
+ assert isinstance(node, ast.Constant) or isinstance(node, nc.Const)
408
+ if isinstance(node.value, numbers.Number):
409
+ return self.handle_num(node, node.value, first_token, last_token)
410
+ elif isinstance(node.value, (str, bytes)):
411
+ return self.visit_str(node, first_token, last_token)
412
+ return (first_token, last_token)
413
+
414
+ visit_constant = visit_const
415
+
416
+ def visit_keyword(self, node, first_token, last_token):
417
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
418
+ # Until python 3.9 (https://bugs.python.org/issue40141),
419
+ # ast.keyword nodes didn't have line info. Astroid has lineno None.
420
+ assert isinstance(node, ast.keyword) or isinstance(node, nc.Keyword)
421
+ if node.arg is not None and getattr(node, 'lineno', None) is None:
422
+ equals = self._code.find_token(first_token, token.OP, '=', reverse=True)
423
+ name = self._code.prev_token(equals)
424
+ util.expect_token(name, token.NAME, node.arg)
425
+ first_token = name
426
+ return (first_token, last_token)
427
+
428
+ def visit_starred(self, node, first_token, last_token):
429
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
430
+ # Astroid has 'Starred' nodes (for "foo(*bar)" type args), but they need to be adjusted.
431
+ if not util.match_token(first_token, token.OP, '*'):
432
+ star = self._code.prev_token(first_token)
433
+ if util.match_token(star, token.OP, '*'):
434
+ first_token = star
435
+ return (first_token, last_token)
436
+
437
+ def visit_assignname(self, node, first_token, last_token):
438
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
439
+ # Astroid may turn 'except' clause into AssignName, but we need to adjust it.
440
+ if util.match_token(first_token, token.NAME, 'except'):
441
+ colon = self._code.find_token(last_token, token.OP, ':')
442
+ first_token = last_token = self._code.prev_token(colon)
443
+ return (first_token, last_token)
444
+
445
+ # Async nodes should typically start with the word 'async'
446
+ # but Python < 3.7 doesn't put the col_offset there
447
+ # AsyncFunctionDef is slightly different because it might have
448
+ # decorators before that, which visit_functiondef handles
449
+ def handle_async(self, node, first_token, last_token):
450
+ # type: (AstNode, util.Token, util.Token) -> Tuple[util.Token, util.Token]
451
+ if not first_token.string == 'async':
452
+ first_token = self._code.prev_token(first_token)
453
+ return (first_token, last_token)
454
+
455
+ visit_asyncfor = handle_async
456
+ visit_asyncwith = handle_async
457
+
458
+ def visit_asyncfunctiondef(self,
459
+ node, # type: AstNode
460
+ first_token, # type: util.Token
461
+ last_token, # type: util.Token
462
+ ):
463
+ # type: (...) -> Tuple[util.Token, util.Token]
464
+ if util.match_token(first_token, token.NAME, 'def'):
465
+ # Include the 'async' token
466
+ first_token = self._code.prev_token(first_token)
467
+ return self.visit_functiondef(node, first_token, last_token)
venv/Lib/site-packages/asttokens/py.typed ADDED
File without changes
venv/Lib/site-packages/asttokens/util.py ADDED
@@ -0,0 +1,485 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Grist Labs, Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import ast
16
+ import collections
17
+ import io
18
+ import sys
19
+ import token
20
+ import tokenize
21
+ from abc import ABCMeta
22
+ from ast import Module, expr, AST
23
+ from functools import lru_cache
24
+ from typing import (
25
+ Callable,
26
+ Dict,
27
+ Iterable,
28
+ Iterator,
29
+ List,
30
+ Optional,
31
+ Tuple,
32
+ Union,
33
+ cast,
34
+ Any,
35
+ TYPE_CHECKING,
36
+ Type,
37
+ )
38
+
39
+ if TYPE_CHECKING: # pragma: no cover
40
+ from .astroid_compat import NodeNG
41
+
42
+ # Type class used to expand out the definition of AST to include fields added by this library
43
+ # It's not actually used for anything other than type checking though!
44
+ class EnhancedAST(AST):
45
+ # Additional attributes set by mark_tokens
46
+ first_token = None # type: Token
47
+ last_token = None # type: Token
48
+ lineno = 0 # type: int
49
+
50
+ AstNode = Union[EnhancedAST, NodeNG]
51
+
52
+ TokenInfo = tokenize.TokenInfo
53
+
54
+
55
+ def token_repr(tok_type, string):
56
+ # type: (int, Optional[str]) -> str
57
+ """Returns a human-friendly representation of a token with the given type and string."""
58
+ # repr() prefixes unicode with 'u' on Python2 but not Python3; strip it out for consistency.
59
+ return '%s:%s' % (token.tok_name[tok_type], repr(string).lstrip('u'))
60
+
61
+
62
+ class Token(collections.namedtuple('Token', 'type string start end line index startpos endpos')):
63
+ """
64
+ TokenInfo is an 8-tuple containing the same 5 fields as the tokens produced by the tokenize
65
+ module, and 3 additional ones useful for this module:
66
+
67
+ - [0] .type Token type (see token.py)
68
+ - [1] .string Token (a string)
69
+ - [2] .start Starting (row, column) indices of the token (a 2-tuple of ints)
70
+ - [3] .end Ending (row, column) indices of the token (a 2-tuple of ints)
71
+ - [4] .line Original line (string)
72
+ - [5] .index Index of the token in the list of tokens that it belongs to.
73
+ - [6] .startpos Starting character offset into the input text.
74
+ - [7] .endpos Ending character offset into the input text.
75
+ """
76
+ def __str__(self):
77
+ # type: () -> str
78
+ return token_repr(self.type, self.string)
79
+
80
+
81
+ def match_token(token, tok_type, tok_str=None):
82
+ # type: (Token, int, Optional[str]) -> bool
83
+ """Returns true if token is of the given type and, if a string is given, has that string."""
84
+ return token.type == tok_type and (tok_str is None or token.string == tok_str)
85
+
86
+
87
+ def expect_token(token, tok_type, tok_str=None):
88
+ # type: (Token, int, Optional[str]) -> None
89
+ """
90
+ Verifies that the given token is of the expected type. If tok_str is given, the token string
91
+ is verified too. If the token doesn't match, raises an informative ValueError.
92
+ """
93
+ if not match_token(token, tok_type, tok_str):
94
+ raise ValueError("Expected token %s, got %s on line %s col %s" % (
95
+ token_repr(tok_type, tok_str), str(token),
96
+ token.start[0], token.start[1] + 1))
97
+
98
+
99
+ def is_non_coding_token(token_type):
100
+ # type: (int) -> bool
101
+ """
102
+ These are considered non-coding tokens, as they don't affect the syntax tree.
103
+ """
104
+ return token_type in (token.NL, token.COMMENT, token.ENCODING)
105
+
106
+
107
+ def generate_tokens(text):
108
+ # type: (str) -> Iterator[TokenInfo]
109
+ """
110
+ Generates standard library tokens for the given code.
111
+ """
112
+ # tokenize.generate_tokens is technically an undocumented API for Python3, but allows us to use the same API as for
113
+ # Python2. See http://stackoverflow.com/a/4952291/328565.
114
+ # FIXME: Remove cast once https://github.com/python/typeshed/issues/7003 gets fixed
115
+ return tokenize.generate_tokens(cast(Callable[[], str], io.StringIO(text).readline))
116
+
117
+
118
+ def iter_children_func(node):
119
+ # type: (AST) -> Callable
120
+ """
121
+ Returns a function which yields all direct children of a AST node,
122
+ skipping children that are singleton nodes.
123
+ The function depends on whether ``node`` is from ``ast`` or from the ``astroid`` module.
124
+ """
125
+ return iter_children_astroid if hasattr(node, 'get_children') else iter_children_ast
126
+
127
+
128
+ def iter_children_astroid(node, include_joined_str=False):
129
+ # type: (NodeNG, bool) -> Union[Iterator, List]
130
+ if not include_joined_str and is_joined_str(node):
131
+ return []
132
+
133
+ return node.get_children()
134
+
135
+
136
+ SINGLETONS = {c for n, c in ast.__dict__.items() if isinstance(c, type) and
137
+ issubclass(c, (ast.expr_context, ast.boolop, ast.operator, ast.unaryop, ast.cmpop))}
138
+
139
+
140
+ def iter_children_ast(node, include_joined_str=False):
141
+ # type: (AST, bool) -> Iterator[Union[AST, expr]]
142
+ if not include_joined_str and is_joined_str(node):
143
+ return
144
+
145
+ if isinstance(node, ast.Dict):
146
+ # override the iteration order: instead of <all keys>, <all values>,
147
+ # yield keys and values in source order (key1, value1, key2, value2, ...)
148
+ for (key, value) in zip(node.keys, node.values):
149
+ if key is not None:
150
+ yield key
151
+ yield value
152
+ return
153
+
154
+ for child in ast.iter_child_nodes(node):
155
+ # Skip singleton children; they don't reflect particular positions in the code and break the
156
+ # assumptions about the tree consisting of distinct nodes. Note that collecting classes
157
+ # beforehand and checking them in a set is faster than using isinstance each time.
158
+ if child.__class__ not in SINGLETONS:
159
+ yield child
160
+
161
+
162
+ stmt_class_names = {n for n, c in ast.__dict__.items()
163
+ if isinstance(c, type) and issubclass(c, ast.stmt)}
164
+ expr_class_names = ({n for n, c in ast.__dict__.items()
165
+ if isinstance(c, type) and issubclass(c, ast.expr)} |
166
+ {'AssignName', 'DelName', 'Const', 'AssignAttr', 'DelAttr'})
167
+
168
+ # These feel hacky compared to isinstance() but allow us to work with both ast and astroid nodes
169
+ # in the same way, and without even importing astroid.
170
+ def is_expr(node):
171
+ # type: (AstNode) -> bool
172
+ """Returns whether node is an expression node."""
173
+ return node.__class__.__name__ in expr_class_names
174
+
175
+ def is_stmt(node):
176
+ # type: (AstNode) -> bool
177
+ """Returns whether node is a statement node."""
178
+ return node.__class__.__name__ in stmt_class_names
179
+
180
+ def is_module(node):
181
+ # type: (AstNode) -> bool
182
+ """Returns whether node is a module node."""
183
+ return node.__class__.__name__ == 'Module'
184
+
185
+ def is_joined_str(node):
186
+ # type: (AstNode) -> bool
187
+ """Returns whether node is a JoinedStr node, used to represent f-strings."""
188
+ # At the moment, nodes below JoinedStr have wrong line/col info, and trying to process them only
189
+ # leads to errors.
190
+ return node.__class__.__name__ == 'JoinedStr'
191
+
192
+
193
+ def is_expr_stmt(node):
194
+ # type: (AstNode) -> bool
195
+ """Returns whether node is an `Expr` node, which is a statement that is an expression."""
196
+ return node.__class__.__name__ == 'Expr'
197
+
198
+
199
+
200
+ CONSTANT_CLASSES: Tuple[Type, ...] = (ast.Constant,)
201
+ try:
202
+ from astroid import Const
203
+ CONSTANT_CLASSES += (Const,)
204
+ except ImportError: # pragma: no cover
205
+ # astroid is not available
206
+ pass
207
+
208
+ def is_constant(node):
209
+ # type: (AstNode) -> bool
210
+ """Returns whether node is a Constant node."""
211
+ return isinstance(node, CONSTANT_CLASSES)
212
+
213
+
214
+ def is_ellipsis(node):
215
+ # type: (AstNode) -> bool
216
+ """Returns whether node is an Ellipsis node."""
217
+ return is_constant(node) and node.value is Ellipsis # type: ignore
218
+
219
+
220
+ def is_starred(node):
221
+ # type: (AstNode) -> bool
222
+ """Returns whether node is a starred expression node."""
223
+ return node.__class__.__name__ == 'Starred'
224
+
225
+
226
+ def is_slice(node):
227
+ # type: (AstNode) -> bool
228
+ """Returns whether node represents a slice, e.g. `1:2` in `x[1:2]`"""
229
+ # Before 3.9, a tuple containing a slice is an ExtSlice,
230
+ # but this was removed in https://bugs.python.org/issue34822
231
+ return (
232
+ node.__class__.__name__ in ('Slice', 'ExtSlice')
233
+ or (
234
+ node.__class__.__name__ == 'Tuple'
235
+ and any(map(is_slice, cast(ast.Tuple, node).elts))
236
+ )
237
+ )
238
+
239
+
240
+ def is_empty_astroid_slice(node):
241
+ # type: (AstNode) -> bool
242
+ return (
243
+ node.__class__.__name__ == "Slice"
244
+ and not isinstance(node, ast.AST)
245
+ and node.lower is node.upper is node.step is None
246
+ )
247
+
248
+
249
+ # Sentinel value used by visit_tree().
250
+ _PREVISIT = object()
251
+
252
+ def visit_tree(node, previsit, postvisit):
253
+ # type: (Module, Callable[[AstNode, Optional[Token]], Tuple[Optional[Token], Optional[Token]]], Optional[Callable[[AstNode, Optional[Token], Optional[Token]], None]]) -> None
254
+ """
255
+ Scans the tree under the node depth-first using an explicit stack. It avoids implicit recursion
256
+ via the function call stack to avoid hitting 'maximum recursion depth exceeded' error.
257
+
258
+ It calls ``previsit()`` and ``postvisit()`` as follows:
259
+
260
+ * ``previsit(node, par_value)`` - should return ``(par_value, value)``
261
+ ``par_value`` is as returned from ``previsit()`` of the parent.
262
+
263
+ * ``postvisit(node, par_value, value)`` - should return ``value``
264
+ ``par_value`` is as returned from ``previsit()`` of the parent, and ``value`` is as
265
+ returned from ``previsit()`` of this node itself. The return ``value`` is ignored except
266
+ the one for the root node, which is returned from the overall ``visit_tree()`` call.
267
+
268
+ For the initial node, ``par_value`` is None. ``postvisit`` may be None.
269
+ """
270
+ if not postvisit:
271
+ postvisit = lambda node, pvalue, value: None
272
+
273
+ iter_children = iter_children_func(node)
274
+ done = set()
275
+ ret = None
276
+ stack = [(node, None, _PREVISIT)] # type: List[Tuple[AstNode, Optional[Token], Union[Optional[Token], object]]]
277
+ while stack:
278
+ current, par_value, value = stack.pop()
279
+ if value is _PREVISIT:
280
+ assert current not in done # protect againt infinite loop in case of a bad tree.
281
+ done.add(current)
282
+
283
+ pvalue, post_value = previsit(current, par_value)
284
+ stack.append((current, par_value, post_value))
285
+
286
+ # Insert all children in reverse order (so that first child ends up on top of the stack).
287
+ ins = len(stack)
288
+ for n in iter_children(current):
289
+ stack.insert(ins, (n, pvalue, _PREVISIT))
290
+ else:
291
+ ret = postvisit(current, par_value, cast(Optional[Token], value))
292
+ return ret
293
+
294
+
295
+ def walk(node, include_joined_str=False):
296
+ # type: (AST, bool) -> Iterator[Union[Module, AstNode]]
297
+ """
298
+ Recursively yield all descendant nodes in the tree starting at ``node`` (including ``node``
299
+ itself), using depth-first pre-order traversal (yieling parents before their children).
300
+
301
+ This is similar to ``ast.walk()``, but with a different order, and it works for both ``ast`` and
302
+ ``astroid`` trees. Also, as ``iter_children()``, it skips singleton nodes generated by ``ast``.
303
+
304
+ By default, ``JoinedStr`` (f-string) nodes and their contents are skipped
305
+ because they previously couldn't be handled. Set ``include_joined_str`` to True to include them.
306
+ """
307
+ iter_children = iter_children_func(node)
308
+ done = set()
309
+ stack = [node]
310
+ while stack:
311
+ current = stack.pop()
312
+ assert current not in done # protect againt infinite loop in case of a bad tree.
313
+ done.add(current)
314
+
315
+ yield current
316
+
317
+ # Insert all children in reverse order (so that first child ends up on top of the stack).
318
+ # This is faster than building a list and reversing it.
319
+ ins = len(stack)
320
+ for c in iter_children(current, include_joined_str):
321
+ stack.insert(ins, c)
322
+
323
+
324
+ def replace(text, replacements):
325
+ # type: (str, List[Tuple[int, int, str]]) -> str
326
+ """
327
+ Replaces multiple slices of text with new values. This is a convenience method for making code
328
+ modifications of ranges e.g. as identified by ``ASTTokens.get_text_range(node)``. Replacements is
329
+ an iterable of ``(start, end, new_text)`` tuples.
330
+
331
+ For example, ``replace("this is a test", [(0, 4, "X"), (8, 9, "THE")])`` produces
332
+ ``"X is THE test"``.
333
+ """
334
+ p = 0
335
+ parts = []
336
+ for (start, end, new_text) in sorted(replacements):
337
+ parts.append(text[p:start])
338
+ parts.append(new_text)
339
+ p = end
340
+ parts.append(text[p:])
341
+ return ''.join(parts)
342
+
343
+
344
+ class NodeMethods:
345
+ """
346
+ Helper to get `visit_{node_type}` methods given a node's class and cache the results.
347
+ """
348
+ def __init__(self):
349
+ # type: () -> None
350
+ self._cache = {} # type: Dict[Union[ABCMeta, type], Callable[[AstNode, Token, Token], Tuple[Token, Token]]]
351
+
352
+ def get(self, obj, cls):
353
+ # type: (Any, Union[ABCMeta, type]) -> Callable
354
+ """
355
+ Using the lowercase name of the class as node_type, returns `obj.visit_{node_type}`,
356
+ or `obj.visit_default` if the type-specific method is not found.
357
+ """
358
+ method = self._cache.get(cls)
359
+ if not method:
360
+ name = "visit_" + cls.__name__.lower()
361
+ method = getattr(obj, name, obj.visit_default)
362
+ self._cache[cls] = method
363
+ return method
364
+
365
+
366
+ def patched_generate_tokens(original_tokens):
367
+ # type: (Iterable[TokenInfo]) -> Iterator[TokenInfo]
368
+ """
369
+ Fixes tokens yielded by `tokenize.generate_tokens` to handle more non-ASCII characters in identifiers.
370
+ Workaround for https://github.com/python/cpython/issues/68382.
371
+ Should only be used when tokenizing a string that is known to be valid syntax,
372
+ because it assumes that error tokens are not actually errors.
373
+ Combines groups of consecutive NAME, NUMBER, and/or ERRORTOKEN tokens into a single NAME token.
374
+ """
375
+ group = [] # type: List[tokenize.TokenInfo]
376
+ for tok in original_tokens:
377
+ if (
378
+ tok.type in (tokenize.NAME, tokenize.ERRORTOKEN, tokenize.NUMBER)
379
+ # Only combine tokens if they have no whitespace in between
380
+ and (not group or group[-1].end == tok.start)
381
+ ):
382
+ group.append(tok)
383
+ else:
384
+ for combined_token in combine_tokens(group):
385
+ yield combined_token
386
+ group = []
387
+ yield tok
388
+ for combined_token in combine_tokens(group):
389
+ yield combined_token
390
+
391
+ def combine_tokens(group):
392
+ # type: (List[tokenize.TokenInfo]) -> List[tokenize.TokenInfo]
393
+ if not any(tok.type == tokenize.ERRORTOKEN for tok in group) or len({tok.line for tok in group}) != 1:
394
+ return group
395
+ return [
396
+ tokenize.TokenInfo(
397
+ type=tokenize.NAME,
398
+ string="".join(t.string for t in group),
399
+ start=group[0].start,
400
+ end=group[-1].end,
401
+ line=group[0].line,
402
+ )
403
+ ]
404
+
405
+
406
+ def last_stmt(node):
407
+ # type: (ast.AST) -> ast.AST
408
+ """
409
+ If the given AST node contains multiple statements, return the last one.
410
+ Otherwise, just return the node.
411
+ """
412
+ child_stmts = [
413
+ child for child in iter_children_func(node)(node)
414
+ if is_stmt(child) or type(child).__name__ in (
415
+ "excepthandler",
416
+ "ExceptHandler",
417
+ "match_case",
418
+ "MatchCase",
419
+ "TryExcept",
420
+ "TryFinally",
421
+ )
422
+ ]
423
+ if child_stmts:
424
+ return last_stmt(child_stmts[-1])
425
+ return node
426
+
427
+
428
+
429
+ @lru_cache(maxsize=None)
430
+ def fstring_positions_work():
431
+ # type: () -> bool
432
+ """
433
+ The positions attached to nodes inside f-string FormattedValues have some bugs
434
+ that were fixed in Python 3.9.7 in https://github.com/python/cpython/pull/27729.
435
+ This checks for those bugs more concretely without relying on the Python version.
436
+ Specifically this checks:
437
+ - Values with a format spec or conversion
438
+ - Repeated (i.e. identical-looking) expressions
439
+ - f-strings implicitly concatenated over multiple lines.
440
+ - Multiline, triple-quoted f-strings.
441
+ """
442
+ source = """(
443
+ f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}"
444
+ f"a {b}{b} c {d!r} e {f:g} h {i:{j}} k {l:{m:n}}"
445
+ f"{x + y + z} {x} {y} {z} {z} {z!a} {z:z}"
446
+ f'''
447
+ {s} {t}
448
+ {u} {v}
449
+ '''
450
+ )"""
451
+ tree = ast.parse(source)
452
+ name_nodes = [node for node in ast.walk(tree) if isinstance(node, ast.Name)]
453
+ name_positions = [(node.lineno, node.col_offset) for node in name_nodes]
454
+ positions_are_unique = len(set(name_positions)) == len(name_positions)
455
+ correct_source_segments = all(
456
+ ast.get_source_segment(source, node) == node.id
457
+ for node in name_nodes
458
+ )
459
+ return positions_are_unique and correct_source_segments
460
+
461
+ def annotate_fstring_nodes(tree):
462
+ # type: (ast.AST) -> None
463
+ """
464
+ Add a special attribute `_broken_positions` to nodes inside f-strings
465
+ if the lineno/col_offset cannot be trusted.
466
+ """
467
+ if sys.version_info >= (3, 12):
468
+ # f-strings were weirdly implemented until https://peps.python.org/pep-0701/
469
+ # In Python 3.12, inner nodes have sensible positions.
470
+ return
471
+ for joinedstr in walk(tree, include_joined_str=True):
472
+ if not isinstance(joinedstr, ast.JoinedStr):
473
+ continue
474
+ for part in joinedstr.values:
475
+ # The ast positions of the FormattedValues/Constant nodes span the full f-string, which is weird.
476
+ setattr(part, '_broken_positions', True) # use setattr for mypy
477
+
478
+ if isinstance(part, ast.FormattedValue):
479
+ if not fstring_positions_work():
480
+ for child in walk(part.value):
481
+ setattr(child, '_broken_positions', True)
482
+
483
+ if part.format_spec: # this is another JoinedStr
484
+ # Again, the standard positions span the full f-string.
485
+ setattr(part.format_spec, '_broken_positions', True)
venv/Lib/site-packages/asttokens/version.py ADDED
@@ -0,0 +1 @@
 
 
1
+ __version__ = "3.0.0"
venv/Lib/site-packages/async_lru-2.0.5.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
venv/Lib/site-packages/async_lru-2.0.5.dist-info/LICENSE ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The MIT License
2
+
3
+ Copyright (c) 2018 aio-libs team https://github.com/aio-libs/
4
+ Copyright (c) 2017 Ocean S. A. https://ocean.io/
5
+ Copyright (c) 2016-2017 WikiBusiness Corporation http://wikibusiness.org/
6
+
7
+ Permission is hereby granted, free of charge, to any person obtaining a copy
8
+ of this software and associated documentation files (the "Software"), to deal
9
+ in the Software without restriction, including without limitation the rights
10
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11
+ copies of the Software, and to permit persons to whom the Software is
12
+ furnished to do so, subject to the following conditions:
13
+
14
+ The above copyright notice and this permission notice shall be included in
15
+ all copies or substantial portions of the Software.
16
+
17
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23
+ THE SOFTWARE.
venv/Lib/site-packages/async_lru-2.0.5.dist-info/METADATA ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.2
2
+ Name: async-lru
3
+ Version: 2.0.5
4
+ Summary: Simple LRU cache for asyncio
5
+ Home-page: https://github.com/aio-libs/async-lru
6
+ Maintainer: aiohttp team <[email protected]>
7
+ Maintainer-email: [email protected]
8
+ License: MIT License
9
+ Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
10
+ Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
11
+ Project-URL: CI: GitHub Actions, https://github.com/aio-libs/async-lru/actions
12
+ Project-URL: GitHub: repo, https://github.com/aio-libs/async-lru
13
+ Keywords: asyncio,lru,lru_cache
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Intended Audience :: Developers
16
+ Classifier: Programming Language :: Python
17
+ Classifier: Programming Language :: Python :: 3
18
+ Classifier: Programming Language :: Python :: 3 :: Only
19
+ Classifier: Programming Language :: Python :: 3.9
20
+ Classifier: Programming Language :: Python :: 3.10
21
+ Classifier: Programming Language :: Python :: 3.11
22
+ Classifier: Programming Language :: Python :: 3.12
23
+ Classifier: Programming Language :: Python :: 3.13
24
+ Classifier: Development Status :: 5 - Production/Stable
25
+ Classifier: Framework :: AsyncIO
26
+ Requires-Python: >=3.9
27
+ Description-Content-Type: text/x-rst
28
+ License-File: LICENSE
29
+ Requires-Dist: typing_extensions>=4.0.0; python_version < "3.11"
30
+
31
+ async-lru
32
+ =========
33
+
34
+ :info: Simple lru cache for asyncio
35
+
36
+ .. image:: https://github.com/aio-libs/async-lru/actions/workflows/ci-cd.yml/badge.svg?event=push
37
+ :target: https://github.com/aio-libs/async-lru/actions/workflows/ci-cd.yml?query=event:push
38
+ :alt: GitHub Actions CI/CD workflows status
39
+
40
+ .. image:: https://img.shields.io/pypi/v/async-lru.svg?logo=Python&logoColor=white
41
+ :target: https://pypi.org/project/async-lru
42
+ :alt: async-lru @ PyPI
43
+
44
+ .. image:: https://codecov.io/gh/aio-libs/async-lru/branch/master/graph/badge.svg
45
+ :target: https://codecov.io/gh/aio-libs/async-lru
46
+
47
+ .. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
48
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
49
+ :alt: Matrix Room — #aio-libs:matrix.org
50
+
51
+ .. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
52
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
53
+ :alt: Matrix Space — #aio-libs-space:matrix.org
54
+
55
+ Installation
56
+ ------------
57
+
58
+ .. code-block:: shell
59
+
60
+ pip install async-lru
61
+
62
+ Usage
63
+ -----
64
+
65
+ This package is a port of Python's built-in `functools.lru_cache <https://docs.python.org/3/library/functools.html#functools.lru_cache>`_ function for `asyncio <https://docs.python.org/3/library/asyncio.html>`_. To better handle async behaviour, it also ensures multiple concurrent calls will only result in 1 call to the wrapped function, with all ``await``\s receiving the result of that call when it completes.
66
+
67
+ .. code-block:: python
68
+
69
+ import asyncio
70
+
71
+ import aiohttp
72
+ from async_lru import alru_cache
73
+
74
+
75
+ @alru_cache(maxsize=32)
76
+ async def get_pep(num):
77
+ resource = 'http://www.python.org/dev/peps/pep-%04d/' % num
78
+ async with aiohttp.ClientSession() as session:
79
+ try:
80
+ async with session.get(resource) as s:
81
+ return await s.read()
82
+ except aiohttp.ClientError:
83
+ return 'Not Found'
84
+
85
+
86
+ async def main():
87
+ for n in 8, 290, 308, 320, 8, 218, 320, 279, 289, 320, 9991:
88
+ pep = await get_pep(n)
89
+ print(n, len(pep))
90
+
91
+ print(get_pep.cache_info())
92
+ # CacheInfo(hits=3, misses=8, maxsize=32, currsize=8)
93
+
94
+ # closing is optional, but highly recommended
95
+ await get_pep.cache_close()
96
+
97
+
98
+ asyncio.run(main())
99
+
100
+
101
+ TTL (time-to-live in seconds, expiration on timeout) is supported by accepting `ttl` configuration
102
+ parameter (off by default):
103
+
104
+ .. code-block:: python
105
+
106
+ @alru_cache(ttl=5)
107
+ async def func(arg):
108
+ return arg * 2
109
+
110
+
111
+ The library supports explicit invalidation for specific function call by
112
+ `cache_invalidate()`:
113
+
114
+ .. code-block:: python
115
+
116
+ @alru_cache(ttl=5)
117
+ async def func(arg1, arg2):
118
+ return arg1 + arg2
119
+
120
+ func.cache_invalidate(1, arg2=2)
121
+
122
+ The method returns `True` if corresponding arguments set was cached already, `False`
123
+ otherwise.
124
+
125
+ Thanks
126
+ ------
127
+
128
+ The library was donated by `Ocean S.A. <https://ocean.io/>`_
129
+
130
+ Thanks to the company for contribution.
venv/Lib/site-packages/async_lru-2.0.5.dist-info/RECORD ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ async_lru-2.0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
2
+ async_lru-2.0.5.dist-info/LICENSE,sha256=6mWXVnm8IJO-kl1SA1jkEJa9lv3e3uPpKRYKX6rc9XM,1226
3
+ async_lru-2.0.5.dist-info/METADATA,sha256=8xQLHb4_Zr7AvNfM1kofHWX5JjNubmBnfJeicqq790I,4485
4
+ async_lru-2.0.5.dist-info/RECORD,,
5
+ async_lru-2.0.5.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
6
+ async_lru-2.0.5.dist-info/top_level.txt,sha256=nUy-F2tq_gf0YsQKIGqHmkS_XJxU_dQlINuXZIAHTsk,10
7
+ async_lru/__init__.py,sha256=ebHg3Yib8ILqq0nNPRGG633b3kVb1ZaJFcZemYiSlEg,9425
8
+ async_lru/__pycache__/__init__.cpython-312.pyc,,
9
+ async_lru/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
venv/Lib/site-packages/async_lru-2.0.5.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (76.0.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
venv/Lib/site-packages/async_lru-2.0.5.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ async_lru
venv/Lib/site-packages/async_lru/__init__.py ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import dataclasses
3
+ import sys
4
+ from asyncio.coroutines import _is_coroutine # type: ignore[attr-defined]
5
+ from functools import _CacheInfo, _make_key, partial, partialmethod
6
+ from typing import (
7
+ Any,
8
+ Callable,
9
+ Coroutine,
10
+ Generic,
11
+ Hashable,
12
+ Optional,
13
+ OrderedDict,
14
+ Set,
15
+ Type,
16
+ TypedDict,
17
+ TypeVar,
18
+ Union,
19
+ cast,
20
+ final,
21
+ overload,
22
+ )
23
+
24
+
25
+ if sys.version_info >= (3, 11):
26
+ from typing import Self
27
+ else:
28
+ from typing_extensions import Self
29
+
30
+
31
+ __version__ = "2.0.5"
32
+
33
+ __all__ = ("alru_cache",)
34
+
35
+
36
+ _T = TypeVar("_T")
37
+ _R = TypeVar("_R")
38
+ _Coro = Coroutine[Any, Any, _R]
39
+ _CB = Callable[..., _Coro[_R]]
40
+ _CBP = Union[_CB[_R], "partial[_Coro[_R]]", "partialmethod[_Coro[_R]]"]
41
+
42
+
43
+ @final
44
+ class _CacheParameters(TypedDict):
45
+ typed: bool
46
+ maxsize: Optional[int]
47
+ tasks: int
48
+ closed: bool
49
+
50
+
51
+ @final
52
+ @dataclasses.dataclass
53
+ class _CacheItem(Generic[_R]):
54
+ fut: "asyncio.Future[_R]"
55
+ later_call: Optional[asyncio.Handle]
56
+
57
+ def cancel(self) -> None:
58
+ if self.later_call is not None:
59
+ self.later_call.cancel()
60
+ self.later_call = None
61
+
62
+
63
+ @final
64
+ class _LRUCacheWrapper(Generic[_R]):
65
+ def __init__(
66
+ self,
67
+ fn: _CB[_R],
68
+ maxsize: Optional[int],
69
+ typed: bool,
70
+ ttl: Optional[float],
71
+ ) -> None:
72
+ try:
73
+ self.__module__ = fn.__module__
74
+ except AttributeError:
75
+ pass
76
+ try:
77
+ self.__name__ = fn.__name__
78
+ except AttributeError:
79
+ pass
80
+ try:
81
+ self.__qualname__ = fn.__qualname__
82
+ except AttributeError:
83
+ pass
84
+ try:
85
+ self.__doc__ = fn.__doc__
86
+ except AttributeError:
87
+ pass
88
+ try:
89
+ self.__annotations__ = fn.__annotations__
90
+ except AttributeError:
91
+ pass
92
+ try:
93
+ self.__dict__.update(fn.__dict__)
94
+ except AttributeError:
95
+ pass
96
+ # set __wrapped__ last so we don't inadvertently copy it
97
+ # from the wrapped function when updating __dict__
98
+ self._is_coroutine = _is_coroutine
99
+ self.__wrapped__ = fn
100
+ self.__maxsize = maxsize
101
+ self.__typed = typed
102
+ self.__ttl = ttl
103
+ self.__cache: OrderedDict[Hashable, _CacheItem[_R]] = OrderedDict()
104
+ self.__closed = False
105
+ self.__hits = 0
106
+ self.__misses = 0
107
+ self.__tasks: Set["asyncio.Task[_R]"] = set()
108
+
109
+ def cache_invalidate(self, /, *args: Hashable, **kwargs: Any) -> bool:
110
+ key = _make_key(args, kwargs, self.__typed)
111
+
112
+ cache_item = self.__cache.pop(key, None)
113
+ if cache_item is None:
114
+ return False
115
+ else:
116
+ cache_item.cancel()
117
+ return True
118
+
119
+ def cache_clear(self) -> None:
120
+ self.__hits = 0
121
+ self.__misses = 0
122
+
123
+ for c in self.__cache.values():
124
+ if c.later_call:
125
+ c.later_call.cancel()
126
+ self.__cache.clear()
127
+ self.__tasks.clear()
128
+
129
+ async def cache_close(self, *, wait: bool = False) -> None:
130
+ self.__closed = True
131
+
132
+ tasks = list(self.__tasks)
133
+ if not tasks:
134
+ return
135
+
136
+ if not wait:
137
+ for task in tasks:
138
+ if not task.done():
139
+ task.cancel()
140
+
141
+ await asyncio.gather(*tasks, return_exceptions=True)
142
+
143
+ def cache_info(self) -> _CacheInfo:
144
+ return _CacheInfo(
145
+ self.__hits,
146
+ self.__misses,
147
+ self.__maxsize,
148
+ len(self.__cache),
149
+ )
150
+
151
+ def cache_parameters(self) -> _CacheParameters:
152
+ return _CacheParameters(
153
+ maxsize=self.__maxsize,
154
+ typed=self.__typed,
155
+ tasks=len(self.__tasks),
156
+ closed=self.__closed,
157
+ )
158
+
159
+ def _cache_hit(self, key: Hashable) -> None:
160
+ self.__hits += 1
161
+ self.__cache.move_to_end(key)
162
+
163
+ def _cache_miss(self, key: Hashable) -> None:
164
+ self.__misses += 1
165
+
166
+ def _task_done_callback(
167
+ self, fut: "asyncio.Future[_R]", key: Hashable, task: "asyncio.Task[_R]"
168
+ ) -> None:
169
+ self.__tasks.discard(task)
170
+
171
+ if task.cancelled():
172
+ fut.cancel()
173
+ self.__cache.pop(key, None)
174
+ return
175
+
176
+ exc = task.exception()
177
+ if exc is not None:
178
+ fut.set_exception(exc)
179
+ self.__cache.pop(key, None)
180
+ return
181
+
182
+ cache_item = self.__cache.get(key)
183
+ if self.__ttl is not None and cache_item is not None:
184
+ loop = asyncio.get_running_loop()
185
+ cache_item.later_call = loop.call_later(
186
+ self.__ttl, self.__cache.pop, key, None
187
+ )
188
+
189
+ fut.set_result(task.result())
190
+
191
+ async def __call__(self, /, *fn_args: Any, **fn_kwargs: Any) -> _R:
192
+ if self.__closed:
193
+ raise RuntimeError(f"alru_cache is closed for {self}")
194
+
195
+ loop = asyncio.get_running_loop()
196
+
197
+ key = _make_key(fn_args, fn_kwargs, self.__typed)
198
+
199
+ cache_item = self.__cache.get(key)
200
+
201
+ if cache_item is not None:
202
+ self._cache_hit(key)
203
+ if not cache_item.fut.done():
204
+ return await asyncio.shield(cache_item.fut)
205
+
206
+ return cache_item.fut.result()
207
+
208
+ fut = loop.create_future()
209
+ coro = self.__wrapped__(*fn_args, **fn_kwargs)
210
+ task: asyncio.Task[_R] = loop.create_task(coro)
211
+ self.__tasks.add(task)
212
+ task.add_done_callback(partial(self._task_done_callback, fut, key))
213
+
214
+ self.__cache[key] = _CacheItem(fut, None)
215
+
216
+ if self.__maxsize is not None and len(self.__cache) > self.__maxsize:
217
+ dropped_key, cache_item = self.__cache.popitem(last=False)
218
+ cache_item.cancel()
219
+
220
+ self._cache_miss(key)
221
+ return await asyncio.shield(fut)
222
+
223
+ def __get__(
224
+ self, instance: _T, owner: Optional[Type[_T]]
225
+ ) -> Union[Self, "_LRUCacheWrapperInstanceMethod[_R, _T]"]:
226
+ if owner is None:
227
+ return self
228
+ else:
229
+ return _LRUCacheWrapperInstanceMethod(self, instance)
230
+
231
+
232
+ @final
233
+ class _LRUCacheWrapperInstanceMethod(Generic[_R, _T]):
234
+ def __init__(
235
+ self,
236
+ wrapper: _LRUCacheWrapper[_R],
237
+ instance: _T,
238
+ ) -> None:
239
+ try:
240
+ self.__module__ = wrapper.__module__
241
+ except AttributeError:
242
+ pass
243
+ try:
244
+ self.__name__ = wrapper.__name__
245
+ except AttributeError:
246
+ pass
247
+ try:
248
+ self.__qualname__ = wrapper.__qualname__
249
+ except AttributeError:
250
+ pass
251
+ try:
252
+ self.__doc__ = wrapper.__doc__
253
+ except AttributeError:
254
+ pass
255
+ try:
256
+ self.__annotations__ = wrapper.__annotations__
257
+ except AttributeError:
258
+ pass
259
+ try:
260
+ self.__dict__.update(wrapper.__dict__)
261
+ except AttributeError:
262
+ pass
263
+ # set __wrapped__ last so we don't inadvertently copy it
264
+ # from the wrapped function when updating __dict__
265
+ self._is_coroutine = _is_coroutine
266
+ self.__wrapped__ = wrapper.__wrapped__
267
+ self.__instance = instance
268
+ self.__wrapper = wrapper
269
+
270
+ def cache_invalidate(self, /, *args: Hashable, **kwargs: Any) -> bool:
271
+ return self.__wrapper.cache_invalidate(self.__instance, *args, **kwargs)
272
+
273
+ def cache_clear(self) -> None:
274
+ self.__wrapper.cache_clear()
275
+
276
+ async def cache_close(
277
+ self, *, cancel: bool = False, return_exceptions: bool = True
278
+ ) -> None:
279
+ await self.__wrapper.cache_close()
280
+
281
+ def cache_info(self) -> _CacheInfo:
282
+ return self.__wrapper.cache_info()
283
+
284
+ def cache_parameters(self) -> _CacheParameters:
285
+ return self.__wrapper.cache_parameters()
286
+
287
+ async def __call__(self, /, *fn_args: Any, **fn_kwargs: Any) -> _R:
288
+ return await self.__wrapper(self.__instance, *fn_args, **fn_kwargs)
289
+
290
+
291
+ def _make_wrapper(
292
+ maxsize: Optional[int],
293
+ typed: bool,
294
+ ttl: Optional[float] = None,
295
+ ) -> Callable[[_CBP[_R]], _LRUCacheWrapper[_R]]:
296
+ def wrapper(fn: _CBP[_R]) -> _LRUCacheWrapper[_R]:
297
+ origin = fn
298
+
299
+ while isinstance(origin, (partial, partialmethod)):
300
+ origin = origin.func
301
+
302
+ if not asyncio.iscoroutinefunction(origin):
303
+ raise RuntimeError(f"Coroutine function is required, got {fn!r}")
304
+
305
+ # functools.partialmethod support
306
+ if hasattr(fn, "_make_unbound_method"):
307
+ fn = fn._make_unbound_method()
308
+
309
+ return _LRUCacheWrapper(cast(_CB[_R], fn), maxsize, typed, ttl)
310
+
311
+ return wrapper
312
+
313
+
314
+ @overload
315
+ def alru_cache(
316
+ maxsize: Optional[int] = 128,
317
+ typed: bool = False,
318
+ *,
319
+ ttl: Optional[float] = None,
320
+ ) -> Callable[[_CBP[_R]], _LRUCacheWrapper[_R]]:
321
+ ...
322
+
323
+
324
+ @overload
325
+ def alru_cache(
326
+ maxsize: _CBP[_R],
327
+ /,
328
+ ) -> _LRUCacheWrapper[_R]:
329
+ ...
330
+
331
+
332
+ def alru_cache(
333
+ maxsize: Union[Optional[int], _CBP[_R]] = 128,
334
+ typed: bool = False,
335
+ *,
336
+ ttl: Optional[float] = None,
337
+ ) -> Union[Callable[[_CBP[_R]], _LRUCacheWrapper[_R]], _LRUCacheWrapper[_R]]:
338
+ if maxsize is None or isinstance(maxsize, int):
339
+ return _make_wrapper(maxsize, typed, ttl)
340
+ else:
341
+ fn = cast(_CB[_R], maxsize)
342
+
343
+ if callable(fn) or hasattr(fn, "_make_unbound_method"):
344
+ return _make_wrapper(128, False, None)(fn)
345
+
346
+ raise NotImplementedError(f"{fn!r} decorating is not supported")
venv/Lib/site-packages/async_lru/py.typed ADDED
File without changes
venv/Lib/site-packages/attr/__init__.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ Classes Without Boilerplate
5
+ """
6
+
7
+ from functools import partial
8
+ from typing import Callable, Literal, Protocol
9
+
10
+ from . import converters, exceptions, filters, setters, validators
11
+ from ._cmp import cmp_using
12
+ from ._config import get_run_validators, set_run_validators
13
+ from ._funcs import asdict, assoc, astuple, has, resolve_types
14
+ from ._make import (
15
+ NOTHING,
16
+ Attribute,
17
+ Converter,
18
+ Factory,
19
+ _Nothing,
20
+ attrib,
21
+ attrs,
22
+ evolve,
23
+ fields,
24
+ fields_dict,
25
+ make_class,
26
+ validate,
27
+ )
28
+ from ._next_gen import define, field, frozen, mutable
29
+ from ._version_info import VersionInfo
30
+
31
+
32
+ s = attributes = attrs
33
+ ib = attr = attrib
34
+ dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
35
+
36
+
37
+ class AttrsInstance(Protocol):
38
+ pass
39
+
40
+
41
+ NothingType = Literal[_Nothing.NOTHING]
42
+
43
+ __all__ = [
44
+ "NOTHING",
45
+ "Attribute",
46
+ "AttrsInstance",
47
+ "Converter",
48
+ "Factory",
49
+ "NothingType",
50
+ "asdict",
51
+ "assoc",
52
+ "astuple",
53
+ "attr",
54
+ "attrib",
55
+ "attributes",
56
+ "attrs",
57
+ "cmp_using",
58
+ "converters",
59
+ "define",
60
+ "evolve",
61
+ "exceptions",
62
+ "field",
63
+ "fields",
64
+ "fields_dict",
65
+ "filters",
66
+ "frozen",
67
+ "get_run_validators",
68
+ "has",
69
+ "ib",
70
+ "make_class",
71
+ "mutable",
72
+ "resolve_types",
73
+ "s",
74
+ "set_run_validators",
75
+ "setters",
76
+ "validate",
77
+ "validators",
78
+ ]
79
+
80
+
81
+ def _make_getattr(mod_name: str) -> Callable:
82
+ """
83
+ Create a metadata proxy for packaging information that uses *mod_name* in
84
+ its warnings and errors.
85
+ """
86
+
87
+ def __getattr__(name: str) -> str:
88
+ if name not in ("__version__", "__version_info__"):
89
+ msg = f"module {mod_name} has no attribute {name}"
90
+ raise AttributeError(msg)
91
+
92
+ from importlib.metadata import metadata
93
+
94
+ meta = metadata("attrs")
95
+
96
+ if name == "__version_info__":
97
+ return VersionInfo._from_version_string(meta["version"])
98
+
99
+ return meta["version"]
100
+
101
+ return __getattr__
102
+
103
+
104
+ __getattr__ = _make_getattr(__name__)
venv/Lib/site-packages/attr/__init__.pyi ADDED
@@ -0,0 +1,389 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import enum
2
+ import sys
3
+
4
+ from typing import (
5
+ Any,
6
+ Callable,
7
+ Generic,
8
+ Literal,
9
+ Mapping,
10
+ Protocol,
11
+ Sequence,
12
+ TypeVar,
13
+ overload,
14
+ )
15
+
16
+ # `import X as X` is required to make these public
17
+ from . import converters as converters
18
+ from . import exceptions as exceptions
19
+ from . import filters as filters
20
+ from . import setters as setters
21
+ from . import validators as validators
22
+ from ._cmp import cmp_using as cmp_using
23
+ from ._typing_compat import AttrsInstance_
24
+ from ._version_info import VersionInfo
25
+ from attrs import (
26
+ define as define,
27
+ field as field,
28
+ mutable as mutable,
29
+ frozen as frozen,
30
+ _EqOrderType,
31
+ _ValidatorType,
32
+ _ConverterType,
33
+ _ReprArgType,
34
+ _OnSetAttrType,
35
+ _OnSetAttrArgType,
36
+ _FieldTransformer,
37
+ _ValidatorArgType,
38
+ )
39
+
40
+ if sys.version_info >= (3, 10):
41
+ from typing import TypeGuard, TypeAlias
42
+ else:
43
+ from typing_extensions import TypeGuard, TypeAlias
44
+
45
+ if sys.version_info >= (3, 11):
46
+ from typing import dataclass_transform
47
+ else:
48
+ from typing_extensions import dataclass_transform
49
+
50
+ __version__: str
51
+ __version_info__: VersionInfo
52
+ __title__: str
53
+ __description__: str
54
+ __url__: str
55
+ __uri__: str
56
+ __author__: str
57
+ __email__: str
58
+ __license__: str
59
+ __copyright__: str
60
+
61
+ _T = TypeVar("_T")
62
+ _C = TypeVar("_C", bound=type)
63
+
64
+ _FilterType = Callable[["Attribute[_T]", _T], bool]
65
+
66
+ # We subclass this here to keep the protocol's qualified name clean.
67
+ class AttrsInstance(AttrsInstance_, Protocol):
68
+ pass
69
+
70
+ _A = TypeVar("_A", bound=type[AttrsInstance])
71
+
72
+ class _Nothing(enum.Enum):
73
+ NOTHING = enum.auto()
74
+
75
+ NOTHING = _Nothing.NOTHING
76
+ NothingType: TypeAlias = Literal[_Nothing.NOTHING]
77
+
78
+ # NOTE: Factory lies about its return type to make this possible:
79
+ # `x: List[int] # = Factory(list)`
80
+ # Work around mypy issue #4554 in the common case by using an overload.
81
+
82
+ @overload
83
+ def Factory(factory: Callable[[], _T]) -> _T: ...
84
+ @overload
85
+ def Factory(
86
+ factory: Callable[[Any], _T],
87
+ takes_self: Literal[True],
88
+ ) -> _T: ...
89
+ @overload
90
+ def Factory(
91
+ factory: Callable[[], _T],
92
+ takes_self: Literal[False],
93
+ ) -> _T: ...
94
+
95
+ In = TypeVar("In")
96
+ Out = TypeVar("Out")
97
+
98
+ class Converter(Generic[In, Out]):
99
+ @overload
100
+ def __init__(self, converter: Callable[[In], Out]) -> None: ...
101
+ @overload
102
+ def __init__(
103
+ self,
104
+ converter: Callable[[In, AttrsInstance, Attribute], Out],
105
+ *,
106
+ takes_self: Literal[True],
107
+ takes_field: Literal[True],
108
+ ) -> None: ...
109
+ @overload
110
+ def __init__(
111
+ self,
112
+ converter: Callable[[In, Attribute], Out],
113
+ *,
114
+ takes_field: Literal[True],
115
+ ) -> None: ...
116
+ @overload
117
+ def __init__(
118
+ self,
119
+ converter: Callable[[In, AttrsInstance], Out],
120
+ *,
121
+ takes_self: Literal[True],
122
+ ) -> None: ...
123
+
124
+ class Attribute(Generic[_T]):
125
+ name: str
126
+ default: _T | None
127
+ validator: _ValidatorType[_T] | None
128
+ repr: _ReprArgType
129
+ cmp: _EqOrderType
130
+ eq: _EqOrderType
131
+ order: _EqOrderType
132
+ hash: bool | None
133
+ init: bool
134
+ converter: Converter | None
135
+ metadata: dict[Any, Any]
136
+ type: type[_T] | None
137
+ kw_only: bool
138
+ on_setattr: _OnSetAttrType
139
+ alias: str | None
140
+
141
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
142
+
143
+ # NOTE: We had several choices for the annotation to use for type arg:
144
+ # 1) Type[_T]
145
+ # - Pros: Handles simple cases correctly
146
+ # - Cons: Might produce less informative errors in the case of conflicting
147
+ # TypeVars e.g. `attr.ib(default='bad', type=int)`
148
+ # 2) Callable[..., _T]
149
+ # - Pros: Better error messages than #1 for conflicting TypeVars
150
+ # - Cons: Terrible error messages for validator checks.
151
+ # e.g. attr.ib(type=int, validator=validate_str)
152
+ # -> error: Cannot infer function type argument
153
+ # 3) type (and do all of the work in the mypy plugin)
154
+ # - Pros: Simple here, and we could customize the plugin with our own errors.
155
+ # - Cons: Would need to write mypy plugin code to handle all the cases.
156
+ # We chose option #1.
157
+
158
+ # `attr` lies about its return type to make the following possible:
159
+ # attr() -> Any
160
+ # attr(8) -> int
161
+ # attr(validator=<some callable>) -> Whatever the callable expects.
162
+ # This makes this type of assignments possible:
163
+ # x: int = attr(8)
164
+ #
165
+ # This form catches explicit None or no default but with no other arguments
166
+ # returns Any.
167
+ @overload
168
+ def attrib(
169
+ default: None = ...,
170
+ validator: None = ...,
171
+ repr: _ReprArgType = ...,
172
+ cmp: _EqOrderType | None = ...,
173
+ hash: bool | None = ...,
174
+ init: bool = ...,
175
+ metadata: Mapping[Any, Any] | None = ...,
176
+ type: None = ...,
177
+ converter: None = ...,
178
+ factory: None = ...,
179
+ kw_only: bool = ...,
180
+ eq: _EqOrderType | None = ...,
181
+ order: _EqOrderType | None = ...,
182
+ on_setattr: _OnSetAttrArgType | None = ...,
183
+ alias: str | None = ...,
184
+ ) -> Any: ...
185
+
186
+ # This form catches an explicit None or no default and infers the type from the
187
+ # other arguments.
188
+ @overload
189
+ def attrib(
190
+ default: None = ...,
191
+ validator: _ValidatorArgType[_T] | None = ...,
192
+ repr: _ReprArgType = ...,
193
+ cmp: _EqOrderType | None = ...,
194
+ hash: bool | None = ...,
195
+ init: bool = ...,
196
+ metadata: Mapping[Any, Any] | None = ...,
197
+ type: type[_T] | None = ...,
198
+ converter: _ConverterType
199
+ | list[_ConverterType]
200
+ | tuple[_ConverterType]
201
+ | None = ...,
202
+ factory: Callable[[], _T] | None = ...,
203
+ kw_only: bool = ...,
204
+ eq: _EqOrderType | None = ...,
205
+ order: _EqOrderType | None = ...,
206
+ on_setattr: _OnSetAttrArgType | None = ...,
207
+ alias: str | None = ...,
208
+ ) -> _T: ...
209
+
210
+ # This form catches an explicit default argument.
211
+ @overload
212
+ def attrib(
213
+ default: _T,
214
+ validator: _ValidatorArgType[_T] | None = ...,
215
+ repr: _ReprArgType = ...,
216
+ cmp: _EqOrderType | None = ...,
217
+ hash: bool | None = ...,
218
+ init: bool = ...,
219
+ metadata: Mapping[Any, Any] | None = ...,
220
+ type: type[_T] | None = ...,
221
+ converter: _ConverterType
222
+ | list[_ConverterType]
223
+ | tuple[_ConverterType]
224
+ | None = ...,
225
+ factory: Callable[[], _T] | None = ...,
226
+ kw_only: bool = ...,
227
+ eq: _EqOrderType | None = ...,
228
+ order: _EqOrderType | None = ...,
229
+ on_setattr: _OnSetAttrArgType | None = ...,
230
+ alias: str | None = ...,
231
+ ) -> _T: ...
232
+
233
+ # This form covers type=non-Type: e.g. forward references (str), Any
234
+ @overload
235
+ def attrib(
236
+ default: _T | None = ...,
237
+ validator: _ValidatorArgType[_T] | None = ...,
238
+ repr: _ReprArgType = ...,
239
+ cmp: _EqOrderType | None = ...,
240
+ hash: bool | None = ...,
241
+ init: bool = ...,
242
+ metadata: Mapping[Any, Any] | None = ...,
243
+ type: object = ...,
244
+ converter: _ConverterType
245
+ | list[_ConverterType]
246
+ | tuple[_ConverterType]
247
+ | None = ...,
248
+ factory: Callable[[], _T] | None = ...,
249
+ kw_only: bool = ...,
250
+ eq: _EqOrderType | None = ...,
251
+ order: _EqOrderType | None = ...,
252
+ on_setattr: _OnSetAttrArgType | None = ...,
253
+ alias: str | None = ...,
254
+ ) -> Any: ...
255
+ @overload
256
+ @dataclass_transform(order_default=True, field_specifiers=(attrib, field))
257
+ def attrs(
258
+ maybe_cls: _C,
259
+ these: dict[str, Any] | None = ...,
260
+ repr_ns: str | None = ...,
261
+ repr: bool = ...,
262
+ cmp: _EqOrderType | None = ...,
263
+ hash: bool | None = ...,
264
+ init: bool = ...,
265
+ slots: bool = ...,
266
+ frozen: bool = ...,
267
+ weakref_slot: bool = ...,
268
+ str: bool = ...,
269
+ auto_attribs: bool = ...,
270
+ kw_only: bool = ...,
271
+ cache_hash: bool = ...,
272
+ auto_exc: bool = ...,
273
+ eq: _EqOrderType | None = ...,
274
+ order: _EqOrderType | None = ...,
275
+ auto_detect: bool = ...,
276
+ collect_by_mro: bool = ...,
277
+ getstate_setstate: bool | None = ...,
278
+ on_setattr: _OnSetAttrArgType | None = ...,
279
+ field_transformer: _FieldTransformer | None = ...,
280
+ match_args: bool = ...,
281
+ unsafe_hash: bool | None = ...,
282
+ ) -> _C: ...
283
+ @overload
284
+ @dataclass_transform(order_default=True, field_specifiers=(attrib, field))
285
+ def attrs(
286
+ maybe_cls: None = ...,
287
+ these: dict[str, Any] | None = ...,
288
+ repr_ns: str | None = ...,
289
+ repr: bool = ...,
290
+ cmp: _EqOrderType | None = ...,
291
+ hash: bool | None = ...,
292
+ init: bool = ...,
293
+ slots: bool = ...,
294
+ frozen: bool = ...,
295
+ weakref_slot: bool = ...,
296
+ str: bool = ...,
297
+ auto_attribs: bool = ...,
298
+ kw_only: bool = ...,
299
+ cache_hash: bool = ...,
300
+ auto_exc: bool = ...,
301
+ eq: _EqOrderType | None = ...,
302
+ order: _EqOrderType | None = ...,
303
+ auto_detect: bool = ...,
304
+ collect_by_mro: bool = ...,
305
+ getstate_setstate: bool | None = ...,
306
+ on_setattr: _OnSetAttrArgType | None = ...,
307
+ field_transformer: _FieldTransformer | None = ...,
308
+ match_args: bool = ...,
309
+ unsafe_hash: bool | None = ...,
310
+ ) -> Callable[[_C], _C]: ...
311
+ def fields(cls: type[AttrsInstance]) -> Any: ...
312
+ def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...
313
+ def validate(inst: AttrsInstance) -> None: ...
314
+ def resolve_types(
315
+ cls: _A,
316
+ globalns: dict[str, Any] | None = ...,
317
+ localns: dict[str, Any] | None = ...,
318
+ attribs: list[Attribute[Any]] | None = ...,
319
+ include_extras: bool = ...,
320
+ ) -> _A: ...
321
+
322
+ # TODO: add support for returning a proper attrs class from the mypy plugin
323
+ # we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
324
+ # [attr.ib()])` is valid
325
+ def make_class(
326
+ name: str,
327
+ attrs: list[str] | tuple[str, ...] | dict[str, Any],
328
+ bases: tuple[type, ...] = ...,
329
+ class_body: dict[str, Any] | None = ...,
330
+ repr_ns: str | None = ...,
331
+ repr: bool = ...,
332
+ cmp: _EqOrderType | None = ...,
333
+ hash: bool | None = ...,
334
+ init: bool = ...,
335
+ slots: bool = ...,
336
+ frozen: bool = ...,
337
+ weakref_slot: bool = ...,
338
+ str: bool = ...,
339
+ auto_attribs: bool = ...,
340
+ kw_only: bool = ...,
341
+ cache_hash: bool = ...,
342
+ auto_exc: bool = ...,
343
+ eq: _EqOrderType | None = ...,
344
+ order: _EqOrderType | None = ...,
345
+ collect_by_mro: bool = ...,
346
+ on_setattr: _OnSetAttrArgType | None = ...,
347
+ field_transformer: _FieldTransformer | None = ...,
348
+ ) -> type: ...
349
+
350
+ # _funcs --
351
+
352
+ # TODO: add support for returning TypedDict from the mypy plugin
353
+ # FIXME: asdict/astuple do not honor their factory args. Waiting on one of
354
+ # these:
355
+ # https://github.com/python/mypy/issues/4236
356
+ # https://github.com/python/typing/issues/253
357
+ # XXX: remember to fix attrs.asdict/astuple too!
358
+ def asdict(
359
+ inst: AttrsInstance,
360
+ recurse: bool = ...,
361
+ filter: _FilterType[Any] | None = ...,
362
+ dict_factory: type[Mapping[Any, Any]] = ...,
363
+ retain_collection_types: bool = ...,
364
+ value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,
365
+ tuple_keys: bool | None = ...,
366
+ ) -> dict[str, Any]: ...
367
+
368
+ # TODO: add support for returning NamedTuple from the mypy plugin
369
+ def astuple(
370
+ inst: AttrsInstance,
371
+ recurse: bool = ...,
372
+ filter: _FilterType[Any] | None = ...,
373
+ tuple_factory: type[Sequence[Any]] = ...,
374
+ retain_collection_types: bool = ...,
375
+ ) -> tuple[Any, ...]: ...
376
+ def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...
377
+ def assoc(inst: _T, **changes: Any) -> _T: ...
378
+ def evolve(inst: _T, **changes: Any) -> _T: ...
379
+
380
+ # _config --
381
+
382
+ def set_run_validators(run: bool) -> None: ...
383
+ def get_run_validators() -> bool: ...
384
+
385
+ # aliases --
386
+
387
+ s = attributes = attrs
388
+ ib = attr = attrib
389
+ dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
venv/Lib/site-packages/attr/_cmp.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+
4
+ import functools
5
+ import types
6
+
7
+ from ._make import __ne__
8
+
9
+
10
+ _operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
11
+
12
+
13
+ def cmp_using(
14
+ eq=None,
15
+ lt=None,
16
+ le=None,
17
+ gt=None,
18
+ ge=None,
19
+ require_same_type=True,
20
+ class_name="Comparable",
21
+ ):
22
+ """
23
+ Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
24
+ and ``cmp`` arguments to customize field comparison.
25
+
26
+ The resulting class will have a full set of ordering methods if at least
27
+ one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
28
+
29
+ Args:
30
+ eq (typing.Callable | None):
31
+ Callable used to evaluate equality of two objects.
32
+
33
+ lt (typing.Callable | None):
34
+ Callable used to evaluate whether one object is less than another
35
+ object.
36
+
37
+ le (typing.Callable | None):
38
+ Callable used to evaluate whether one object is less than or equal
39
+ to another object.
40
+
41
+ gt (typing.Callable | None):
42
+ Callable used to evaluate whether one object is greater than
43
+ another object.
44
+
45
+ ge (typing.Callable | None):
46
+ Callable used to evaluate whether one object is greater than or
47
+ equal to another object.
48
+
49
+ require_same_type (bool):
50
+ When `True`, equality and ordering methods will return
51
+ `NotImplemented` if objects are not of the same type.
52
+
53
+ class_name (str | None): Name of class. Defaults to "Comparable".
54
+
55
+ See `comparison` for more details.
56
+
57
+ .. versionadded:: 21.1.0
58
+ """
59
+
60
+ body = {
61
+ "__slots__": ["value"],
62
+ "__init__": _make_init(),
63
+ "_requirements": [],
64
+ "_is_comparable_to": _is_comparable_to,
65
+ }
66
+
67
+ # Add operations.
68
+ num_order_functions = 0
69
+ has_eq_function = False
70
+
71
+ if eq is not None:
72
+ has_eq_function = True
73
+ body["__eq__"] = _make_operator("eq", eq)
74
+ body["__ne__"] = __ne__
75
+
76
+ if lt is not None:
77
+ num_order_functions += 1
78
+ body["__lt__"] = _make_operator("lt", lt)
79
+
80
+ if le is not None:
81
+ num_order_functions += 1
82
+ body["__le__"] = _make_operator("le", le)
83
+
84
+ if gt is not None:
85
+ num_order_functions += 1
86
+ body["__gt__"] = _make_operator("gt", gt)
87
+
88
+ if ge is not None:
89
+ num_order_functions += 1
90
+ body["__ge__"] = _make_operator("ge", ge)
91
+
92
+ type_ = types.new_class(
93
+ class_name, (object,), {}, lambda ns: ns.update(body)
94
+ )
95
+
96
+ # Add same type requirement.
97
+ if require_same_type:
98
+ type_._requirements.append(_check_same_type)
99
+
100
+ # Add total ordering if at least one operation was defined.
101
+ if 0 < num_order_functions < 4:
102
+ if not has_eq_function:
103
+ # functools.total_ordering requires __eq__ to be defined,
104
+ # so raise early error here to keep a nice stack.
105
+ msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
106
+ raise ValueError(msg)
107
+ type_ = functools.total_ordering(type_)
108
+
109
+ return type_
110
+
111
+
112
+ def _make_init():
113
+ """
114
+ Create __init__ method.
115
+ """
116
+
117
+ def __init__(self, value):
118
+ """
119
+ Initialize object with *value*.
120
+ """
121
+ self.value = value
122
+
123
+ return __init__
124
+
125
+
126
+ def _make_operator(name, func):
127
+ """
128
+ Create operator method.
129
+ """
130
+
131
+ def method(self, other):
132
+ if not self._is_comparable_to(other):
133
+ return NotImplemented
134
+
135
+ result = func(self.value, other.value)
136
+ if result is NotImplemented:
137
+ return NotImplemented
138
+
139
+ return result
140
+
141
+ method.__name__ = f"__{name}__"
142
+ method.__doc__ = (
143
+ f"Return a {_operation_names[name]} b. Computed by attrs."
144
+ )
145
+
146
+ return method
147
+
148
+
149
+ def _is_comparable_to(self, other):
150
+ """
151
+ Check whether `other` is comparable to `self`.
152
+ """
153
+ return all(func(self, other) for func in self._requirements)
154
+
155
+
156
+ def _check_same_type(self, other):
157
+ """
158
+ Return True if *self* and *other* are of the same type, False otherwise.
159
+ """
160
+ return other.value.__class__ is self.value.__class__
venv/Lib/site-packages/attr/_cmp.pyi ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Callable
2
+
3
+ _CompareWithType = Callable[[Any, Any], bool]
4
+
5
+ def cmp_using(
6
+ eq: _CompareWithType | None = ...,
7
+ lt: _CompareWithType | None = ...,
8
+ le: _CompareWithType | None = ...,
9
+ gt: _CompareWithType | None = ...,
10
+ ge: _CompareWithType | None = ...,
11
+ require_same_type: bool = ...,
12
+ class_name: str = ...,
13
+ ) -> type: ...
venv/Lib/site-packages/attr/_compat.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ import inspect
4
+ import platform
5
+ import sys
6
+ import threading
7
+
8
+ from collections.abc import Mapping, Sequence # noqa: F401
9
+ from typing import _GenericAlias
10
+
11
+
12
+ PYPY = platform.python_implementation() == "PyPy"
13
+ PY_3_9_PLUS = sys.version_info[:2] >= (3, 9)
14
+ PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
15
+ PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
16
+ PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
17
+ PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
18
+ PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
19
+
20
+
21
+ if PY_3_14_PLUS: # pragma: no cover
22
+ import annotationlib
23
+
24
+ _get_annotations = annotationlib.get_annotations
25
+
26
+ else:
27
+
28
+ def _get_annotations(cls):
29
+ """
30
+ Get annotations for *cls*.
31
+ """
32
+ return cls.__dict__.get("__annotations__", {})
33
+
34
+
35
+ class _AnnotationExtractor:
36
+ """
37
+ Extract type annotations from a callable, returning None whenever there
38
+ is none.
39
+ """
40
+
41
+ __slots__ = ["sig"]
42
+
43
+ def __init__(self, callable):
44
+ try:
45
+ self.sig = inspect.signature(callable)
46
+ except (ValueError, TypeError): # inspect failed
47
+ self.sig = None
48
+
49
+ def get_first_param_type(self):
50
+ """
51
+ Return the type annotation of the first argument if it's not empty.
52
+ """
53
+ if not self.sig:
54
+ return None
55
+
56
+ params = list(self.sig.parameters.values())
57
+ if params and params[0].annotation is not inspect.Parameter.empty:
58
+ return params[0].annotation
59
+
60
+ return None
61
+
62
+ def get_return_type(self):
63
+ """
64
+ Return the return type if it's not empty.
65
+ """
66
+ if (
67
+ self.sig
68
+ and self.sig.return_annotation is not inspect.Signature.empty
69
+ ):
70
+ return self.sig.return_annotation
71
+
72
+ return None
73
+
74
+
75
+ # Thread-local global to track attrs instances which are already being repr'd.
76
+ # This is needed because there is no other (thread-safe) way to pass info
77
+ # about the instances that are already being repr'd through the call stack
78
+ # in order to ensure we don't perform infinite recursion.
79
+ #
80
+ # For instance, if an instance contains a dict which contains that instance,
81
+ # we need to know that we're already repr'ing the outside instance from within
82
+ # the dict's repr() call.
83
+ #
84
+ # This lives here rather than in _make.py so that the functions in _make.py
85
+ # don't have a direct reference to the thread-local in their globals dict.
86
+ # If they have such a reference, it breaks cloudpickle.
87
+ repr_context = threading.local()
88
+
89
+
90
+ def get_generic_base(cl):
91
+ """If this is a generic class (A[str]), return the generic base for it."""
92
+ if cl.__class__ is _GenericAlias:
93
+ return cl.__origin__
94
+ return None
venv/Lib/site-packages/attr/_config.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ __all__ = ["get_run_validators", "set_run_validators"]
4
+
5
+ _run_validators = True
6
+
7
+
8
+ def set_run_validators(run):
9
+ """
10
+ Set whether or not validators are run. By default, they are run.
11
+
12
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
13
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
14
+ instead.
15
+ """
16
+ if not isinstance(run, bool):
17
+ msg = "'run' must be bool."
18
+ raise TypeError(msg)
19
+ global _run_validators
20
+ _run_validators = run
21
+
22
+
23
+ def get_run_validators():
24
+ """
25
+ Return whether or not validators are run.
26
+
27
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
28
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
29
+ instead.
30
+ """
31
+ return _run_validators
venv/Lib/site-packages/attr/_funcs.py ADDED
@@ -0,0 +1,468 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+
4
+ import copy
5
+
6
+ from ._compat import PY_3_9_PLUS, get_generic_base
7
+ from ._make import _OBJ_SETATTR, NOTHING, fields
8
+ from .exceptions import AttrsAttributeNotFoundError
9
+
10
+
11
+ def asdict(
12
+ inst,
13
+ recurse=True,
14
+ filter=None,
15
+ dict_factory=dict,
16
+ retain_collection_types=False,
17
+ value_serializer=None,
18
+ ):
19
+ """
20
+ Return the *attrs* attribute values of *inst* as a dict.
21
+
22
+ Optionally recurse into other *attrs*-decorated classes.
23
+
24
+ Args:
25
+ inst: Instance of an *attrs*-decorated class.
26
+
27
+ recurse (bool): Recurse into classes that are also *attrs*-decorated.
28
+
29
+ filter (~typing.Callable):
30
+ A callable whose return code determines whether an attribute or
31
+ element is included (`True`) or dropped (`False`). Is called with
32
+ the `attrs.Attribute` as the first argument and the value as the
33
+ second argument.
34
+
35
+ dict_factory (~typing.Callable):
36
+ A callable to produce dictionaries from. For example, to produce
37
+ ordered dictionaries instead of normal Python dictionaries, pass in
38
+ ``collections.OrderedDict``.
39
+
40
+ retain_collection_types (bool):
41
+ Do not convert to `list` when encountering an attribute whose type
42
+ is `tuple` or `set`. Only meaningful if *recurse* is `True`.
43
+
44
+ value_serializer (typing.Callable | None):
45
+ A hook that is called for every attribute or dict key/value. It
46
+ receives the current instance, field and value and must return the
47
+ (updated) value. The hook is run *after* the optional *filter* has
48
+ been applied.
49
+
50
+ Returns:
51
+ Return type of *dict_factory*.
52
+
53
+ Raises:
54
+ attrs.exceptions.NotAnAttrsClassError:
55
+ If *cls* is not an *attrs* class.
56
+
57
+ .. versionadded:: 16.0.0 *dict_factory*
58
+ .. versionadded:: 16.1.0 *retain_collection_types*
59
+ .. versionadded:: 20.3.0 *value_serializer*
60
+ .. versionadded:: 21.3.0
61
+ If a dict has a collection for a key, it is serialized as a tuple.
62
+ """
63
+ attrs = fields(inst.__class__)
64
+ rv = dict_factory()
65
+ for a in attrs:
66
+ v = getattr(inst, a.name)
67
+ if filter is not None and not filter(a, v):
68
+ continue
69
+
70
+ if value_serializer is not None:
71
+ v = value_serializer(inst, a, v)
72
+
73
+ if recurse is True:
74
+ if has(v.__class__):
75
+ rv[a.name] = asdict(
76
+ v,
77
+ recurse=True,
78
+ filter=filter,
79
+ dict_factory=dict_factory,
80
+ retain_collection_types=retain_collection_types,
81
+ value_serializer=value_serializer,
82
+ )
83
+ elif isinstance(v, (tuple, list, set, frozenset)):
84
+ cf = v.__class__ if retain_collection_types is True else list
85
+ items = [
86
+ _asdict_anything(
87
+ i,
88
+ is_key=False,
89
+ filter=filter,
90
+ dict_factory=dict_factory,
91
+ retain_collection_types=retain_collection_types,
92
+ value_serializer=value_serializer,
93
+ )
94
+ for i in v
95
+ ]
96
+ try:
97
+ rv[a.name] = cf(items)
98
+ except TypeError:
99
+ if not issubclass(cf, tuple):
100
+ raise
101
+ # Workaround for TypeError: cf.__new__() missing 1 required
102
+ # positional argument (which appears, for a namedturle)
103
+ rv[a.name] = cf(*items)
104
+ elif isinstance(v, dict):
105
+ df = dict_factory
106
+ rv[a.name] = df(
107
+ (
108
+ _asdict_anything(
109
+ kk,
110
+ is_key=True,
111
+ filter=filter,
112
+ dict_factory=df,
113
+ retain_collection_types=retain_collection_types,
114
+ value_serializer=value_serializer,
115
+ ),
116
+ _asdict_anything(
117
+ vv,
118
+ is_key=False,
119
+ filter=filter,
120
+ dict_factory=df,
121
+ retain_collection_types=retain_collection_types,
122
+ value_serializer=value_serializer,
123
+ ),
124
+ )
125
+ for kk, vv in v.items()
126
+ )
127
+ else:
128
+ rv[a.name] = v
129
+ else:
130
+ rv[a.name] = v
131
+ return rv
132
+
133
+
134
+ def _asdict_anything(
135
+ val,
136
+ is_key,
137
+ filter,
138
+ dict_factory,
139
+ retain_collection_types,
140
+ value_serializer,
141
+ ):
142
+ """
143
+ ``asdict`` only works on attrs instances, this works on anything.
144
+ """
145
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
146
+ # Attrs class.
147
+ rv = asdict(
148
+ val,
149
+ recurse=True,
150
+ filter=filter,
151
+ dict_factory=dict_factory,
152
+ retain_collection_types=retain_collection_types,
153
+ value_serializer=value_serializer,
154
+ )
155
+ elif isinstance(val, (tuple, list, set, frozenset)):
156
+ if retain_collection_types is True:
157
+ cf = val.__class__
158
+ elif is_key:
159
+ cf = tuple
160
+ else:
161
+ cf = list
162
+
163
+ rv = cf(
164
+ [
165
+ _asdict_anything(
166
+ i,
167
+ is_key=False,
168
+ filter=filter,
169
+ dict_factory=dict_factory,
170
+ retain_collection_types=retain_collection_types,
171
+ value_serializer=value_serializer,
172
+ )
173
+ for i in val
174
+ ]
175
+ )
176
+ elif isinstance(val, dict):
177
+ df = dict_factory
178
+ rv = df(
179
+ (
180
+ _asdict_anything(
181
+ kk,
182
+ is_key=True,
183
+ filter=filter,
184
+ dict_factory=df,
185
+ retain_collection_types=retain_collection_types,
186
+ value_serializer=value_serializer,
187
+ ),
188
+ _asdict_anything(
189
+ vv,
190
+ is_key=False,
191
+ filter=filter,
192
+ dict_factory=df,
193
+ retain_collection_types=retain_collection_types,
194
+ value_serializer=value_serializer,
195
+ ),
196
+ )
197
+ for kk, vv in val.items()
198
+ )
199
+ else:
200
+ rv = val
201
+ if value_serializer is not None:
202
+ rv = value_serializer(None, None, rv)
203
+
204
+ return rv
205
+
206
+
207
+ def astuple(
208
+ inst,
209
+ recurse=True,
210
+ filter=None,
211
+ tuple_factory=tuple,
212
+ retain_collection_types=False,
213
+ ):
214
+ """
215
+ Return the *attrs* attribute values of *inst* as a tuple.
216
+
217
+ Optionally recurse into other *attrs*-decorated classes.
218
+
219
+ Args:
220
+ inst: Instance of an *attrs*-decorated class.
221
+
222
+ recurse (bool):
223
+ Recurse into classes that are also *attrs*-decorated.
224
+
225
+ filter (~typing.Callable):
226
+ A callable whose return code determines whether an attribute or
227
+ element is included (`True`) or dropped (`False`). Is called with
228
+ the `attrs.Attribute` as the first argument and the value as the
229
+ second argument.
230
+
231
+ tuple_factory (~typing.Callable):
232
+ A callable to produce tuples from. For example, to produce lists
233
+ instead of tuples.
234
+
235
+ retain_collection_types (bool):
236
+ Do not convert to `list` or `dict` when encountering an attribute
237
+ which type is `tuple`, `dict` or `set`. Only meaningful if
238
+ *recurse* is `True`.
239
+
240
+ Returns:
241
+ Return type of *tuple_factory*
242
+
243
+ Raises:
244
+ attrs.exceptions.NotAnAttrsClassError:
245
+ If *cls* is not an *attrs* class.
246
+
247
+ .. versionadded:: 16.2.0
248
+ """
249
+ attrs = fields(inst.__class__)
250
+ rv = []
251
+ retain = retain_collection_types # Very long. :/
252
+ for a in attrs:
253
+ v = getattr(inst, a.name)
254
+ if filter is not None and not filter(a, v):
255
+ continue
256
+ if recurse is True:
257
+ if has(v.__class__):
258
+ rv.append(
259
+ astuple(
260
+ v,
261
+ recurse=True,
262
+ filter=filter,
263
+ tuple_factory=tuple_factory,
264
+ retain_collection_types=retain,
265
+ )
266
+ )
267
+ elif isinstance(v, (tuple, list, set, frozenset)):
268
+ cf = v.__class__ if retain is True else list
269
+ items = [
270
+ (
271
+ astuple(
272
+ j,
273
+ recurse=True,
274
+ filter=filter,
275
+ tuple_factory=tuple_factory,
276
+ retain_collection_types=retain,
277
+ )
278
+ if has(j.__class__)
279
+ else j
280
+ )
281
+ for j in v
282
+ ]
283
+ try:
284
+ rv.append(cf(items))
285
+ except TypeError:
286
+ if not issubclass(cf, tuple):
287
+ raise
288
+ # Workaround for TypeError: cf.__new__() missing 1 required
289
+ # positional argument (which appears, for a namedturle)
290
+ rv.append(cf(*items))
291
+ elif isinstance(v, dict):
292
+ df = v.__class__ if retain is True else dict
293
+ rv.append(
294
+ df(
295
+ (
296
+ (
297
+ astuple(
298
+ kk,
299
+ tuple_factory=tuple_factory,
300
+ retain_collection_types=retain,
301
+ )
302
+ if has(kk.__class__)
303
+ else kk
304
+ ),
305
+ (
306
+ astuple(
307
+ vv,
308
+ tuple_factory=tuple_factory,
309
+ retain_collection_types=retain,
310
+ )
311
+ if has(vv.__class__)
312
+ else vv
313
+ ),
314
+ )
315
+ for kk, vv in v.items()
316
+ )
317
+ )
318
+ else:
319
+ rv.append(v)
320
+ else:
321
+ rv.append(v)
322
+
323
+ return rv if tuple_factory is list else tuple_factory(rv)
324
+
325
+
326
+ def has(cls):
327
+ """
328
+ Check whether *cls* is a class with *attrs* attributes.
329
+
330
+ Args:
331
+ cls (type): Class to introspect.
332
+
333
+ Raises:
334
+ TypeError: If *cls* is not a class.
335
+
336
+ Returns:
337
+ bool:
338
+ """
339
+ attrs = getattr(cls, "__attrs_attrs__", None)
340
+ if attrs is not None:
341
+ return True
342
+
343
+ # No attrs, maybe it's a specialized generic (A[str])?
344
+ generic_base = get_generic_base(cls)
345
+ if generic_base is not None:
346
+ generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
347
+ if generic_attrs is not None:
348
+ # Stick it on here for speed next time.
349
+ cls.__attrs_attrs__ = generic_attrs
350
+ return generic_attrs is not None
351
+ return False
352
+
353
+
354
+ def assoc(inst, **changes):
355
+ """
356
+ Copy *inst* and apply *changes*.
357
+
358
+ This is different from `evolve` that applies the changes to the arguments
359
+ that create the new instance.
360
+
361
+ `evolve`'s behavior is preferable, but there are `edge cases`_ where it
362
+ doesn't work. Therefore `assoc` is deprecated, but will not be removed.
363
+
364
+ .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
365
+
366
+ Args:
367
+ inst: Instance of a class with *attrs* attributes.
368
+
369
+ changes: Keyword changes in the new copy.
370
+
371
+ Returns:
372
+ A copy of inst with *changes* incorporated.
373
+
374
+ Raises:
375
+ attrs.exceptions.AttrsAttributeNotFoundError:
376
+ If *attr_name* couldn't be found on *cls*.
377
+
378
+ attrs.exceptions.NotAnAttrsClassError:
379
+ If *cls* is not an *attrs* class.
380
+
381
+ .. deprecated:: 17.1.0
382
+ Use `attrs.evolve` instead if you can. This function will not be
383
+ removed du to the slightly different approach compared to
384
+ `attrs.evolve`, though.
385
+ """
386
+ new = copy.copy(inst)
387
+ attrs = fields(inst.__class__)
388
+ for k, v in changes.items():
389
+ a = getattr(attrs, k, NOTHING)
390
+ if a is NOTHING:
391
+ msg = f"{k} is not an attrs attribute on {new.__class__}."
392
+ raise AttrsAttributeNotFoundError(msg)
393
+ _OBJ_SETATTR(new, k, v)
394
+ return new
395
+
396
+
397
+ def resolve_types(
398
+ cls, globalns=None, localns=None, attribs=None, include_extras=True
399
+ ):
400
+ """
401
+ Resolve any strings and forward annotations in type annotations.
402
+
403
+ This is only required if you need concrete types in :class:`Attribute`'s
404
+ *type* field. In other words, you don't need to resolve your types if you
405
+ only use them for static type checking.
406
+
407
+ With no arguments, names will be looked up in the module in which the class
408
+ was created. If this is not what you want, for example, if the name only
409
+ exists inside a method, you may pass *globalns* or *localns* to specify
410
+ other dictionaries in which to look up these names. See the docs of
411
+ `typing.get_type_hints` for more details.
412
+
413
+ Args:
414
+ cls (type): Class to resolve.
415
+
416
+ globalns (dict | None): Dictionary containing global variables.
417
+
418
+ localns (dict | None): Dictionary containing local variables.
419
+
420
+ attribs (list | None):
421
+ List of attribs for the given class. This is necessary when calling
422
+ from inside a ``field_transformer`` since *cls* is not an *attrs*
423
+ class yet.
424
+
425
+ include_extras (bool):
426
+ Resolve more accurately, if possible. Pass ``include_extras`` to
427
+ ``typing.get_hints``, if supported by the typing module. On
428
+ supported Python versions (3.9+), this resolves the types more
429
+ accurately.
430
+
431
+ Raises:
432
+ TypeError: If *cls* is not a class.
433
+
434
+ attrs.exceptions.NotAnAttrsClassError:
435
+ If *cls* is not an *attrs* class and you didn't pass any attribs.
436
+
437
+ NameError: If types cannot be resolved because of missing variables.
438
+
439
+ Returns:
440
+ *cls* so you can use this function also as a class decorator. Please
441
+ note that you have to apply it **after** `attrs.define`. That means the
442
+ decorator has to come in the line **before** `attrs.define`.
443
+
444
+ .. versionadded:: 20.1.0
445
+ .. versionadded:: 21.1.0 *attribs*
446
+ .. versionadded:: 23.1.0 *include_extras*
447
+ """
448
+ # Since calling get_type_hints is expensive we cache whether we've
449
+ # done it already.
450
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
451
+ import typing
452
+
453
+ kwargs = {"globalns": globalns, "localns": localns}
454
+
455
+ if PY_3_9_PLUS:
456
+ kwargs["include_extras"] = include_extras
457
+
458
+ hints = typing.get_type_hints(cls, **kwargs)
459
+ for field in fields(cls) if attribs is None else attribs:
460
+ if field.name in hints:
461
+ # Since fields have been frozen we must work around it.
462
+ _OBJ_SETATTR(field, "type", hints[field.name])
463
+ # We store the class we resolved so that subclasses know they haven't
464
+ # been resolved.
465
+ cls.__attrs_types_resolved__ = cls
466
+
467
+ # Return the class so you can use it as a decorator too.
468
+ return cls
venv/Lib/site-packages/attr/_make.py ADDED
@@ -0,0 +1,3123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ from __future__ import annotations
4
+
5
+ import abc
6
+ import contextlib
7
+ import copy
8
+ import enum
9
+ import inspect
10
+ import itertools
11
+ import linecache
12
+ import sys
13
+ import types
14
+ import unicodedata
15
+
16
+ from collections.abc import Callable, Mapping
17
+ from functools import cached_property
18
+ from typing import Any, NamedTuple, TypeVar
19
+
20
+ # We need to import _compat itself in addition to the _compat members to avoid
21
+ # having the thread-local in the globals here.
22
+ from . import _compat, _config, setters
23
+ from ._compat import (
24
+ PY_3_10_PLUS,
25
+ PY_3_11_PLUS,
26
+ PY_3_13_PLUS,
27
+ _AnnotationExtractor,
28
+ _get_annotations,
29
+ get_generic_base,
30
+ )
31
+ from .exceptions import (
32
+ DefaultAlreadySetError,
33
+ FrozenInstanceError,
34
+ NotAnAttrsClassError,
35
+ UnannotatedAttributeError,
36
+ )
37
+
38
+
39
+ # This is used at least twice, so cache it here.
40
+ _OBJ_SETATTR = object.__setattr__
41
+ _INIT_FACTORY_PAT = "__attr_factory_%s"
42
+ _CLASSVAR_PREFIXES = (
43
+ "typing.ClassVar",
44
+ "t.ClassVar",
45
+ "ClassVar",
46
+ "typing_extensions.ClassVar",
47
+ )
48
+ # we don't use a double-underscore prefix because that triggers
49
+ # name mangling when trying to create a slot for the field
50
+ # (when slots=True)
51
+ _HASH_CACHE_FIELD = "_attrs_cached_hash"
52
+
53
+ _EMPTY_METADATA_SINGLETON = types.MappingProxyType({})
54
+
55
+ # Unique object for unequivocal getattr() defaults.
56
+ _SENTINEL = object()
57
+
58
+ _DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate)
59
+
60
+
61
+ class _Nothing(enum.Enum):
62
+ """
63
+ Sentinel to indicate the lack of a value when `None` is ambiguous.
64
+
65
+ If extending attrs, you can use ``typing.Literal[NOTHING]`` to show
66
+ that a value may be ``NOTHING``.
67
+
68
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
69
+ .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant.
70
+ """
71
+
72
+ NOTHING = enum.auto()
73
+
74
+ def __repr__(self):
75
+ return "NOTHING"
76
+
77
+ def __bool__(self):
78
+ return False
79
+
80
+
81
+ NOTHING = _Nothing.NOTHING
82
+ """
83
+ Sentinel to indicate the lack of a value when `None` is ambiguous.
84
+
85
+ When using in 3rd party code, use `attrs.NothingType` for type annotations.
86
+ """
87
+
88
+
89
+ class _CacheHashWrapper(int):
90
+ """
91
+ An integer subclass that pickles / copies as None
92
+
93
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
94
+ serializing a potentially (even likely) invalid hash value. Since `None`
95
+ is the default value for uncalculated hashes, whenever this is copied,
96
+ the copy's value for the hash should automatically reset.
97
+
98
+ See GH #613 for more details.
99
+ """
100
+
101
+ def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008
102
+ return _none_constructor, _args
103
+
104
+
105
+ def attrib(
106
+ default=NOTHING,
107
+ validator=None,
108
+ repr=True,
109
+ cmp=None,
110
+ hash=None,
111
+ init=True,
112
+ metadata=None,
113
+ type=None,
114
+ converter=None,
115
+ factory=None,
116
+ kw_only=False,
117
+ eq=None,
118
+ order=None,
119
+ on_setattr=None,
120
+ alias=None,
121
+ ):
122
+ """
123
+ Create a new field / attribute on a class.
124
+
125
+ Identical to `attrs.field`, except it's not keyword-only.
126
+
127
+ Consider using `attrs.field` in new code (``attr.ib`` will *never* go away,
128
+ though).
129
+
130
+ .. warning::
131
+
132
+ Does **nothing** unless the class is also decorated with
133
+ `attr.s` (or similar)!
134
+
135
+
136
+ .. versionadded:: 15.2.0 *convert*
137
+ .. versionadded:: 16.3.0 *metadata*
138
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
139
+ .. versionchanged:: 17.1.0
140
+ *hash* is `None` and therefore mirrors *eq* by default.
141
+ .. versionadded:: 17.3.0 *type*
142
+ .. deprecated:: 17.4.0 *convert*
143
+ .. versionadded:: 17.4.0
144
+ *converter* as a replacement for the deprecated *convert* to achieve
145
+ consistency with other noun-based arguments.
146
+ .. versionadded:: 18.1.0
147
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
148
+ .. versionadded:: 18.2.0 *kw_only*
149
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
150
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
151
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
152
+ .. versionadded:: 19.2.0 *eq* and *order*
153
+ .. versionadded:: 20.1.0 *on_setattr*
154
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
155
+ .. versionchanged:: 21.1.0
156
+ *eq*, *order*, and *cmp* also accept a custom callable
157
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
158
+ .. versionadded:: 22.2.0 *alias*
159
+ """
160
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
161
+ cmp, eq, order, True
162
+ )
163
+
164
+ if hash is not None and hash is not True and hash is not False:
165
+ msg = "Invalid value for hash. Must be True, False, or None."
166
+ raise TypeError(msg)
167
+
168
+ if factory is not None:
169
+ if default is not NOTHING:
170
+ msg = (
171
+ "The `default` and `factory` arguments are mutually exclusive."
172
+ )
173
+ raise ValueError(msg)
174
+ if not callable(factory):
175
+ msg = "The `factory` argument must be a callable."
176
+ raise ValueError(msg)
177
+ default = Factory(factory)
178
+
179
+ if metadata is None:
180
+ metadata = {}
181
+
182
+ # Apply syntactic sugar by auto-wrapping.
183
+ if isinstance(on_setattr, (list, tuple)):
184
+ on_setattr = setters.pipe(*on_setattr)
185
+
186
+ if validator and isinstance(validator, (list, tuple)):
187
+ validator = and_(*validator)
188
+
189
+ if converter and isinstance(converter, (list, tuple)):
190
+ converter = pipe(*converter)
191
+
192
+ return _CountingAttr(
193
+ default=default,
194
+ validator=validator,
195
+ repr=repr,
196
+ cmp=None,
197
+ hash=hash,
198
+ init=init,
199
+ converter=converter,
200
+ metadata=metadata,
201
+ type=type,
202
+ kw_only=kw_only,
203
+ eq=eq,
204
+ eq_key=eq_key,
205
+ order=order,
206
+ order_key=order_key,
207
+ on_setattr=on_setattr,
208
+ alias=alias,
209
+ )
210
+
211
+
212
+ def _compile_and_eval(
213
+ script: str,
214
+ globs: dict[str, Any] | None,
215
+ locs: Mapping[str, object] | None = None,
216
+ filename: str = "",
217
+ ) -> None:
218
+ """
219
+ Evaluate the script with the given global (globs) and local (locs)
220
+ variables.
221
+ """
222
+ bytecode = compile(script, filename, "exec")
223
+ eval(bytecode, globs, locs)
224
+
225
+
226
+ def _linecache_and_compile(
227
+ script: str,
228
+ filename: str,
229
+ globs: dict[str, Any] | None,
230
+ locals: Mapping[str, object] | None = None,
231
+ ) -> dict[str, Any]:
232
+ """
233
+ Cache the script with _linecache_, compile it and return the _locals_.
234
+ """
235
+
236
+ locs = {} if locals is None else locals
237
+
238
+ # In order of debuggers like PDB being able to step through the code,
239
+ # we add a fake linecache entry.
240
+ count = 1
241
+ base_filename = filename
242
+ while True:
243
+ linecache_tuple = (
244
+ len(script),
245
+ None,
246
+ script.splitlines(True),
247
+ filename,
248
+ )
249
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
250
+ if old_val == linecache_tuple:
251
+ break
252
+
253
+ filename = f"{base_filename[:-1]}-{count}>"
254
+ count += 1
255
+
256
+ _compile_and_eval(script, globs, locs, filename)
257
+
258
+ return locs
259
+
260
+
261
+ def _make_attr_tuple_class(cls_name: str, attr_names: list[str]) -> type:
262
+ """
263
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
264
+
265
+ The subclass is a bare tuple with properties for names.
266
+
267
+ class MyClassAttributes(tuple):
268
+ __slots__ = ()
269
+ x = property(itemgetter(0))
270
+ """
271
+ attr_class_name = f"{cls_name}Attributes"
272
+ body = {}
273
+ for i, attr_name in enumerate(attr_names):
274
+
275
+ def getter(self, i=i):
276
+ return self[i]
277
+
278
+ body[attr_name] = property(getter)
279
+ return type(attr_class_name, (tuple,), body)
280
+
281
+
282
+ # Tuple class for extracted attributes from a class definition.
283
+ # `base_attrs` is a subset of `attrs`.
284
+ class _Attributes(NamedTuple):
285
+ attrs: type
286
+ base_attrs: list[Attribute]
287
+ base_attrs_map: dict[str, type]
288
+
289
+
290
+ def _is_class_var(annot):
291
+ """
292
+ Check whether *annot* is a typing.ClassVar.
293
+
294
+ The string comparison hack is used to avoid evaluating all string
295
+ annotations which would put attrs-based classes at a performance
296
+ disadvantage compared to plain old classes.
297
+ """
298
+ annot = str(annot)
299
+
300
+ # Annotation can be quoted.
301
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
302
+ annot = annot[1:-1]
303
+
304
+ return annot.startswith(_CLASSVAR_PREFIXES)
305
+
306
+
307
+ def _has_own_attribute(cls, attrib_name):
308
+ """
309
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
310
+ """
311
+ return attrib_name in cls.__dict__
312
+
313
+
314
+ def _collect_base_attrs(
315
+ cls, taken_attr_names
316
+ ) -> tuple[list[Attribute], dict[str, type]]:
317
+ """
318
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
319
+ """
320
+ base_attrs = []
321
+ base_attr_map = {} # A dictionary of base attrs to their classes.
322
+
323
+ # Traverse the MRO and collect attributes.
324
+ for base_cls in reversed(cls.__mro__[1:-1]):
325
+ for a in getattr(base_cls, "__attrs_attrs__", []):
326
+ if a.inherited or a.name in taken_attr_names:
327
+ continue
328
+
329
+ a = a.evolve(inherited=True) # noqa: PLW2901
330
+ base_attrs.append(a)
331
+ base_attr_map[a.name] = base_cls
332
+
333
+ # For each name, only keep the freshest definition i.e. the furthest at the
334
+ # back. base_attr_map is fine because it gets overwritten with every new
335
+ # instance.
336
+ filtered = []
337
+ seen = set()
338
+ for a in reversed(base_attrs):
339
+ if a.name in seen:
340
+ continue
341
+ filtered.insert(0, a)
342
+ seen.add(a.name)
343
+
344
+ return filtered, base_attr_map
345
+
346
+
347
+ def _collect_base_attrs_broken(cls, taken_attr_names):
348
+ """
349
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
350
+
351
+ N.B. *taken_attr_names* will be mutated.
352
+
353
+ Adhere to the old incorrect behavior.
354
+
355
+ Notably it collects from the front and considers inherited attributes which
356
+ leads to the buggy behavior reported in #428.
357
+ """
358
+ base_attrs = []
359
+ base_attr_map = {} # A dictionary of base attrs to their classes.
360
+
361
+ # Traverse the MRO and collect attributes.
362
+ for base_cls in cls.__mro__[1:-1]:
363
+ for a in getattr(base_cls, "__attrs_attrs__", []):
364
+ if a.name in taken_attr_names:
365
+ continue
366
+
367
+ a = a.evolve(inherited=True) # noqa: PLW2901
368
+ taken_attr_names.add(a.name)
369
+ base_attrs.append(a)
370
+ base_attr_map[a.name] = base_cls
371
+
372
+ return base_attrs, base_attr_map
373
+
374
+
375
+ def _transform_attrs(
376
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
377
+ ) -> _Attributes:
378
+ """
379
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
380
+
381
+ If *these* is passed, use that and don't look for them on the class.
382
+
383
+ If *collect_by_mro* is True, collect them in the correct MRO order,
384
+ otherwise use the old -- incorrect -- order. See #428.
385
+
386
+ Return an `_Attributes`.
387
+ """
388
+ cd = cls.__dict__
389
+ anns = _get_annotations(cls)
390
+
391
+ if these is not None:
392
+ ca_list = list(these.items())
393
+ elif auto_attribs is True:
394
+ ca_names = {
395
+ name
396
+ for name, attr in cd.items()
397
+ if attr.__class__ is _CountingAttr
398
+ }
399
+ ca_list = []
400
+ annot_names = set()
401
+ for attr_name, type in anns.items():
402
+ if _is_class_var(type):
403
+ continue
404
+ annot_names.add(attr_name)
405
+ a = cd.get(attr_name, NOTHING)
406
+
407
+ if a.__class__ is not _CountingAttr:
408
+ a = attrib(a)
409
+ ca_list.append((attr_name, a))
410
+
411
+ unannotated = ca_names - annot_names
412
+ if unannotated:
413
+ raise UnannotatedAttributeError(
414
+ "The following `attr.ib`s lack a type annotation: "
415
+ + ", ".join(
416
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
417
+ )
418
+ + "."
419
+ )
420
+ else:
421
+ ca_list = sorted(
422
+ (
423
+ (name, attr)
424
+ for name, attr in cd.items()
425
+ if attr.__class__ is _CountingAttr
426
+ ),
427
+ key=lambda e: e[1].counter,
428
+ )
429
+
430
+ fca = Attribute.from_counting_attr
431
+ own_attrs = [
432
+ fca(attr_name, ca, anns.get(attr_name)) for attr_name, ca in ca_list
433
+ ]
434
+
435
+ if collect_by_mro:
436
+ base_attrs, base_attr_map = _collect_base_attrs(
437
+ cls, {a.name for a in own_attrs}
438
+ )
439
+ else:
440
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
441
+ cls, {a.name for a in own_attrs}
442
+ )
443
+
444
+ if kw_only:
445
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
446
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
447
+
448
+ attrs = base_attrs + own_attrs
449
+
450
+ if field_transformer is not None:
451
+ attrs = tuple(field_transformer(cls, attrs))
452
+
453
+ # Check attr order after executing the field_transformer.
454
+ # Mandatory vs non-mandatory attr order only matters when they are part of
455
+ # the __init__ signature and when they aren't kw_only (which are moved to
456
+ # the end and can be mandatory or non-mandatory in any order, as they will
457
+ # be specified as keyword args anyway). Check the order of those attrs:
458
+ had_default = False
459
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
460
+ if had_default is True and a.default is NOTHING:
461
+ msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}"
462
+ raise ValueError(msg)
463
+
464
+ if had_default is False and a.default is not NOTHING:
465
+ had_default = True
466
+
467
+ # Resolve default field alias after executing field_transformer.
468
+ # This allows field_transformer to differentiate between explicit vs
469
+ # default aliases and supply their own defaults.
470
+ for a in attrs:
471
+ if not a.alias:
472
+ # Evolve is very slow, so we hold our nose and do it dirty.
473
+ _OBJ_SETATTR.__get__(a)("alias", _default_init_alias_for(a.name))
474
+
475
+ # Create AttrsClass *after* applying the field_transformer since it may
476
+ # add or remove attributes!
477
+ attr_names = [a.name for a in attrs]
478
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
479
+
480
+ return _Attributes(AttrsClass(attrs), base_attrs, base_attr_map)
481
+
482
+
483
+ def _make_cached_property_getattr(cached_properties, original_getattr, cls):
484
+ lines = [
485
+ # Wrapped to get `__class__` into closure cell for super()
486
+ # (It will be replaced with the newly constructed class after construction).
487
+ "def wrapper(_cls):",
488
+ " __class__ = _cls",
489
+ " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):",
490
+ " func = cached_properties.get(item)",
491
+ " if func is not None:",
492
+ " result = func(self)",
493
+ " _setter = _cached_setattr_get(self)",
494
+ " _setter(item, result)",
495
+ " return result",
496
+ ]
497
+ if original_getattr is not None:
498
+ lines.append(
499
+ " return original_getattr(self, item)",
500
+ )
501
+ else:
502
+ lines.extend(
503
+ [
504
+ " try:",
505
+ " return super().__getattribute__(item)",
506
+ " except AttributeError:",
507
+ " if not hasattr(super(), '__getattr__'):",
508
+ " raise",
509
+ " return super().__getattr__(item)",
510
+ " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"",
511
+ " raise AttributeError(original_error)",
512
+ ]
513
+ )
514
+
515
+ lines.extend(
516
+ [
517
+ " return __getattr__",
518
+ "__getattr__ = wrapper(_cls)",
519
+ ]
520
+ )
521
+
522
+ unique_filename = _generate_unique_filename(cls, "getattr")
523
+
524
+ glob = {
525
+ "cached_properties": cached_properties,
526
+ "_cached_setattr_get": _OBJ_SETATTR.__get__,
527
+ "original_getattr": original_getattr,
528
+ }
529
+
530
+ return _linecache_and_compile(
531
+ "\n".join(lines), unique_filename, glob, locals={"_cls": cls}
532
+ )["__getattr__"]
533
+
534
+
535
+ def _frozen_setattrs(self, name, value):
536
+ """
537
+ Attached to frozen classes as __setattr__.
538
+ """
539
+ if isinstance(self, BaseException) and name in (
540
+ "__cause__",
541
+ "__context__",
542
+ "__traceback__",
543
+ "__suppress_context__",
544
+ "__notes__",
545
+ ):
546
+ BaseException.__setattr__(self, name, value)
547
+ return
548
+
549
+ raise FrozenInstanceError
550
+
551
+
552
+ def _frozen_delattrs(self, name):
553
+ """
554
+ Attached to frozen classes as __delattr__.
555
+ """
556
+ if isinstance(self, BaseException) and name in ("__notes__",):
557
+ BaseException.__delattr__(self, name)
558
+ return
559
+
560
+ raise FrozenInstanceError
561
+
562
+
563
+ def evolve(*args, **changes):
564
+ """
565
+ Create a new instance, based on the first positional argument with
566
+ *changes* applied.
567
+
568
+ .. tip::
569
+
570
+ On Python 3.13 and later, you can also use `copy.replace` instead.
571
+
572
+ Args:
573
+
574
+ inst:
575
+ Instance of a class with *attrs* attributes. *inst* must be passed
576
+ as a positional argument.
577
+
578
+ changes:
579
+ Keyword changes in the new copy.
580
+
581
+ Returns:
582
+ A copy of inst with *changes* incorporated.
583
+
584
+ Raises:
585
+ TypeError:
586
+ If *attr_name* couldn't be found in the class ``__init__``.
587
+
588
+ attrs.exceptions.NotAnAttrsClassError:
589
+ If *cls* is not an *attrs* class.
590
+
591
+ .. versionadded:: 17.1.0
592
+ .. deprecated:: 23.1.0
593
+ It is now deprecated to pass the instance using the keyword argument
594
+ *inst*. It will raise a warning until at least April 2024, after which
595
+ it will become an error. Always pass the instance as a positional
596
+ argument.
597
+ .. versionchanged:: 24.1.0
598
+ *inst* can't be passed as a keyword argument anymore.
599
+ """
600
+ try:
601
+ (inst,) = args
602
+ except ValueError:
603
+ msg = (
604
+ f"evolve() takes 1 positional argument, but {len(args)} were given"
605
+ )
606
+ raise TypeError(msg) from None
607
+
608
+ cls = inst.__class__
609
+ attrs = fields(cls)
610
+ for a in attrs:
611
+ if not a.init:
612
+ continue
613
+ attr_name = a.name # To deal with private attributes.
614
+ init_name = a.alias
615
+ if init_name not in changes:
616
+ changes[init_name] = getattr(inst, attr_name)
617
+
618
+ return cls(**changes)
619
+
620
+
621
+ class _ClassBuilder:
622
+ """
623
+ Iteratively build *one* class.
624
+ """
625
+
626
+ __slots__ = (
627
+ "_add_method_dunders",
628
+ "_attr_names",
629
+ "_attrs",
630
+ "_base_attr_map",
631
+ "_base_names",
632
+ "_cache_hash",
633
+ "_cls",
634
+ "_cls_dict",
635
+ "_delete_attribs",
636
+ "_frozen",
637
+ "_has_custom_setattr",
638
+ "_has_post_init",
639
+ "_has_pre_init",
640
+ "_is_exc",
641
+ "_on_setattr",
642
+ "_pre_init_has_args",
643
+ "_repr_added",
644
+ "_script_snippets",
645
+ "_slots",
646
+ "_weakref_slot",
647
+ "_wrote_own_setattr",
648
+ )
649
+
650
+ def __init__(
651
+ self,
652
+ cls: type,
653
+ these,
654
+ slots,
655
+ frozen,
656
+ weakref_slot,
657
+ getstate_setstate,
658
+ auto_attribs,
659
+ kw_only,
660
+ cache_hash,
661
+ is_exc,
662
+ collect_by_mro,
663
+ on_setattr,
664
+ has_custom_setattr,
665
+ field_transformer,
666
+ ):
667
+ attrs, base_attrs, base_map = _transform_attrs(
668
+ cls,
669
+ these,
670
+ auto_attribs,
671
+ kw_only,
672
+ collect_by_mro,
673
+ field_transformer,
674
+ )
675
+
676
+ self._cls = cls
677
+ self._cls_dict = dict(cls.__dict__) if slots else {}
678
+ self._attrs = attrs
679
+ self._base_names = {a.name for a in base_attrs}
680
+ self._base_attr_map = base_map
681
+ self._attr_names = tuple(a.name for a in attrs)
682
+ self._slots = slots
683
+ self._frozen = frozen
684
+ self._weakref_slot = weakref_slot
685
+ self._cache_hash = cache_hash
686
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
687
+ self._pre_init_has_args = False
688
+ if self._has_pre_init:
689
+ # Check if the pre init method has more arguments than just `self`
690
+ # We want to pass arguments if pre init expects arguments
691
+ pre_init_func = cls.__attrs_pre_init__
692
+ pre_init_signature = inspect.signature(pre_init_func)
693
+ self._pre_init_has_args = len(pre_init_signature.parameters) > 1
694
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
695
+ self._delete_attribs = not bool(these)
696
+ self._is_exc = is_exc
697
+ self._on_setattr = on_setattr
698
+
699
+ self._has_custom_setattr = has_custom_setattr
700
+ self._wrote_own_setattr = False
701
+
702
+ self._cls_dict["__attrs_attrs__"] = self._attrs
703
+
704
+ if frozen:
705
+ self._cls_dict["__setattr__"] = _frozen_setattrs
706
+ self._cls_dict["__delattr__"] = _frozen_delattrs
707
+
708
+ self._wrote_own_setattr = True
709
+ elif on_setattr in (
710
+ _DEFAULT_ON_SETATTR,
711
+ setters.validate,
712
+ setters.convert,
713
+ ):
714
+ has_validator = has_converter = False
715
+ for a in attrs:
716
+ if a.validator is not None:
717
+ has_validator = True
718
+ if a.converter is not None:
719
+ has_converter = True
720
+
721
+ if has_validator and has_converter:
722
+ break
723
+ if (
724
+ (
725
+ on_setattr == _DEFAULT_ON_SETATTR
726
+ and not (has_validator or has_converter)
727
+ )
728
+ or (on_setattr == setters.validate and not has_validator)
729
+ or (on_setattr == setters.convert and not has_converter)
730
+ ):
731
+ # If class-level on_setattr is set to convert + validate, but
732
+ # there's no field to convert or validate, pretend like there's
733
+ # no on_setattr.
734
+ self._on_setattr = None
735
+
736
+ if getstate_setstate:
737
+ (
738
+ self._cls_dict["__getstate__"],
739
+ self._cls_dict["__setstate__"],
740
+ ) = self._make_getstate_setstate()
741
+
742
+ # tuples of script, globs, hook
743
+ self._script_snippets: list[
744
+ tuple[str, dict, Callable[[dict, dict], Any]]
745
+ ] = []
746
+ self._repr_added = False
747
+
748
+ # We want to only do this check once; in 99.9% of cases these
749
+ # exist.
750
+ if not hasattr(self._cls, "__module__") or not hasattr(
751
+ self._cls, "__qualname__"
752
+ ):
753
+ self._add_method_dunders = self._add_method_dunders_safe
754
+ else:
755
+ self._add_method_dunders = self._add_method_dunders_unsafe
756
+
757
+ def __repr__(self):
758
+ return f"<_ClassBuilder(cls={self._cls.__name__})>"
759
+
760
+ def _eval_snippets(self) -> None:
761
+ """
762
+ Evaluate any registered snippets in one go.
763
+ """
764
+ script = "\n".join([snippet[0] for snippet in self._script_snippets])
765
+ globs = {}
766
+ for _, snippet_globs, _ in self._script_snippets:
767
+ globs.update(snippet_globs)
768
+
769
+ locs = _linecache_and_compile(
770
+ script,
771
+ _generate_unique_filename(self._cls, "methods"),
772
+ globs,
773
+ )
774
+
775
+ for _, _, hook in self._script_snippets:
776
+ hook(self._cls_dict, locs)
777
+
778
+ def build_class(self):
779
+ """
780
+ Finalize class based on the accumulated configuration.
781
+
782
+ Builder cannot be used after calling this method.
783
+ """
784
+ self._eval_snippets()
785
+ if self._slots is True:
786
+ cls = self._create_slots_class()
787
+ else:
788
+ cls = self._patch_original_class()
789
+ if PY_3_10_PLUS:
790
+ cls = abc.update_abstractmethods(cls)
791
+
792
+ # The method gets only called if it's not inherited from a base class.
793
+ # _has_own_attribute does NOT work properly for classmethods.
794
+ if (
795
+ getattr(cls, "__attrs_init_subclass__", None)
796
+ and "__attrs_init_subclass__" not in cls.__dict__
797
+ ):
798
+ cls.__attrs_init_subclass__()
799
+
800
+ return cls
801
+
802
+ def _patch_original_class(self):
803
+ """
804
+ Apply accumulated methods and return the class.
805
+ """
806
+ cls = self._cls
807
+ base_names = self._base_names
808
+
809
+ # Clean class of attribute definitions (`attr.ib()`s).
810
+ if self._delete_attribs:
811
+ for name in self._attr_names:
812
+ if (
813
+ name not in base_names
814
+ and getattr(cls, name, _SENTINEL) is not _SENTINEL
815
+ ):
816
+ # An AttributeError can happen if a base class defines a
817
+ # class variable and we want to set an attribute with the
818
+ # same name by using only a type annotation.
819
+ with contextlib.suppress(AttributeError):
820
+ delattr(cls, name)
821
+
822
+ # Attach our dunder methods.
823
+ for name, value in self._cls_dict.items():
824
+ setattr(cls, name, value)
825
+
826
+ # If we've inherited an attrs __setattr__ and don't write our own,
827
+ # reset it to object's.
828
+ if not self._wrote_own_setattr and getattr(
829
+ cls, "__attrs_own_setattr__", False
830
+ ):
831
+ cls.__attrs_own_setattr__ = False
832
+
833
+ if not self._has_custom_setattr:
834
+ cls.__setattr__ = _OBJ_SETATTR
835
+
836
+ return cls
837
+
838
+ def _create_slots_class(self):
839
+ """
840
+ Build and return a new class with a `__slots__` attribute.
841
+ """
842
+ cd = {
843
+ k: v
844
+ for k, v in self._cls_dict.items()
845
+ if k not in (*tuple(self._attr_names), "__dict__", "__weakref__")
846
+ }
847
+
848
+ # If our class doesn't have its own implementation of __setattr__
849
+ # (either from the user or by us), check the bases, if one of them has
850
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
851
+ # MRO because we only care about our immediate base classes.
852
+ # XXX: This can be confused by subclassing a slotted attrs class with
853
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
854
+ # XXX: class. See `test_slotted_confused` for details. For now that's
855
+ # XXX: OK with us.
856
+ if not self._wrote_own_setattr:
857
+ cd["__attrs_own_setattr__"] = False
858
+
859
+ if not self._has_custom_setattr:
860
+ for base_cls in self._cls.__bases__:
861
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
862
+ cd["__setattr__"] = _OBJ_SETATTR
863
+ break
864
+
865
+ # Traverse the MRO to collect existing slots
866
+ # and check for an existing __weakref__.
867
+ existing_slots = {}
868
+ weakref_inherited = False
869
+ for base_cls in self._cls.__mro__[1:-1]:
870
+ if base_cls.__dict__.get("__weakref__", None) is not None:
871
+ weakref_inherited = True
872
+ existing_slots.update(
873
+ {
874
+ name: getattr(base_cls, name)
875
+ for name in getattr(base_cls, "__slots__", [])
876
+ }
877
+ )
878
+
879
+ base_names = set(self._base_names)
880
+
881
+ names = self._attr_names
882
+ if (
883
+ self._weakref_slot
884
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
885
+ and "__weakref__" not in names
886
+ and not weakref_inherited
887
+ ):
888
+ names += ("__weakref__",)
889
+
890
+ cached_properties = {
891
+ name: cached_prop.func
892
+ for name, cached_prop in cd.items()
893
+ if isinstance(cached_prop, cached_property)
894
+ }
895
+
896
+ # Collect methods with a `__class__` reference that are shadowed in the new class.
897
+ # To know to update them.
898
+ additional_closure_functions_to_update = []
899
+ if cached_properties:
900
+ class_annotations = _get_annotations(self._cls)
901
+ for name, func in cached_properties.items():
902
+ # Add cached properties to names for slotting.
903
+ names += (name,)
904
+ # Clear out function from class to avoid clashing.
905
+ del cd[name]
906
+ additional_closure_functions_to_update.append(func)
907
+ annotation = inspect.signature(func).return_annotation
908
+ if annotation is not inspect.Parameter.empty:
909
+ class_annotations[name] = annotation
910
+
911
+ original_getattr = cd.get("__getattr__")
912
+ if original_getattr is not None:
913
+ additional_closure_functions_to_update.append(original_getattr)
914
+
915
+ cd["__getattr__"] = _make_cached_property_getattr(
916
+ cached_properties, original_getattr, self._cls
917
+ )
918
+
919
+ # We only add the names of attributes that aren't inherited.
920
+ # Setting __slots__ to inherited attributes wastes memory.
921
+ slot_names = [name for name in names if name not in base_names]
922
+
923
+ # There are slots for attributes from current class
924
+ # that are defined in parent classes.
925
+ # As their descriptors may be overridden by a child class,
926
+ # we collect them here and update the class dict
927
+ reused_slots = {
928
+ slot: slot_descriptor
929
+ for slot, slot_descriptor in existing_slots.items()
930
+ if slot in slot_names
931
+ }
932
+ slot_names = [name for name in slot_names if name not in reused_slots]
933
+ cd.update(reused_slots)
934
+ if self._cache_hash:
935
+ slot_names.append(_HASH_CACHE_FIELD)
936
+
937
+ cd["__slots__"] = tuple(slot_names)
938
+
939
+ cd["__qualname__"] = self._cls.__qualname__
940
+
941
+ # Create new class based on old class and our methods.
942
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
943
+
944
+ # The following is a fix for
945
+ # <https://github.com/python-attrs/attrs/issues/102>.
946
+ # If a method mentions `__class__` or uses the no-arg super(), the
947
+ # compiler will bake a reference to the class in the method itself
948
+ # as `method.__closure__`. Since we replace the class with a
949
+ # clone, we rewrite these references so it keeps working.
950
+ for item in itertools.chain(
951
+ cls.__dict__.values(), additional_closure_functions_to_update
952
+ ):
953
+ if isinstance(item, (classmethod, staticmethod)):
954
+ # Class- and staticmethods hide their functions inside.
955
+ # These might need to be rewritten as well.
956
+ closure_cells = getattr(item.__func__, "__closure__", None)
957
+ elif isinstance(item, property):
958
+ # Workaround for property `super()` shortcut (PY3-only).
959
+ # There is no universal way for other descriptors.
960
+ closure_cells = getattr(item.fget, "__closure__", None)
961
+ else:
962
+ closure_cells = getattr(item, "__closure__", None)
963
+
964
+ if not closure_cells: # Catch None or the empty list.
965
+ continue
966
+ for cell in closure_cells:
967
+ try:
968
+ match = cell.cell_contents is self._cls
969
+ except ValueError: # noqa: PERF203
970
+ # ValueError: Cell is empty
971
+ pass
972
+ else:
973
+ if match:
974
+ cell.cell_contents = cls
975
+ return cls
976
+
977
+ def add_repr(self, ns):
978
+ script, globs = _make_repr_script(self._attrs, ns)
979
+
980
+ def _attach_repr(cls_dict, globs):
981
+ cls_dict["__repr__"] = self._add_method_dunders(globs["__repr__"])
982
+
983
+ self._script_snippets.append((script, globs, _attach_repr))
984
+ self._repr_added = True
985
+ return self
986
+
987
+ def add_str(self):
988
+ if not self._repr_added:
989
+ msg = "__str__ can only be generated if a __repr__ exists."
990
+ raise ValueError(msg)
991
+
992
+ def __str__(self):
993
+ return self.__repr__()
994
+
995
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
996
+ return self
997
+
998
+ def _make_getstate_setstate(self):
999
+ """
1000
+ Create custom __setstate__ and __getstate__ methods.
1001
+ """
1002
+ # __weakref__ is not writable.
1003
+ state_attr_names = tuple(
1004
+ an for an in self._attr_names if an != "__weakref__"
1005
+ )
1006
+
1007
+ def slots_getstate(self):
1008
+ """
1009
+ Automatically created by attrs.
1010
+ """
1011
+ return {name: getattr(self, name) for name in state_attr_names}
1012
+
1013
+ hash_caching_enabled = self._cache_hash
1014
+
1015
+ def slots_setstate(self, state):
1016
+ """
1017
+ Automatically created by attrs.
1018
+ """
1019
+ __bound_setattr = _OBJ_SETATTR.__get__(self)
1020
+ if isinstance(state, tuple):
1021
+ # Backward compatibility with attrs instances pickled with
1022
+ # attrs versions before v22.2.0 which stored tuples.
1023
+ for name, value in zip(state_attr_names, state):
1024
+ __bound_setattr(name, value)
1025
+ else:
1026
+ for name in state_attr_names:
1027
+ if name in state:
1028
+ __bound_setattr(name, state[name])
1029
+
1030
+ # The hash code cache is not included when the object is
1031
+ # serialized, but it still needs to be initialized to None to
1032
+ # indicate that the first call to __hash__ should be a cache
1033
+ # miss.
1034
+ if hash_caching_enabled:
1035
+ __bound_setattr(_HASH_CACHE_FIELD, None)
1036
+
1037
+ return slots_getstate, slots_setstate
1038
+
1039
+ def make_unhashable(self):
1040
+ self._cls_dict["__hash__"] = None
1041
+ return self
1042
+
1043
+ def add_hash(self):
1044
+ script, globs = _make_hash_script(
1045
+ self._cls,
1046
+ self._attrs,
1047
+ frozen=self._frozen,
1048
+ cache_hash=self._cache_hash,
1049
+ )
1050
+
1051
+ def attach_hash(cls_dict: dict, locs: dict) -> None:
1052
+ cls_dict["__hash__"] = self._add_method_dunders(locs["__hash__"])
1053
+
1054
+ self._script_snippets.append((script, globs, attach_hash))
1055
+
1056
+ return self
1057
+
1058
+ def add_init(self):
1059
+ script, globs, annotations = _make_init_script(
1060
+ self._cls,
1061
+ self._attrs,
1062
+ self._has_pre_init,
1063
+ self._pre_init_has_args,
1064
+ self._has_post_init,
1065
+ self._frozen,
1066
+ self._slots,
1067
+ self._cache_hash,
1068
+ self._base_attr_map,
1069
+ self._is_exc,
1070
+ self._on_setattr,
1071
+ attrs_init=False,
1072
+ )
1073
+
1074
+ def _attach_init(cls_dict, globs):
1075
+ init = globs["__init__"]
1076
+ init.__annotations__ = annotations
1077
+ cls_dict["__init__"] = self._add_method_dunders(init)
1078
+
1079
+ self._script_snippets.append((script, globs, _attach_init))
1080
+
1081
+ return self
1082
+
1083
+ def add_replace(self):
1084
+ self._cls_dict["__replace__"] = self._add_method_dunders(
1085
+ lambda self, **changes: evolve(self, **changes)
1086
+ )
1087
+ return self
1088
+
1089
+ def add_match_args(self):
1090
+ self._cls_dict["__match_args__"] = tuple(
1091
+ field.name
1092
+ for field in self._attrs
1093
+ if field.init and not field.kw_only
1094
+ )
1095
+
1096
+ def add_attrs_init(self):
1097
+ script, globs, annotations = _make_init_script(
1098
+ self._cls,
1099
+ self._attrs,
1100
+ self._has_pre_init,
1101
+ self._pre_init_has_args,
1102
+ self._has_post_init,
1103
+ self._frozen,
1104
+ self._slots,
1105
+ self._cache_hash,
1106
+ self._base_attr_map,
1107
+ self._is_exc,
1108
+ self._on_setattr,
1109
+ attrs_init=True,
1110
+ )
1111
+
1112
+ def _attach_attrs_init(cls_dict, globs):
1113
+ init = globs["__attrs_init__"]
1114
+ init.__annotations__ = annotations
1115
+ cls_dict["__attrs_init__"] = self._add_method_dunders(init)
1116
+
1117
+ self._script_snippets.append((script, globs, _attach_attrs_init))
1118
+
1119
+ return self
1120
+
1121
+ def add_eq(self):
1122
+ cd = self._cls_dict
1123
+
1124
+ script, globs = _make_eq_script(self._attrs)
1125
+
1126
+ def _attach_eq(cls_dict, globs):
1127
+ cls_dict["__eq__"] = self._add_method_dunders(globs["__eq__"])
1128
+
1129
+ self._script_snippets.append((script, globs, _attach_eq))
1130
+
1131
+ cd["__ne__"] = __ne__
1132
+
1133
+ return self
1134
+
1135
+ def add_order(self):
1136
+ cd = self._cls_dict
1137
+
1138
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
1139
+ self._add_method_dunders(meth)
1140
+ for meth in _make_order(self._cls, self._attrs)
1141
+ )
1142
+
1143
+ return self
1144
+
1145
+ def add_setattr(self):
1146
+ sa_attrs = {}
1147
+ for a in self._attrs:
1148
+ on_setattr = a.on_setattr or self._on_setattr
1149
+ if on_setattr and on_setattr is not setters.NO_OP:
1150
+ sa_attrs[a.name] = a, on_setattr
1151
+
1152
+ if not sa_attrs:
1153
+ return self
1154
+
1155
+ if self._has_custom_setattr:
1156
+ # We need to write a __setattr__ but there already is one!
1157
+ msg = "Can't combine custom __setattr__ with on_setattr hooks."
1158
+ raise ValueError(msg)
1159
+
1160
+ # docstring comes from _add_method_dunders
1161
+ def __setattr__(self, name, val):
1162
+ try:
1163
+ a, hook = sa_attrs[name]
1164
+ except KeyError:
1165
+ nval = val
1166
+ else:
1167
+ nval = hook(self, a, val)
1168
+
1169
+ _OBJ_SETATTR(self, name, nval)
1170
+
1171
+ self._cls_dict["__attrs_own_setattr__"] = True
1172
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
1173
+ self._wrote_own_setattr = True
1174
+
1175
+ return self
1176
+
1177
+ def _add_method_dunders_unsafe(self, method: Callable) -> Callable:
1178
+ """
1179
+ Add __module__ and __qualname__ to a *method*.
1180
+ """
1181
+ method.__module__ = self._cls.__module__
1182
+
1183
+ method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
1184
+
1185
+ method.__doc__ = (
1186
+ f"Method generated by attrs for class {self._cls.__qualname__}."
1187
+ )
1188
+
1189
+ return method
1190
+
1191
+ def _add_method_dunders_safe(self, method: Callable) -> Callable:
1192
+ """
1193
+ Add __module__ and __qualname__ to a *method* if possible.
1194
+ """
1195
+ with contextlib.suppress(AttributeError):
1196
+ method.__module__ = self._cls.__module__
1197
+
1198
+ with contextlib.suppress(AttributeError):
1199
+ method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
1200
+
1201
+ with contextlib.suppress(AttributeError):
1202
+ method.__doc__ = f"Method generated by attrs for class {self._cls.__qualname__}."
1203
+
1204
+ return method
1205
+
1206
+
1207
+ def _determine_attrs_eq_order(cmp, eq, order, default_eq):
1208
+ """
1209
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
1210
+ values of eq and order. If *eq* is None, set it to *default_eq*.
1211
+ """
1212
+ if cmp is not None and any((eq is not None, order is not None)):
1213
+ msg = "Don't mix `cmp` with `eq' and `order`."
1214
+ raise ValueError(msg)
1215
+
1216
+ # cmp takes precedence due to bw-compatibility.
1217
+ if cmp is not None:
1218
+ return cmp, cmp
1219
+
1220
+ # If left None, equality is set to the specified default and ordering
1221
+ # mirrors equality.
1222
+ if eq is None:
1223
+ eq = default_eq
1224
+
1225
+ if order is None:
1226
+ order = eq
1227
+
1228
+ if eq is False and order is True:
1229
+ msg = "`order` can only be True if `eq` is True too."
1230
+ raise ValueError(msg)
1231
+
1232
+ return eq, order
1233
+
1234
+
1235
+ def _determine_attrib_eq_order(cmp, eq, order, default_eq):
1236
+ """
1237
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
1238
+ values of eq and order. If *eq* is None, set it to *default_eq*.
1239
+ """
1240
+ if cmp is not None and any((eq is not None, order is not None)):
1241
+ msg = "Don't mix `cmp` with `eq' and `order`."
1242
+ raise ValueError(msg)
1243
+
1244
+ def decide_callable_or_boolean(value):
1245
+ """
1246
+ Decide whether a key function is used.
1247
+ """
1248
+ if callable(value):
1249
+ value, key = True, value
1250
+ else:
1251
+ key = None
1252
+ return value, key
1253
+
1254
+ # cmp takes precedence due to bw-compatibility.
1255
+ if cmp is not None:
1256
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
1257
+ return cmp, cmp_key, cmp, cmp_key
1258
+
1259
+ # If left None, equality is set to the specified default and ordering
1260
+ # mirrors equality.
1261
+ if eq is None:
1262
+ eq, eq_key = default_eq, None
1263
+ else:
1264
+ eq, eq_key = decide_callable_or_boolean(eq)
1265
+
1266
+ if order is None:
1267
+ order, order_key = eq, eq_key
1268
+ else:
1269
+ order, order_key = decide_callable_or_boolean(order)
1270
+
1271
+ if eq is False and order is True:
1272
+ msg = "`order` can only be True if `eq` is True too."
1273
+ raise ValueError(msg)
1274
+
1275
+ return eq, eq_key, order, order_key
1276
+
1277
+
1278
+ def _determine_whether_to_implement(
1279
+ cls, flag, auto_detect, dunders, default=True
1280
+ ):
1281
+ """
1282
+ Check whether we should implement a set of methods for *cls*.
1283
+
1284
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
1285
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
1286
+ whose presence signal that the user has implemented it themselves.
1287
+
1288
+ Return *default* if no reason for either for or against is found.
1289
+ """
1290
+ if flag is True or flag is False:
1291
+ return flag
1292
+
1293
+ if flag is None and auto_detect is False:
1294
+ return default
1295
+
1296
+ # Logically, flag is None and auto_detect is True here.
1297
+ for dunder in dunders:
1298
+ if _has_own_attribute(cls, dunder):
1299
+ return False
1300
+
1301
+ return default
1302
+
1303
+
1304
+ def attrs(
1305
+ maybe_cls=None,
1306
+ these=None,
1307
+ repr_ns=None,
1308
+ repr=None,
1309
+ cmp=None,
1310
+ hash=None,
1311
+ init=None,
1312
+ slots=False,
1313
+ frozen=False,
1314
+ weakref_slot=True,
1315
+ str=False,
1316
+ auto_attribs=False,
1317
+ kw_only=False,
1318
+ cache_hash=False,
1319
+ auto_exc=False,
1320
+ eq=None,
1321
+ order=None,
1322
+ auto_detect=False,
1323
+ collect_by_mro=False,
1324
+ getstate_setstate=None,
1325
+ on_setattr=None,
1326
+ field_transformer=None,
1327
+ match_args=True,
1328
+ unsafe_hash=None,
1329
+ ):
1330
+ r"""
1331
+ A class decorator that adds :term:`dunder methods` according to the
1332
+ specified attributes using `attr.ib` or the *these* argument.
1333
+
1334
+ Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will
1335
+ *never* go away, though).
1336
+
1337
+ Args:
1338
+ repr_ns (str):
1339
+ When using nested classes, there was no way in Python 2 to
1340
+ automatically detect that. This argument allows to set a custom
1341
+ name for a more meaningful ``repr`` output. This argument is
1342
+ pointless in Python 3 and is therefore deprecated.
1343
+
1344
+ .. caution::
1345
+ Refer to `attrs.define` for the rest of the parameters, but note that they
1346
+ can have different defaults.
1347
+
1348
+ Notably, leaving *on_setattr* as `None` will **not** add any hooks.
1349
+
1350
+ .. versionadded:: 16.0.0 *slots*
1351
+ .. versionadded:: 16.1.0 *frozen*
1352
+ .. versionadded:: 16.3.0 *str*
1353
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
1354
+ .. versionchanged:: 17.1.0
1355
+ *hash* supports `None` as value which is also the default now.
1356
+ .. versionadded:: 17.3.0 *auto_attribs*
1357
+ .. versionchanged:: 18.1.0
1358
+ If *these* is passed, no attributes are deleted from the class body.
1359
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
1360
+ .. versionadded:: 18.2.0 *weakref_slot*
1361
+ .. deprecated:: 18.2.0
1362
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
1363
+ `DeprecationWarning` if the classes compared are subclasses of
1364
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
1365
+ to each other.
1366
+ .. versionchanged:: 19.2.0
1367
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
1368
+ subclasses comparable anymore.
1369
+ .. versionadded:: 18.2.0 *kw_only*
1370
+ .. versionadded:: 18.2.0 *cache_hash*
1371
+ .. versionadded:: 19.1.0 *auto_exc*
1372
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
1373
+ .. versionadded:: 19.2.0 *eq* and *order*
1374
+ .. versionadded:: 20.1.0 *auto_detect*
1375
+ .. versionadded:: 20.1.0 *collect_by_mro*
1376
+ .. versionadded:: 20.1.0 *getstate_setstate*
1377
+ .. versionadded:: 20.1.0 *on_setattr*
1378
+ .. versionadded:: 20.3.0 *field_transformer*
1379
+ .. versionchanged:: 21.1.0
1380
+ ``init=False`` injects ``__attrs_init__``
1381
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
1382
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
1383
+ .. versionadded:: 21.3.0 *match_args*
1384
+ .. versionadded:: 22.2.0
1385
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
1386
+ .. deprecated:: 24.1.0 *repr_ns*
1387
+ .. versionchanged:: 24.1.0
1388
+ Instances are not compared as tuples of attributes anymore, but using a
1389
+ big ``and`` condition. This is faster and has more correct behavior for
1390
+ uncomparable values like `math.nan`.
1391
+ .. versionadded:: 24.1.0
1392
+ If a class has an *inherited* classmethod called
1393
+ ``__attrs_init_subclass__``, it is executed after the class is created.
1394
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
1395
+ """
1396
+ if repr_ns is not None:
1397
+ import warnings
1398
+
1399
+ warnings.warn(
1400
+ DeprecationWarning(
1401
+ "The `repr_ns` argument is deprecated and will be removed in or after August 2025."
1402
+ ),
1403
+ stacklevel=2,
1404
+ )
1405
+
1406
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
1407
+
1408
+ # unsafe_hash takes precedence due to PEP 681.
1409
+ if unsafe_hash is not None:
1410
+ hash = unsafe_hash
1411
+
1412
+ if isinstance(on_setattr, (list, tuple)):
1413
+ on_setattr = setters.pipe(*on_setattr)
1414
+
1415
+ def wrap(cls):
1416
+ is_frozen = frozen or _has_frozen_base_class(cls)
1417
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
1418
+ has_own_setattr = auto_detect and _has_own_attribute(
1419
+ cls, "__setattr__"
1420
+ )
1421
+
1422
+ if has_own_setattr and is_frozen:
1423
+ msg = "Can't freeze a class with a custom __setattr__."
1424
+ raise ValueError(msg)
1425
+
1426
+ builder = _ClassBuilder(
1427
+ cls,
1428
+ these,
1429
+ slots,
1430
+ is_frozen,
1431
+ weakref_slot,
1432
+ _determine_whether_to_implement(
1433
+ cls,
1434
+ getstate_setstate,
1435
+ auto_detect,
1436
+ ("__getstate__", "__setstate__"),
1437
+ default=slots,
1438
+ ),
1439
+ auto_attribs,
1440
+ kw_only,
1441
+ cache_hash,
1442
+ is_exc,
1443
+ collect_by_mro,
1444
+ on_setattr,
1445
+ has_own_setattr,
1446
+ field_transformer,
1447
+ )
1448
+
1449
+ if _determine_whether_to_implement(
1450
+ cls, repr, auto_detect, ("__repr__",)
1451
+ ):
1452
+ builder.add_repr(repr_ns)
1453
+
1454
+ if str is True:
1455
+ builder.add_str()
1456
+
1457
+ eq = _determine_whether_to_implement(
1458
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
1459
+ )
1460
+ if not is_exc and eq is True:
1461
+ builder.add_eq()
1462
+ if not is_exc and _determine_whether_to_implement(
1463
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
1464
+ ):
1465
+ builder.add_order()
1466
+
1467
+ if not frozen:
1468
+ builder.add_setattr()
1469
+
1470
+ nonlocal hash
1471
+ if (
1472
+ hash is None
1473
+ and auto_detect is True
1474
+ and _has_own_attribute(cls, "__hash__")
1475
+ ):
1476
+ hash = False
1477
+
1478
+ if hash is not True and hash is not False and hash is not None:
1479
+ # Can't use `hash in` because 1 == True for example.
1480
+ msg = "Invalid value for hash. Must be True, False, or None."
1481
+ raise TypeError(msg)
1482
+
1483
+ if hash is False or (hash is None and eq is False) or is_exc:
1484
+ # Don't do anything. Should fall back to __object__'s __hash__
1485
+ # which is by id.
1486
+ if cache_hash:
1487
+ msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
1488
+ raise TypeError(msg)
1489
+ elif hash is True or (
1490
+ hash is None and eq is True and is_frozen is True
1491
+ ):
1492
+ # Build a __hash__ if told so, or if it's safe.
1493
+ builder.add_hash()
1494
+ else:
1495
+ # Raise TypeError on attempts to hash.
1496
+ if cache_hash:
1497
+ msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
1498
+ raise TypeError(msg)
1499
+ builder.make_unhashable()
1500
+
1501
+ if _determine_whether_to_implement(
1502
+ cls, init, auto_detect, ("__init__",)
1503
+ ):
1504
+ builder.add_init()
1505
+ else:
1506
+ builder.add_attrs_init()
1507
+ if cache_hash:
1508
+ msg = "Invalid value for cache_hash. To use hash caching, init must be True."
1509
+ raise TypeError(msg)
1510
+
1511
+ if PY_3_13_PLUS and not _has_own_attribute(cls, "__replace__"):
1512
+ builder.add_replace()
1513
+
1514
+ if (
1515
+ PY_3_10_PLUS
1516
+ and match_args
1517
+ and not _has_own_attribute(cls, "__match_args__")
1518
+ ):
1519
+ builder.add_match_args()
1520
+
1521
+ return builder.build_class()
1522
+
1523
+ # maybe_cls's type depends on the usage of the decorator. It's a class
1524
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
1525
+ if maybe_cls is None:
1526
+ return wrap
1527
+
1528
+ return wrap(maybe_cls)
1529
+
1530
+
1531
+ _attrs = attrs
1532
+ """
1533
+ Internal alias so we can use it in functions that take an argument called
1534
+ *attrs*.
1535
+ """
1536
+
1537
+
1538
+ def _has_frozen_base_class(cls):
1539
+ """
1540
+ Check whether *cls* has a frozen ancestor by looking at its
1541
+ __setattr__.
1542
+ """
1543
+ return cls.__setattr__ is _frozen_setattrs
1544
+
1545
+
1546
+ def _generate_unique_filename(cls: type, func_name: str) -> str:
1547
+ """
1548
+ Create a "filename" suitable for a function being generated.
1549
+ """
1550
+ return (
1551
+ f"<attrs generated {func_name} {cls.__module__}."
1552
+ f"{getattr(cls, '__qualname__', cls.__name__)}>"
1553
+ )
1554
+
1555
+
1556
+ def _make_hash_script(
1557
+ cls: type, attrs: list[Attribute], frozen: bool, cache_hash: bool
1558
+ ) -> tuple[str, dict]:
1559
+ attrs = tuple(
1560
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
1561
+ )
1562
+
1563
+ tab = " "
1564
+
1565
+ type_hash = hash(_generate_unique_filename(cls, "hash"))
1566
+ # If eq is custom generated, we need to include the functions in globs
1567
+ globs = {}
1568
+
1569
+ hash_def = "def __hash__(self"
1570
+ hash_func = "hash(("
1571
+ closing_braces = "))"
1572
+ if not cache_hash:
1573
+ hash_def += "):"
1574
+ else:
1575
+ hash_def += ", *"
1576
+
1577
+ hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):"
1578
+ hash_func = "_cache_wrapper(" + hash_func
1579
+ closing_braces += ")"
1580
+
1581
+ method_lines = [hash_def]
1582
+
1583
+ def append_hash_computation_lines(prefix, indent):
1584
+ """
1585
+ Generate the code for actually computing the hash code.
1586
+ Below this will either be returned directly or used to compute
1587
+ a value which is then cached, depending on the value of cache_hash
1588
+ """
1589
+
1590
+ method_lines.extend(
1591
+ [
1592
+ indent + prefix + hash_func,
1593
+ indent + f" {type_hash},",
1594
+ ]
1595
+ )
1596
+
1597
+ for a in attrs:
1598
+ if a.eq_key:
1599
+ cmp_name = f"_{a.name}_key"
1600
+ globs[cmp_name] = a.eq_key
1601
+ method_lines.append(
1602
+ indent + f" {cmp_name}(self.{a.name}),"
1603
+ )
1604
+ else:
1605
+ method_lines.append(indent + f" self.{a.name},")
1606
+
1607
+ method_lines.append(indent + " " + closing_braces)
1608
+
1609
+ if cache_hash:
1610
+ method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:")
1611
+ if frozen:
1612
+ append_hash_computation_lines(
1613
+ f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2
1614
+ )
1615
+ method_lines.append(tab * 2 + ")") # close __setattr__
1616
+ else:
1617
+ append_hash_computation_lines(
1618
+ f"self.{_HASH_CACHE_FIELD} = ", tab * 2
1619
+ )
1620
+ method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}")
1621
+ else:
1622
+ append_hash_computation_lines("return ", tab)
1623
+
1624
+ script = "\n".join(method_lines)
1625
+ return script, globs
1626
+
1627
+
1628
+ def _add_hash(cls: type, attrs: list[Attribute]):
1629
+ """
1630
+ Add a hash method to *cls*.
1631
+ """
1632
+ script, globs = _make_hash_script(
1633
+ cls, attrs, frozen=False, cache_hash=False
1634
+ )
1635
+ _compile_and_eval(
1636
+ script, globs, filename=_generate_unique_filename(cls, "__hash__")
1637
+ )
1638
+ cls.__hash__ = globs["__hash__"]
1639
+ return cls
1640
+
1641
+
1642
+ def __ne__(self, other):
1643
+ """
1644
+ Check equality and either forward a NotImplemented or
1645
+ return the result negated.
1646
+ """
1647
+ result = self.__eq__(other)
1648
+ if result is NotImplemented:
1649
+ return NotImplemented
1650
+
1651
+ return not result
1652
+
1653
+
1654
+ def _make_eq_script(attrs: list) -> tuple[str, dict]:
1655
+ """
1656
+ Create __eq__ method for *cls* with *attrs*.
1657
+ """
1658
+ attrs = [a for a in attrs if a.eq]
1659
+
1660
+ lines = [
1661
+ "def __eq__(self, other):",
1662
+ " if other.__class__ is not self.__class__:",
1663
+ " return NotImplemented",
1664
+ ]
1665
+
1666
+ globs = {}
1667
+ if attrs:
1668
+ lines.append(" return (")
1669
+ for a in attrs:
1670
+ if a.eq_key:
1671
+ cmp_name = f"_{a.name}_key"
1672
+ # Add the key function to the global namespace
1673
+ # of the evaluated function.
1674
+ globs[cmp_name] = a.eq_key
1675
+ lines.append(
1676
+ f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})"
1677
+ )
1678
+ else:
1679
+ lines.append(f" self.{a.name} == other.{a.name}")
1680
+ if a is not attrs[-1]:
1681
+ lines[-1] = f"{lines[-1]} and"
1682
+ lines.append(" )")
1683
+ else:
1684
+ lines.append(" return True")
1685
+
1686
+ script = "\n".join(lines)
1687
+
1688
+ return script, globs
1689
+
1690
+
1691
+ def _make_order(cls, attrs):
1692
+ """
1693
+ Create ordering methods for *cls* with *attrs*.
1694
+ """
1695
+ attrs = [a for a in attrs if a.order]
1696
+
1697
+ def attrs_to_tuple(obj):
1698
+ """
1699
+ Save us some typing.
1700
+ """
1701
+ return tuple(
1702
+ key(value) if key else value
1703
+ for value, key in (
1704
+ (getattr(obj, a.name), a.order_key) for a in attrs
1705
+ )
1706
+ )
1707
+
1708
+ def __lt__(self, other):
1709
+ """
1710
+ Automatically created by attrs.
1711
+ """
1712
+ if other.__class__ is self.__class__:
1713
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
1714
+
1715
+ return NotImplemented
1716
+
1717
+ def __le__(self, other):
1718
+ """
1719
+ Automatically created by attrs.
1720
+ """
1721
+ if other.__class__ is self.__class__:
1722
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
1723
+
1724
+ return NotImplemented
1725
+
1726
+ def __gt__(self, other):
1727
+ """
1728
+ Automatically created by attrs.
1729
+ """
1730
+ if other.__class__ is self.__class__:
1731
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
1732
+
1733
+ return NotImplemented
1734
+
1735
+ def __ge__(self, other):
1736
+ """
1737
+ Automatically created by attrs.
1738
+ """
1739
+ if other.__class__ is self.__class__:
1740
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
1741
+
1742
+ return NotImplemented
1743
+
1744
+ return __lt__, __le__, __gt__, __ge__
1745
+
1746
+
1747
+ def _add_eq(cls, attrs=None):
1748
+ """
1749
+ Add equality methods to *cls* with *attrs*.
1750
+ """
1751
+ if attrs is None:
1752
+ attrs = cls.__attrs_attrs__
1753
+
1754
+ script, globs = _make_eq_script(attrs)
1755
+ _compile_and_eval(
1756
+ script, globs, filename=_generate_unique_filename(cls, "__eq__")
1757
+ )
1758
+ cls.__eq__ = globs["__eq__"]
1759
+ cls.__ne__ = __ne__
1760
+
1761
+ return cls
1762
+
1763
+
1764
+ def _make_repr_script(attrs, ns) -> tuple[str, dict]:
1765
+ """
1766
+ Create the source and globs for a __repr__ and return it.
1767
+ """
1768
+ # Figure out which attributes to include, and which function to use to
1769
+ # format them. The a.repr value can be either bool or a custom
1770
+ # callable.
1771
+ attr_names_with_reprs = tuple(
1772
+ (a.name, (repr if a.repr is True else a.repr), a.init)
1773
+ for a in attrs
1774
+ if a.repr is not False
1775
+ )
1776
+ globs = {
1777
+ name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr
1778
+ }
1779
+ globs["_compat"] = _compat
1780
+ globs["AttributeError"] = AttributeError
1781
+ globs["NOTHING"] = NOTHING
1782
+ attribute_fragments = []
1783
+ for name, r, i in attr_names_with_reprs:
1784
+ accessor = (
1785
+ "self." + name if i else 'getattr(self, "' + name + '", NOTHING)'
1786
+ )
1787
+ fragment = (
1788
+ "%s={%s!r}" % (name, accessor)
1789
+ if r == repr
1790
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
1791
+ )
1792
+ attribute_fragments.append(fragment)
1793
+ repr_fragment = ", ".join(attribute_fragments)
1794
+
1795
+ if ns is None:
1796
+ cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
1797
+ else:
1798
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
1799
+
1800
+ lines = [
1801
+ "def __repr__(self):",
1802
+ " try:",
1803
+ " already_repring = _compat.repr_context.already_repring",
1804
+ " except AttributeError:",
1805
+ " already_repring = {id(self),}",
1806
+ " _compat.repr_context.already_repring = already_repring",
1807
+ " else:",
1808
+ " if id(self) in already_repring:",
1809
+ " return '...'",
1810
+ " else:",
1811
+ " already_repring.add(id(self))",
1812
+ " try:",
1813
+ f" return f'{cls_name_fragment}({repr_fragment})'",
1814
+ " finally:",
1815
+ " already_repring.remove(id(self))",
1816
+ ]
1817
+
1818
+ return "\n".join(lines), globs
1819
+
1820
+
1821
+ def _add_repr(cls, ns=None, attrs=None):
1822
+ """
1823
+ Add a repr method to *cls*.
1824
+ """
1825
+ if attrs is None:
1826
+ attrs = cls.__attrs_attrs__
1827
+
1828
+ script, globs = _make_repr_script(attrs, ns)
1829
+ _compile_and_eval(
1830
+ script, globs, filename=_generate_unique_filename(cls, "__repr__")
1831
+ )
1832
+ cls.__repr__ = globs["__repr__"]
1833
+ return cls
1834
+
1835
+
1836
+ def fields(cls):
1837
+ """
1838
+ Return the tuple of *attrs* attributes for a class.
1839
+
1840
+ The tuple also allows accessing the fields by their names (see below for
1841
+ examples).
1842
+
1843
+ Args:
1844
+ cls (type): Class to introspect.
1845
+
1846
+ Raises:
1847
+ TypeError: If *cls* is not a class.
1848
+
1849
+ attrs.exceptions.NotAnAttrsClassError:
1850
+ If *cls* is not an *attrs* class.
1851
+
1852
+ Returns:
1853
+ tuple (with name accessors) of `attrs.Attribute`
1854
+
1855
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
1856
+ by name.
1857
+ .. versionchanged:: 23.1.0 Add support for generic classes.
1858
+ """
1859
+ generic_base = get_generic_base(cls)
1860
+
1861
+ if generic_base is None and not isinstance(cls, type):
1862
+ msg = "Passed object must be a class."
1863
+ raise TypeError(msg)
1864
+
1865
+ attrs = getattr(cls, "__attrs_attrs__", None)
1866
+
1867
+ if attrs is None:
1868
+ if generic_base is not None:
1869
+ attrs = getattr(generic_base, "__attrs_attrs__", None)
1870
+ if attrs is not None:
1871
+ # Even though this is global state, stick it on here to speed
1872
+ # it up. We rely on `cls` being cached for this to be
1873
+ # efficient.
1874
+ cls.__attrs_attrs__ = attrs
1875
+ return attrs
1876
+ msg = f"{cls!r} is not an attrs-decorated class."
1877
+ raise NotAnAttrsClassError(msg)
1878
+
1879
+ return attrs
1880
+
1881
+
1882
+ def fields_dict(cls):
1883
+ """
1884
+ Return an ordered dictionary of *attrs* attributes for a class, whose keys
1885
+ are the attribute names.
1886
+
1887
+ Args:
1888
+ cls (type): Class to introspect.
1889
+
1890
+ Raises:
1891
+ TypeError: If *cls* is not a class.
1892
+
1893
+ attrs.exceptions.NotAnAttrsClassError:
1894
+ If *cls* is not an *attrs* class.
1895
+
1896
+ Returns:
1897
+ dict[str, attrs.Attribute]: Dict of attribute name to definition
1898
+
1899
+ .. versionadded:: 18.1.0
1900
+ """
1901
+ if not isinstance(cls, type):
1902
+ msg = "Passed object must be a class."
1903
+ raise TypeError(msg)
1904
+ attrs = getattr(cls, "__attrs_attrs__", None)
1905
+ if attrs is None:
1906
+ msg = f"{cls!r} is not an attrs-decorated class."
1907
+ raise NotAnAttrsClassError(msg)
1908
+ return {a.name: a for a in attrs}
1909
+
1910
+
1911
+ def validate(inst):
1912
+ """
1913
+ Validate all attributes on *inst* that have a validator.
1914
+
1915
+ Leaves all exceptions through.
1916
+
1917
+ Args:
1918
+ inst: Instance of a class with *attrs* attributes.
1919
+ """
1920
+ if _config._run_validators is False:
1921
+ return
1922
+
1923
+ for a in fields(inst.__class__):
1924
+ v = a.validator
1925
+ if v is not None:
1926
+ v(inst, a, getattr(inst, a.name))
1927
+
1928
+
1929
+ def _is_slot_attr(a_name, base_attr_map):
1930
+ """
1931
+ Check if the attribute name comes from a slot class.
1932
+ """
1933
+ cls = base_attr_map.get(a_name)
1934
+ return cls and "__slots__" in cls.__dict__
1935
+
1936
+
1937
+ def _make_init_script(
1938
+ cls,
1939
+ attrs,
1940
+ pre_init,
1941
+ pre_init_has_args,
1942
+ post_init,
1943
+ frozen,
1944
+ slots,
1945
+ cache_hash,
1946
+ base_attr_map,
1947
+ is_exc,
1948
+ cls_on_setattr,
1949
+ attrs_init,
1950
+ ) -> tuple[str, dict, dict]:
1951
+ has_cls_on_setattr = (
1952
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
1953
+ )
1954
+
1955
+ if frozen and has_cls_on_setattr:
1956
+ msg = "Frozen classes can't use on_setattr."
1957
+ raise ValueError(msg)
1958
+
1959
+ needs_cached_setattr = cache_hash or frozen
1960
+ filtered_attrs = []
1961
+ attr_dict = {}
1962
+ for a in attrs:
1963
+ if not a.init and a.default is NOTHING:
1964
+ continue
1965
+
1966
+ filtered_attrs.append(a)
1967
+ attr_dict[a.name] = a
1968
+
1969
+ if a.on_setattr is not None:
1970
+ if frozen is True:
1971
+ msg = "Frozen classes can't use on_setattr."
1972
+ raise ValueError(msg)
1973
+
1974
+ needs_cached_setattr = True
1975
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
1976
+ needs_cached_setattr = True
1977
+
1978
+ script, globs, annotations = _attrs_to_init_script(
1979
+ filtered_attrs,
1980
+ frozen,
1981
+ slots,
1982
+ pre_init,
1983
+ pre_init_has_args,
1984
+ post_init,
1985
+ cache_hash,
1986
+ base_attr_map,
1987
+ is_exc,
1988
+ needs_cached_setattr,
1989
+ has_cls_on_setattr,
1990
+ "__attrs_init__" if attrs_init else "__init__",
1991
+ )
1992
+ if cls.__module__ in sys.modules:
1993
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
1994
+ globs.update(sys.modules[cls.__module__].__dict__)
1995
+
1996
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
1997
+
1998
+ if needs_cached_setattr:
1999
+ # Save the lookup overhead in __init__ if we need to circumvent
2000
+ # setattr hooks.
2001
+ globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__
2002
+
2003
+ return script, globs, annotations
2004
+
2005
+
2006
+ def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str:
2007
+ """
2008
+ Use the cached object.setattr to set *attr_name* to *value_var*.
2009
+ """
2010
+ return f"_setattr('{attr_name}', {value_var})"
2011
+
2012
+
2013
+ def _setattr_with_converter(
2014
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
2015
+ ) -> str:
2016
+ """
2017
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
2018
+ its converter first.
2019
+ """
2020
+ return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})"
2021
+
2022
+
2023
+ def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str:
2024
+ """
2025
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
2026
+ relegate to _setattr.
2027
+ """
2028
+ if has_on_setattr:
2029
+ return _setattr(attr_name, value, True)
2030
+
2031
+ return f"self.{attr_name} = {value}"
2032
+
2033
+
2034
+ def _assign_with_converter(
2035
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
2036
+ ) -> str:
2037
+ """
2038
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
2039
+ conversion. Otherwise relegate to _setattr_with_converter.
2040
+ """
2041
+ if has_on_setattr:
2042
+ return _setattr_with_converter(attr_name, value_var, True, converter)
2043
+
2044
+ return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}"
2045
+
2046
+
2047
+ def _determine_setters(
2048
+ frozen: bool, slots: bool, base_attr_map: dict[str, type]
2049
+ ):
2050
+ """
2051
+ Determine the correct setter functions based on whether a class is frozen
2052
+ and/or slotted.
2053
+ """
2054
+ if frozen is True:
2055
+ if slots is True:
2056
+ return (), _setattr, _setattr_with_converter
2057
+
2058
+ # Dict frozen classes assign directly to __dict__.
2059
+ # But only if the attribute doesn't come from an ancestor slot
2060
+ # class.
2061
+ # Note _inst_dict will be used again below if cache_hash is True
2062
+
2063
+ def fmt_setter(
2064
+ attr_name: str, value_var: str, has_on_setattr: bool
2065
+ ) -> str:
2066
+ if _is_slot_attr(attr_name, base_attr_map):
2067
+ return _setattr(attr_name, value_var, has_on_setattr)
2068
+
2069
+ return f"_inst_dict['{attr_name}'] = {value_var}"
2070
+
2071
+ def fmt_setter_with_converter(
2072
+ attr_name: str,
2073
+ value_var: str,
2074
+ has_on_setattr: bool,
2075
+ converter: Converter,
2076
+ ) -> str:
2077
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
2078
+ return _setattr_with_converter(
2079
+ attr_name, value_var, has_on_setattr, converter
2080
+ )
2081
+
2082
+ return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}"
2083
+
2084
+ return (
2085
+ ("_inst_dict = self.__dict__",),
2086
+ fmt_setter,
2087
+ fmt_setter_with_converter,
2088
+ )
2089
+
2090
+ # Not frozen -- we can just assign directly.
2091
+ return (), _assign, _assign_with_converter
2092
+
2093
+
2094
+ def _attrs_to_init_script(
2095
+ attrs: list[Attribute],
2096
+ is_frozen: bool,
2097
+ is_slotted: bool,
2098
+ call_pre_init: bool,
2099
+ pre_init_has_args: bool,
2100
+ call_post_init: bool,
2101
+ does_cache_hash: bool,
2102
+ base_attr_map: dict[str, type],
2103
+ is_exc: bool,
2104
+ needs_cached_setattr: bool,
2105
+ has_cls_on_setattr: bool,
2106
+ method_name: str,
2107
+ ) -> tuple[str, dict, dict]:
2108
+ """
2109
+ Return a script of an initializer for *attrs*, a dict of globals, and
2110
+ annotations for the initializer.
2111
+
2112
+ The globals are required by the generated script.
2113
+ """
2114
+ lines = ["self.__attrs_pre_init__()"] if call_pre_init else []
2115
+
2116
+ if needs_cached_setattr:
2117
+ lines.append(
2118
+ # Circumvent the __setattr__ descriptor to save one lookup per
2119
+ # assignment. Note _setattr will be used again below if
2120
+ # does_cache_hash is True.
2121
+ "_setattr = _cached_setattr_get(self)"
2122
+ )
2123
+
2124
+ extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters(
2125
+ is_frozen, is_slotted, base_attr_map
2126
+ )
2127
+ lines.extend(extra_lines)
2128
+
2129
+ args = []
2130
+ kw_only_args = []
2131
+ attrs_to_validate = []
2132
+
2133
+ # This is a dictionary of names to validator and converter callables.
2134
+ # Injecting this into __init__ globals lets us avoid lookups.
2135
+ names_for_globals = {}
2136
+ annotations = {"return": None}
2137
+
2138
+ for a in attrs:
2139
+ if a.validator:
2140
+ attrs_to_validate.append(a)
2141
+
2142
+ attr_name = a.name
2143
+ has_on_setattr = a.on_setattr is not None or (
2144
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
2145
+ )
2146
+ # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not
2147
+ # explicitly provided
2148
+ arg_name = a.alias
2149
+
2150
+ has_factory = isinstance(a.default, Factory)
2151
+ maybe_self = "self" if has_factory and a.default.takes_self else ""
2152
+
2153
+ if a.converter is not None and not isinstance(a.converter, Converter):
2154
+ converter = Converter(a.converter)
2155
+ else:
2156
+ converter = a.converter
2157
+
2158
+ if a.init is False:
2159
+ if has_factory:
2160
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
2161
+ if converter is not None:
2162
+ lines.append(
2163
+ fmt_setter_with_converter(
2164
+ attr_name,
2165
+ init_factory_name + f"({maybe_self})",
2166
+ has_on_setattr,
2167
+ converter,
2168
+ )
2169
+ )
2170
+ names_for_globals[converter._get_global_name(a.name)] = (
2171
+ converter.converter
2172
+ )
2173
+ else:
2174
+ lines.append(
2175
+ fmt_setter(
2176
+ attr_name,
2177
+ init_factory_name + f"({maybe_self})",
2178
+ has_on_setattr,
2179
+ )
2180
+ )
2181
+ names_for_globals[init_factory_name] = a.default.factory
2182
+ elif converter is not None:
2183
+ lines.append(
2184
+ fmt_setter_with_converter(
2185
+ attr_name,
2186
+ f"attr_dict['{attr_name}'].default",
2187
+ has_on_setattr,
2188
+ converter,
2189
+ )
2190
+ )
2191
+ names_for_globals[converter._get_global_name(a.name)] = (
2192
+ converter.converter
2193
+ )
2194
+ else:
2195
+ lines.append(
2196
+ fmt_setter(
2197
+ attr_name,
2198
+ f"attr_dict['{attr_name}'].default",
2199
+ has_on_setattr,
2200
+ )
2201
+ )
2202
+ elif a.default is not NOTHING and not has_factory:
2203
+ arg = f"{arg_name}=attr_dict['{attr_name}'].default"
2204
+ if a.kw_only:
2205
+ kw_only_args.append(arg)
2206
+ else:
2207
+ args.append(arg)
2208
+
2209
+ if converter is not None:
2210
+ lines.append(
2211
+ fmt_setter_with_converter(
2212
+ attr_name, arg_name, has_on_setattr, converter
2213
+ )
2214
+ )
2215
+ names_for_globals[converter._get_global_name(a.name)] = (
2216
+ converter.converter
2217
+ )
2218
+ else:
2219
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
2220
+
2221
+ elif has_factory:
2222
+ arg = f"{arg_name}=NOTHING"
2223
+ if a.kw_only:
2224
+ kw_only_args.append(arg)
2225
+ else:
2226
+ args.append(arg)
2227
+ lines.append(f"if {arg_name} is not NOTHING:")
2228
+
2229
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
2230
+ if converter is not None:
2231
+ lines.append(
2232
+ " "
2233
+ + fmt_setter_with_converter(
2234
+ attr_name, arg_name, has_on_setattr, converter
2235
+ )
2236
+ )
2237
+ lines.append("else:")
2238
+ lines.append(
2239
+ " "
2240
+ + fmt_setter_with_converter(
2241
+ attr_name,
2242
+ init_factory_name + "(" + maybe_self + ")",
2243
+ has_on_setattr,
2244
+ converter,
2245
+ )
2246
+ )
2247
+ names_for_globals[converter._get_global_name(a.name)] = (
2248
+ converter.converter
2249
+ )
2250
+ else:
2251
+ lines.append(
2252
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
2253
+ )
2254
+ lines.append("else:")
2255
+ lines.append(
2256
+ " "
2257
+ + fmt_setter(
2258
+ attr_name,
2259
+ init_factory_name + "(" + maybe_self + ")",
2260
+ has_on_setattr,
2261
+ )
2262
+ )
2263
+ names_for_globals[init_factory_name] = a.default.factory
2264
+ else:
2265
+ if a.kw_only:
2266
+ kw_only_args.append(arg_name)
2267
+ else:
2268
+ args.append(arg_name)
2269
+
2270
+ if converter is not None:
2271
+ lines.append(
2272
+ fmt_setter_with_converter(
2273
+ attr_name, arg_name, has_on_setattr, converter
2274
+ )
2275
+ )
2276
+ names_for_globals[converter._get_global_name(a.name)] = (
2277
+ converter.converter
2278
+ )
2279
+ else:
2280
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
2281
+
2282
+ if a.init is True:
2283
+ if a.type is not None and converter is None:
2284
+ annotations[arg_name] = a.type
2285
+ elif converter is not None and converter._first_param_type:
2286
+ # Use the type from the converter if present.
2287
+ annotations[arg_name] = converter._first_param_type
2288
+
2289
+ if attrs_to_validate: # we can skip this if there are no validators.
2290
+ names_for_globals["_config"] = _config
2291
+ lines.append("if _config._run_validators is True:")
2292
+ for a in attrs_to_validate:
2293
+ val_name = "__attr_validator_" + a.name
2294
+ attr_name = "__attr_" + a.name
2295
+ lines.append(f" {val_name}(self, {attr_name}, self.{a.name})")
2296
+ names_for_globals[val_name] = a.validator
2297
+ names_for_globals[attr_name] = a
2298
+
2299
+ if call_post_init:
2300
+ lines.append("self.__attrs_post_init__()")
2301
+
2302
+ # Because this is set only after __attrs_post_init__ is called, a crash
2303
+ # will result if post-init tries to access the hash code. This seemed
2304
+ # preferable to setting this beforehand, in which case alteration to field
2305
+ # values during post-init combined with post-init accessing the hash code
2306
+ # would result in silent bugs.
2307
+ if does_cache_hash:
2308
+ if is_frozen:
2309
+ if is_slotted:
2310
+ init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)"
2311
+ else:
2312
+ init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None"
2313
+ else:
2314
+ init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None"
2315
+ lines.append(init_hash_cache)
2316
+
2317
+ # For exceptions we rely on BaseException.__init__ for proper
2318
+ # initialization.
2319
+ if is_exc:
2320
+ vals = ",".join(f"self.{a.name}" for a in attrs if a.init)
2321
+
2322
+ lines.append(f"BaseException.__init__(self, {vals})")
2323
+
2324
+ args = ", ".join(args)
2325
+ pre_init_args = args
2326
+ if kw_only_args:
2327
+ # leading comma & kw_only args
2328
+ args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}"
2329
+ pre_init_kw_only_args = ", ".join(
2330
+ [
2331
+ f"{kw_arg_name}={kw_arg_name}"
2332
+ # We need to remove the defaults from the kw_only_args.
2333
+ for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args)
2334
+ ]
2335
+ )
2336
+ pre_init_args += ", " if pre_init_args else ""
2337
+ pre_init_args += pre_init_kw_only_args
2338
+
2339
+ if call_pre_init and pre_init_has_args:
2340
+ # If pre init method has arguments, pass same arguments as `__init__`.
2341
+ lines[0] = f"self.__attrs_pre_init__({pre_init_args})"
2342
+
2343
+ # Python <3.12 doesn't allow backslashes in f-strings.
2344
+ NL = "\n "
2345
+ return (
2346
+ f"""def {method_name}(self, {args}):
2347
+ {NL.join(lines) if lines else "pass"}
2348
+ """,
2349
+ names_for_globals,
2350
+ annotations,
2351
+ )
2352
+
2353
+
2354
+ def _default_init_alias_for(name: str) -> str:
2355
+ """
2356
+ The default __init__ parameter name for a field.
2357
+
2358
+ This performs private-name adjustment via leading-unscore stripping,
2359
+ and is the default value of Attribute.alias if not provided.
2360
+ """
2361
+
2362
+ return name.lstrip("_")
2363
+
2364
+
2365
+ class Attribute:
2366
+ """
2367
+ *Read-only* representation of an attribute.
2368
+
2369
+ .. warning::
2370
+
2371
+ You should never instantiate this class yourself.
2372
+
2373
+ The class has *all* arguments of `attr.ib` (except for ``factory`` which is
2374
+ only syntactic sugar for ``default=Factory(...)`` plus the following:
2375
+
2376
+ - ``name`` (`str`): The name of the attribute.
2377
+ - ``alias`` (`str`): The __init__ parameter name of the attribute, after
2378
+ any explicit overrides and default private-attribute-name handling.
2379
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
2380
+ from a base class.
2381
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The
2382
+ callables that are used for comparing and ordering objects by this
2383
+ attribute, respectively. These are set by passing a callable to
2384
+ `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also
2385
+ :ref:`comparison customization <custom-comparison>`.
2386
+
2387
+ Instances of this class are frequently used for introspection purposes
2388
+ like:
2389
+
2390
+ - `fields` returns a tuple of them.
2391
+ - Validators get them passed as the first argument.
2392
+ - The :ref:`field transformer <transform-fields>` hook receives a list of
2393
+ them.
2394
+ - The ``alias`` property exposes the __init__ parameter name of the field,
2395
+ with any overrides and default private-attribute handling applied.
2396
+
2397
+
2398
+ .. versionadded:: 20.1.0 *inherited*
2399
+ .. versionadded:: 20.1.0 *on_setattr*
2400
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
2401
+ equality checks and hashing anymore.
2402
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
2403
+ .. versionadded:: 22.2.0 *alias*
2404
+
2405
+ For the full version history of the fields, see `attr.ib`.
2406
+ """
2407
+
2408
+ # These slots must NOT be reordered because we use them later for
2409
+ # instantiation.
2410
+ __slots__ = ( # noqa: RUF023
2411
+ "name",
2412
+ "default",
2413
+ "validator",
2414
+ "repr",
2415
+ "eq",
2416
+ "eq_key",
2417
+ "order",
2418
+ "order_key",
2419
+ "hash",
2420
+ "init",
2421
+ "metadata",
2422
+ "type",
2423
+ "converter",
2424
+ "kw_only",
2425
+ "inherited",
2426
+ "on_setattr",
2427
+ "alias",
2428
+ )
2429
+
2430
+ def __init__(
2431
+ self,
2432
+ name,
2433
+ default,
2434
+ validator,
2435
+ repr,
2436
+ cmp, # XXX: unused, remove along with other cmp code.
2437
+ hash,
2438
+ init,
2439
+ inherited,
2440
+ metadata=None,
2441
+ type=None,
2442
+ converter=None,
2443
+ kw_only=False,
2444
+ eq=None,
2445
+ eq_key=None,
2446
+ order=None,
2447
+ order_key=None,
2448
+ on_setattr=None,
2449
+ alias=None,
2450
+ ):
2451
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
2452
+ cmp, eq_key or eq, order_key or order, True
2453
+ )
2454
+
2455
+ # Cache this descriptor here to speed things up later.
2456
+ bound_setattr = _OBJ_SETATTR.__get__(self)
2457
+
2458
+ # Despite the big red warning, people *do* instantiate `Attribute`
2459
+ # themselves.
2460
+ bound_setattr("name", name)
2461
+ bound_setattr("default", default)
2462
+ bound_setattr("validator", validator)
2463
+ bound_setattr("repr", repr)
2464
+ bound_setattr("eq", eq)
2465
+ bound_setattr("eq_key", eq_key)
2466
+ bound_setattr("order", order)
2467
+ bound_setattr("order_key", order_key)
2468
+ bound_setattr("hash", hash)
2469
+ bound_setattr("init", init)
2470
+ bound_setattr("converter", converter)
2471
+ bound_setattr(
2472
+ "metadata",
2473
+ (
2474
+ types.MappingProxyType(dict(metadata)) # Shallow copy
2475
+ if metadata
2476
+ else _EMPTY_METADATA_SINGLETON
2477
+ ),
2478
+ )
2479
+ bound_setattr("type", type)
2480
+ bound_setattr("kw_only", kw_only)
2481
+ bound_setattr("inherited", inherited)
2482
+ bound_setattr("on_setattr", on_setattr)
2483
+ bound_setattr("alias", alias)
2484
+
2485
+ def __setattr__(self, name, value):
2486
+ raise FrozenInstanceError
2487
+
2488
+ @classmethod
2489
+ def from_counting_attr(cls, name: str, ca: _CountingAttr, type=None):
2490
+ # type holds the annotated value. deal with conflicts:
2491
+ if type is None:
2492
+ type = ca.type
2493
+ elif ca.type is not None:
2494
+ msg = f"Type annotation and type argument cannot both be present for '{name}'."
2495
+ raise ValueError(msg)
2496
+ return cls(
2497
+ name,
2498
+ ca._default,
2499
+ ca._validator,
2500
+ ca.repr,
2501
+ None,
2502
+ ca.hash,
2503
+ ca.init,
2504
+ False,
2505
+ ca.metadata,
2506
+ type,
2507
+ ca.converter,
2508
+ ca.kw_only,
2509
+ ca.eq,
2510
+ ca.eq_key,
2511
+ ca.order,
2512
+ ca.order_key,
2513
+ ca.on_setattr,
2514
+ ca.alias,
2515
+ )
2516
+
2517
+ # Don't use attrs.evolve since fields(Attribute) doesn't work
2518
+ def evolve(self, **changes):
2519
+ """
2520
+ Copy *self* and apply *changes*.
2521
+
2522
+ This works similarly to `attrs.evolve` but that function does not work
2523
+ with :class:`attrs.Attribute`.
2524
+
2525
+ It is mainly meant to be used for `transform-fields`.
2526
+
2527
+ .. versionadded:: 20.3.0
2528
+ """
2529
+ new = copy.copy(self)
2530
+
2531
+ new._setattrs(changes.items())
2532
+
2533
+ return new
2534
+
2535
+ # Don't use _add_pickle since fields(Attribute) doesn't work
2536
+ def __getstate__(self):
2537
+ """
2538
+ Play nice with pickle.
2539
+ """
2540
+ return tuple(
2541
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
2542
+ for name in self.__slots__
2543
+ )
2544
+
2545
+ def __setstate__(self, state):
2546
+ """
2547
+ Play nice with pickle.
2548
+ """
2549
+ self._setattrs(zip(self.__slots__, state))
2550
+
2551
+ def _setattrs(self, name_values_pairs):
2552
+ bound_setattr = _OBJ_SETATTR.__get__(self)
2553
+ for name, value in name_values_pairs:
2554
+ if name != "metadata":
2555
+ bound_setattr(name, value)
2556
+ else:
2557
+ bound_setattr(
2558
+ name,
2559
+ (
2560
+ types.MappingProxyType(dict(value))
2561
+ if value
2562
+ else _EMPTY_METADATA_SINGLETON
2563
+ ),
2564
+ )
2565
+
2566
+
2567
+ _a = [
2568
+ Attribute(
2569
+ name=name,
2570
+ default=NOTHING,
2571
+ validator=None,
2572
+ repr=True,
2573
+ cmp=None,
2574
+ eq=True,
2575
+ order=False,
2576
+ hash=(name != "metadata"),
2577
+ init=True,
2578
+ inherited=False,
2579
+ alias=_default_init_alias_for(name),
2580
+ )
2581
+ for name in Attribute.__slots__
2582
+ ]
2583
+
2584
+ Attribute = _add_hash(
2585
+ _add_eq(
2586
+ _add_repr(Attribute, attrs=_a),
2587
+ attrs=[a for a in _a if a.name != "inherited"],
2588
+ ),
2589
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
2590
+ )
2591
+
2592
+
2593
+ class _CountingAttr:
2594
+ """
2595
+ Intermediate representation of attributes that uses a counter to preserve
2596
+ the order in which the attributes have been defined.
2597
+
2598
+ *Internal* data structure of the attrs library. Running into is most
2599
+ likely the result of a bug like a forgotten `@attr.s` decorator.
2600
+ """
2601
+
2602
+ __slots__ = (
2603
+ "_default",
2604
+ "_validator",
2605
+ "alias",
2606
+ "converter",
2607
+ "counter",
2608
+ "eq",
2609
+ "eq_key",
2610
+ "hash",
2611
+ "init",
2612
+ "kw_only",
2613
+ "metadata",
2614
+ "on_setattr",
2615
+ "order",
2616
+ "order_key",
2617
+ "repr",
2618
+ "type",
2619
+ )
2620
+ __attrs_attrs__ = (
2621
+ *tuple(
2622
+ Attribute(
2623
+ name=name,
2624
+ alias=_default_init_alias_for(name),
2625
+ default=NOTHING,
2626
+ validator=None,
2627
+ repr=True,
2628
+ cmp=None,
2629
+ hash=True,
2630
+ init=True,
2631
+ kw_only=False,
2632
+ eq=True,
2633
+ eq_key=None,
2634
+ order=False,
2635
+ order_key=None,
2636
+ inherited=False,
2637
+ on_setattr=None,
2638
+ )
2639
+ for name in (
2640
+ "counter",
2641
+ "_default",
2642
+ "repr",
2643
+ "eq",
2644
+ "order",
2645
+ "hash",
2646
+ "init",
2647
+ "on_setattr",
2648
+ "alias",
2649
+ )
2650
+ ),
2651
+ Attribute(
2652
+ name="metadata",
2653
+ alias="metadata",
2654
+ default=None,
2655
+ validator=None,
2656
+ repr=True,
2657
+ cmp=None,
2658
+ hash=False,
2659
+ init=True,
2660
+ kw_only=False,
2661
+ eq=True,
2662
+ eq_key=None,
2663
+ order=False,
2664
+ order_key=None,
2665
+ inherited=False,
2666
+ on_setattr=None,
2667
+ ),
2668
+ )
2669
+ cls_counter = 0
2670
+
2671
+ def __init__(
2672
+ self,
2673
+ default,
2674
+ validator,
2675
+ repr,
2676
+ cmp,
2677
+ hash,
2678
+ init,
2679
+ converter,
2680
+ metadata,
2681
+ type,
2682
+ kw_only,
2683
+ eq,
2684
+ eq_key,
2685
+ order,
2686
+ order_key,
2687
+ on_setattr,
2688
+ alias,
2689
+ ):
2690
+ _CountingAttr.cls_counter += 1
2691
+ self.counter = _CountingAttr.cls_counter
2692
+ self._default = default
2693
+ self._validator = validator
2694
+ self.converter = converter
2695
+ self.repr = repr
2696
+ self.eq = eq
2697
+ self.eq_key = eq_key
2698
+ self.order = order
2699
+ self.order_key = order_key
2700
+ self.hash = hash
2701
+ self.init = init
2702
+ self.metadata = metadata
2703
+ self.type = type
2704
+ self.kw_only = kw_only
2705
+ self.on_setattr = on_setattr
2706
+ self.alias = alias
2707
+
2708
+ def validator(self, meth):
2709
+ """
2710
+ Decorator that adds *meth* to the list of validators.
2711
+
2712
+ Returns *meth* unchanged.
2713
+
2714
+ .. versionadded:: 17.1.0
2715
+ """
2716
+ if self._validator is None:
2717
+ self._validator = meth
2718
+ else:
2719
+ self._validator = and_(self._validator, meth)
2720
+ return meth
2721
+
2722
+ def default(self, meth):
2723
+ """
2724
+ Decorator that allows to set the default for an attribute.
2725
+
2726
+ Returns *meth* unchanged.
2727
+
2728
+ Raises:
2729
+ DefaultAlreadySetError: If default has been set before.
2730
+
2731
+ .. versionadded:: 17.1.0
2732
+ """
2733
+ if self._default is not NOTHING:
2734
+ raise DefaultAlreadySetError
2735
+
2736
+ self._default = Factory(meth, takes_self=True)
2737
+
2738
+ return meth
2739
+
2740
+
2741
+ _CountingAttr = _add_eq(_add_repr(_CountingAttr))
2742
+
2743
+
2744
+ class Factory:
2745
+ """
2746
+ Stores a factory callable.
2747
+
2748
+ If passed as the default value to `attrs.field`, the factory is used to
2749
+ generate a new value.
2750
+
2751
+ Args:
2752
+ factory (typing.Callable):
2753
+ A callable that takes either none or exactly one mandatory
2754
+ positional argument depending on *takes_self*.
2755
+
2756
+ takes_self (bool):
2757
+ Pass the partially initialized instance that is being initialized
2758
+ as a positional argument.
2759
+
2760
+ .. versionadded:: 17.1.0 *takes_self*
2761
+ """
2762
+
2763
+ __slots__ = ("factory", "takes_self")
2764
+
2765
+ def __init__(self, factory, takes_self=False):
2766
+ self.factory = factory
2767
+ self.takes_self = takes_self
2768
+
2769
+ def __getstate__(self):
2770
+ """
2771
+ Play nice with pickle.
2772
+ """
2773
+ return tuple(getattr(self, name) for name in self.__slots__)
2774
+
2775
+ def __setstate__(self, state):
2776
+ """
2777
+ Play nice with pickle.
2778
+ """
2779
+ for name, value in zip(self.__slots__, state):
2780
+ setattr(self, name, value)
2781
+
2782
+
2783
+ _f = [
2784
+ Attribute(
2785
+ name=name,
2786
+ default=NOTHING,
2787
+ validator=None,
2788
+ repr=True,
2789
+ cmp=None,
2790
+ eq=True,
2791
+ order=False,
2792
+ hash=True,
2793
+ init=True,
2794
+ inherited=False,
2795
+ )
2796
+ for name in Factory.__slots__
2797
+ ]
2798
+
2799
+ Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
2800
+
2801
+
2802
+ class Converter:
2803
+ """
2804
+ Stores a converter callable.
2805
+
2806
+ Allows for the wrapped converter to take additional arguments. The
2807
+ arguments are passed in the order they are documented.
2808
+
2809
+ Args:
2810
+ converter (Callable): A callable that converts the passed value.
2811
+
2812
+ takes_self (bool):
2813
+ Pass the partially initialized instance that is being initialized
2814
+ as a positional argument. (default: `False`)
2815
+
2816
+ takes_field (bool):
2817
+ Pass the field definition (an :class:`Attribute`) into the
2818
+ converter as a positional argument. (default: `False`)
2819
+
2820
+ .. versionadded:: 24.1.0
2821
+ """
2822
+
2823
+ __slots__ = (
2824
+ "__call__",
2825
+ "_first_param_type",
2826
+ "_global_name",
2827
+ "converter",
2828
+ "takes_field",
2829
+ "takes_self",
2830
+ )
2831
+
2832
+ def __init__(self, converter, *, takes_self=False, takes_field=False):
2833
+ self.converter = converter
2834
+ self.takes_self = takes_self
2835
+ self.takes_field = takes_field
2836
+
2837
+ ex = _AnnotationExtractor(converter)
2838
+ self._first_param_type = ex.get_first_param_type()
2839
+
2840
+ if not (self.takes_self or self.takes_field):
2841
+ self.__call__ = lambda value, _, __: self.converter(value)
2842
+ elif self.takes_self and not self.takes_field:
2843
+ self.__call__ = lambda value, instance, __: self.converter(
2844
+ value, instance
2845
+ )
2846
+ elif not self.takes_self and self.takes_field:
2847
+ self.__call__ = lambda value, __, field: self.converter(
2848
+ value, field
2849
+ )
2850
+ else:
2851
+ self.__call__ = lambda value, instance, field: self.converter(
2852
+ value, instance, field
2853
+ )
2854
+
2855
+ rt = ex.get_return_type()
2856
+ if rt is not None:
2857
+ self.__call__.__annotations__["return"] = rt
2858
+
2859
+ @staticmethod
2860
+ def _get_global_name(attr_name: str) -> str:
2861
+ """
2862
+ Return the name that a converter for an attribute name *attr_name*
2863
+ would have.
2864
+ """
2865
+ return f"__attr_converter_{attr_name}"
2866
+
2867
+ def _fmt_converter_call(self, attr_name: str, value_var: str) -> str:
2868
+ """
2869
+ Return a string that calls the converter for an attribute name
2870
+ *attr_name* and the value in variable named *value_var* according to
2871
+ `self.takes_self` and `self.takes_field`.
2872
+ """
2873
+ if not (self.takes_self or self.takes_field):
2874
+ return f"{self._get_global_name(attr_name)}({value_var})"
2875
+
2876
+ if self.takes_self and self.takes_field:
2877
+ return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])"
2878
+
2879
+ if self.takes_self:
2880
+ return f"{self._get_global_name(attr_name)}({value_var}, self)"
2881
+
2882
+ return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])"
2883
+
2884
+ def __getstate__(self):
2885
+ """
2886
+ Return a dict containing only converter and takes_self -- the rest gets
2887
+ computed when loading.
2888
+ """
2889
+ return {
2890
+ "converter": self.converter,
2891
+ "takes_self": self.takes_self,
2892
+ "takes_field": self.takes_field,
2893
+ }
2894
+
2895
+ def __setstate__(self, state):
2896
+ """
2897
+ Load instance from state.
2898
+ """
2899
+ self.__init__(**state)
2900
+
2901
+
2902
+ _f = [
2903
+ Attribute(
2904
+ name=name,
2905
+ default=NOTHING,
2906
+ validator=None,
2907
+ repr=True,
2908
+ cmp=None,
2909
+ eq=True,
2910
+ order=False,
2911
+ hash=True,
2912
+ init=True,
2913
+ inherited=False,
2914
+ )
2915
+ for name in ("converter", "takes_self", "takes_field")
2916
+ ]
2917
+
2918
+ Converter = _add_hash(
2919
+ _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f
2920
+ )
2921
+
2922
+
2923
+ def make_class(
2924
+ name, attrs, bases=(object,), class_body=None, **attributes_arguments
2925
+ ):
2926
+ r"""
2927
+ A quick way to create a new class called *name* with *attrs*.
2928
+
2929
+ .. note::
2930
+
2931
+ ``make_class()`` is a thin wrapper around `attr.s`, not `attrs.define`
2932
+ which means that it doesn't come with some of the improved defaults.
2933
+
2934
+ For example, if you want the same ``on_setattr`` behavior as in
2935
+ `attrs.define`, you have to pass the hooks yourself: ``make_class(...,
2936
+ on_setattr=setters.pipe(setters.convert, setters.validate)``
2937
+
2938
+ .. warning::
2939
+
2940
+ It is *your* duty to ensure that the class name and the attribute names
2941
+ are valid identifiers. ``make_class()`` will *not* validate them for
2942
+ you.
2943
+
2944
+ Args:
2945
+ name (str): The name for the new class.
2946
+
2947
+ attrs (list | dict):
2948
+ A list of names or a dictionary of mappings of names to `attr.ib`\
2949
+ s / `attrs.field`\ s.
2950
+
2951
+ The order is deduced from the order of the names or attributes
2952
+ inside *attrs*. Otherwise the order of the definition of the
2953
+ attributes is used.
2954
+
2955
+ bases (tuple[type, ...]): Classes that the new class will subclass.
2956
+
2957
+ class_body (dict):
2958
+ An optional dictionary of class attributes for the new class.
2959
+
2960
+ attributes_arguments: Passed unmodified to `attr.s`.
2961
+
2962
+ Returns:
2963
+ type: A new class with *attrs*.
2964
+
2965
+ .. versionadded:: 17.1.0 *bases*
2966
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
2967
+ .. versionchanged:: 23.2.0 *class_body*
2968
+ .. versionchanged:: 25.2.0 Class names can now be unicode.
2969
+ """
2970
+ # Class identifiers are converted into the normal form NFKC while parsing
2971
+ name = unicodedata.normalize("NFKC", name)
2972
+
2973
+ if isinstance(attrs, dict):
2974
+ cls_dict = attrs
2975
+ elif isinstance(attrs, (list, tuple)):
2976
+ cls_dict = {a: attrib() for a in attrs}
2977
+ else:
2978
+ msg = "attrs argument must be a dict or a list."
2979
+ raise TypeError(msg)
2980
+
2981
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
2982
+ post_init = cls_dict.pop("__attrs_post_init__", None)
2983
+ user_init = cls_dict.pop("__init__", None)
2984
+
2985
+ body = {}
2986
+ if class_body is not None:
2987
+ body.update(class_body)
2988
+ if pre_init is not None:
2989
+ body["__attrs_pre_init__"] = pre_init
2990
+ if post_init is not None:
2991
+ body["__attrs_post_init__"] = post_init
2992
+ if user_init is not None:
2993
+ body["__init__"] = user_init
2994
+
2995
+ type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
2996
+
2997
+ # For pickling to work, the __module__ variable needs to be set to the
2998
+ # frame where the class is created. Bypass this step in environments where
2999
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
3000
+ # defined for arguments greater than 0 (IronPython).
3001
+ with contextlib.suppress(AttributeError, ValueError):
3002
+ type_.__module__ = sys._getframe(1).f_globals.get(
3003
+ "__name__", "__main__"
3004
+ )
3005
+
3006
+ # We do it here for proper warnings with meaningful stacklevel.
3007
+ cmp = attributes_arguments.pop("cmp", None)
3008
+ (
3009
+ attributes_arguments["eq"],
3010
+ attributes_arguments["order"],
3011
+ ) = _determine_attrs_eq_order(
3012
+ cmp,
3013
+ attributes_arguments.get("eq"),
3014
+ attributes_arguments.get("order"),
3015
+ True,
3016
+ )
3017
+
3018
+ cls = _attrs(these=cls_dict, **attributes_arguments)(type_)
3019
+ # Only add type annotations now or "_attrs()" will complain:
3020
+ cls.__annotations__ = {
3021
+ k: v.type for k, v in cls_dict.items() if v.type is not None
3022
+ }
3023
+ return cls
3024
+
3025
+
3026
+ # These are required by within this module so we define them here and merely
3027
+ # import into .validators / .converters.
3028
+
3029
+
3030
+ @attrs(slots=True, unsafe_hash=True)
3031
+ class _AndValidator:
3032
+ """
3033
+ Compose many validators to a single one.
3034
+ """
3035
+
3036
+ _validators = attrib()
3037
+
3038
+ def __call__(self, inst, attr, value):
3039
+ for v in self._validators:
3040
+ v(inst, attr, value)
3041
+
3042
+
3043
+ def and_(*validators):
3044
+ """
3045
+ A validator that composes multiple validators into one.
3046
+
3047
+ When called on a value, it runs all wrapped validators.
3048
+
3049
+ Args:
3050
+ validators (~collections.abc.Iterable[typing.Callable]):
3051
+ Arbitrary number of validators.
3052
+
3053
+ .. versionadded:: 17.1.0
3054
+ """
3055
+ vals = []
3056
+ for validator in validators:
3057
+ vals.extend(
3058
+ validator._validators
3059
+ if isinstance(validator, _AndValidator)
3060
+ else [validator]
3061
+ )
3062
+
3063
+ return _AndValidator(tuple(vals))
3064
+
3065
+
3066
+ def pipe(*converters):
3067
+ """
3068
+ A converter that composes multiple converters into one.
3069
+
3070
+ When called on a value, it runs all wrapped converters, returning the
3071
+ *last* value.
3072
+
3073
+ Type annotations will be inferred from the wrapped converters', if they
3074
+ have any.
3075
+
3076
+ converters (~collections.abc.Iterable[typing.Callable]):
3077
+ Arbitrary number of converters.
3078
+
3079
+ .. versionadded:: 20.1.0
3080
+ """
3081
+
3082
+ return_instance = any(isinstance(c, Converter) for c in converters)
3083
+
3084
+ if return_instance:
3085
+
3086
+ def pipe_converter(val, inst, field):
3087
+ for c in converters:
3088
+ val = (
3089
+ c(val, inst, field) if isinstance(c, Converter) else c(val)
3090
+ )
3091
+
3092
+ return val
3093
+
3094
+ else:
3095
+
3096
+ def pipe_converter(val):
3097
+ for c in converters:
3098
+ val = c(val)
3099
+
3100
+ return val
3101
+
3102
+ if not converters:
3103
+ # If the converter list is empty, pipe_converter is the identity.
3104
+ A = TypeVar("A")
3105
+ pipe_converter.__annotations__.update({"val": A, "return": A})
3106
+ else:
3107
+ # Get parameter type from first converter.
3108
+ t = _AnnotationExtractor(converters[0]).get_first_param_type()
3109
+ if t:
3110
+ pipe_converter.__annotations__["val"] = t
3111
+
3112
+ last = converters[-1]
3113
+ if not PY_3_11_PLUS and isinstance(last, Converter):
3114
+ last = last.__call__
3115
+
3116
+ # Get return type from last converter.
3117
+ rt = _AnnotationExtractor(last).get_return_type()
3118
+ if rt:
3119
+ pipe_converter.__annotations__["return"] = rt
3120
+
3121
+ if return_instance:
3122
+ return Converter(pipe_converter, takes_self=True, takes_field=True)
3123
+ return pipe_converter
venv/Lib/site-packages/attr/_next_gen.py ADDED
@@ -0,0 +1,623 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+ """
4
+ These are keyword-only APIs that call `attr.s` and `attr.ib` with different
5
+ default values.
6
+ """
7
+
8
+ from functools import partial
9
+
10
+ from . import setters
11
+ from ._funcs import asdict as _asdict
12
+ from ._funcs import astuple as _astuple
13
+ from ._make import (
14
+ _DEFAULT_ON_SETATTR,
15
+ NOTHING,
16
+ _frozen_setattrs,
17
+ attrib,
18
+ attrs,
19
+ )
20
+ from .exceptions import UnannotatedAttributeError
21
+
22
+
23
+ def define(
24
+ maybe_cls=None,
25
+ *,
26
+ these=None,
27
+ repr=None,
28
+ unsafe_hash=None,
29
+ hash=None,
30
+ init=None,
31
+ slots=True,
32
+ frozen=False,
33
+ weakref_slot=True,
34
+ str=False,
35
+ auto_attribs=None,
36
+ kw_only=False,
37
+ cache_hash=False,
38
+ auto_exc=True,
39
+ eq=None,
40
+ order=False,
41
+ auto_detect=True,
42
+ getstate_setstate=None,
43
+ on_setattr=None,
44
+ field_transformer=None,
45
+ match_args=True,
46
+ ):
47
+ r"""
48
+ A class decorator that adds :term:`dunder methods` according to
49
+ :term:`fields <field>` specified using :doc:`type annotations <types>`,
50
+ `field()` calls, or the *these* argument.
51
+
52
+ Since *attrs* patches or replaces an existing class, you cannot use
53
+ `object.__init_subclass__` with *attrs* classes, because it runs too early.
54
+ As a replacement, you can define ``__attrs_init_subclass__`` on your class.
55
+ It will be called by *attrs* classes that subclass it after they're
56
+ created. See also :ref:`init-subclass`.
57
+
58
+ Args:
59
+ slots (bool):
60
+ Create a :term:`slotted class <slotted classes>` that's more
61
+ memory-efficient. Slotted classes are generally superior to the
62
+ default dict classes, but have some gotchas you should know about,
63
+ so we encourage you to read the :term:`glossary entry <slotted
64
+ classes>`.
65
+
66
+ auto_detect (bool):
67
+ Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
68
+ explicitly, assume they are set to True **unless any** of the
69
+ involved methods for one of the arguments is implemented in the
70
+ *current* class (meaning, it is *not* inherited from some base
71
+ class).
72
+
73
+ So, for example by implementing ``__eq__`` on a class yourself,
74
+ *attrs* will deduce ``eq=False`` and will create *neither*
75
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a
76
+ sensible ``__ne__`` by default, so it *should* be enough to only
77
+ implement ``__eq__`` in most cases).
78
+
79
+ Passing True or False` to *init*, *repr*, *eq*, or *hash*
80
+ overrides whatever *auto_detect* would determine.
81
+
82
+ auto_exc (bool):
83
+ If the class subclasses `BaseException` (which implicitly includes
84
+ any subclass of any exception), the following happens to behave
85
+ like a well-behaved Python exception class:
86
+
87
+ - the values for *eq*, *order*, and *hash* are ignored and the
88
+ instances compare and hash by the instance's ids [#]_ ,
89
+ - all attributes that are either passed into ``__init__`` or have a
90
+ default value are additionally available as a tuple in the
91
+ ``args`` attribute,
92
+ - the value of *str* is ignored leaving ``__str__`` to base
93
+ classes.
94
+
95
+ .. [#]
96
+ Note that *attrs* will *not* remove existing implementations of
97
+ ``__hash__`` or the equality methods. It just won't add own
98
+ ones.
99
+
100
+ on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
101
+ A callable that is run whenever the user attempts to set an
102
+ attribute (either by assignment like ``i.x = 42`` or by using
103
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same
104
+ arguments as validators: the instance, the attribute that is being
105
+ modified, and the new value.
106
+
107
+ If no exception is raised, the attribute is set to the return value
108
+ of the callable.
109
+
110
+ If a list of callables is passed, they're automatically wrapped in
111
+ an `attrs.setters.pipe`.
112
+
113
+ If left None, the default behavior is to run converters and
114
+ validators whenever an attribute is set.
115
+
116
+ init (bool):
117
+ Create a ``__init__`` method that initializes the *attrs*
118
+ attributes. Leading underscores are stripped for the argument name,
119
+ unless an alias is set on the attribute.
120
+
121
+ .. seealso::
122
+ `init` shows advanced ways to customize the generated
123
+ ``__init__`` method, including executing code before and after.
124
+
125
+ repr(bool):
126
+ Create a ``__repr__`` method with a human readable representation
127
+ of *attrs* attributes.
128
+
129
+ str (bool):
130
+ Create a ``__str__`` method that is identical to ``__repr__``. This
131
+ is usually not necessary except for `Exception`\ s.
132
+
133
+ eq (bool | None):
134
+ If True or None (default), add ``__eq__`` and ``__ne__`` methods
135
+ that check two instances for equality.
136
+
137
+ .. seealso::
138
+ `comparison` describes how to customize the comparison behavior
139
+ going as far comparing NumPy arrays.
140
+
141
+ order (bool | None):
142
+ If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
143
+ methods that behave like *eq* above and allow instances to be
144
+ ordered.
145
+
146
+ They compare the instances as if they were tuples of their *attrs*
147
+ attributes if and only if the types of both classes are
148
+ *identical*.
149
+
150
+ If `None` mirror value of *eq*.
151
+
152
+ .. seealso:: `comparison`
153
+
154
+ unsafe_hash (bool | None):
155
+ If None (default), the ``__hash__`` method is generated according
156
+ how *eq* and *frozen* are set.
157
+
158
+ 1. If *both* are True, *attrs* will generate a ``__hash__`` for
159
+ you.
160
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
161
+ to None, marking it unhashable (which it is).
162
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning
163
+ the ``__hash__`` method of the base class will be used. If the
164
+ base class is `object`, this means it will fall back to id-based
165
+ hashing.
166
+
167
+ Although not recommended, you can decide for yourself and force
168
+ *attrs* to create one (for example, if the class is immutable even
169
+ though you didn't freeze it programmatically) by passing True or
170
+ not. Both of these cases are rather special and should be used
171
+ carefully.
172
+
173
+ .. seealso::
174
+
175
+ - Our documentation on `hashing`,
176
+ - Python's documentation on `object.__hash__`,
177
+ - and the `GitHub issue that led to the default \ behavior
178
+ <https://github.com/python-attrs/attrs/issues/136>`_ for more
179
+ details.
180
+
181
+ hash (bool | None):
182
+ Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
183
+
184
+ cache_hash (bool):
185
+ Ensure that the object's hash code is computed only once and stored
186
+ on the object. If this is set to True, hashing must be either
187
+ explicitly or implicitly enabled for this class. If the hash code
188
+ is cached, avoid any reassignments of fields involved in hash code
189
+ computation or mutations of the objects those fields point to after
190
+ object creation. If such changes occur, the behavior of the
191
+ object's hash code is undefined.
192
+
193
+ frozen (bool):
194
+ Make instances immutable after initialization. If someone attempts
195
+ to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
196
+ is raised.
197
+
198
+ .. note::
199
+
200
+ 1. This is achieved by installing a custom ``__setattr__``
201
+ method on your class, so you can't implement your own.
202
+
203
+ 2. True immutability is impossible in Python.
204
+
205
+ 3. This *does* have a minor a runtime performance `impact
206
+ <how-frozen>` when initializing new instances. In other
207
+ words: ``__init__`` is slightly slower with ``frozen=True``.
208
+
209
+ 4. If a class is frozen, you cannot modify ``self`` in
210
+ ``__attrs_post_init__`` or a self-written ``__init__``. You
211
+ can circumvent that limitation by using
212
+ ``object.__setattr__(self, "attribute_name", value)``.
213
+
214
+ 5. Subclasses of a frozen class are frozen too.
215
+
216
+ kw_only (bool):
217
+ Make all attributes keyword-only in the generated ``__init__`` (if
218
+ *init* is False, this parameter is ignored).
219
+
220
+ weakref_slot (bool):
221
+ Make instances weak-referenceable. This has no effect unless
222
+ *slots* is True.
223
+
224
+ field_transformer (~typing.Callable | None):
225
+ A function that is called with the original class object and all
226
+ fields right before *attrs* finalizes the class. You can use this,
227
+ for example, to automatically add converters or validators to
228
+ fields based on their types.
229
+
230
+ .. seealso:: `transform-fields`
231
+
232
+ match_args (bool):
233
+ If True (default), set ``__match_args__`` on the class to support
234
+ :pep:`634` (*Structural Pattern Matching*). It is a tuple of all
235
+ non-keyword-only ``__init__`` parameter names on Python 3.10 and
236
+ later. Ignored on older Python versions.
237
+
238
+ collect_by_mro (bool):
239
+ If True, *attrs* collects attributes from base classes correctly
240
+ according to the `method resolution order
241
+ <https://docs.python.org/3/howto/mro.html>`_. If False, *attrs*
242
+ will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
243
+
244
+ See also `issue #428
245
+ <https://github.com/python-attrs/attrs/issues/428>`_.
246
+
247
+ getstate_setstate (bool | None):
248
+ .. note::
249
+
250
+ This is usually only interesting for slotted classes and you
251
+ should probably just set *auto_detect* to True.
252
+
253
+ If True, ``__getstate__`` and ``__setstate__`` are generated and
254
+ attached to the class. This is necessary for slotted classes to be
255
+ pickleable. If left None, it's True by default for slotted classes
256
+ and False for dict classes.
257
+
258
+ If *auto_detect* is True, and *getstate_setstate* is left None, and
259
+ **either** ``__getstate__`` or ``__setstate__`` is detected
260
+ directly on the class (meaning: not inherited), it is set to False
261
+ (this is usually what you want).
262
+
263
+ auto_attribs (bool | None):
264
+ If True, look at type annotations to determine which attributes to
265
+ use, like `dataclasses`. If False, it will only look for explicit
266
+ :func:`field` class attributes, like classic *attrs*.
267
+
268
+ If left None, it will guess:
269
+
270
+ 1. If any attributes are annotated and no unannotated
271
+ `attrs.field`\ s are found, it assumes *auto_attribs=True*.
272
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
273
+ `attrs.field`\ s.
274
+
275
+ If *attrs* decides to look at type annotations, **all** fields
276
+ **must** be annotated. If *attrs* encounters a field that is set to
277
+ a :func:`field` / `attr.ib` but lacks a type annotation, an
278
+ `attrs.exceptions.UnannotatedAttributeError` is raised. Use
279
+ ``field_name: typing.Any = field(...)`` if you don't want to set a
280
+ type.
281
+
282
+ .. warning::
283
+
284
+ For features that use the attribute name to create decorators
285
+ (for example, :ref:`validators <validators>`), you still *must*
286
+ assign :func:`field` / `attr.ib` to them. Otherwise Python will
287
+ either not find the name or try to use the default value to
288
+ call, for example, ``validator`` on it.
289
+
290
+ Attributes annotated as `typing.ClassVar`, and attributes that are
291
+ neither annotated nor set to an `field()` are **ignored**.
292
+
293
+ these (dict[str, object]):
294
+ A dictionary of name to the (private) return value of `field()`
295
+ mappings. This is useful to avoid the definition of your attributes
296
+ within the class body because you can't (for example, if you want
297
+ to add ``__repr__`` methods to Django models) or don't want to.
298
+
299
+ If *these* is not `None`, *attrs* will *not* search the class body
300
+ for attributes and will *not* remove any attributes from it.
301
+
302
+ The order is deduced from the order of the attributes inside
303
+ *these*.
304
+
305
+ Arguably, this is a rather obscure feature.
306
+
307
+ .. versionadded:: 20.1.0
308
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
309
+ .. versionadded:: 22.2.0
310
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
311
+ .. versionchanged:: 24.1.0
312
+ Instances are not compared as tuples of attributes anymore, but using a
313
+ big ``and`` condition. This is faster and has more correct behavior for
314
+ uncomparable values like `math.nan`.
315
+ .. versionadded:: 24.1.0
316
+ If a class has an *inherited* classmethod called
317
+ ``__attrs_init_subclass__``, it is executed after the class is created.
318
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
319
+ .. versionadded:: 24.3.0
320
+ Unless already present, a ``__replace__`` method is automatically
321
+ created for `copy.replace` (Python 3.13+ only).
322
+
323
+ .. note::
324
+
325
+ The main differences to the classic `attr.s` are:
326
+
327
+ - Automatically detect whether or not *auto_attribs* should be `True`
328
+ (c.f. *auto_attribs* parameter).
329
+ - Converters and validators run when attributes are set by default --
330
+ if *frozen* is `False`.
331
+ - *slots=True*
332
+
333
+ Usually, this has only upsides and few visible effects in everyday
334
+ programming. But it *can* lead to some surprising behaviors, so
335
+ please make sure to read :term:`slotted classes`.
336
+
337
+ - *auto_exc=True*
338
+ - *auto_detect=True*
339
+ - *order=False*
340
+ - Some options that were only relevant on Python 2 or were kept around
341
+ for backwards-compatibility have been removed.
342
+
343
+ """
344
+
345
+ def do_it(cls, auto_attribs):
346
+ return attrs(
347
+ maybe_cls=cls,
348
+ these=these,
349
+ repr=repr,
350
+ hash=hash,
351
+ unsafe_hash=unsafe_hash,
352
+ init=init,
353
+ slots=slots,
354
+ frozen=frozen,
355
+ weakref_slot=weakref_slot,
356
+ str=str,
357
+ auto_attribs=auto_attribs,
358
+ kw_only=kw_only,
359
+ cache_hash=cache_hash,
360
+ auto_exc=auto_exc,
361
+ eq=eq,
362
+ order=order,
363
+ auto_detect=auto_detect,
364
+ collect_by_mro=True,
365
+ getstate_setstate=getstate_setstate,
366
+ on_setattr=on_setattr,
367
+ field_transformer=field_transformer,
368
+ match_args=match_args,
369
+ )
370
+
371
+ def wrap(cls):
372
+ """
373
+ Making this a wrapper ensures this code runs during class creation.
374
+
375
+ We also ensure that frozen-ness of classes is inherited.
376
+ """
377
+ nonlocal frozen, on_setattr
378
+
379
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
380
+
381
+ # By default, mutable classes convert & validate on setattr.
382
+ if frozen is False and on_setattr is None:
383
+ on_setattr = _DEFAULT_ON_SETATTR
384
+
385
+ # However, if we subclass a frozen class, we inherit the immutability
386
+ # and disable on_setattr.
387
+ for base_cls in cls.__bases__:
388
+ if base_cls.__setattr__ is _frozen_setattrs:
389
+ if had_on_setattr:
390
+ msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
391
+ raise ValueError(msg)
392
+
393
+ on_setattr = setters.NO_OP
394
+ break
395
+
396
+ if auto_attribs is not None:
397
+ return do_it(cls, auto_attribs)
398
+
399
+ try:
400
+ return do_it(cls, True)
401
+ except UnannotatedAttributeError:
402
+ return do_it(cls, False)
403
+
404
+ # maybe_cls's type depends on the usage of the decorator. It's a class
405
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
406
+ if maybe_cls is None:
407
+ return wrap
408
+
409
+ return wrap(maybe_cls)
410
+
411
+
412
+ mutable = define
413
+ frozen = partial(define, frozen=True, on_setattr=None)
414
+
415
+
416
+ def field(
417
+ *,
418
+ default=NOTHING,
419
+ validator=None,
420
+ repr=True,
421
+ hash=None,
422
+ init=True,
423
+ metadata=None,
424
+ type=None,
425
+ converter=None,
426
+ factory=None,
427
+ kw_only=False,
428
+ eq=None,
429
+ order=None,
430
+ on_setattr=None,
431
+ alias=None,
432
+ ):
433
+ """
434
+ Create a new :term:`field` / :term:`attribute` on a class.
435
+
436
+ .. warning::
437
+
438
+ Does **nothing** unless the class is also decorated with
439
+ `attrs.define` (or similar)!
440
+
441
+ Args:
442
+ default:
443
+ A value that is used if an *attrs*-generated ``__init__`` is used
444
+ and no value is passed while instantiating or the attribute is
445
+ excluded using ``init=False``.
446
+
447
+ If the value is an instance of `attrs.Factory`, its callable will
448
+ be used to construct a new value (useful for mutable data types
449
+ like lists or dicts).
450
+
451
+ If a default is not set (or set manually to `attrs.NOTHING`), a
452
+ value *must* be supplied when instantiating; otherwise a
453
+ `TypeError` will be raised.
454
+
455
+ .. seealso:: `defaults`
456
+
457
+ factory (~typing.Callable):
458
+ Syntactic sugar for ``default=attr.Factory(factory)``.
459
+
460
+ validator (~typing.Callable | list[~typing.Callable]):
461
+ Callable that is called by *attrs*-generated ``__init__`` methods
462
+ after the instance has been initialized. They receive the
463
+ initialized instance, the :func:`~attrs.Attribute`, and the passed
464
+ value.
465
+
466
+ The return value is *not* inspected so the validator has to throw
467
+ an exception itself.
468
+
469
+ If a `list` is passed, its items are treated as validators and must
470
+ all pass.
471
+
472
+ Validators can be globally disabled and re-enabled using
473
+ `attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
474
+
475
+ The validator can also be set using decorator notation as shown
476
+ below.
477
+
478
+ .. seealso:: :ref:`validators`
479
+
480
+ repr (bool | ~typing.Callable):
481
+ Include this attribute in the generated ``__repr__`` method. If
482
+ True, include the attribute; if False, omit it. By default, the
483
+ built-in ``repr()`` function is used. To override how the attribute
484
+ value is formatted, pass a ``callable`` that takes a single value
485
+ and returns a string. Note that the resulting string is used as-is,
486
+ which means it will be used directly *instead* of calling
487
+ ``repr()`` (the default).
488
+
489
+ eq (bool | ~typing.Callable):
490
+ If True (default), include this attribute in the generated
491
+ ``__eq__`` and ``__ne__`` methods that check two instances for
492
+ equality. To override how the attribute value is compared, pass a
493
+ callable that takes a single value and returns the value to be
494
+ compared.
495
+
496
+ .. seealso:: `comparison`
497
+
498
+ order (bool | ~typing.Callable):
499
+ If True (default), include this attributes in the generated
500
+ ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
501
+ override how the attribute value is ordered, pass a callable that
502
+ takes a single value and returns the value to be ordered.
503
+
504
+ .. seealso:: `comparison`
505
+
506
+ hash (bool | None):
507
+ Include this attribute in the generated ``__hash__`` method. If
508
+ None (default), mirror *eq*'s value. This is the correct behavior
509
+ according the Python spec. Setting this value to anything else
510
+ than None is *discouraged*.
511
+
512
+ .. seealso:: `hashing`
513
+
514
+ init (bool):
515
+ Include this attribute in the generated ``__init__`` method.
516
+
517
+ It is possible to set this to False and set a default value. In
518
+ that case this attributed is unconditionally initialized with the
519
+ specified default value or factory.
520
+
521
+ .. seealso:: `init`
522
+
523
+ converter (typing.Callable | Converter):
524
+ A callable that is called by *attrs*-generated ``__init__`` methods
525
+ to convert attribute's value to the desired format.
526
+
527
+ If a vanilla callable is passed, it is given the passed-in value as
528
+ the only positional argument. It is possible to receive additional
529
+ arguments by wrapping the callable in a `Converter`.
530
+
531
+ Either way, the returned value will be used as the new value of the
532
+ attribute. The value is converted before being passed to the
533
+ validator, if any.
534
+
535
+ .. seealso:: :ref:`converters`
536
+
537
+ metadata (dict | None):
538
+ An arbitrary mapping, to be used by third-party code.
539
+
540
+ .. seealso:: `extending-metadata`.
541
+
542
+ type (type):
543
+ The type of the attribute. Nowadays, the preferred method to
544
+ specify the type is using a variable annotation (see :pep:`526`).
545
+ This argument is provided for backwards-compatibility and for usage
546
+ with `make_class`. Regardless of the approach used, the type will
547
+ be stored on ``Attribute.type``.
548
+
549
+ Please note that *attrs* doesn't do anything with this metadata by
550
+ itself. You can use it as part of your own code or for `static type
551
+ checking <types>`.
552
+
553
+ kw_only (bool):
554
+ Make this attribute keyword-only in the generated ``__init__`` (if
555
+ ``init`` is False, this parameter is ignored).
556
+
557
+ on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
558
+ Allows to overwrite the *on_setattr* setting from `attr.s`. If left
559
+ None, the *on_setattr* value from `attr.s` is used. Set to
560
+ `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
561
+ attribute -- regardless of the setting in `define()`.
562
+
563
+ alias (str | None):
564
+ Override this attribute's parameter name in the generated
565
+ ``__init__`` method. If left None, default to ``name`` stripped
566
+ of leading underscores. See `private-attributes`.
567
+
568
+ .. versionadded:: 20.1.0
569
+ .. versionchanged:: 21.1.0
570
+ *eq*, *order*, and *cmp* also accept a custom callable
571
+ .. versionadded:: 22.2.0 *alias*
572
+ .. versionadded:: 23.1.0
573
+ The *type* parameter has been re-added; mostly for `attrs.make_class`.
574
+ Please note that type checkers ignore this metadata.
575
+
576
+ .. seealso::
577
+
578
+ `attr.ib`
579
+ """
580
+ return attrib(
581
+ default=default,
582
+ validator=validator,
583
+ repr=repr,
584
+ hash=hash,
585
+ init=init,
586
+ metadata=metadata,
587
+ type=type,
588
+ converter=converter,
589
+ factory=factory,
590
+ kw_only=kw_only,
591
+ eq=eq,
592
+ order=order,
593
+ on_setattr=on_setattr,
594
+ alias=alias,
595
+ )
596
+
597
+
598
+ def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
599
+ """
600
+ Same as `attr.asdict`, except that collections types are always retained
601
+ and dict is always used as *dict_factory*.
602
+
603
+ .. versionadded:: 21.3.0
604
+ """
605
+ return _asdict(
606
+ inst=inst,
607
+ recurse=recurse,
608
+ filter=filter,
609
+ value_serializer=value_serializer,
610
+ retain_collection_types=True,
611
+ )
612
+
613
+
614
+ def astuple(inst, *, recurse=True, filter=None):
615
+ """
616
+ Same as `attr.astuple`, except that collections types are always retained
617
+ and `tuple` is always used as the *tuple_factory*.
618
+
619
+ .. versionadded:: 21.3.0
620
+ """
621
+ return _astuple(
622
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
623
+ )
venv/Lib/site-packages/attr/_typing_compat.pyi ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, ClassVar, Protocol
2
+
3
+ # MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
4
+ MYPY = False
5
+
6
+ if MYPY:
7
+ # A protocol to be able to statically accept an attrs class.
8
+ class AttrsInstance_(Protocol):
9
+ __attrs_attrs__: ClassVar[Any]
10
+
11
+ else:
12
+ # For type checkers without plug-in support use an empty protocol that
13
+ # will (hopefully) be combined into a union.
14
+ class AttrsInstance_(Protocol):
15
+ pass
venv/Lib/site-packages/attr/_version_info.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-License-Identifier: MIT
2
+
3
+
4
+ from functools import total_ordering
5
+
6
+ from ._funcs import astuple
7
+ from ._make import attrib, attrs
8
+
9
+
10
+ @total_ordering
11
+ @attrs(eq=False, order=False, slots=True, frozen=True)
12
+ class VersionInfo:
13
+ """
14
+ A version object that can be compared to tuple of length 1--4:
15
+
16
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
17
+ True
18
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
19
+ True
20
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
21
+ >>> vi < (19, 1, 1)
22
+ False
23
+ >>> vi < (19,)
24
+ False
25
+ >>> vi == (19, 2,)
26
+ True
27
+ >>> vi == (19, 2, 1)
28
+ False
29
+
30
+ .. versionadded:: 19.2
31
+ """
32
+
33
+ year = attrib(type=int)
34
+ minor = attrib(type=int)
35
+ micro = attrib(type=int)
36
+ releaselevel = attrib(type=str)
37
+
38
+ @classmethod
39
+ def _from_version_string(cls, s):
40
+ """
41
+ Parse *s* and return a _VersionInfo.
42
+ """
43
+ v = s.split(".")
44
+ if len(v) == 3:
45
+ v.append("final")
46
+
47
+ return cls(
48
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
49
+ )
50
+
51
+ def _ensure_tuple(self, other):
52
+ """
53
+ Ensure *other* is a tuple of a valid length.
54
+
55
+ Returns a possibly transformed *other* and ourselves as a tuple of
56
+ the same length as *other*.
57
+ """
58
+
59
+ if self.__class__ is other.__class__:
60
+ other = astuple(other)
61
+
62
+ if not isinstance(other, tuple):
63
+ raise NotImplementedError
64
+
65
+ if not (1 <= len(other) <= 4):
66
+ raise NotImplementedError
67
+
68
+ return astuple(self)[: len(other)], other
69
+
70
+ def __eq__(self, other):
71
+ try:
72
+ us, them = self._ensure_tuple(other)
73
+ except NotImplementedError:
74
+ return NotImplemented
75
+
76
+ return us == them
77
+
78
+ def __lt__(self, other):
79
+ try:
80
+ us, them = self._ensure_tuple(other)
81
+ except NotImplementedError:
82
+ return NotImplemented
83
+
84
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
85
+ # have to do anything special with releaselevel for now.
86
+ return us < them
venv/Lib/site-packages/attr/_version_info.pyi ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ class VersionInfo:
2
+ @property
3
+ def year(self) -> int: ...
4
+ @property
5
+ def minor(self) -> int: ...
6
+ @property
7
+ def micro(self) -> int: ...
8
+ @property
9
+ def releaselevel(self) -> str: ...