Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitignore +18 -0
- venv/Lib/site-packages/aiohttp-3.11.18.dist-info/INSTALLER +1 -0
- venv/Lib/site-packages/aiohttp-3.11.18.dist-info/METADATA +251 -0
- venv/Lib/site-packages/aiohttp-3.11.18.dist-info/RECORD +131 -0
- venv/Lib/site-packages/aiohttp-3.11.18.dist-info/WHEEL +5 -0
- venv/Lib/site-packages/aiohttp-3.11.18.dist-info/top_level.txt +1 -0
- venv/Lib/site-packages/aiohttp/__init__.py +264 -0
- venv/Lib/site-packages/aiohttp/_cparser.pxd +158 -0
- venv/Lib/site-packages/aiohttp/_find_header.pxd +2 -0
- venv/Lib/site-packages/aiohttp/_headers.pxi +83 -0
- venv/Lib/site-packages/aiohttp/_http_parser.pyx +837 -0
- venv/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd +0 -0
- venv/Lib/site-packages/aiohttp/_http_writer.pyx +160 -0
- venv/Lib/site-packages/aiohttp/client.py +1550 -0
- venv/Lib/site-packages/aiohttp/client_exceptions.py +421 -0
- venv/Lib/site-packages/aiohttp/client_proto.py +308 -0
- venv/Lib/site-packages/aiohttp/client_reqrep.py +1315 -0
- venv/Lib/site-packages/aiohttp/client_ws.py +428 -0
- venv/Lib/site-packages/aiohttp/compression_utils.py +173 -0
- venv/Lib/site-packages/aiohttp/connector.py +1658 -0
- venv/Lib/site-packages/aiohttp/cookiejar.py +495 -0
- venv/Lib/site-packages/aiohttp/formdata.py +182 -0
- venv/Lib/site-packages/aiohttp/hdrs.py +121 -0
- venv/Lib/site-packages/aiohttp/helpers.py +958 -0
- venv/Lib/site-packages/aiohttp/http.py +72 -0
- venv/Lib/site-packages/aiohttp/http_exceptions.py +112 -0
- venv/Lib/site-packages/aiohttp/http_parser.py +1046 -0
- venv/Lib/site-packages/aiohttp/http_websocket.py +36 -0
- venv/Lib/site-packages/aiohttp/http_writer.py +249 -0
- venv/Lib/site-packages/aiohttp/log.py +8 -0
- venv/Lib/site-packages/aiohttp/multipart.py +1071 -0
- venv/Lib/site-packages/aiohttp/payload.py +519 -0
- venv/Lib/site-packages/aiohttp/payload_streamer.py +78 -0
- venv/Lib/site-packages/aiohttp/py.typed +1 -0
- venv/Lib/site-packages/aiohttp/pytest_plugin.py +436 -0
- venv/Lib/site-packages/aiohttp/resolver.py +190 -0
- venv/Lib/site-packages/aiohttp/streams.py +727 -0
- venv/Lib/site-packages/aiohttp/tcp_helpers.py +37 -0
- venv/Lib/site-packages/aiohttp/test_utils.py +774 -0
- venv/Lib/site-packages/aiohttp/tracing.py +470 -0
- venv/Lib/site-packages/aiohttp/typedefs.py +69 -0
- venv/Lib/site-packages/aiohttp/web.py +605 -0
- venv/Lib/site-packages/aiohttp/web_app.py +620 -0
- venv/Lib/site-packages/aiohttp/web_exceptions.py +452 -0
- venv/Lib/site-packages/aiohttp/web_fileresponse.py +418 -0
- venv/Lib/site-packages/aiohttp/web_log.py +216 -0
- venv/Lib/site-packages/aiohttp/web_middlewares.py +121 -0
- venv/Lib/site-packages/aiohttp/web_protocol.py +792 -0
- venv/Lib/site-packages/aiohttp/web_request.py +914 -0
- venv/Lib/site-packages/aiohttp/web_response.py +838 -0
.gitignore
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Python
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
venv/
|
5 |
+
|
6 |
+
# Datos
|
7 |
+
data/raw/
|
8 |
+
data/processed/
|
9 |
+
results/
|
10 |
+
|
11 |
+
# Jupyter
|
12 |
+
.ipynb_checkpoints/
|
13 |
+
|
14 |
+
# Docker
|
15 |
+
*.log
|
16 |
+
|
17 |
+
# Sistema
|
18 |
+
Thumbs.db
|
venv/Lib/site-packages/aiohttp-3.11.18.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
venv/Lib/site-packages/aiohttp-3.11.18.dist-info/METADATA
ADDED
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.4
|
2 |
+
Name: aiohttp
|
3 |
+
Version: 3.11.18
|
4 |
+
Summary: Async http client/server framework (asyncio)
|
5 |
+
Home-page: https://github.com/aio-libs/aiohttp
|
6 |
+
Maintainer: aiohttp team <[email protected]>
|
7 |
+
Maintainer-email: [email protected]
|
8 |
+
License: Apache-2.0
|
9 |
+
Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
|
10 |
+
Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
|
11 |
+
Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
12 |
+
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
|
13 |
+
Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html
|
14 |
+
Project-URL: Docs: RTD, https://docs.aiohttp.org
|
15 |
+
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
|
16 |
+
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
|
17 |
+
Classifier: Development Status :: 5 - Production/Stable
|
18 |
+
Classifier: Framework :: AsyncIO
|
19 |
+
Classifier: Intended Audience :: Developers
|
20 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
21 |
+
Classifier: Operating System :: POSIX
|
22 |
+
Classifier: Operating System :: MacOS :: MacOS X
|
23 |
+
Classifier: Operating System :: Microsoft :: Windows
|
24 |
+
Classifier: Programming Language :: Python
|
25 |
+
Classifier: Programming Language :: Python :: 3
|
26 |
+
Classifier: Programming Language :: Python :: 3.9
|
27 |
+
Classifier: Programming Language :: Python :: 3.10
|
28 |
+
Classifier: Programming Language :: Python :: 3.11
|
29 |
+
Classifier: Programming Language :: Python :: 3.12
|
30 |
+
Classifier: Programming Language :: Python :: 3.13
|
31 |
+
Classifier: Topic :: Internet :: WWW/HTTP
|
32 |
+
Requires-Python: >=3.9
|
33 |
+
Description-Content-Type: text/x-rst
|
34 |
+
License-File: LICENSE.txt
|
35 |
+
Requires-Dist: aiohappyeyeballs>=2.3.0
|
36 |
+
Requires-Dist: aiosignal>=1.1.2
|
37 |
+
Requires-Dist: async-timeout<6.0,>=4.0; python_version < "3.11"
|
38 |
+
Requires-Dist: attrs>=17.3.0
|
39 |
+
Requires-Dist: frozenlist>=1.1.1
|
40 |
+
Requires-Dist: multidict<7.0,>=4.5
|
41 |
+
Requires-Dist: propcache>=0.2.0
|
42 |
+
Requires-Dist: yarl<2.0,>=1.17.0
|
43 |
+
Provides-Extra: speedups
|
44 |
+
Requires-Dist: aiodns>=3.2.0; (sys_platform == "linux" or sys_platform == "darwin") and extra == "speedups"
|
45 |
+
Requires-Dist: Brotli; platform_python_implementation == "CPython" and extra == "speedups"
|
46 |
+
Requires-Dist: brotlicffi; platform_python_implementation != "CPython" and extra == "speedups"
|
47 |
+
Dynamic: license-file
|
48 |
+
|
49 |
+
==================================
|
50 |
+
Async http client/server framework
|
51 |
+
==================================
|
52 |
+
|
53 |
+
.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
|
54 |
+
:height: 64px
|
55 |
+
:width: 64px
|
56 |
+
:alt: aiohttp logo
|
57 |
+
|
58 |
+
|
|
59 |
+
|
60 |
+
.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
|
61 |
+
:target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
|
62 |
+
:alt: GitHub Actions status for master branch
|
63 |
+
|
64 |
+
.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
|
65 |
+
:target: https://codecov.io/gh/aio-libs/aiohttp
|
66 |
+
:alt: codecov.io status for master branch
|
67 |
+
|
68 |
+
.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json
|
69 |
+
:target: https://codspeed.io/aio-libs/aiohttp
|
70 |
+
:alt: Codspeed.io status for aiohttp
|
71 |
+
|
72 |
+
.. image:: https://badge.fury.io/py/aiohttp.svg
|
73 |
+
:target: https://pypi.org/project/aiohttp
|
74 |
+
:alt: Latest PyPI package version
|
75 |
+
|
76 |
+
.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
|
77 |
+
:target: https://docs.aiohttp.org/
|
78 |
+
:alt: Latest Read The Docs
|
79 |
+
|
80 |
+
.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
81 |
+
:target: https://matrix.to/#/%23aio-libs:matrix.org
|
82 |
+
:alt: Matrix Room — #aio-libs:matrix.org
|
83 |
+
|
84 |
+
.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
85 |
+
:target: https://matrix.to/#/%23aio-libs-space:matrix.org
|
86 |
+
:alt: Matrix Space — #aio-libs-space:matrix.org
|
87 |
+
|
88 |
+
|
89 |
+
Key Features
|
90 |
+
============
|
91 |
+
|
92 |
+
- Supports both client and server side of HTTP protocol.
|
93 |
+
- Supports both client and server Web-Sockets out-of-the-box and avoids
|
94 |
+
Callback Hell.
|
95 |
+
- Provides Web-server with middleware and pluggable routing.
|
96 |
+
|
97 |
+
|
98 |
+
Getting started
|
99 |
+
===============
|
100 |
+
|
101 |
+
Client
|
102 |
+
------
|
103 |
+
|
104 |
+
To get something from the web:
|
105 |
+
|
106 |
+
.. code-block:: python
|
107 |
+
|
108 |
+
import aiohttp
|
109 |
+
import asyncio
|
110 |
+
|
111 |
+
async def main():
|
112 |
+
|
113 |
+
async with aiohttp.ClientSession() as session:
|
114 |
+
async with session.get('http://python.org') as response:
|
115 |
+
|
116 |
+
print("Status:", response.status)
|
117 |
+
print("Content-type:", response.headers['content-type'])
|
118 |
+
|
119 |
+
html = await response.text()
|
120 |
+
print("Body:", html[:15], "...")
|
121 |
+
|
122 |
+
asyncio.run(main())
|
123 |
+
|
124 |
+
This prints:
|
125 |
+
|
126 |
+
.. code-block::
|
127 |
+
|
128 |
+
Status: 200
|
129 |
+
Content-type: text/html; charset=utf-8
|
130 |
+
Body: <!doctype html> ...
|
131 |
+
|
132 |
+
Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
|
133 |
+
|
134 |
+
Server
|
135 |
+
------
|
136 |
+
|
137 |
+
An example using a simple server:
|
138 |
+
|
139 |
+
.. code-block:: python
|
140 |
+
|
141 |
+
# examples/server_simple.py
|
142 |
+
from aiohttp import web
|
143 |
+
|
144 |
+
async def handle(request):
|
145 |
+
name = request.match_info.get('name', "Anonymous")
|
146 |
+
text = "Hello, " + name
|
147 |
+
return web.Response(text=text)
|
148 |
+
|
149 |
+
async def wshandle(request):
|
150 |
+
ws = web.WebSocketResponse()
|
151 |
+
await ws.prepare(request)
|
152 |
+
|
153 |
+
async for msg in ws:
|
154 |
+
if msg.type == web.WSMsgType.text:
|
155 |
+
await ws.send_str("Hello, {}".format(msg.data))
|
156 |
+
elif msg.type == web.WSMsgType.binary:
|
157 |
+
await ws.send_bytes(msg.data)
|
158 |
+
elif msg.type == web.WSMsgType.close:
|
159 |
+
break
|
160 |
+
|
161 |
+
return ws
|
162 |
+
|
163 |
+
|
164 |
+
app = web.Application()
|
165 |
+
app.add_routes([web.get('/', handle),
|
166 |
+
web.get('/echo', wshandle),
|
167 |
+
web.get('/{name}', handle)])
|
168 |
+
|
169 |
+
if __name__ == '__main__':
|
170 |
+
web.run_app(app)
|
171 |
+
|
172 |
+
|
173 |
+
Documentation
|
174 |
+
=============
|
175 |
+
|
176 |
+
https://aiohttp.readthedocs.io/
|
177 |
+
|
178 |
+
|
179 |
+
Demos
|
180 |
+
=====
|
181 |
+
|
182 |
+
https://github.com/aio-libs/aiohttp-demos
|
183 |
+
|
184 |
+
|
185 |
+
External links
|
186 |
+
==============
|
187 |
+
|
188 |
+
* `Third party libraries
|
189 |
+
<http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
|
190 |
+
* `Built with aiohttp
|
191 |
+
<http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
|
192 |
+
* `Powered by aiohttp
|
193 |
+
<http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
|
194 |
+
|
195 |
+
Feel free to make a Pull Request for adding your link to these pages!
|
196 |
+
|
197 |
+
|
198 |
+
Communication channels
|
199 |
+
======================
|
200 |
+
|
201 |
+
*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions
|
202 |
+
|
203 |
+
*Matrix*: `#aio-libs:matrix.org <https://matrix.to/#/#aio-libs:matrix.org>`_
|
204 |
+
|
205 |
+
We support `Stack Overflow
|
206 |
+
<https://stackoverflow.com/questions/tagged/aiohttp>`_.
|
207 |
+
Please add *aiohttp* tag to your question there.
|
208 |
+
|
209 |
+
Requirements
|
210 |
+
============
|
211 |
+
|
212 |
+
- attrs_
|
213 |
+
- multidict_
|
214 |
+
- yarl_
|
215 |
+
- frozenlist_
|
216 |
+
|
217 |
+
Optionally you may install the aiodns_ library (highly recommended for sake of speed).
|
218 |
+
|
219 |
+
.. _aiodns: https://pypi.python.org/pypi/aiodns
|
220 |
+
.. _attrs: https://github.com/python-attrs/attrs
|
221 |
+
.. _multidict: https://pypi.python.org/pypi/multidict
|
222 |
+
.. _frozenlist: https://pypi.org/project/frozenlist/
|
223 |
+
.. _yarl: https://pypi.python.org/pypi/yarl
|
224 |
+
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
|
225 |
+
|
226 |
+
License
|
227 |
+
=======
|
228 |
+
|
229 |
+
``aiohttp`` is offered under the Apache 2 license.
|
230 |
+
|
231 |
+
|
232 |
+
Keepsafe
|
233 |
+
========
|
234 |
+
|
235 |
+
The aiohttp community would like to thank Keepsafe
|
236 |
+
(https://www.getkeepsafe.com) for its support in the early days of
|
237 |
+
the project.
|
238 |
+
|
239 |
+
|
240 |
+
Source code
|
241 |
+
===========
|
242 |
+
|
243 |
+
The latest developer version is available in a GitHub repository:
|
244 |
+
https://github.com/aio-libs/aiohttp
|
245 |
+
|
246 |
+
Benchmarks
|
247 |
+
==========
|
248 |
+
|
249 |
+
If you are interested in efficiency, the AsyncIO community maintains a
|
250 |
+
list of benchmarks on the official wiki:
|
251 |
+
https://github.com/python/asyncio/wiki/Benchmarks
|
venv/Lib/site-packages/aiohttp-3.11.18.dist-info/RECORD
ADDED
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
aiohttp-3.11.18.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
2 |
+
aiohttp-3.11.18.dist-info/METADATA,sha256=3jxI2P360mYrWF1mlKSKTspwQQQSpHqyMHvviPlWqFQ,7985
|
3 |
+
aiohttp-3.11.18.dist-info/RECORD,,
|
4 |
+
aiohttp-3.11.18.dist-info/WHEEL,sha256=YvQgr19VPpDKcQ70xCXIWyDmGnURmFmzZpp_Uw5rBS8,101
|
5 |
+
aiohttp-3.11.18.dist-info/licenses/LICENSE.txt,sha256=wUk-nxDVnR-6n53ygAjhVX4zz5-6yM4SY6ozk5goA94,601
|
6 |
+
aiohttp-3.11.18.dist-info/top_level.txt,sha256=iv-JIaacmTl-hSho3QmphcKnbRRYx1st47yjz_178Ro,8
|
7 |
+
aiohttp/.hash/_cparser.pxd.hash,sha256=dVGMrCmyJM_owqoRLPezK095md0X5R319koTuhUN6DQ,64
|
8 |
+
aiohttp/.hash/_find_header.pxd.hash,sha256=W5qRPWDc55gArGZkriI5tztmQHkrdwR6NdQfRQfTxIg,64
|
9 |
+
aiohttp/.hash/_http_parser.pyx.hash,sha256=m0UnDTDnk7nJB7FCyyTKI3KoEK2YHyzGxv6dxR6cNEQ,64
|
10 |
+
aiohttp/.hash/_http_writer.pyx.hash,sha256=6wl8DZynpvBFMT-qCSXDwvdFWO6u6g6YsIa4AKQg-uA,64
|
11 |
+
aiohttp/.hash/hdrs.py.hash,sha256=GldJpkmfx93VdDz-6BEe9rXA7UKQL6vnL5dnJl_h7Ug,64
|
12 |
+
aiohttp/__init__.py,sha256=yZldvUz4ctEaOXN-M9BciDt_YLi-hWLec2G2vdEzJ2Q,8104
|
13 |
+
aiohttp/__pycache__/__init__.cpython-312.pyc,,
|
14 |
+
aiohttp/__pycache__/abc.cpython-312.pyc,,
|
15 |
+
aiohttp/__pycache__/base_protocol.cpython-312.pyc,,
|
16 |
+
aiohttp/__pycache__/client.cpython-312.pyc,,
|
17 |
+
aiohttp/__pycache__/client_exceptions.cpython-312.pyc,,
|
18 |
+
aiohttp/__pycache__/client_proto.cpython-312.pyc,,
|
19 |
+
aiohttp/__pycache__/client_reqrep.cpython-312.pyc,,
|
20 |
+
aiohttp/__pycache__/client_ws.cpython-312.pyc,,
|
21 |
+
aiohttp/__pycache__/compression_utils.cpython-312.pyc,,
|
22 |
+
aiohttp/__pycache__/connector.cpython-312.pyc,,
|
23 |
+
aiohttp/__pycache__/cookiejar.cpython-312.pyc,,
|
24 |
+
aiohttp/__pycache__/formdata.cpython-312.pyc,,
|
25 |
+
aiohttp/__pycache__/hdrs.cpython-312.pyc,,
|
26 |
+
aiohttp/__pycache__/helpers.cpython-312.pyc,,
|
27 |
+
aiohttp/__pycache__/http.cpython-312.pyc,,
|
28 |
+
aiohttp/__pycache__/http_exceptions.cpython-312.pyc,,
|
29 |
+
aiohttp/__pycache__/http_parser.cpython-312.pyc,,
|
30 |
+
aiohttp/__pycache__/http_websocket.cpython-312.pyc,,
|
31 |
+
aiohttp/__pycache__/http_writer.cpython-312.pyc,,
|
32 |
+
aiohttp/__pycache__/log.cpython-312.pyc,,
|
33 |
+
aiohttp/__pycache__/multipart.cpython-312.pyc,,
|
34 |
+
aiohttp/__pycache__/payload.cpython-312.pyc,,
|
35 |
+
aiohttp/__pycache__/payload_streamer.cpython-312.pyc,,
|
36 |
+
aiohttp/__pycache__/pytest_plugin.cpython-312.pyc,,
|
37 |
+
aiohttp/__pycache__/resolver.cpython-312.pyc,,
|
38 |
+
aiohttp/__pycache__/streams.cpython-312.pyc,,
|
39 |
+
aiohttp/__pycache__/tcp_helpers.cpython-312.pyc,,
|
40 |
+
aiohttp/__pycache__/test_utils.cpython-312.pyc,,
|
41 |
+
aiohttp/__pycache__/tracing.cpython-312.pyc,,
|
42 |
+
aiohttp/__pycache__/typedefs.cpython-312.pyc,,
|
43 |
+
aiohttp/__pycache__/web.cpython-312.pyc,,
|
44 |
+
aiohttp/__pycache__/web_app.cpython-312.pyc,,
|
45 |
+
aiohttp/__pycache__/web_exceptions.cpython-312.pyc,,
|
46 |
+
aiohttp/__pycache__/web_fileresponse.cpython-312.pyc,,
|
47 |
+
aiohttp/__pycache__/web_log.cpython-312.pyc,,
|
48 |
+
aiohttp/__pycache__/web_middlewares.cpython-312.pyc,,
|
49 |
+
aiohttp/__pycache__/web_protocol.cpython-312.pyc,,
|
50 |
+
aiohttp/__pycache__/web_request.cpython-312.pyc,,
|
51 |
+
aiohttp/__pycache__/web_response.cpython-312.pyc,,
|
52 |
+
aiohttp/__pycache__/web_routedef.cpython-312.pyc,,
|
53 |
+
aiohttp/__pycache__/web_runner.cpython-312.pyc,,
|
54 |
+
aiohttp/__pycache__/web_server.cpython-312.pyc,,
|
55 |
+
aiohttp/__pycache__/web_urldispatcher.cpython-312.pyc,,
|
56 |
+
aiohttp/__pycache__/web_ws.cpython-312.pyc,,
|
57 |
+
aiohttp/__pycache__/worker.cpython-312.pyc,,
|
58 |
+
aiohttp/_cparser.pxd,sha256=W6-cu0SyHhOEPeb475NvxagQ1Jz9pWqyZJvwEqTLNs0,4476
|
59 |
+
aiohttp/_find_header.pxd,sha256=BFUSmxhemBtblqxzjzH3x03FfxaWlTyuAIOz8YZ5_nM,70
|
60 |
+
aiohttp/_headers.pxi,sha256=1MhCe6Un_KI1tpO85HnDfzVO94BhcirLanAOys5FIHA,2090
|
61 |
+
aiohttp/_http_parser.cp312-win_amd64.pyd,sha256=H9pqRaa1hrNwSww-HuQxwUJnsQ0sLJPxvvG6BSLnyDQ,263168
|
62 |
+
aiohttp/_http_parser.pyx,sha256=_F97Oagn-KQyd3WrXFCsbpj3PBJv3sYJTY6kTmvdJj4,29078
|
63 |
+
aiohttp/_http_writer.cp312-win_amd64.pyd,sha256=GBA6ALrDFrQM8aAjjO27GFEazErf33AYO1M_7C63G44,48128
|
64 |
+
aiohttp/_http_writer.pyx,sha256=w60HP6TVQKmrs_nHm8FlSNYiRX0EBo7Hyq9imUmDNjo,4721
|
65 |
+
aiohttp/_websocket/.hash/mask.pxd.hash,sha256=MtKRHuamwsRzCTtELIaBcyklRCAFDonBlAPO_IRg3aY,64
|
66 |
+
aiohttp/_websocket/.hash/mask.pyx.hash,sha256=eOyT813GYbX_MUjzLOpzr-vTu3J_gpUOy8EzNgE7ntQ,64
|
67 |
+
aiohttp/_websocket/.hash/reader_c.pxd.hash,sha256=yvt0gruPh-Of05bSNwxeoYQyBSudgK1tdYTXBHa2qh8,64
|
68 |
+
aiohttp/_websocket/__init__.py,sha256=R51KWH5kkdtDLb7T-ilztksbfweKCy3t22SgxGtiY-4,45
|
69 |
+
aiohttp/_websocket/__pycache__/__init__.cpython-312.pyc,,
|
70 |
+
aiohttp/_websocket/__pycache__/helpers.cpython-312.pyc,,
|
71 |
+
aiohttp/_websocket/__pycache__/models.cpython-312.pyc,,
|
72 |
+
aiohttp/_websocket/__pycache__/reader.cpython-312.pyc,,
|
73 |
+
aiohttp/_websocket/__pycache__/reader_c.cpython-312.pyc,,
|
74 |
+
aiohttp/_websocket/__pycache__/reader_py.cpython-312.pyc,,
|
75 |
+
aiohttp/_websocket/__pycache__/writer.cpython-312.pyc,,
|
76 |
+
aiohttp/_websocket/helpers.py,sha256=amqvDhoAKAi8ptB4qUNuQhkaOn-4JxSh_VLAqytmEfw,5185
|
77 |
+
aiohttp/_websocket/mask.cp312-win_amd64.pyd,sha256=cgYEgVfcLb31l3t6RrERhW_KRn2M-gjpd3LLtMZXCvw,36352
|
78 |
+
aiohttp/_websocket/mask.pxd,sha256=41TdSZvhcbYSW_Vrw7bF4r_yoor2njtdaZ3bmvK6-jw,115
|
79 |
+
aiohttp/_websocket/mask.pyx,sha256=Ro7dOOv43HAAqNMz3xyCA11ppcn-vARIvjycStTEYww,1445
|
80 |
+
aiohttp/_websocket/models.py,sha256=Pz8qvnU43VUCNZcY4g03VwTsHOsb_jSN8iG69xMAc_A,2205
|
81 |
+
aiohttp/_websocket/reader.py,sha256=1r0cJ-jdFgbSrC6-jI0zjEA1CppzoUn8u_wiebrVVO0,1061
|
82 |
+
aiohttp/_websocket/reader_c.cp312-win_amd64.pyd,sha256=BC24VBzdxztmEB7CDIh5y3zS0odWOQ6t_6ZQ3AaLgbE,162304
|
83 |
+
aiohttp/_websocket/reader_c.pxd,sha256=HNOl4gRWtNBNEYNbK9PGOfFEQwUqJGexBbDKB_20sl0,2735
|
84 |
+
aiohttp/_websocket/reader_c.py,sha256=-NS-tAkVY8H9JGNH5pYAHWmqj48MB67myWbsBXCKjD8,18826
|
85 |
+
aiohttp/_websocket/reader_py.py,sha256=-NS-tAkVY8H9JGNH5pYAHWmqj48MB67myWbsBXCKjD8,18826
|
86 |
+
aiohttp/_websocket/writer.py,sha256=D_mfB5Qit--2P6Bp1Eti9OJY7Sl4oLCjKB2RDcOwWEs,7254
|
87 |
+
aiohttp/abc.py,sha256=OINViQw0OsbiM_KrOs-9vzzR0l2WxLtrzvp5Wb4TIBI,6765
|
88 |
+
aiohttp/base_protocol.py,sha256=8vNIv6QV_SDCW-8tfhlyxSwiBD7dAiMTqJI1GI8RG5s,3125
|
89 |
+
aiohttp/client.py,sha256=nqVyZ8mE0_5rJ3jVW0Z-U5VUHyncYCwGW8NbiHNzqgc,55859
|
90 |
+
aiohttp/client_exceptions.py,sha256=sJcuvYKaB2nwuSdP7k18y3wc74aU0xAzdJikzzesrPE,11788
|
91 |
+
aiohttp/client_proto.py,sha256=-GKW5nUWhXSIT_QyNuALSXkYatK8b45iRUvytPrlY-w,10381
|
92 |
+
aiohttp/client_reqrep.py,sha256=RY4JHBY8ED3V0q6spUnu6gc-gdDEl8Vtj0gQKG5idUQ,45240
|
93 |
+
aiohttp/client_ws.py,sha256=9DraHuupuJcT7NOgyeGml8SBr7V5D5ID5-piY1fQMdA,15537
|
94 |
+
aiohttp/compression_utils.py,sha256=EZ-3hTQ2tX-75l6Q_txvN0nTs8CBIBb0440beikNPIw,5854
|
95 |
+
aiohttp/connector.py,sha256=iozDKCB7gdFYEhojpzRbAv4nHNOnbJVlrFcMBKCNHe8,62203
|
96 |
+
aiohttp/cookiejar.py,sha256=n41nHmwNTMlg5172GblWKjRp9Tdalu0-14X02BOXr1E,18110
|
97 |
+
aiohttp/formdata.py,sha256=PZmRnM9I5Kpg6wP_r7fc31zhBD0vplZ4UHYp0y56Akk,6734
|
98 |
+
aiohttp/hdrs.py,sha256=7htmhgZyE9HqWbPpxHU0r7kAIdT2kpOXQa1AadDh2W8,5232
|
99 |
+
aiohttp/helpers.py,sha256=zLz193DE3m68gBwsM43cdaqnzz3cdfit0Dhsd9_mXig,30572
|
100 |
+
aiohttp/http.py,sha256=DGKcwDbgIMpasv7s2jeKCRuixyj7W-RIrihRFjj0xcY,1914
|
101 |
+
aiohttp/http_exceptions.py,sha256=4-y5Vc5pUqbBVcSyCPjANAWw0kv6bsBoijgNx3ZICcY,3073
|
102 |
+
aiohttp/http_parser.py,sha256=S8RvdCJD1Mn7QsBJ_KvH2Q4Q0uv3LWUqrB5XaXMjsLg,37897
|
103 |
+
aiohttp/http_websocket.py,sha256=b9kBmxPLPFQP_nu_sMhIMIeqDOm0ug8G4prbrhEMHZ0,878
|
104 |
+
aiohttp/http_writer.py,sha256=JO_I6HGc600C9CECQTSwosHkjnRcIxcObSwuzMkdtVA,7844
|
105 |
+
aiohttp/log.py,sha256=zYUTvXsMQ9Sz1yNN8kXwd5Qxu49a1FzjZ_wQqriEc8M,333
|
106 |
+
aiohttp/multipart.py,sha256=cHpKMB1OGu-eZa62otIH25R5i5TbwcJkfgayLx3A1BY,38015
|
107 |
+
aiohttp/payload.py,sha256=LqfJiBBbmT07_sSz4NfOvP_olrcLr5sdw8VLSuCJOs4,16312
|
108 |
+
aiohttp/payload_streamer.py,sha256=K0iV85iW0vEG3rDkcopruidspynzQvrwW8mJvgPHisg,2289
|
109 |
+
aiohttp/py.typed,sha256=3VVwXUAWVEVX7sDwyYDnW5ZdBC9_Z9AJAFfLCleUW0k,8
|
110 |
+
aiohttp/pytest_plugin.py,sha256=xZPKY6OYN3XyxHCBkS4K_ca1qjL7sPUEi7KTVSDJ2t8,13204
|
111 |
+
aiohttp/resolver.py,sha256=tD5GPm6szkHsvW-xd5nYsu50OrGLGttbXg1vgptCsjA,6659
|
112 |
+
aiohttp/streams.py,sha256=B4LngNMnKyAyvySvm2Pnp_VKT3yRL2QVhn4dlFvqH7M,23056
|
113 |
+
aiohttp/tcp_helpers.py,sha256=K-hhGh3jd6qCEnHJo8LvFyfJwBjh99UKI7A0aSRVhj4,998
|
114 |
+
aiohttp/test_utils.py,sha256=zFWAb-rPz1fWRUHnrjnfUH7ORlfIgZ2UZbEGe4YTa9I,23790
|
115 |
+
aiohttp/tracing.py,sha256=c3C8lnLZ0G1Jj3Iv1GgV-Op8PwcM4m6d931w502hSgI,15607
|
116 |
+
aiohttp/typedefs.py,sha256=Sx5v2yUyLu8nbabqtJRWj1M1_uW0IZACu78uYD7LBy0,1726
|
117 |
+
aiohttp/web.py,sha256=qzYNfrwlh7SD4yHyzq7SC5hldTCX-xitRfx2IVJtajI,19027
|
118 |
+
aiohttp/web_app.py,sha256=XRNsu1fhDBZayQSQKYOQbyLUNt-vLh7uxSpou-PCU38,20174
|
119 |
+
aiohttp/web_exceptions.py,sha256=itNRhCMDJFhnMWftr5SyTsoqh-i0n9rzTj0sjcAEUjo,10812
|
120 |
+
aiohttp/web_fileresponse.py,sha256=21KqwtCLQQ3SZldaW0DxNnZtLoSyUl0q3uSKq7Fj7nk,16922
|
121 |
+
aiohttp/web_log.py,sha256=G5ugloW9noUxPft0SmVWOXw30MviL6rqZc3XrKN_T1U,8081
|
122 |
+
aiohttp/web_middlewares.py,sha256=mM2-R8eaV2r6Mi9Zc2bDG8QnhE9h0IzPvtDX_fkKR5s,4286
|
123 |
+
aiohttp/web_protocol.py,sha256=x1GlB6jqPou3QZyMKpKVLdyETwUTIJ-AbesXDEWxKKY,27807
|
124 |
+
aiohttp/web_request.py,sha256=ygupGKr7IlUWRKyaxQ_0tWa75wRwK8gntpVUKridYpA,30551
|
125 |
+
aiohttp/web_response.py,sha256=RqTN4bfkP_dPi1SzM-KUUrXmv_x3DTrgHMyiR2HMRWU,29409
|
126 |
+
aiohttp/web_routedef.py,sha256=XC10f57Q36JmYaaQqrecsyfIxHMepCKaKkBEB7hLzJI,6324
|
127 |
+
aiohttp/web_runner.py,sha256=zyVYVzCgnopiGwnIhKlNZHtLV_IYQ9aC-Vm43j_HRoA,12185
|
128 |
+
aiohttp/web_server.py,sha256=RZSWt_Mj-Lu89bFYsr_T3rjxW2VNN7PHNJ2mvv2qELs,2972
|
129 |
+
aiohttp/web_urldispatcher.py,sha256=PPzAeo1CBcKLw6gl5yXOG7ScybdmLftuhPpa5KK4fyk,45303
|
130 |
+
aiohttp/web_ws.py,sha256=yP0LDngKMwZhksuJ_PRwYov6QUeDDJE2rcPxiWio5oY,23279
|
131 |
+
aiohttp/worker.py,sha256=N_9iyS_tR9U0pf3BRaIH2nzA1pjN1Xfi2gGmRrMhnho,8407
|
venv/Lib/site-packages/aiohttp-3.11.18.dist-info/WHEEL
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: setuptools (79.0.0)
|
3 |
+
Root-Is-Purelib: false
|
4 |
+
Tag: cp312-cp312-win_amd64
|
5 |
+
|
venv/Lib/site-packages/aiohttp-3.11.18.dist-info/top_level.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
aiohttp
|
venv/Lib/site-packages/aiohttp/__init__.py
ADDED
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
__version__ = "3.11.18"
|
2 |
+
|
3 |
+
from typing import TYPE_CHECKING, Tuple
|
4 |
+
|
5 |
+
from . import hdrs as hdrs
|
6 |
+
from .client import (
|
7 |
+
BaseConnector,
|
8 |
+
ClientConnectionError,
|
9 |
+
ClientConnectionResetError,
|
10 |
+
ClientConnectorCertificateError,
|
11 |
+
ClientConnectorDNSError,
|
12 |
+
ClientConnectorError,
|
13 |
+
ClientConnectorSSLError,
|
14 |
+
ClientError,
|
15 |
+
ClientHttpProxyError,
|
16 |
+
ClientOSError,
|
17 |
+
ClientPayloadError,
|
18 |
+
ClientProxyConnectionError,
|
19 |
+
ClientRequest,
|
20 |
+
ClientResponse,
|
21 |
+
ClientResponseError,
|
22 |
+
ClientSession,
|
23 |
+
ClientSSLError,
|
24 |
+
ClientTimeout,
|
25 |
+
ClientWebSocketResponse,
|
26 |
+
ClientWSTimeout,
|
27 |
+
ConnectionTimeoutError,
|
28 |
+
ContentTypeError,
|
29 |
+
Fingerprint,
|
30 |
+
InvalidURL,
|
31 |
+
InvalidUrlClientError,
|
32 |
+
InvalidUrlRedirectClientError,
|
33 |
+
NamedPipeConnector,
|
34 |
+
NonHttpUrlClientError,
|
35 |
+
NonHttpUrlRedirectClientError,
|
36 |
+
RedirectClientError,
|
37 |
+
RequestInfo,
|
38 |
+
ServerConnectionError,
|
39 |
+
ServerDisconnectedError,
|
40 |
+
ServerFingerprintMismatch,
|
41 |
+
ServerTimeoutError,
|
42 |
+
SocketTimeoutError,
|
43 |
+
TCPConnector,
|
44 |
+
TooManyRedirects,
|
45 |
+
UnixConnector,
|
46 |
+
WSMessageTypeError,
|
47 |
+
WSServerHandshakeError,
|
48 |
+
request,
|
49 |
+
)
|
50 |
+
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
51 |
+
from .formdata import FormData as FormData
|
52 |
+
from .helpers import BasicAuth, ChainMapProxy, ETag
|
53 |
+
from .http import (
|
54 |
+
HttpVersion as HttpVersion,
|
55 |
+
HttpVersion10 as HttpVersion10,
|
56 |
+
HttpVersion11 as HttpVersion11,
|
57 |
+
WebSocketError as WebSocketError,
|
58 |
+
WSCloseCode as WSCloseCode,
|
59 |
+
WSMessage as WSMessage,
|
60 |
+
WSMsgType as WSMsgType,
|
61 |
+
)
|
62 |
+
from .multipart import (
|
63 |
+
BadContentDispositionHeader as BadContentDispositionHeader,
|
64 |
+
BadContentDispositionParam as BadContentDispositionParam,
|
65 |
+
BodyPartReader as BodyPartReader,
|
66 |
+
MultipartReader as MultipartReader,
|
67 |
+
MultipartWriter as MultipartWriter,
|
68 |
+
content_disposition_filename as content_disposition_filename,
|
69 |
+
parse_content_disposition as parse_content_disposition,
|
70 |
+
)
|
71 |
+
from .payload import (
|
72 |
+
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
73 |
+
AsyncIterablePayload as AsyncIterablePayload,
|
74 |
+
BufferedReaderPayload as BufferedReaderPayload,
|
75 |
+
BytesIOPayload as BytesIOPayload,
|
76 |
+
BytesPayload as BytesPayload,
|
77 |
+
IOBasePayload as IOBasePayload,
|
78 |
+
JsonPayload as JsonPayload,
|
79 |
+
Payload as Payload,
|
80 |
+
StringIOPayload as StringIOPayload,
|
81 |
+
StringPayload as StringPayload,
|
82 |
+
TextIOPayload as TextIOPayload,
|
83 |
+
get_payload as get_payload,
|
84 |
+
payload_type as payload_type,
|
85 |
+
)
|
86 |
+
from .payload_streamer import streamer as streamer
|
87 |
+
from .resolver import (
|
88 |
+
AsyncResolver as AsyncResolver,
|
89 |
+
DefaultResolver as DefaultResolver,
|
90 |
+
ThreadedResolver as ThreadedResolver,
|
91 |
+
)
|
92 |
+
from .streams import (
|
93 |
+
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
94 |
+
DataQueue as DataQueue,
|
95 |
+
EofStream as EofStream,
|
96 |
+
FlowControlDataQueue as FlowControlDataQueue,
|
97 |
+
StreamReader as StreamReader,
|
98 |
+
)
|
99 |
+
from .tracing import (
|
100 |
+
TraceConfig as TraceConfig,
|
101 |
+
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
102 |
+
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
103 |
+
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
104 |
+
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
105 |
+
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
106 |
+
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
107 |
+
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
108 |
+
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
109 |
+
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
110 |
+
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
111 |
+
TraceRequestEndParams as TraceRequestEndParams,
|
112 |
+
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
113 |
+
TraceRequestHeadersSentParams as TraceRequestHeadersSentParams,
|
114 |
+
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
115 |
+
TraceRequestStartParams as TraceRequestStartParams,
|
116 |
+
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
117 |
+
)
|
118 |
+
|
119 |
+
if TYPE_CHECKING:
|
120 |
+
# At runtime these are lazy-loaded at the bottom of the file.
|
121 |
+
from .worker import (
|
122 |
+
GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
|
123 |
+
GunicornWebWorker as GunicornWebWorker,
|
124 |
+
)
|
125 |
+
|
126 |
+
__all__: Tuple[str, ...] = (
|
127 |
+
"hdrs",
|
128 |
+
# client
|
129 |
+
"BaseConnector",
|
130 |
+
"ClientConnectionError",
|
131 |
+
"ClientConnectionResetError",
|
132 |
+
"ClientConnectorCertificateError",
|
133 |
+
"ClientConnectorDNSError",
|
134 |
+
"ClientConnectorError",
|
135 |
+
"ClientConnectorSSLError",
|
136 |
+
"ClientError",
|
137 |
+
"ClientHttpProxyError",
|
138 |
+
"ClientOSError",
|
139 |
+
"ClientPayloadError",
|
140 |
+
"ClientProxyConnectionError",
|
141 |
+
"ClientResponse",
|
142 |
+
"ClientRequest",
|
143 |
+
"ClientResponseError",
|
144 |
+
"ClientSSLError",
|
145 |
+
"ClientSession",
|
146 |
+
"ClientTimeout",
|
147 |
+
"ClientWebSocketResponse",
|
148 |
+
"ClientWSTimeout",
|
149 |
+
"ConnectionTimeoutError",
|
150 |
+
"ContentTypeError",
|
151 |
+
"Fingerprint",
|
152 |
+
"FlowControlDataQueue",
|
153 |
+
"InvalidURL",
|
154 |
+
"InvalidUrlClientError",
|
155 |
+
"InvalidUrlRedirectClientError",
|
156 |
+
"NonHttpUrlClientError",
|
157 |
+
"NonHttpUrlRedirectClientError",
|
158 |
+
"RedirectClientError",
|
159 |
+
"RequestInfo",
|
160 |
+
"ServerConnectionError",
|
161 |
+
"ServerDisconnectedError",
|
162 |
+
"ServerFingerprintMismatch",
|
163 |
+
"ServerTimeoutError",
|
164 |
+
"SocketTimeoutError",
|
165 |
+
"TCPConnector",
|
166 |
+
"TooManyRedirects",
|
167 |
+
"UnixConnector",
|
168 |
+
"NamedPipeConnector",
|
169 |
+
"WSServerHandshakeError",
|
170 |
+
"request",
|
171 |
+
# cookiejar
|
172 |
+
"CookieJar",
|
173 |
+
"DummyCookieJar",
|
174 |
+
# formdata
|
175 |
+
"FormData",
|
176 |
+
# helpers
|
177 |
+
"BasicAuth",
|
178 |
+
"ChainMapProxy",
|
179 |
+
"ETag",
|
180 |
+
# http
|
181 |
+
"HttpVersion",
|
182 |
+
"HttpVersion10",
|
183 |
+
"HttpVersion11",
|
184 |
+
"WSMsgType",
|
185 |
+
"WSCloseCode",
|
186 |
+
"WSMessage",
|
187 |
+
"WebSocketError",
|
188 |
+
# multipart
|
189 |
+
"BadContentDispositionHeader",
|
190 |
+
"BadContentDispositionParam",
|
191 |
+
"BodyPartReader",
|
192 |
+
"MultipartReader",
|
193 |
+
"MultipartWriter",
|
194 |
+
"content_disposition_filename",
|
195 |
+
"parse_content_disposition",
|
196 |
+
# payload
|
197 |
+
"AsyncIterablePayload",
|
198 |
+
"BufferedReaderPayload",
|
199 |
+
"BytesIOPayload",
|
200 |
+
"BytesPayload",
|
201 |
+
"IOBasePayload",
|
202 |
+
"JsonPayload",
|
203 |
+
"PAYLOAD_REGISTRY",
|
204 |
+
"Payload",
|
205 |
+
"StringIOPayload",
|
206 |
+
"StringPayload",
|
207 |
+
"TextIOPayload",
|
208 |
+
"get_payload",
|
209 |
+
"payload_type",
|
210 |
+
# payload_streamer
|
211 |
+
"streamer",
|
212 |
+
# resolver
|
213 |
+
"AsyncResolver",
|
214 |
+
"DefaultResolver",
|
215 |
+
"ThreadedResolver",
|
216 |
+
# streams
|
217 |
+
"DataQueue",
|
218 |
+
"EMPTY_PAYLOAD",
|
219 |
+
"EofStream",
|
220 |
+
"StreamReader",
|
221 |
+
# tracing
|
222 |
+
"TraceConfig",
|
223 |
+
"TraceConnectionCreateEndParams",
|
224 |
+
"TraceConnectionCreateStartParams",
|
225 |
+
"TraceConnectionQueuedEndParams",
|
226 |
+
"TraceConnectionQueuedStartParams",
|
227 |
+
"TraceConnectionReuseconnParams",
|
228 |
+
"TraceDnsCacheHitParams",
|
229 |
+
"TraceDnsCacheMissParams",
|
230 |
+
"TraceDnsResolveHostEndParams",
|
231 |
+
"TraceDnsResolveHostStartParams",
|
232 |
+
"TraceRequestChunkSentParams",
|
233 |
+
"TraceRequestEndParams",
|
234 |
+
"TraceRequestExceptionParams",
|
235 |
+
"TraceRequestHeadersSentParams",
|
236 |
+
"TraceRequestRedirectParams",
|
237 |
+
"TraceRequestStartParams",
|
238 |
+
"TraceResponseChunkReceivedParams",
|
239 |
+
# workers (imported lazily with __getattr__)
|
240 |
+
"GunicornUVLoopWebWorker",
|
241 |
+
"GunicornWebWorker",
|
242 |
+
"WSMessageTypeError",
|
243 |
+
)
|
244 |
+
|
245 |
+
|
246 |
+
def __dir__() -> Tuple[str, ...]:
|
247 |
+
return __all__ + ("__doc__",)
|
248 |
+
|
249 |
+
|
250 |
+
def __getattr__(name: str) -> object:
|
251 |
+
global GunicornUVLoopWebWorker, GunicornWebWorker
|
252 |
+
|
253 |
+
# Importing gunicorn takes a long time (>100ms), so only import if actually needed.
|
254 |
+
if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
|
255 |
+
try:
|
256 |
+
from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
|
257 |
+
except ImportError:
|
258 |
+
return None
|
259 |
+
|
260 |
+
GunicornUVLoopWebWorker = guv # type: ignore[misc]
|
261 |
+
GunicornWebWorker = gw # type: ignore[misc]
|
262 |
+
return guv if name == "GunicornUVLoopWebWorker" else gw
|
263 |
+
|
264 |
+
raise AttributeError(f"module {__name__} has no attribute {name}")
|
venv/Lib/site-packages/aiohttp/_cparser.pxd
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
|
2 |
+
|
3 |
+
|
4 |
+
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
5 |
+
|
6 |
+
struct llhttp__internal_s:
|
7 |
+
int32_t _index
|
8 |
+
void* _span_pos0
|
9 |
+
void* _span_cb0
|
10 |
+
int32_t error
|
11 |
+
const char* reason
|
12 |
+
const char* error_pos
|
13 |
+
void* data
|
14 |
+
void* _current
|
15 |
+
uint64_t content_length
|
16 |
+
uint8_t type
|
17 |
+
uint8_t method
|
18 |
+
uint8_t http_major
|
19 |
+
uint8_t http_minor
|
20 |
+
uint8_t header_state
|
21 |
+
uint8_t lenient_flags
|
22 |
+
uint8_t upgrade
|
23 |
+
uint8_t finish
|
24 |
+
uint16_t flags
|
25 |
+
uint16_t status_code
|
26 |
+
void* settings
|
27 |
+
|
28 |
+
ctypedef llhttp__internal_s llhttp__internal_t
|
29 |
+
ctypedef llhttp__internal_t llhttp_t
|
30 |
+
|
31 |
+
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
32 |
+
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
33 |
+
|
34 |
+
struct llhttp_settings_s:
|
35 |
+
llhttp_cb on_message_begin
|
36 |
+
llhttp_data_cb on_url
|
37 |
+
llhttp_data_cb on_status
|
38 |
+
llhttp_data_cb on_header_field
|
39 |
+
llhttp_data_cb on_header_value
|
40 |
+
llhttp_cb on_headers_complete
|
41 |
+
llhttp_data_cb on_body
|
42 |
+
llhttp_cb on_message_complete
|
43 |
+
llhttp_cb on_chunk_header
|
44 |
+
llhttp_cb on_chunk_complete
|
45 |
+
|
46 |
+
llhttp_cb on_url_complete
|
47 |
+
llhttp_cb on_status_complete
|
48 |
+
llhttp_cb on_header_field_complete
|
49 |
+
llhttp_cb on_header_value_complete
|
50 |
+
|
51 |
+
ctypedef llhttp_settings_s llhttp_settings_t
|
52 |
+
|
53 |
+
enum llhttp_errno:
|
54 |
+
HPE_OK,
|
55 |
+
HPE_INTERNAL,
|
56 |
+
HPE_STRICT,
|
57 |
+
HPE_LF_EXPECTED,
|
58 |
+
HPE_UNEXPECTED_CONTENT_LENGTH,
|
59 |
+
HPE_CLOSED_CONNECTION,
|
60 |
+
HPE_INVALID_METHOD,
|
61 |
+
HPE_INVALID_URL,
|
62 |
+
HPE_INVALID_CONSTANT,
|
63 |
+
HPE_INVALID_VERSION,
|
64 |
+
HPE_INVALID_HEADER_TOKEN,
|
65 |
+
HPE_INVALID_CONTENT_LENGTH,
|
66 |
+
HPE_INVALID_CHUNK_SIZE,
|
67 |
+
HPE_INVALID_STATUS,
|
68 |
+
HPE_INVALID_EOF_STATE,
|
69 |
+
HPE_INVALID_TRANSFER_ENCODING,
|
70 |
+
HPE_CB_MESSAGE_BEGIN,
|
71 |
+
HPE_CB_HEADERS_COMPLETE,
|
72 |
+
HPE_CB_MESSAGE_COMPLETE,
|
73 |
+
HPE_CB_CHUNK_HEADER,
|
74 |
+
HPE_CB_CHUNK_COMPLETE,
|
75 |
+
HPE_PAUSED,
|
76 |
+
HPE_PAUSED_UPGRADE,
|
77 |
+
HPE_USER
|
78 |
+
|
79 |
+
ctypedef llhttp_errno llhttp_errno_t
|
80 |
+
|
81 |
+
enum llhttp_flags:
|
82 |
+
F_CHUNKED,
|
83 |
+
F_CONTENT_LENGTH
|
84 |
+
|
85 |
+
enum llhttp_type:
|
86 |
+
HTTP_REQUEST,
|
87 |
+
HTTP_RESPONSE,
|
88 |
+
HTTP_BOTH
|
89 |
+
|
90 |
+
enum llhttp_method:
|
91 |
+
HTTP_DELETE,
|
92 |
+
HTTP_GET,
|
93 |
+
HTTP_HEAD,
|
94 |
+
HTTP_POST,
|
95 |
+
HTTP_PUT,
|
96 |
+
HTTP_CONNECT,
|
97 |
+
HTTP_OPTIONS,
|
98 |
+
HTTP_TRACE,
|
99 |
+
HTTP_COPY,
|
100 |
+
HTTP_LOCK,
|
101 |
+
HTTP_MKCOL,
|
102 |
+
HTTP_MOVE,
|
103 |
+
HTTP_PROPFIND,
|
104 |
+
HTTP_PROPPATCH,
|
105 |
+
HTTP_SEARCH,
|
106 |
+
HTTP_UNLOCK,
|
107 |
+
HTTP_BIND,
|
108 |
+
HTTP_REBIND,
|
109 |
+
HTTP_UNBIND,
|
110 |
+
HTTP_ACL,
|
111 |
+
HTTP_REPORT,
|
112 |
+
HTTP_MKACTIVITY,
|
113 |
+
HTTP_CHECKOUT,
|
114 |
+
HTTP_MERGE,
|
115 |
+
HTTP_MSEARCH,
|
116 |
+
HTTP_NOTIFY,
|
117 |
+
HTTP_SUBSCRIBE,
|
118 |
+
HTTP_UNSUBSCRIBE,
|
119 |
+
HTTP_PATCH,
|
120 |
+
HTTP_PURGE,
|
121 |
+
HTTP_MKCALENDAR,
|
122 |
+
HTTP_LINK,
|
123 |
+
HTTP_UNLINK,
|
124 |
+
HTTP_SOURCE,
|
125 |
+
HTTP_PRI,
|
126 |
+
HTTP_DESCRIBE,
|
127 |
+
HTTP_ANNOUNCE,
|
128 |
+
HTTP_SETUP,
|
129 |
+
HTTP_PLAY,
|
130 |
+
HTTP_PAUSE,
|
131 |
+
HTTP_TEARDOWN,
|
132 |
+
HTTP_GET_PARAMETER,
|
133 |
+
HTTP_SET_PARAMETER,
|
134 |
+
HTTP_REDIRECT,
|
135 |
+
HTTP_RECORD,
|
136 |
+
HTTP_FLUSH
|
137 |
+
|
138 |
+
ctypedef llhttp_method llhttp_method_t;
|
139 |
+
|
140 |
+
void llhttp_settings_init(llhttp_settings_t* settings)
|
141 |
+
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
142 |
+
const llhttp_settings_t* settings)
|
143 |
+
|
144 |
+
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
145 |
+
|
146 |
+
int llhttp_should_keep_alive(const llhttp_t* parser)
|
147 |
+
|
148 |
+
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
149 |
+
|
150 |
+
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
151 |
+
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
152 |
+
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
153 |
+
|
154 |
+
const char* llhttp_method_name(llhttp_method_t method)
|
155 |
+
|
156 |
+
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
157 |
+
void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
|
158 |
+
void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
|
venv/Lib/site-packages/aiohttp/_find_header.pxd
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
cdef extern from "_find_header.h":
|
2 |
+
int find_header(char *, int)
|
venv/Lib/site-packages/aiohttp/_headers.pxi
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# The file is autogenerated from aiohttp/hdrs.py
|
2 |
+
# Run ./tools/gen.py to update it after the origin changing.
|
3 |
+
|
4 |
+
from . import hdrs
|
5 |
+
cdef tuple headers = (
|
6 |
+
hdrs.ACCEPT,
|
7 |
+
hdrs.ACCEPT_CHARSET,
|
8 |
+
hdrs.ACCEPT_ENCODING,
|
9 |
+
hdrs.ACCEPT_LANGUAGE,
|
10 |
+
hdrs.ACCEPT_RANGES,
|
11 |
+
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
12 |
+
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
13 |
+
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
14 |
+
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
15 |
+
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
16 |
+
hdrs.ACCESS_CONTROL_MAX_AGE,
|
17 |
+
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
18 |
+
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
19 |
+
hdrs.AGE,
|
20 |
+
hdrs.ALLOW,
|
21 |
+
hdrs.AUTHORIZATION,
|
22 |
+
hdrs.CACHE_CONTROL,
|
23 |
+
hdrs.CONNECTION,
|
24 |
+
hdrs.CONTENT_DISPOSITION,
|
25 |
+
hdrs.CONTENT_ENCODING,
|
26 |
+
hdrs.CONTENT_LANGUAGE,
|
27 |
+
hdrs.CONTENT_LENGTH,
|
28 |
+
hdrs.CONTENT_LOCATION,
|
29 |
+
hdrs.CONTENT_MD5,
|
30 |
+
hdrs.CONTENT_RANGE,
|
31 |
+
hdrs.CONTENT_TRANSFER_ENCODING,
|
32 |
+
hdrs.CONTENT_TYPE,
|
33 |
+
hdrs.COOKIE,
|
34 |
+
hdrs.DATE,
|
35 |
+
hdrs.DESTINATION,
|
36 |
+
hdrs.DIGEST,
|
37 |
+
hdrs.ETAG,
|
38 |
+
hdrs.EXPECT,
|
39 |
+
hdrs.EXPIRES,
|
40 |
+
hdrs.FORWARDED,
|
41 |
+
hdrs.FROM,
|
42 |
+
hdrs.HOST,
|
43 |
+
hdrs.IF_MATCH,
|
44 |
+
hdrs.IF_MODIFIED_SINCE,
|
45 |
+
hdrs.IF_NONE_MATCH,
|
46 |
+
hdrs.IF_RANGE,
|
47 |
+
hdrs.IF_UNMODIFIED_SINCE,
|
48 |
+
hdrs.KEEP_ALIVE,
|
49 |
+
hdrs.LAST_EVENT_ID,
|
50 |
+
hdrs.LAST_MODIFIED,
|
51 |
+
hdrs.LINK,
|
52 |
+
hdrs.LOCATION,
|
53 |
+
hdrs.MAX_FORWARDS,
|
54 |
+
hdrs.ORIGIN,
|
55 |
+
hdrs.PRAGMA,
|
56 |
+
hdrs.PROXY_AUTHENTICATE,
|
57 |
+
hdrs.PROXY_AUTHORIZATION,
|
58 |
+
hdrs.RANGE,
|
59 |
+
hdrs.REFERER,
|
60 |
+
hdrs.RETRY_AFTER,
|
61 |
+
hdrs.SEC_WEBSOCKET_ACCEPT,
|
62 |
+
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
63 |
+
hdrs.SEC_WEBSOCKET_KEY,
|
64 |
+
hdrs.SEC_WEBSOCKET_KEY1,
|
65 |
+
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
66 |
+
hdrs.SEC_WEBSOCKET_VERSION,
|
67 |
+
hdrs.SERVER,
|
68 |
+
hdrs.SET_COOKIE,
|
69 |
+
hdrs.TE,
|
70 |
+
hdrs.TRAILER,
|
71 |
+
hdrs.TRANSFER_ENCODING,
|
72 |
+
hdrs.URI,
|
73 |
+
hdrs.UPGRADE,
|
74 |
+
hdrs.USER_AGENT,
|
75 |
+
hdrs.VARY,
|
76 |
+
hdrs.VIA,
|
77 |
+
hdrs.WWW_AUTHENTICATE,
|
78 |
+
hdrs.WANT_DIGEST,
|
79 |
+
hdrs.WARNING,
|
80 |
+
hdrs.X_FORWARDED_FOR,
|
81 |
+
hdrs.X_FORWARDED_HOST,
|
82 |
+
hdrs.X_FORWARDED_PROTO,
|
83 |
+
)
|
venv/Lib/site-packages/aiohttp/_http_parser.pyx
ADDED
@@ -0,0 +1,837 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#cython: language_level=3
|
2 |
+
#
|
3 |
+
# Based on https://github.com/MagicStack/httptools
|
4 |
+
#
|
5 |
+
|
6 |
+
from cpython cimport (
|
7 |
+
Py_buffer,
|
8 |
+
PyBUF_SIMPLE,
|
9 |
+
PyBuffer_Release,
|
10 |
+
PyBytes_AsString,
|
11 |
+
PyBytes_AsStringAndSize,
|
12 |
+
PyObject_GetBuffer,
|
13 |
+
)
|
14 |
+
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
15 |
+
from libc.limits cimport ULLONG_MAX
|
16 |
+
from libc.string cimport memcpy
|
17 |
+
|
18 |
+
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
19 |
+
from yarl import URL as _URL
|
20 |
+
|
21 |
+
from aiohttp import hdrs
|
22 |
+
from aiohttp.helpers import DEBUG, set_exception
|
23 |
+
|
24 |
+
from .http_exceptions import (
|
25 |
+
BadHttpMessage,
|
26 |
+
BadHttpMethod,
|
27 |
+
BadStatusLine,
|
28 |
+
ContentLengthError,
|
29 |
+
InvalidHeader,
|
30 |
+
InvalidURLError,
|
31 |
+
LineTooLong,
|
32 |
+
PayloadEncodingError,
|
33 |
+
TransferEncodingError,
|
34 |
+
)
|
35 |
+
from .http_parser import DeflateBuffer as _DeflateBuffer
|
36 |
+
from .http_writer import (
|
37 |
+
HttpVersion as _HttpVersion,
|
38 |
+
HttpVersion10 as _HttpVersion10,
|
39 |
+
HttpVersion11 as _HttpVersion11,
|
40 |
+
)
|
41 |
+
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
42 |
+
|
43 |
+
cimport cython
|
44 |
+
|
45 |
+
from aiohttp cimport _cparser as cparser
|
46 |
+
|
47 |
+
include "_headers.pxi"
|
48 |
+
|
49 |
+
from aiohttp cimport _find_header
|
50 |
+
|
51 |
+
ALLOWED_UPGRADES = frozenset({"websocket"})
|
52 |
+
DEF DEFAULT_FREELIST_SIZE = 250
|
53 |
+
|
54 |
+
cdef extern from "Python.h":
|
55 |
+
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
56 |
+
Py_ssize_t PyByteArray_Size(object) except -1
|
57 |
+
char* PyByteArray_AsString(object)
|
58 |
+
|
59 |
+
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
60 |
+
'RawRequestMessage', 'RawResponseMessage')
|
61 |
+
|
62 |
+
cdef object URL = _URL
|
63 |
+
cdef object URL_build = URL.build
|
64 |
+
cdef object CIMultiDict = _CIMultiDict
|
65 |
+
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
66 |
+
cdef object HttpVersion = _HttpVersion
|
67 |
+
cdef object HttpVersion10 = _HttpVersion10
|
68 |
+
cdef object HttpVersion11 = _HttpVersion11
|
69 |
+
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
70 |
+
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
71 |
+
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
72 |
+
cdef object StreamReader = _StreamReader
|
73 |
+
cdef object DeflateBuffer = _DeflateBuffer
|
74 |
+
cdef bytes EMPTY_BYTES = b""
|
75 |
+
|
76 |
+
cdef inline object extend(object buf, const char* at, size_t length):
|
77 |
+
cdef Py_ssize_t s
|
78 |
+
cdef char* ptr
|
79 |
+
s = PyByteArray_Size(buf)
|
80 |
+
PyByteArray_Resize(buf, s + length)
|
81 |
+
ptr = PyByteArray_AsString(buf)
|
82 |
+
memcpy(ptr + s, at, length)
|
83 |
+
|
84 |
+
|
85 |
+
DEF METHODS_COUNT = 46;
|
86 |
+
|
87 |
+
cdef list _http_method = []
|
88 |
+
|
89 |
+
for i in range(METHODS_COUNT):
|
90 |
+
_http_method.append(
|
91 |
+
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
92 |
+
|
93 |
+
|
94 |
+
cdef inline str http_method_str(int i):
|
95 |
+
if i < METHODS_COUNT:
|
96 |
+
return <str>_http_method[i]
|
97 |
+
else:
|
98 |
+
return "<unknown>"
|
99 |
+
|
100 |
+
cdef inline object find_header(bytes raw_header):
|
101 |
+
cdef Py_ssize_t size
|
102 |
+
cdef char *buf
|
103 |
+
cdef int idx
|
104 |
+
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
105 |
+
idx = _find_header.find_header(buf, size)
|
106 |
+
if idx == -1:
|
107 |
+
return raw_header.decode('utf-8', 'surrogateescape')
|
108 |
+
return headers[idx]
|
109 |
+
|
110 |
+
|
111 |
+
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
112 |
+
cdef class RawRequestMessage:
|
113 |
+
cdef readonly str method
|
114 |
+
cdef readonly str path
|
115 |
+
cdef readonly object version # HttpVersion
|
116 |
+
cdef readonly object headers # CIMultiDict
|
117 |
+
cdef readonly object raw_headers # tuple
|
118 |
+
cdef readonly object should_close
|
119 |
+
cdef readonly object compression
|
120 |
+
cdef readonly object upgrade
|
121 |
+
cdef readonly object chunked
|
122 |
+
cdef readonly object url # yarl.URL
|
123 |
+
|
124 |
+
def __init__(self, method, path, version, headers, raw_headers,
|
125 |
+
should_close, compression, upgrade, chunked, url):
|
126 |
+
self.method = method
|
127 |
+
self.path = path
|
128 |
+
self.version = version
|
129 |
+
self.headers = headers
|
130 |
+
self.raw_headers = raw_headers
|
131 |
+
self.should_close = should_close
|
132 |
+
self.compression = compression
|
133 |
+
self.upgrade = upgrade
|
134 |
+
self.chunked = chunked
|
135 |
+
self.url = url
|
136 |
+
|
137 |
+
def __repr__(self):
|
138 |
+
info = []
|
139 |
+
info.append(("method", self.method))
|
140 |
+
info.append(("path", self.path))
|
141 |
+
info.append(("version", self.version))
|
142 |
+
info.append(("headers", self.headers))
|
143 |
+
info.append(("raw_headers", self.raw_headers))
|
144 |
+
info.append(("should_close", self.should_close))
|
145 |
+
info.append(("compression", self.compression))
|
146 |
+
info.append(("upgrade", self.upgrade))
|
147 |
+
info.append(("chunked", self.chunked))
|
148 |
+
info.append(("url", self.url))
|
149 |
+
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
150 |
+
return '<RawRequestMessage(' + sinfo + ')>'
|
151 |
+
|
152 |
+
def _replace(self, **dct):
|
153 |
+
cdef RawRequestMessage ret
|
154 |
+
ret = _new_request_message(self.method,
|
155 |
+
self.path,
|
156 |
+
self.version,
|
157 |
+
self.headers,
|
158 |
+
self.raw_headers,
|
159 |
+
self.should_close,
|
160 |
+
self.compression,
|
161 |
+
self.upgrade,
|
162 |
+
self.chunked,
|
163 |
+
self.url)
|
164 |
+
if "method" in dct:
|
165 |
+
ret.method = dct["method"]
|
166 |
+
if "path" in dct:
|
167 |
+
ret.path = dct["path"]
|
168 |
+
if "version" in dct:
|
169 |
+
ret.version = dct["version"]
|
170 |
+
if "headers" in dct:
|
171 |
+
ret.headers = dct["headers"]
|
172 |
+
if "raw_headers" in dct:
|
173 |
+
ret.raw_headers = dct["raw_headers"]
|
174 |
+
if "should_close" in dct:
|
175 |
+
ret.should_close = dct["should_close"]
|
176 |
+
if "compression" in dct:
|
177 |
+
ret.compression = dct["compression"]
|
178 |
+
if "upgrade" in dct:
|
179 |
+
ret.upgrade = dct["upgrade"]
|
180 |
+
if "chunked" in dct:
|
181 |
+
ret.chunked = dct["chunked"]
|
182 |
+
if "url" in dct:
|
183 |
+
ret.url = dct["url"]
|
184 |
+
return ret
|
185 |
+
|
186 |
+
cdef _new_request_message(str method,
|
187 |
+
str path,
|
188 |
+
object version,
|
189 |
+
object headers,
|
190 |
+
object raw_headers,
|
191 |
+
bint should_close,
|
192 |
+
object compression,
|
193 |
+
bint upgrade,
|
194 |
+
bint chunked,
|
195 |
+
object url):
|
196 |
+
cdef RawRequestMessage ret
|
197 |
+
ret = RawRequestMessage.__new__(RawRequestMessage)
|
198 |
+
ret.method = method
|
199 |
+
ret.path = path
|
200 |
+
ret.version = version
|
201 |
+
ret.headers = headers
|
202 |
+
ret.raw_headers = raw_headers
|
203 |
+
ret.should_close = should_close
|
204 |
+
ret.compression = compression
|
205 |
+
ret.upgrade = upgrade
|
206 |
+
ret.chunked = chunked
|
207 |
+
ret.url = url
|
208 |
+
return ret
|
209 |
+
|
210 |
+
|
211 |
+
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
212 |
+
cdef class RawResponseMessage:
|
213 |
+
cdef readonly object version # HttpVersion
|
214 |
+
cdef readonly int code
|
215 |
+
cdef readonly str reason
|
216 |
+
cdef readonly object headers # CIMultiDict
|
217 |
+
cdef readonly object raw_headers # tuple
|
218 |
+
cdef readonly object should_close
|
219 |
+
cdef readonly object compression
|
220 |
+
cdef readonly object upgrade
|
221 |
+
cdef readonly object chunked
|
222 |
+
|
223 |
+
def __init__(self, version, code, reason, headers, raw_headers,
|
224 |
+
should_close, compression, upgrade, chunked):
|
225 |
+
self.version = version
|
226 |
+
self.code = code
|
227 |
+
self.reason = reason
|
228 |
+
self.headers = headers
|
229 |
+
self.raw_headers = raw_headers
|
230 |
+
self.should_close = should_close
|
231 |
+
self.compression = compression
|
232 |
+
self.upgrade = upgrade
|
233 |
+
self.chunked = chunked
|
234 |
+
|
235 |
+
def __repr__(self):
|
236 |
+
info = []
|
237 |
+
info.append(("version", self.version))
|
238 |
+
info.append(("code", self.code))
|
239 |
+
info.append(("reason", self.reason))
|
240 |
+
info.append(("headers", self.headers))
|
241 |
+
info.append(("raw_headers", self.raw_headers))
|
242 |
+
info.append(("should_close", self.should_close))
|
243 |
+
info.append(("compression", self.compression))
|
244 |
+
info.append(("upgrade", self.upgrade))
|
245 |
+
info.append(("chunked", self.chunked))
|
246 |
+
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
247 |
+
return '<RawResponseMessage(' + sinfo + ')>'
|
248 |
+
|
249 |
+
|
250 |
+
cdef _new_response_message(object version,
|
251 |
+
int code,
|
252 |
+
str reason,
|
253 |
+
object headers,
|
254 |
+
object raw_headers,
|
255 |
+
bint should_close,
|
256 |
+
object compression,
|
257 |
+
bint upgrade,
|
258 |
+
bint chunked):
|
259 |
+
cdef RawResponseMessage ret
|
260 |
+
ret = RawResponseMessage.__new__(RawResponseMessage)
|
261 |
+
ret.version = version
|
262 |
+
ret.code = code
|
263 |
+
ret.reason = reason
|
264 |
+
ret.headers = headers
|
265 |
+
ret.raw_headers = raw_headers
|
266 |
+
ret.should_close = should_close
|
267 |
+
ret.compression = compression
|
268 |
+
ret.upgrade = upgrade
|
269 |
+
ret.chunked = chunked
|
270 |
+
return ret
|
271 |
+
|
272 |
+
|
273 |
+
@cython.internal
|
274 |
+
cdef class HttpParser:
|
275 |
+
|
276 |
+
cdef:
|
277 |
+
cparser.llhttp_t* _cparser
|
278 |
+
cparser.llhttp_settings_t* _csettings
|
279 |
+
|
280 |
+
bytes _raw_name
|
281 |
+
object _name
|
282 |
+
bytes _raw_value
|
283 |
+
bint _has_value
|
284 |
+
|
285 |
+
object _protocol
|
286 |
+
object _loop
|
287 |
+
object _timer
|
288 |
+
|
289 |
+
size_t _max_line_size
|
290 |
+
size_t _max_field_size
|
291 |
+
size_t _max_headers
|
292 |
+
bint _response_with_body
|
293 |
+
bint _read_until_eof
|
294 |
+
|
295 |
+
bint _started
|
296 |
+
object _url
|
297 |
+
bytearray _buf
|
298 |
+
str _path
|
299 |
+
str _reason
|
300 |
+
list _headers
|
301 |
+
list _raw_headers
|
302 |
+
bint _upgraded
|
303 |
+
list _messages
|
304 |
+
object _payload
|
305 |
+
bint _payload_error
|
306 |
+
object _payload_exception
|
307 |
+
object _last_error
|
308 |
+
bint _auto_decompress
|
309 |
+
int _limit
|
310 |
+
|
311 |
+
str _content_encoding
|
312 |
+
|
313 |
+
Py_buffer py_buf
|
314 |
+
|
315 |
+
def __cinit__(self):
|
316 |
+
self._cparser = <cparser.llhttp_t*> \
|
317 |
+
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
318 |
+
if self._cparser is NULL:
|
319 |
+
raise MemoryError()
|
320 |
+
|
321 |
+
self._csettings = <cparser.llhttp_settings_t*> \
|
322 |
+
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
323 |
+
if self._csettings is NULL:
|
324 |
+
raise MemoryError()
|
325 |
+
|
326 |
+
def __dealloc__(self):
|
327 |
+
PyMem_Free(self._cparser)
|
328 |
+
PyMem_Free(self._csettings)
|
329 |
+
|
330 |
+
cdef _init(
|
331 |
+
self, cparser.llhttp_type mode,
|
332 |
+
object protocol, object loop, int limit,
|
333 |
+
object timer=None,
|
334 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
335 |
+
size_t max_field_size=8190, payload_exception=None,
|
336 |
+
bint response_with_body=True, bint read_until_eof=False,
|
337 |
+
bint auto_decompress=True,
|
338 |
+
):
|
339 |
+
cparser.llhttp_settings_init(self._csettings)
|
340 |
+
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
341 |
+
self._cparser.data = <void*>self
|
342 |
+
self._cparser.content_length = 0
|
343 |
+
|
344 |
+
self._protocol = protocol
|
345 |
+
self._loop = loop
|
346 |
+
self._timer = timer
|
347 |
+
|
348 |
+
self._buf = bytearray()
|
349 |
+
self._payload = None
|
350 |
+
self._payload_error = 0
|
351 |
+
self._payload_exception = payload_exception
|
352 |
+
self._messages = []
|
353 |
+
|
354 |
+
self._raw_name = EMPTY_BYTES
|
355 |
+
self._raw_value = EMPTY_BYTES
|
356 |
+
self._has_value = False
|
357 |
+
|
358 |
+
self._max_line_size = max_line_size
|
359 |
+
self._max_headers = max_headers
|
360 |
+
self._max_field_size = max_field_size
|
361 |
+
self._response_with_body = response_with_body
|
362 |
+
self._read_until_eof = read_until_eof
|
363 |
+
self._upgraded = False
|
364 |
+
self._auto_decompress = auto_decompress
|
365 |
+
self._content_encoding = None
|
366 |
+
|
367 |
+
self._csettings.on_url = cb_on_url
|
368 |
+
self._csettings.on_status = cb_on_status
|
369 |
+
self._csettings.on_header_field = cb_on_header_field
|
370 |
+
self._csettings.on_header_value = cb_on_header_value
|
371 |
+
self._csettings.on_headers_complete = cb_on_headers_complete
|
372 |
+
self._csettings.on_body = cb_on_body
|
373 |
+
self._csettings.on_message_begin = cb_on_message_begin
|
374 |
+
self._csettings.on_message_complete = cb_on_message_complete
|
375 |
+
self._csettings.on_chunk_header = cb_on_chunk_header
|
376 |
+
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
377 |
+
|
378 |
+
self._last_error = None
|
379 |
+
self._limit = limit
|
380 |
+
|
381 |
+
cdef _process_header(self):
|
382 |
+
cdef str value
|
383 |
+
if self._raw_name is not EMPTY_BYTES:
|
384 |
+
name = find_header(self._raw_name)
|
385 |
+
value = self._raw_value.decode('utf-8', 'surrogateescape')
|
386 |
+
|
387 |
+
self._headers.append((name, value))
|
388 |
+
|
389 |
+
if name is CONTENT_ENCODING:
|
390 |
+
self._content_encoding = value
|
391 |
+
|
392 |
+
self._has_value = False
|
393 |
+
self._raw_headers.append((self._raw_name, self._raw_value))
|
394 |
+
self._raw_name = EMPTY_BYTES
|
395 |
+
self._raw_value = EMPTY_BYTES
|
396 |
+
|
397 |
+
cdef _on_header_field(self, char* at, size_t length):
|
398 |
+
if self._has_value:
|
399 |
+
self._process_header()
|
400 |
+
|
401 |
+
if self._raw_name is EMPTY_BYTES:
|
402 |
+
self._raw_name = at[:length]
|
403 |
+
else:
|
404 |
+
self._raw_name += at[:length]
|
405 |
+
|
406 |
+
cdef _on_header_value(self, char* at, size_t length):
|
407 |
+
if self._raw_value is EMPTY_BYTES:
|
408 |
+
self._raw_value = at[:length]
|
409 |
+
else:
|
410 |
+
self._raw_value += at[:length]
|
411 |
+
self._has_value = True
|
412 |
+
|
413 |
+
cdef _on_headers_complete(self):
|
414 |
+
self._process_header()
|
415 |
+
|
416 |
+
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
417 |
+
upgrade = self._cparser.upgrade
|
418 |
+
chunked = self._cparser.flags & cparser.F_CHUNKED
|
419 |
+
|
420 |
+
raw_headers = tuple(self._raw_headers)
|
421 |
+
headers = CIMultiDictProxy(CIMultiDict(self._headers))
|
422 |
+
|
423 |
+
if self._cparser.type == cparser.HTTP_REQUEST:
|
424 |
+
allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES
|
425 |
+
if allowed or self._cparser.method == cparser.HTTP_CONNECT:
|
426 |
+
self._upgraded = True
|
427 |
+
else:
|
428 |
+
if upgrade and self._cparser.status_code == 101:
|
429 |
+
self._upgraded = True
|
430 |
+
|
431 |
+
# do not support old websocket spec
|
432 |
+
if SEC_WEBSOCKET_KEY1 in headers:
|
433 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
434 |
+
|
435 |
+
encoding = None
|
436 |
+
enc = self._content_encoding
|
437 |
+
if enc is not None:
|
438 |
+
self._content_encoding = None
|
439 |
+
enc = enc.lower()
|
440 |
+
if enc in ('gzip', 'deflate', 'br'):
|
441 |
+
encoding = enc
|
442 |
+
|
443 |
+
if self._cparser.type == cparser.HTTP_REQUEST:
|
444 |
+
method = http_method_str(self._cparser.method)
|
445 |
+
msg = _new_request_message(
|
446 |
+
method, self._path,
|
447 |
+
self.http_version(), headers, raw_headers,
|
448 |
+
should_close, encoding, upgrade, chunked, self._url)
|
449 |
+
else:
|
450 |
+
msg = _new_response_message(
|
451 |
+
self.http_version(), self._cparser.status_code, self._reason,
|
452 |
+
headers, raw_headers, should_close, encoding,
|
453 |
+
upgrade, chunked)
|
454 |
+
|
455 |
+
if (
|
456 |
+
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
457 |
+
self._cparser.method == cparser.HTTP_CONNECT or
|
458 |
+
(self._cparser.status_code >= 199 and
|
459 |
+
self._cparser.content_length == 0 and
|
460 |
+
self._read_until_eof)
|
461 |
+
):
|
462 |
+
payload = StreamReader(
|
463 |
+
self._protocol, timer=self._timer, loop=self._loop,
|
464 |
+
limit=self._limit)
|
465 |
+
else:
|
466 |
+
payload = EMPTY_PAYLOAD
|
467 |
+
|
468 |
+
self._payload = payload
|
469 |
+
if encoding is not None and self._auto_decompress:
|
470 |
+
self._payload = DeflateBuffer(payload, encoding)
|
471 |
+
|
472 |
+
if not self._response_with_body:
|
473 |
+
payload = EMPTY_PAYLOAD
|
474 |
+
|
475 |
+
self._messages.append((msg, payload))
|
476 |
+
|
477 |
+
cdef _on_message_complete(self):
|
478 |
+
self._payload.feed_eof()
|
479 |
+
self._payload = None
|
480 |
+
|
481 |
+
cdef _on_chunk_header(self):
|
482 |
+
self._payload.begin_http_chunk_receiving()
|
483 |
+
|
484 |
+
cdef _on_chunk_complete(self):
|
485 |
+
self._payload.end_http_chunk_receiving()
|
486 |
+
|
487 |
+
cdef object _on_status_complete(self):
|
488 |
+
pass
|
489 |
+
|
490 |
+
cdef inline http_version(self):
|
491 |
+
cdef cparser.llhttp_t* parser = self._cparser
|
492 |
+
|
493 |
+
if parser.http_major == 1:
|
494 |
+
if parser.http_minor == 0:
|
495 |
+
return HttpVersion10
|
496 |
+
elif parser.http_minor == 1:
|
497 |
+
return HttpVersion11
|
498 |
+
|
499 |
+
return HttpVersion(parser.http_major, parser.http_minor)
|
500 |
+
|
501 |
+
### Public API ###
|
502 |
+
|
503 |
+
def feed_eof(self):
|
504 |
+
cdef bytes desc
|
505 |
+
|
506 |
+
if self._payload is not None:
|
507 |
+
if self._cparser.flags & cparser.F_CHUNKED:
|
508 |
+
raise TransferEncodingError(
|
509 |
+
"Not enough data for satisfy transfer length header.")
|
510 |
+
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
511 |
+
raise ContentLengthError(
|
512 |
+
"Not enough data for satisfy content length header.")
|
513 |
+
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
514 |
+
desc = cparser.llhttp_get_error_reason(self._cparser)
|
515 |
+
raise PayloadEncodingError(desc.decode('latin-1'))
|
516 |
+
else:
|
517 |
+
self._payload.feed_eof()
|
518 |
+
elif self._started:
|
519 |
+
self._on_headers_complete()
|
520 |
+
if self._messages:
|
521 |
+
return self._messages[-1][0]
|
522 |
+
|
523 |
+
def feed_data(self, data):
|
524 |
+
cdef:
|
525 |
+
size_t data_len
|
526 |
+
size_t nb
|
527 |
+
cdef cparser.llhttp_errno_t errno
|
528 |
+
|
529 |
+
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
530 |
+
data_len = <size_t>self.py_buf.len
|
531 |
+
|
532 |
+
errno = cparser.llhttp_execute(
|
533 |
+
self._cparser,
|
534 |
+
<char*>self.py_buf.buf,
|
535 |
+
data_len)
|
536 |
+
|
537 |
+
if errno is cparser.HPE_PAUSED_UPGRADE:
|
538 |
+
cparser.llhttp_resume_after_upgrade(self._cparser)
|
539 |
+
|
540 |
+
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
541 |
+
|
542 |
+
PyBuffer_Release(&self.py_buf)
|
543 |
+
|
544 |
+
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
545 |
+
if self._payload_error == 0:
|
546 |
+
if self._last_error is not None:
|
547 |
+
ex = self._last_error
|
548 |
+
self._last_error = None
|
549 |
+
else:
|
550 |
+
after = cparser.llhttp_get_error_pos(self._cparser)
|
551 |
+
before = data[:after - <char*>self.py_buf.buf]
|
552 |
+
after_b = after.split(b"\r\n", 1)[0]
|
553 |
+
before = before.rsplit(b"\r\n", 1)[-1]
|
554 |
+
data = before + after_b
|
555 |
+
pointer = " " * (len(repr(before))-1) + "^"
|
556 |
+
ex = parser_error_from_errno(self._cparser, data, pointer)
|
557 |
+
self._payload = None
|
558 |
+
raise ex
|
559 |
+
|
560 |
+
if self._messages:
|
561 |
+
messages = self._messages
|
562 |
+
self._messages = []
|
563 |
+
else:
|
564 |
+
messages = ()
|
565 |
+
|
566 |
+
if self._upgraded:
|
567 |
+
return messages, True, data[nb:]
|
568 |
+
else:
|
569 |
+
return messages, False, b""
|
570 |
+
|
571 |
+
def set_upgraded(self, val):
|
572 |
+
self._upgraded = val
|
573 |
+
|
574 |
+
|
575 |
+
cdef class HttpRequestParser(HttpParser):
|
576 |
+
|
577 |
+
def __init__(
|
578 |
+
self, protocol, loop, int limit, timer=None,
|
579 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
580 |
+
size_t max_field_size=8190, payload_exception=None,
|
581 |
+
bint response_with_body=True, bint read_until_eof=False,
|
582 |
+
bint auto_decompress=True,
|
583 |
+
):
|
584 |
+
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
585 |
+
max_line_size, max_headers, max_field_size,
|
586 |
+
payload_exception, response_with_body, read_until_eof,
|
587 |
+
auto_decompress)
|
588 |
+
|
589 |
+
cdef object _on_status_complete(self):
|
590 |
+
cdef int idx1, idx2
|
591 |
+
if not self._buf:
|
592 |
+
return
|
593 |
+
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
594 |
+
try:
|
595 |
+
idx3 = len(self._path)
|
596 |
+
if self._cparser.method == cparser.HTTP_CONNECT:
|
597 |
+
# authority-form,
|
598 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
599 |
+
self._url = URL.build(authority=self._path, encoded=True)
|
600 |
+
elif idx3 > 1 and self._path[0] == '/':
|
601 |
+
# origin-form,
|
602 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
603 |
+
idx1 = self._path.find("?")
|
604 |
+
if idx1 == -1:
|
605 |
+
query = ""
|
606 |
+
idx2 = self._path.find("#")
|
607 |
+
if idx2 == -1:
|
608 |
+
path = self._path
|
609 |
+
fragment = ""
|
610 |
+
else:
|
611 |
+
path = self._path[0: idx2]
|
612 |
+
fragment = self._path[idx2+1:]
|
613 |
+
|
614 |
+
else:
|
615 |
+
path = self._path[0:idx1]
|
616 |
+
idx1 += 1
|
617 |
+
idx2 = self._path.find("#", idx1+1)
|
618 |
+
if idx2 == -1:
|
619 |
+
query = self._path[idx1:]
|
620 |
+
fragment = ""
|
621 |
+
else:
|
622 |
+
query = self._path[idx1: idx2]
|
623 |
+
fragment = self._path[idx2+1:]
|
624 |
+
|
625 |
+
self._url = URL.build(
|
626 |
+
path=path,
|
627 |
+
query_string=query,
|
628 |
+
fragment=fragment,
|
629 |
+
encoded=True,
|
630 |
+
)
|
631 |
+
else:
|
632 |
+
# absolute-form for proxy maybe,
|
633 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
634 |
+
self._url = URL(self._path, encoded=True)
|
635 |
+
finally:
|
636 |
+
PyByteArray_Resize(self._buf, 0)
|
637 |
+
|
638 |
+
|
639 |
+
cdef class HttpResponseParser(HttpParser):
|
640 |
+
|
641 |
+
def __init__(
|
642 |
+
self, protocol, loop, int limit, timer=None,
|
643 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
644 |
+
size_t max_field_size=8190, payload_exception=None,
|
645 |
+
bint response_with_body=True, bint read_until_eof=False,
|
646 |
+
bint auto_decompress=True
|
647 |
+
):
|
648 |
+
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
649 |
+
max_line_size, max_headers, max_field_size,
|
650 |
+
payload_exception, response_with_body, read_until_eof,
|
651 |
+
auto_decompress)
|
652 |
+
# Use strict parsing on dev mode, so users are warned about broken servers.
|
653 |
+
if not DEBUG:
|
654 |
+
cparser.llhttp_set_lenient_headers(self._cparser, 1)
|
655 |
+
cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
|
656 |
+
cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
|
657 |
+
|
658 |
+
cdef object _on_status_complete(self):
|
659 |
+
if self._buf:
|
660 |
+
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
661 |
+
PyByteArray_Resize(self._buf, 0)
|
662 |
+
else:
|
663 |
+
self._reason = self._reason or ''
|
664 |
+
|
665 |
+
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
666 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
667 |
+
|
668 |
+
pyparser._started = True
|
669 |
+
pyparser._headers = []
|
670 |
+
pyparser._raw_headers = []
|
671 |
+
PyByteArray_Resize(pyparser._buf, 0)
|
672 |
+
pyparser._path = None
|
673 |
+
pyparser._reason = None
|
674 |
+
return 0
|
675 |
+
|
676 |
+
|
677 |
+
cdef int cb_on_url(cparser.llhttp_t* parser,
|
678 |
+
const char *at, size_t length) except -1:
|
679 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
680 |
+
try:
|
681 |
+
if length > pyparser._max_line_size:
|
682 |
+
raise LineTooLong(
|
683 |
+
'Status line is too long', pyparser._max_line_size, length)
|
684 |
+
extend(pyparser._buf, at, length)
|
685 |
+
except BaseException as ex:
|
686 |
+
pyparser._last_error = ex
|
687 |
+
return -1
|
688 |
+
else:
|
689 |
+
return 0
|
690 |
+
|
691 |
+
|
692 |
+
cdef int cb_on_status(cparser.llhttp_t* parser,
|
693 |
+
const char *at, size_t length) except -1:
|
694 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
695 |
+
cdef str reason
|
696 |
+
try:
|
697 |
+
if length > pyparser._max_line_size:
|
698 |
+
raise LineTooLong(
|
699 |
+
'Status line is too long', pyparser._max_line_size, length)
|
700 |
+
extend(pyparser._buf, at, length)
|
701 |
+
except BaseException as ex:
|
702 |
+
pyparser._last_error = ex
|
703 |
+
return -1
|
704 |
+
else:
|
705 |
+
return 0
|
706 |
+
|
707 |
+
|
708 |
+
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
709 |
+
const char *at, size_t length) except -1:
|
710 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
711 |
+
cdef Py_ssize_t size
|
712 |
+
try:
|
713 |
+
pyparser._on_status_complete()
|
714 |
+
size = len(pyparser._raw_name) + length
|
715 |
+
if size > pyparser._max_field_size:
|
716 |
+
raise LineTooLong(
|
717 |
+
'Header name is too long', pyparser._max_field_size, size)
|
718 |
+
pyparser._on_header_field(at, length)
|
719 |
+
except BaseException as ex:
|
720 |
+
pyparser._last_error = ex
|
721 |
+
return -1
|
722 |
+
else:
|
723 |
+
return 0
|
724 |
+
|
725 |
+
|
726 |
+
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
727 |
+
const char *at, size_t length) except -1:
|
728 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
729 |
+
cdef Py_ssize_t size
|
730 |
+
try:
|
731 |
+
size = len(pyparser._raw_value) + length
|
732 |
+
if size > pyparser._max_field_size:
|
733 |
+
raise LineTooLong(
|
734 |
+
'Header value is too long', pyparser._max_field_size, size)
|
735 |
+
pyparser._on_header_value(at, length)
|
736 |
+
except BaseException as ex:
|
737 |
+
pyparser._last_error = ex
|
738 |
+
return -1
|
739 |
+
else:
|
740 |
+
return 0
|
741 |
+
|
742 |
+
|
743 |
+
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
744 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
745 |
+
try:
|
746 |
+
pyparser._on_status_complete()
|
747 |
+
pyparser._on_headers_complete()
|
748 |
+
except BaseException as exc:
|
749 |
+
pyparser._last_error = exc
|
750 |
+
return -1
|
751 |
+
else:
|
752 |
+
if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT:
|
753 |
+
return 2
|
754 |
+
else:
|
755 |
+
return 0
|
756 |
+
|
757 |
+
|
758 |
+
cdef int cb_on_body(cparser.llhttp_t* parser,
|
759 |
+
const char *at, size_t length) except -1:
|
760 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
761 |
+
cdef bytes body = at[:length]
|
762 |
+
try:
|
763 |
+
pyparser._payload.feed_data(body, length)
|
764 |
+
except BaseException as underlying_exc:
|
765 |
+
reraised_exc = underlying_exc
|
766 |
+
if pyparser._payload_exception is not None:
|
767 |
+
reraised_exc = pyparser._payload_exception(str(underlying_exc))
|
768 |
+
|
769 |
+
set_exception(pyparser._payload, reraised_exc, underlying_exc)
|
770 |
+
|
771 |
+
pyparser._payload_error = 1
|
772 |
+
return -1
|
773 |
+
else:
|
774 |
+
return 0
|
775 |
+
|
776 |
+
|
777 |
+
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
778 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
779 |
+
try:
|
780 |
+
pyparser._started = False
|
781 |
+
pyparser._on_message_complete()
|
782 |
+
except BaseException as exc:
|
783 |
+
pyparser._last_error = exc
|
784 |
+
return -1
|
785 |
+
else:
|
786 |
+
return 0
|
787 |
+
|
788 |
+
|
789 |
+
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
790 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
791 |
+
try:
|
792 |
+
pyparser._on_chunk_header()
|
793 |
+
except BaseException as exc:
|
794 |
+
pyparser._last_error = exc
|
795 |
+
return -1
|
796 |
+
else:
|
797 |
+
return 0
|
798 |
+
|
799 |
+
|
800 |
+
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
801 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
802 |
+
try:
|
803 |
+
pyparser._on_chunk_complete()
|
804 |
+
except BaseException as exc:
|
805 |
+
pyparser._last_error = exc
|
806 |
+
return -1
|
807 |
+
else:
|
808 |
+
return 0
|
809 |
+
|
810 |
+
|
811 |
+
cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
|
812 |
+
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
813 |
+
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
814 |
+
|
815 |
+
err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
|
816 |
+
|
817 |
+
if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
|
818 |
+
cparser.HPE_CB_HEADERS_COMPLETE,
|
819 |
+
cparser.HPE_CB_MESSAGE_COMPLETE,
|
820 |
+
cparser.HPE_CB_CHUNK_HEADER,
|
821 |
+
cparser.HPE_CB_CHUNK_COMPLETE,
|
822 |
+
cparser.HPE_INVALID_CONSTANT,
|
823 |
+
cparser.HPE_INVALID_HEADER_TOKEN,
|
824 |
+
cparser.HPE_INVALID_CONTENT_LENGTH,
|
825 |
+
cparser.HPE_INVALID_CHUNK_SIZE,
|
826 |
+
cparser.HPE_INVALID_EOF_STATE,
|
827 |
+
cparser.HPE_INVALID_TRANSFER_ENCODING}:
|
828 |
+
return BadHttpMessage(err_msg)
|
829 |
+
elif errno == cparser.HPE_INVALID_METHOD:
|
830 |
+
return BadHttpMethod(error=err_msg)
|
831 |
+
elif errno in {cparser.HPE_INVALID_STATUS,
|
832 |
+
cparser.HPE_INVALID_VERSION}:
|
833 |
+
return BadStatusLine(error=err_msg)
|
834 |
+
elif errno == cparser.HPE_INVALID_URL:
|
835 |
+
return InvalidURLError(err_msg)
|
836 |
+
|
837 |
+
return BadHttpMessage(err_msg)
|
venv/Lib/site-packages/aiohttp/_http_writer.cp312-win_amd64.pyd
ADDED
Binary file (48.1 kB). View file
|
|
venv/Lib/site-packages/aiohttp/_http_writer.pyx
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from cpython.bytes cimport PyBytes_FromStringAndSize
|
2 |
+
from cpython.exc cimport PyErr_NoMemory
|
3 |
+
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
4 |
+
from cpython.object cimport PyObject_Str
|
5 |
+
from libc.stdint cimport uint8_t, uint64_t
|
6 |
+
from libc.string cimport memcpy
|
7 |
+
|
8 |
+
from multidict import istr
|
9 |
+
|
10 |
+
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
11 |
+
cdef char BUFFER[BUF_SIZE]
|
12 |
+
|
13 |
+
cdef object _istr = istr
|
14 |
+
|
15 |
+
|
16 |
+
# ----------------- writer ---------------------------
|
17 |
+
|
18 |
+
cdef struct Writer:
|
19 |
+
char *buf
|
20 |
+
Py_ssize_t size
|
21 |
+
Py_ssize_t pos
|
22 |
+
|
23 |
+
|
24 |
+
cdef inline void _init_writer(Writer* writer):
|
25 |
+
writer.buf = &BUFFER[0]
|
26 |
+
writer.size = BUF_SIZE
|
27 |
+
writer.pos = 0
|
28 |
+
|
29 |
+
|
30 |
+
cdef inline void _release_writer(Writer* writer):
|
31 |
+
if writer.buf != BUFFER:
|
32 |
+
PyMem_Free(writer.buf)
|
33 |
+
|
34 |
+
|
35 |
+
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
36 |
+
cdef char * buf
|
37 |
+
cdef Py_ssize_t size
|
38 |
+
|
39 |
+
if writer.pos == writer.size:
|
40 |
+
# reallocate
|
41 |
+
size = writer.size + BUF_SIZE
|
42 |
+
if writer.buf == BUFFER:
|
43 |
+
buf = <char*>PyMem_Malloc(size)
|
44 |
+
if buf == NULL:
|
45 |
+
PyErr_NoMemory()
|
46 |
+
return -1
|
47 |
+
memcpy(buf, writer.buf, writer.size)
|
48 |
+
else:
|
49 |
+
buf = <char*>PyMem_Realloc(writer.buf, size)
|
50 |
+
if buf == NULL:
|
51 |
+
PyErr_NoMemory()
|
52 |
+
return -1
|
53 |
+
writer.buf = buf
|
54 |
+
writer.size = size
|
55 |
+
writer.buf[writer.pos] = <char>ch
|
56 |
+
writer.pos += 1
|
57 |
+
return 0
|
58 |
+
|
59 |
+
|
60 |
+
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
61 |
+
cdef uint64_t utf = <uint64_t> symbol
|
62 |
+
|
63 |
+
if utf < 0x80:
|
64 |
+
return _write_byte(writer, <uint8_t>utf)
|
65 |
+
elif utf < 0x800:
|
66 |
+
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
67 |
+
return -1
|
68 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
69 |
+
elif 0xD800 <= utf <= 0xDFFF:
|
70 |
+
# surogate pair, ignored
|
71 |
+
return 0
|
72 |
+
elif utf < 0x10000:
|
73 |
+
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
74 |
+
return -1
|
75 |
+
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
76 |
+
return -1
|
77 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
78 |
+
elif utf > 0x10FFFF:
|
79 |
+
# symbol is too large
|
80 |
+
return 0
|
81 |
+
else:
|
82 |
+
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
83 |
+
return -1
|
84 |
+
if _write_byte(writer,
|
85 |
+
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
86 |
+
return -1
|
87 |
+
if _write_byte(writer,
|
88 |
+
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
89 |
+
return -1
|
90 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
91 |
+
|
92 |
+
|
93 |
+
cdef inline int _write_str(Writer* writer, str s):
|
94 |
+
cdef Py_UCS4 ch
|
95 |
+
for ch in s:
|
96 |
+
if _write_utf8(writer, ch) < 0:
|
97 |
+
return -1
|
98 |
+
|
99 |
+
|
100 |
+
cdef inline int _write_str_raise_on_nlcr(Writer* writer, object s):
|
101 |
+
cdef Py_UCS4 ch
|
102 |
+
cdef str out_str
|
103 |
+
if type(s) is str:
|
104 |
+
out_str = <str>s
|
105 |
+
elif type(s) is _istr:
|
106 |
+
out_str = PyObject_Str(s)
|
107 |
+
elif not isinstance(s, str):
|
108 |
+
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
109 |
+
else:
|
110 |
+
out_str = str(s)
|
111 |
+
|
112 |
+
for ch in out_str:
|
113 |
+
if ch == 0x0D or ch == 0x0A:
|
114 |
+
raise ValueError(
|
115 |
+
"Newline or carriage return detected in headers. "
|
116 |
+
"Potential header injection attack."
|
117 |
+
)
|
118 |
+
if _write_utf8(writer, ch) < 0:
|
119 |
+
return -1
|
120 |
+
|
121 |
+
|
122 |
+
# --------------- _serialize_headers ----------------------
|
123 |
+
|
124 |
+
def _serialize_headers(str status_line, headers):
|
125 |
+
cdef Writer writer
|
126 |
+
cdef object key
|
127 |
+
cdef object val
|
128 |
+
|
129 |
+
_init_writer(&writer)
|
130 |
+
|
131 |
+
try:
|
132 |
+
if _write_str(&writer, status_line) < 0:
|
133 |
+
raise
|
134 |
+
if _write_byte(&writer, b'\r') < 0:
|
135 |
+
raise
|
136 |
+
if _write_byte(&writer, b'\n') < 0:
|
137 |
+
raise
|
138 |
+
|
139 |
+
for key, val in headers.items():
|
140 |
+
if _write_str_raise_on_nlcr(&writer, key) < 0:
|
141 |
+
raise
|
142 |
+
if _write_byte(&writer, b':') < 0:
|
143 |
+
raise
|
144 |
+
if _write_byte(&writer, b' ') < 0:
|
145 |
+
raise
|
146 |
+
if _write_str_raise_on_nlcr(&writer, val) < 0:
|
147 |
+
raise
|
148 |
+
if _write_byte(&writer, b'\r') < 0:
|
149 |
+
raise
|
150 |
+
if _write_byte(&writer, b'\n') < 0:
|
151 |
+
raise
|
152 |
+
|
153 |
+
if _write_byte(&writer, b'\r') < 0:
|
154 |
+
raise
|
155 |
+
if _write_byte(&writer, b'\n') < 0:
|
156 |
+
raise
|
157 |
+
|
158 |
+
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
159 |
+
finally:
|
160 |
+
_release_writer(&writer)
|
venv/Lib/site-packages/aiohttp/client.py
ADDED
@@ -0,0 +1,1550 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""HTTP Client for asyncio."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import base64
|
5 |
+
import hashlib
|
6 |
+
import json
|
7 |
+
import os
|
8 |
+
import sys
|
9 |
+
import traceback
|
10 |
+
import warnings
|
11 |
+
from contextlib import suppress
|
12 |
+
from types import TracebackType
|
13 |
+
from typing import (
|
14 |
+
TYPE_CHECKING,
|
15 |
+
Any,
|
16 |
+
Awaitable,
|
17 |
+
Callable,
|
18 |
+
Coroutine,
|
19 |
+
Final,
|
20 |
+
FrozenSet,
|
21 |
+
Generator,
|
22 |
+
Generic,
|
23 |
+
Iterable,
|
24 |
+
List,
|
25 |
+
Mapping,
|
26 |
+
Optional,
|
27 |
+
Set,
|
28 |
+
Tuple,
|
29 |
+
Type,
|
30 |
+
TypedDict,
|
31 |
+
TypeVar,
|
32 |
+
Union,
|
33 |
+
)
|
34 |
+
|
35 |
+
import attr
|
36 |
+
from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
|
37 |
+
from yarl import URL
|
38 |
+
|
39 |
+
from . import hdrs, http, payload
|
40 |
+
from ._websocket.reader import WebSocketDataQueue
|
41 |
+
from .abc import AbstractCookieJar
|
42 |
+
from .client_exceptions import (
|
43 |
+
ClientConnectionError,
|
44 |
+
ClientConnectionResetError,
|
45 |
+
ClientConnectorCertificateError,
|
46 |
+
ClientConnectorDNSError,
|
47 |
+
ClientConnectorError,
|
48 |
+
ClientConnectorSSLError,
|
49 |
+
ClientError,
|
50 |
+
ClientHttpProxyError,
|
51 |
+
ClientOSError,
|
52 |
+
ClientPayloadError,
|
53 |
+
ClientProxyConnectionError,
|
54 |
+
ClientResponseError,
|
55 |
+
ClientSSLError,
|
56 |
+
ConnectionTimeoutError,
|
57 |
+
ContentTypeError,
|
58 |
+
InvalidURL,
|
59 |
+
InvalidUrlClientError,
|
60 |
+
InvalidUrlRedirectClientError,
|
61 |
+
NonHttpUrlClientError,
|
62 |
+
NonHttpUrlRedirectClientError,
|
63 |
+
RedirectClientError,
|
64 |
+
ServerConnectionError,
|
65 |
+
ServerDisconnectedError,
|
66 |
+
ServerFingerprintMismatch,
|
67 |
+
ServerTimeoutError,
|
68 |
+
SocketTimeoutError,
|
69 |
+
TooManyRedirects,
|
70 |
+
WSMessageTypeError,
|
71 |
+
WSServerHandshakeError,
|
72 |
+
)
|
73 |
+
from .client_reqrep import (
|
74 |
+
ClientRequest as ClientRequest,
|
75 |
+
ClientResponse as ClientResponse,
|
76 |
+
Fingerprint as Fingerprint,
|
77 |
+
RequestInfo as RequestInfo,
|
78 |
+
_merge_ssl_params,
|
79 |
+
)
|
80 |
+
from .client_ws import (
|
81 |
+
DEFAULT_WS_CLIENT_TIMEOUT,
|
82 |
+
ClientWebSocketResponse as ClientWebSocketResponse,
|
83 |
+
ClientWSTimeout as ClientWSTimeout,
|
84 |
+
)
|
85 |
+
from .connector import (
|
86 |
+
HTTP_AND_EMPTY_SCHEMA_SET,
|
87 |
+
BaseConnector as BaseConnector,
|
88 |
+
NamedPipeConnector as NamedPipeConnector,
|
89 |
+
TCPConnector as TCPConnector,
|
90 |
+
UnixConnector as UnixConnector,
|
91 |
+
)
|
92 |
+
from .cookiejar import CookieJar
|
93 |
+
from .helpers import (
|
94 |
+
_SENTINEL,
|
95 |
+
DEBUG,
|
96 |
+
EMPTY_BODY_METHODS,
|
97 |
+
BasicAuth,
|
98 |
+
TimeoutHandle,
|
99 |
+
get_env_proxy_for_url,
|
100 |
+
sentinel,
|
101 |
+
strip_auth_from_url,
|
102 |
+
)
|
103 |
+
from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
|
104 |
+
from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse
|
105 |
+
from .tracing import Trace, TraceConfig
|
106 |
+
from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL
|
107 |
+
|
108 |
+
__all__ = (
|
109 |
+
# client_exceptions
|
110 |
+
"ClientConnectionError",
|
111 |
+
"ClientConnectionResetError",
|
112 |
+
"ClientConnectorCertificateError",
|
113 |
+
"ClientConnectorDNSError",
|
114 |
+
"ClientConnectorError",
|
115 |
+
"ClientConnectorSSLError",
|
116 |
+
"ClientError",
|
117 |
+
"ClientHttpProxyError",
|
118 |
+
"ClientOSError",
|
119 |
+
"ClientPayloadError",
|
120 |
+
"ClientProxyConnectionError",
|
121 |
+
"ClientResponseError",
|
122 |
+
"ClientSSLError",
|
123 |
+
"ConnectionTimeoutError",
|
124 |
+
"ContentTypeError",
|
125 |
+
"InvalidURL",
|
126 |
+
"InvalidUrlClientError",
|
127 |
+
"RedirectClientError",
|
128 |
+
"NonHttpUrlClientError",
|
129 |
+
"InvalidUrlRedirectClientError",
|
130 |
+
"NonHttpUrlRedirectClientError",
|
131 |
+
"ServerConnectionError",
|
132 |
+
"ServerDisconnectedError",
|
133 |
+
"ServerFingerprintMismatch",
|
134 |
+
"ServerTimeoutError",
|
135 |
+
"SocketTimeoutError",
|
136 |
+
"TooManyRedirects",
|
137 |
+
"WSServerHandshakeError",
|
138 |
+
# client_reqrep
|
139 |
+
"ClientRequest",
|
140 |
+
"ClientResponse",
|
141 |
+
"Fingerprint",
|
142 |
+
"RequestInfo",
|
143 |
+
# connector
|
144 |
+
"BaseConnector",
|
145 |
+
"TCPConnector",
|
146 |
+
"UnixConnector",
|
147 |
+
"NamedPipeConnector",
|
148 |
+
# client_ws
|
149 |
+
"ClientWebSocketResponse",
|
150 |
+
# client
|
151 |
+
"ClientSession",
|
152 |
+
"ClientTimeout",
|
153 |
+
"ClientWSTimeout",
|
154 |
+
"request",
|
155 |
+
"WSMessageTypeError",
|
156 |
+
)
|
157 |
+
|
158 |
+
|
159 |
+
if TYPE_CHECKING:
|
160 |
+
from ssl import SSLContext
|
161 |
+
else:
|
162 |
+
SSLContext = None
|
163 |
+
|
164 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
165 |
+
from typing import Unpack
|
166 |
+
|
167 |
+
|
168 |
+
class _RequestOptions(TypedDict, total=False):
|
169 |
+
params: Query
|
170 |
+
data: Any
|
171 |
+
json: Any
|
172 |
+
cookies: Union[LooseCookies, None]
|
173 |
+
headers: Union[LooseHeaders, None]
|
174 |
+
skip_auto_headers: Union[Iterable[str], None]
|
175 |
+
auth: Union[BasicAuth, None]
|
176 |
+
allow_redirects: bool
|
177 |
+
max_redirects: int
|
178 |
+
compress: Union[str, bool, None]
|
179 |
+
chunked: Union[bool, None]
|
180 |
+
expect100: bool
|
181 |
+
raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]]
|
182 |
+
read_until_eof: bool
|
183 |
+
proxy: Union[StrOrURL, None]
|
184 |
+
proxy_auth: Union[BasicAuth, None]
|
185 |
+
timeout: "Union[ClientTimeout, _SENTINEL, None]"
|
186 |
+
ssl: Union[SSLContext, bool, Fingerprint]
|
187 |
+
server_hostname: Union[str, None]
|
188 |
+
proxy_headers: Union[LooseHeaders, None]
|
189 |
+
trace_request_ctx: Union[Mapping[str, Any], None]
|
190 |
+
read_bufsize: Union[int, None]
|
191 |
+
auto_decompress: Union[bool, None]
|
192 |
+
max_line_size: Union[int, None]
|
193 |
+
max_field_size: Union[int, None]
|
194 |
+
|
195 |
+
|
196 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
197 |
+
class ClientTimeout:
|
198 |
+
total: Optional[float] = None
|
199 |
+
connect: Optional[float] = None
|
200 |
+
sock_read: Optional[float] = None
|
201 |
+
sock_connect: Optional[float] = None
|
202 |
+
ceil_threshold: float = 5
|
203 |
+
|
204 |
+
# pool_queue_timeout: Optional[float] = None
|
205 |
+
# dns_resolution_timeout: Optional[float] = None
|
206 |
+
# socket_connect_timeout: Optional[float] = None
|
207 |
+
# connection_acquiring_timeout: Optional[float] = None
|
208 |
+
# new_connection_timeout: Optional[float] = None
|
209 |
+
# http_header_timeout: Optional[float] = None
|
210 |
+
# response_body_timeout: Optional[float] = None
|
211 |
+
|
212 |
+
# to create a timeout specific for a single request, either
|
213 |
+
# - create a completely new one to overwrite the default
|
214 |
+
# - or use http://www.attrs.org/en/stable/api.html#attr.evolve
|
215 |
+
# to overwrite the defaults
|
216 |
+
|
217 |
+
|
218 |
+
# 5 Minute default read timeout
|
219 |
+
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30)
|
220 |
+
|
221 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2
|
222 |
+
IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"})
|
223 |
+
|
224 |
+
_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse)
|
225 |
+
_CharsetResolver = Callable[[ClientResponse, bytes], str]
|
226 |
+
|
227 |
+
|
228 |
+
class ClientSession:
|
229 |
+
"""First-class interface for making HTTP requests."""
|
230 |
+
|
231 |
+
ATTRS = frozenset(
|
232 |
+
[
|
233 |
+
"_base_url",
|
234 |
+
"_base_url_origin",
|
235 |
+
"_source_traceback",
|
236 |
+
"_connector",
|
237 |
+
"_loop",
|
238 |
+
"_cookie_jar",
|
239 |
+
"_connector_owner",
|
240 |
+
"_default_auth",
|
241 |
+
"_version",
|
242 |
+
"_json_serialize",
|
243 |
+
"_requote_redirect_url",
|
244 |
+
"_timeout",
|
245 |
+
"_raise_for_status",
|
246 |
+
"_auto_decompress",
|
247 |
+
"_trust_env",
|
248 |
+
"_default_headers",
|
249 |
+
"_skip_auto_headers",
|
250 |
+
"_request_class",
|
251 |
+
"_response_class",
|
252 |
+
"_ws_response_class",
|
253 |
+
"_trace_configs",
|
254 |
+
"_read_bufsize",
|
255 |
+
"_max_line_size",
|
256 |
+
"_max_field_size",
|
257 |
+
"_resolve_charset",
|
258 |
+
"_default_proxy",
|
259 |
+
"_default_proxy_auth",
|
260 |
+
"_retry_connection",
|
261 |
+
"requote_redirect_url",
|
262 |
+
]
|
263 |
+
)
|
264 |
+
|
265 |
+
_source_traceback: Optional[traceback.StackSummary] = None
|
266 |
+
_connector: Optional[BaseConnector] = None
|
267 |
+
|
268 |
+
def __init__(
|
269 |
+
self,
|
270 |
+
base_url: Optional[StrOrURL] = None,
|
271 |
+
*,
|
272 |
+
connector: Optional[BaseConnector] = None,
|
273 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
274 |
+
cookies: Optional[LooseCookies] = None,
|
275 |
+
headers: Optional[LooseHeaders] = None,
|
276 |
+
proxy: Optional[StrOrURL] = None,
|
277 |
+
proxy_auth: Optional[BasicAuth] = None,
|
278 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
279 |
+
auth: Optional[BasicAuth] = None,
|
280 |
+
json_serialize: JSONEncoder = json.dumps,
|
281 |
+
request_class: Type[ClientRequest] = ClientRequest,
|
282 |
+
response_class: Type[ClientResponse] = ClientResponse,
|
283 |
+
ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
|
284 |
+
version: HttpVersion = http.HttpVersion11,
|
285 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
286 |
+
connector_owner: bool = True,
|
287 |
+
raise_for_status: Union[
|
288 |
+
bool, Callable[[ClientResponse], Awaitable[None]]
|
289 |
+
] = False,
|
290 |
+
read_timeout: Union[float, _SENTINEL] = sentinel,
|
291 |
+
conn_timeout: Optional[float] = None,
|
292 |
+
timeout: Union[object, ClientTimeout] = sentinel,
|
293 |
+
auto_decompress: bool = True,
|
294 |
+
trust_env: bool = False,
|
295 |
+
requote_redirect_url: bool = True,
|
296 |
+
trace_configs: Optional[List[TraceConfig]] = None,
|
297 |
+
read_bufsize: int = 2**16,
|
298 |
+
max_line_size: int = 8190,
|
299 |
+
max_field_size: int = 8190,
|
300 |
+
fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
|
301 |
+
) -> None:
|
302 |
+
# We initialise _connector to None immediately, as it's referenced in __del__()
|
303 |
+
# and could cause issues if an exception occurs during initialisation.
|
304 |
+
self._connector: Optional[BaseConnector] = None
|
305 |
+
|
306 |
+
if loop is None:
|
307 |
+
if connector is not None:
|
308 |
+
loop = connector._loop
|
309 |
+
|
310 |
+
loop = loop or asyncio.get_running_loop()
|
311 |
+
|
312 |
+
if base_url is None or isinstance(base_url, URL):
|
313 |
+
self._base_url: Optional[URL] = base_url
|
314 |
+
self._base_url_origin = None if base_url is None else base_url.origin()
|
315 |
+
else:
|
316 |
+
self._base_url = URL(base_url)
|
317 |
+
self._base_url_origin = self._base_url.origin()
|
318 |
+
assert self._base_url.absolute, "Only absolute URLs are supported"
|
319 |
+
if self._base_url is not None and not self._base_url.path.endswith("/"):
|
320 |
+
raise ValueError("base_url must have a trailing '/'")
|
321 |
+
|
322 |
+
if timeout is sentinel or timeout is None:
|
323 |
+
self._timeout = DEFAULT_TIMEOUT
|
324 |
+
if read_timeout is not sentinel:
|
325 |
+
warnings.warn(
|
326 |
+
"read_timeout is deprecated, use timeout argument instead",
|
327 |
+
DeprecationWarning,
|
328 |
+
stacklevel=2,
|
329 |
+
)
|
330 |
+
self._timeout = attr.evolve(self._timeout, total=read_timeout)
|
331 |
+
if conn_timeout is not None:
|
332 |
+
self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
|
333 |
+
warnings.warn(
|
334 |
+
"conn_timeout is deprecated, use timeout argument instead",
|
335 |
+
DeprecationWarning,
|
336 |
+
stacklevel=2,
|
337 |
+
)
|
338 |
+
else:
|
339 |
+
if not isinstance(timeout, ClientTimeout):
|
340 |
+
raise ValueError(
|
341 |
+
f"timeout parameter cannot be of {type(timeout)} type, "
|
342 |
+
"please use 'timeout=ClientTimeout(...)'",
|
343 |
+
)
|
344 |
+
self._timeout = timeout
|
345 |
+
if read_timeout is not sentinel:
|
346 |
+
raise ValueError(
|
347 |
+
"read_timeout and timeout parameters "
|
348 |
+
"conflict, please setup "
|
349 |
+
"timeout.read"
|
350 |
+
)
|
351 |
+
if conn_timeout is not None:
|
352 |
+
raise ValueError(
|
353 |
+
"conn_timeout and timeout parameters "
|
354 |
+
"conflict, please setup "
|
355 |
+
"timeout.connect"
|
356 |
+
)
|
357 |
+
|
358 |
+
if connector is None:
|
359 |
+
connector = TCPConnector(loop=loop)
|
360 |
+
|
361 |
+
if connector._loop is not loop:
|
362 |
+
raise RuntimeError("Session and connector has to use same event loop")
|
363 |
+
|
364 |
+
self._loop = loop
|
365 |
+
|
366 |
+
if loop.get_debug():
|
367 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
368 |
+
|
369 |
+
if cookie_jar is None:
|
370 |
+
cookie_jar = CookieJar(loop=loop)
|
371 |
+
self._cookie_jar = cookie_jar
|
372 |
+
|
373 |
+
if cookies:
|
374 |
+
self._cookie_jar.update_cookies(cookies)
|
375 |
+
|
376 |
+
self._connector = connector
|
377 |
+
self._connector_owner = connector_owner
|
378 |
+
self._default_auth = auth
|
379 |
+
self._version = version
|
380 |
+
self._json_serialize = json_serialize
|
381 |
+
self._raise_for_status = raise_for_status
|
382 |
+
self._auto_decompress = auto_decompress
|
383 |
+
self._trust_env = trust_env
|
384 |
+
self._requote_redirect_url = requote_redirect_url
|
385 |
+
self._read_bufsize = read_bufsize
|
386 |
+
self._max_line_size = max_line_size
|
387 |
+
self._max_field_size = max_field_size
|
388 |
+
|
389 |
+
# Convert to list of tuples
|
390 |
+
if headers:
|
391 |
+
real_headers: CIMultiDict[str] = CIMultiDict(headers)
|
392 |
+
else:
|
393 |
+
real_headers = CIMultiDict()
|
394 |
+
self._default_headers: CIMultiDict[str] = real_headers
|
395 |
+
if skip_auto_headers is not None:
|
396 |
+
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
|
397 |
+
else:
|
398 |
+
self._skip_auto_headers = frozenset()
|
399 |
+
|
400 |
+
self._request_class = request_class
|
401 |
+
self._response_class = response_class
|
402 |
+
self._ws_response_class = ws_response_class
|
403 |
+
|
404 |
+
self._trace_configs = trace_configs or []
|
405 |
+
for trace_config in self._trace_configs:
|
406 |
+
trace_config.freeze()
|
407 |
+
|
408 |
+
self._resolve_charset = fallback_charset_resolver
|
409 |
+
|
410 |
+
self._default_proxy = proxy
|
411 |
+
self._default_proxy_auth = proxy_auth
|
412 |
+
self._retry_connection: bool = True
|
413 |
+
|
414 |
+
def __init_subclass__(cls: Type["ClientSession"]) -> None:
|
415 |
+
warnings.warn(
|
416 |
+
"Inheritance class {} from ClientSession "
|
417 |
+
"is discouraged".format(cls.__name__),
|
418 |
+
DeprecationWarning,
|
419 |
+
stacklevel=2,
|
420 |
+
)
|
421 |
+
|
422 |
+
if DEBUG:
|
423 |
+
|
424 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
425 |
+
if name not in self.ATTRS:
|
426 |
+
warnings.warn(
|
427 |
+
"Setting custom ClientSession.{} attribute "
|
428 |
+
"is discouraged".format(name),
|
429 |
+
DeprecationWarning,
|
430 |
+
stacklevel=2,
|
431 |
+
)
|
432 |
+
super().__setattr__(name, val)
|
433 |
+
|
434 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
435 |
+
if not self.closed:
|
436 |
+
kwargs = {"source": self}
|
437 |
+
_warnings.warn(
|
438 |
+
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
|
439 |
+
)
|
440 |
+
context = {"client_session": self, "message": "Unclosed client session"}
|
441 |
+
if self._source_traceback is not None:
|
442 |
+
context["source_traceback"] = self._source_traceback
|
443 |
+
self._loop.call_exception_handler(context)
|
444 |
+
|
445 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
446 |
+
|
447 |
+
def request(
|
448 |
+
self,
|
449 |
+
method: str,
|
450 |
+
url: StrOrURL,
|
451 |
+
**kwargs: Unpack[_RequestOptions],
|
452 |
+
) -> "_RequestContextManager": ...
|
453 |
+
|
454 |
+
else:
|
455 |
+
|
456 |
+
def request(
|
457 |
+
self, method: str, url: StrOrURL, **kwargs: Any
|
458 |
+
) -> "_RequestContextManager":
|
459 |
+
"""Perform HTTP request."""
|
460 |
+
return _RequestContextManager(self._request(method, url, **kwargs))
|
461 |
+
|
462 |
+
def _build_url(self, str_or_url: StrOrURL) -> URL:
|
463 |
+
url = URL(str_or_url)
|
464 |
+
if self._base_url is None:
|
465 |
+
return url
|
466 |
+
else:
|
467 |
+
assert not url.absolute
|
468 |
+
return self._base_url.join(url)
|
469 |
+
|
470 |
+
async def _request(
|
471 |
+
self,
|
472 |
+
method: str,
|
473 |
+
str_or_url: StrOrURL,
|
474 |
+
*,
|
475 |
+
params: Query = None,
|
476 |
+
data: Any = None,
|
477 |
+
json: Any = None,
|
478 |
+
cookies: Optional[LooseCookies] = None,
|
479 |
+
headers: Optional[LooseHeaders] = None,
|
480 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
481 |
+
auth: Optional[BasicAuth] = None,
|
482 |
+
allow_redirects: bool = True,
|
483 |
+
max_redirects: int = 10,
|
484 |
+
compress: Union[str, bool, None] = None,
|
485 |
+
chunked: Optional[bool] = None,
|
486 |
+
expect100: bool = False,
|
487 |
+
raise_for_status: Union[
|
488 |
+
None, bool, Callable[[ClientResponse], Awaitable[None]]
|
489 |
+
] = None,
|
490 |
+
read_until_eof: bool = True,
|
491 |
+
proxy: Optional[StrOrURL] = None,
|
492 |
+
proxy_auth: Optional[BasicAuth] = None,
|
493 |
+
timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
|
494 |
+
verify_ssl: Optional[bool] = None,
|
495 |
+
fingerprint: Optional[bytes] = None,
|
496 |
+
ssl_context: Optional[SSLContext] = None,
|
497 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
498 |
+
server_hostname: Optional[str] = None,
|
499 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
500 |
+
trace_request_ctx: Optional[Mapping[str, Any]] = None,
|
501 |
+
read_bufsize: Optional[int] = None,
|
502 |
+
auto_decompress: Optional[bool] = None,
|
503 |
+
max_line_size: Optional[int] = None,
|
504 |
+
max_field_size: Optional[int] = None,
|
505 |
+
) -> ClientResponse:
|
506 |
+
|
507 |
+
# NOTE: timeout clamps existing connect and read timeouts. We cannot
|
508 |
+
# set the default to None because we need to detect if the user wants
|
509 |
+
# to use the existing timeouts by setting timeout to None.
|
510 |
+
|
511 |
+
if self.closed:
|
512 |
+
raise RuntimeError("Session is closed")
|
513 |
+
|
514 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
515 |
+
|
516 |
+
if data is not None and json is not None:
|
517 |
+
raise ValueError(
|
518 |
+
"data and json parameters can not be used at the same time"
|
519 |
+
)
|
520 |
+
elif json is not None:
|
521 |
+
data = payload.JsonPayload(json, dumps=self._json_serialize)
|
522 |
+
|
523 |
+
if not isinstance(chunked, bool) and chunked is not None:
|
524 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
525 |
+
|
526 |
+
redirects = 0
|
527 |
+
history: List[ClientResponse] = []
|
528 |
+
version = self._version
|
529 |
+
params = params or {}
|
530 |
+
|
531 |
+
# Merge with default headers and transform to CIMultiDict
|
532 |
+
headers = self._prepare_headers(headers)
|
533 |
+
|
534 |
+
try:
|
535 |
+
url = self._build_url(str_or_url)
|
536 |
+
except ValueError as e:
|
537 |
+
raise InvalidUrlClientError(str_or_url) from e
|
538 |
+
|
539 |
+
assert self._connector is not None
|
540 |
+
if url.scheme not in self._connector.allowed_protocol_schema_set:
|
541 |
+
raise NonHttpUrlClientError(url)
|
542 |
+
|
543 |
+
skip_headers: Optional[Iterable[istr]]
|
544 |
+
if skip_auto_headers is not None:
|
545 |
+
skip_headers = {
|
546 |
+
istr(i) for i in skip_auto_headers
|
547 |
+
} | self._skip_auto_headers
|
548 |
+
elif self._skip_auto_headers:
|
549 |
+
skip_headers = self._skip_auto_headers
|
550 |
+
else:
|
551 |
+
skip_headers = None
|
552 |
+
|
553 |
+
if proxy is None:
|
554 |
+
proxy = self._default_proxy
|
555 |
+
if proxy_auth is None:
|
556 |
+
proxy_auth = self._default_proxy_auth
|
557 |
+
|
558 |
+
if proxy is None:
|
559 |
+
proxy_headers = None
|
560 |
+
else:
|
561 |
+
proxy_headers = self._prepare_headers(proxy_headers)
|
562 |
+
try:
|
563 |
+
proxy = URL(proxy)
|
564 |
+
except ValueError as e:
|
565 |
+
raise InvalidURL(proxy) from e
|
566 |
+
|
567 |
+
if timeout is sentinel:
|
568 |
+
real_timeout: ClientTimeout = self._timeout
|
569 |
+
else:
|
570 |
+
if not isinstance(timeout, ClientTimeout):
|
571 |
+
real_timeout = ClientTimeout(total=timeout)
|
572 |
+
else:
|
573 |
+
real_timeout = timeout
|
574 |
+
# timeout is cumulative for all request operations
|
575 |
+
# (request, redirects, responses, data consuming)
|
576 |
+
tm = TimeoutHandle(
|
577 |
+
self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
|
578 |
+
)
|
579 |
+
handle = tm.start()
|
580 |
+
|
581 |
+
if read_bufsize is None:
|
582 |
+
read_bufsize = self._read_bufsize
|
583 |
+
|
584 |
+
if auto_decompress is None:
|
585 |
+
auto_decompress = self._auto_decompress
|
586 |
+
|
587 |
+
if max_line_size is None:
|
588 |
+
max_line_size = self._max_line_size
|
589 |
+
|
590 |
+
if max_field_size is None:
|
591 |
+
max_field_size = self._max_field_size
|
592 |
+
|
593 |
+
traces = [
|
594 |
+
Trace(
|
595 |
+
self,
|
596 |
+
trace_config,
|
597 |
+
trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
|
598 |
+
)
|
599 |
+
for trace_config in self._trace_configs
|
600 |
+
]
|
601 |
+
|
602 |
+
for trace in traces:
|
603 |
+
await trace.send_request_start(method, url.update_query(params), headers)
|
604 |
+
|
605 |
+
timer = tm.timer()
|
606 |
+
try:
|
607 |
+
with timer:
|
608 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests
|
609 |
+
retry_persistent_connection = (
|
610 |
+
self._retry_connection and method in IDEMPOTENT_METHODS
|
611 |
+
)
|
612 |
+
while True:
|
613 |
+
url, auth_from_url = strip_auth_from_url(url)
|
614 |
+
if not url.raw_host:
|
615 |
+
# NOTE: Bail early, otherwise, causes `InvalidURL` through
|
616 |
+
# NOTE: `self._request_class()` below.
|
617 |
+
err_exc_cls = (
|
618 |
+
InvalidUrlRedirectClientError
|
619 |
+
if redirects
|
620 |
+
else InvalidUrlClientError
|
621 |
+
)
|
622 |
+
raise err_exc_cls(url)
|
623 |
+
# If `auth` was passed for an already authenticated URL,
|
624 |
+
# disallow only if this is the initial URL; this is to avoid issues
|
625 |
+
# with sketchy redirects that are not the caller's responsibility
|
626 |
+
if not history and (auth and auth_from_url):
|
627 |
+
raise ValueError(
|
628 |
+
"Cannot combine AUTH argument with "
|
629 |
+
"credentials encoded in URL"
|
630 |
+
)
|
631 |
+
|
632 |
+
# Override the auth with the one from the URL only if we
|
633 |
+
# have no auth, or if we got an auth from a redirect URL
|
634 |
+
if auth is None or (history and auth_from_url is not None):
|
635 |
+
auth = auth_from_url
|
636 |
+
|
637 |
+
if (
|
638 |
+
auth is None
|
639 |
+
and self._default_auth
|
640 |
+
and (
|
641 |
+
not self._base_url or self._base_url_origin == url.origin()
|
642 |
+
)
|
643 |
+
):
|
644 |
+
auth = self._default_auth
|
645 |
+
# It would be confusing if we support explicit
|
646 |
+
# Authorization header with auth argument
|
647 |
+
if (
|
648 |
+
headers is not None
|
649 |
+
and auth is not None
|
650 |
+
and hdrs.AUTHORIZATION in headers
|
651 |
+
):
|
652 |
+
raise ValueError(
|
653 |
+
"Cannot combine AUTHORIZATION header "
|
654 |
+
"with AUTH argument or credentials "
|
655 |
+
"encoded in URL"
|
656 |
+
)
|
657 |
+
|
658 |
+
all_cookies = self._cookie_jar.filter_cookies(url)
|
659 |
+
|
660 |
+
if cookies is not None:
|
661 |
+
tmp_cookie_jar = CookieJar(
|
662 |
+
quote_cookie=self._cookie_jar.quote_cookie
|
663 |
+
)
|
664 |
+
tmp_cookie_jar.update_cookies(cookies)
|
665 |
+
req_cookies = tmp_cookie_jar.filter_cookies(url)
|
666 |
+
if req_cookies:
|
667 |
+
all_cookies.load(req_cookies)
|
668 |
+
|
669 |
+
if proxy is not None:
|
670 |
+
proxy = URL(proxy)
|
671 |
+
elif self._trust_env:
|
672 |
+
with suppress(LookupError):
|
673 |
+
proxy, proxy_auth = get_env_proxy_for_url(url)
|
674 |
+
|
675 |
+
req = self._request_class(
|
676 |
+
method,
|
677 |
+
url,
|
678 |
+
params=params,
|
679 |
+
headers=headers,
|
680 |
+
skip_auto_headers=skip_headers,
|
681 |
+
data=data,
|
682 |
+
cookies=all_cookies,
|
683 |
+
auth=auth,
|
684 |
+
version=version,
|
685 |
+
compress=compress,
|
686 |
+
chunked=chunked,
|
687 |
+
expect100=expect100,
|
688 |
+
loop=self._loop,
|
689 |
+
response_class=self._response_class,
|
690 |
+
proxy=proxy,
|
691 |
+
proxy_auth=proxy_auth,
|
692 |
+
timer=timer,
|
693 |
+
session=self,
|
694 |
+
ssl=ssl if ssl is not None else True,
|
695 |
+
server_hostname=server_hostname,
|
696 |
+
proxy_headers=proxy_headers,
|
697 |
+
traces=traces,
|
698 |
+
trust_env=self.trust_env,
|
699 |
+
)
|
700 |
+
|
701 |
+
# connection timeout
|
702 |
+
try:
|
703 |
+
conn = await self._connector.connect(
|
704 |
+
req, traces=traces, timeout=real_timeout
|
705 |
+
)
|
706 |
+
except asyncio.TimeoutError as exc:
|
707 |
+
raise ConnectionTimeoutError(
|
708 |
+
f"Connection timeout to host {url}"
|
709 |
+
) from exc
|
710 |
+
|
711 |
+
assert conn.transport is not None
|
712 |
+
|
713 |
+
assert conn.protocol is not None
|
714 |
+
conn.protocol.set_response_params(
|
715 |
+
timer=timer,
|
716 |
+
skip_payload=method in EMPTY_BODY_METHODS,
|
717 |
+
read_until_eof=read_until_eof,
|
718 |
+
auto_decompress=auto_decompress,
|
719 |
+
read_timeout=real_timeout.sock_read,
|
720 |
+
read_bufsize=read_bufsize,
|
721 |
+
timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
|
722 |
+
max_line_size=max_line_size,
|
723 |
+
max_field_size=max_field_size,
|
724 |
+
)
|
725 |
+
|
726 |
+
try:
|
727 |
+
try:
|
728 |
+
resp = await req.send(conn)
|
729 |
+
try:
|
730 |
+
await resp.start(conn)
|
731 |
+
except BaseException:
|
732 |
+
resp.close()
|
733 |
+
raise
|
734 |
+
except BaseException:
|
735 |
+
conn.close()
|
736 |
+
raise
|
737 |
+
except (ClientOSError, ServerDisconnectedError):
|
738 |
+
if retry_persistent_connection:
|
739 |
+
retry_persistent_connection = False
|
740 |
+
continue
|
741 |
+
raise
|
742 |
+
except ClientError:
|
743 |
+
raise
|
744 |
+
except OSError as exc:
|
745 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
746 |
+
raise
|
747 |
+
raise ClientOSError(*exc.args) from exc
|
748 |
+
|
749 |
+
if cookies := resp._cookies:
|
750 |
+
self._cookie_jar.update_cookies(cookies, resp.url)
|
751 |
+
|
752 |
+
# redirects
|
753 |
+
if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
|
754 |
+
|
755 |
+
for trace in traces:
|
756 |
+
await trace.send_request_redirect(
|
757 |
+
method, url.update_query(params), headers, resp
|
758 |
+
)
|
759 |
+
|
760 |
+
redirects += 1
|
761 |
+
history.append(resp)
|
762 |
+
if max_redirects and redirects >= max_redirects:
|
763 |
+
resp.close()
|
764 |
+
raise TooManyRedirects(
|
765 |
+
history[0].request_info, tuple(history)
|
766 |
+
)
|
767 |
+
|
768 |
+
# For 301 and 302, mimic IE, now changed in RFC
|
769 |
+
# https://github.com/kennethreitz/requests/pull/269
|
770 |
+
if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
|
771 |
+
resp.status in (301, 302) and resp.method == hdrs.METH_POST
|
772 |
+
):
|
773 |
+
method = hdrs.METH_GET
|
774 |
+
data = None
|
775 |
+
if headers.get(hdrs.CONTENT_LENGTH):
|
776 |
+
headers.pop(hdrs.CONTENT_LENGTH)
|
777 |
+
|
778 |
+
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
|
779 |
+
hdrs.URI
|
780 |
+
)
|
781 |
+
if r_url is None:
|
782 |
+
# see github.com/aio-libs/aiohttp/issues/2022
|
783 |
+
break
|
784 |
+
else:
|
785 |
+
# reading from correct redirection
|
786 |
+
# response is forbidden
|
787 |
+
resp.release()
|
788 |
+
|
789 |
+
try:
|
790 |
+
parsed_redirect_url = URL(
|
791 |
+
r_url, encoded=not self._requote_redirect_url
|
792 |
+
)
|
793 |
+
except ValueError as e:
|
794 |
+
raise InvalidUrlRedirectClientError(
|
795 |
+
r_url,
|
796 |
+
"Server attempted redirecting to a location that does not look like a URL",
|
797 |
+
) from e
|
798 |
+
|
799 |
+
scheme = parsed_redirect_url.scheme
|
800 |
+
if scheme not in HTTP_AND_EMPTY_SCHEMA_SET:
|
801 |
+
resp.close()
|
802 |
+
raise NonHttpUrlRedirectClientError(r_url)
|
803 |
+
elif not scheme:
|
804 |
+
parsed_redirect_url = url.join(parsed_redirect_url)
|
805 |
+
|
806 |
+
try:
|
807 |
+
redirect_origin = parsed_redirect_url.origin()
|
808 |
+
except ValueError as origin_val_err:
|
809 |
+
raise InvalidUrlRedirectClientError(
|
810 |
+
parsed_redirect_url,
|
811 |
+
"Invalid redirect URL origin",
|
812 |
+
) from origin_val_err
|
813 |
+
|
814 |
+
if url.origin() != redirect_origin:
|
815 |
+
auth = None
|
816 |
+
headers.pop(hdrs.AUTHORIZATION, None)
|
817 |
+
|
818 |
+
url = parsed_redirect_url
|
819 |
+
params = {}
|
820 |
+
resp.release()
|
821 |
+
continue
|
822 |
+
|
823 |
+
break
|
824 |
+
|
825 |
+
# check response status
|
826 |
+
if raise_for_status is None:
|
827 |
+
raise_for_status = self._raise_for_status
|
828 |
+
|
829 |
+
if raise_for_status is None:
|
830 |
+
pass
|
831 |
+
elif callable(raise_for_status):
|
832 |
+
await raise_for_status(resp)
|
833 |
+
elif raise_for_status:
|
834 |
+
resp.raise_for_status()
|
835 |
+
|
836 |
+
# register connection
|
837 |
+
if handle is not None:
|
838 |
+
if resp.connection is not None:
|
839 |
+
resp.connection.add_callback(handle.cancel)
|
840 |
+
else:
|
841 |
+
handle.cancel()
|
842 |
+
|
843 |
+
resp._history = tuple(history)
|
844 |
+
|
845 |
+
for trace in traces:
|
846 |
+
await trace.send_request_end(
|
847 |
+
method, url.update_query(params), headers, resp
|
848 |
+
)
|
849 |
+
return resp
|
850 |
+
|
851 |
+
except BaseException as e:
|
852 |
+
# cleanup timer
|
853 |
+
tm.close()
|
854 |
+
if handle:
|
855 |
+
handle.cancel()
|
856 |
+
handle = None
|
857 |
+
|
858 |
+
for trace in traces:
|
859 |
+
await trace.send_request_exception(
|
860 |
+
method, url.update_query(params), headers, e
|
861 |
+
)
|
862 |
+
raise
|
863 |
+
|
864 |
+
def ws_connect(
|
865 |
+
self,
|
866 |
+
url: StrOrURL,
|
867 |
+
*,
|
868 |
+
method: str = hdrs.METH_GET,
|
869 |
+
protocols: Iterable[str] = (),
|
870 |
+
timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
|
871 |
+
receive_timeout: Optional[float] = None,
|
872 |
+
autoclose: bool = True,
|
873 |
+
autoping: bool = True,
|
874 |
+
heartbeat: Optional[float] = None,
|
875 |
+
auth: Optional[BasicAuth] = None,
|
876 |
+
origin: Optional[str] = None,
|
877 |
+
params: Query = None,
|
878 |
+
headers: Optional[LooseHeaders] = None,
|
879 |
+
proxy: Optional[StrOrURL] = None,
|
880 |
+
proxy_auth: Optional[BasicAuth] = None,
|
881 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
882 |
+
verify_ssl: Optional[bool] = None,
|
883 |
+
fingerprint: Optional[bytes] = None,
|
884 |
+
ssl_context: Optional[SSLContext] = None,
|
885 |
+
server_hostname: Optional[str] = None,
|
886 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
887 |
+
compress: int = 0,
|
888 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
889 |
+
) -> "_WSRequestContextManager":
|
890 |
+
"""Initiate websocket connection."""
|
891 |
+
return _WSRequestContextManager(
|
892 |
+
self._ws_connect(
|
893 |
+
url,
|
894 |
+
method=method,
|
895 |
+
protocols=protocols,
|
896 |
+
timeout=timeout,
|
897 |
+
receive_timeout=receive_timeout,
|
898 |
+
autoclose=autoclose,
|
899 |
+
autoping=autoping,
|
900 |
+
heartbeat=heartbeat,
|
901 |
+
auth=auth,
|
902 |
+
origin=origin,
|
903 |
+
params=params,
|
904 |
+
headers=headers,
|
905 |
+
proxy=proxy,
|
906 |
+
proxy_auth=proxy_auth,
|
907 |
+
ssl=ssl,
|
908 |
+
verify_ssl=verify_ssl,
|
909 |
+
fingerprint=fingerprint,
|
910 |
+
ssl_context=ssl_context,
|
911 |
+
server_hostname=server_hostname,
|
912 |
+
proxy_headers=proxy_headers,
|
913 |
+
compress=compress,
|
914 |
+
max_msg_size=max_msg_size,
|
915 |
+
)
|
916 |
+
)
|
917 |
+
|
918 |
+
async def _ws_connect(
|
919 |
+
self,
|
920 |
+
url: StrOrURL,
|
921 |
+
*,
|
922 |
+
method: str = hdrs.METH_GET,
|
923 |
+
protocols: Iterable[str] = (),
|
924 |
+
timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
|
925 |
+
receive_timeout: Optional[float] = None,
|
926 |
+
autoclose: bool = True,
|
927 |
+
autoping: bool = True,
|
928 |
+
heartbeat: Optional[float] = None,
|
929 |
+
auth: Optional[BasicAuth] = None,
|
930 |
+
origin: Optional[str] = None,
|
931 |
+
params: Query = None,
|
932 |
+
headers: Optional[LooseHeaders] = None,
|
933 |
+
proxy: Optional[StrOrURL] = None,
|
934 |
+
proxy_auth: Optional[BasicAuth] = None,
|
935 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
936 |
+
verify_ssl: Optional[bool] = None,
|
937 |
+
fingerprint: Optional[bytes] = None,
|
938 |
+
ssl_context: Optional[SSLContext] = None,
|
939 |
+
server_hostname: Optional[str] = None,
|
940 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
941 |
+
compress: int = 0,
|
942 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
943 |
+
) -> ClientWebSocketResponse:
|
944 |
+
if timeout is not sentinel:
|
945 |
+
if isinstance(timeout, ClientWSTimeout):
|
946 |
+
ws_timeout = timeout
|
947 |
+
else:
|
948 |
+
warnings.warn(
|
949 |
+
"parameter 'timeout' of type 'float' "
|
950 |
+
"is deprecated, please use "
|
951 |
+
"'timeout=ClientWSTimeout(ws_close=...)'",
|
952 |
+
DeprecationWarning,
|
953 |
+
stacklevel=2,
|
954 |
+
)
|
955 |
+
ws_timeout = ClientWSTimeout(ws_close=timeout)
|
956 |
+
else:
|
957 |
+
ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT
|
958 |
+
if receive_timeout is not None:
|
959 |
+
warnings.warn(
|
960 |
+
"float parameter 'receive_timeout' "
|
961 |
+
"is deprecated, please use parameter "
|
962 |
+
"'timeout=ClientWSTimeout(ws_receive=...)'",
|
963 |
+
DeprecationWarning,
|
964 |
+
stacklevel=2,
|
965 |
+
)
|
966 |
+
ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout)
|
967 |
+
|
968 |
+
if headers is None:
|
969 |
+
real_headers: CIMultiDict[str] = CIMultiDict()
|
970 |
+
else:
|
971 |
+
real_headers = CIMultiDict(headers)
|
972 |
+
|
973 |
+
default_headers = {
|
974 |
+
hdrs.UPGRADE: "websocket",
|
975 |
+
hdrs.CONNECTION: "Upgrade",
|
976 |
+
hdrs.SEC_WEBSOCKET_VERSION: "13",
|
977 |
+
}
|
978 |
+
|
979 |
+
for key, value in default_headers.items():
|
980 |
+
real_headers.setdefault(key, value)
|
981 |
+
|
982 |
+
sec_key = base64.b64encode(os.urandom(16))
|
983 |
+
real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
|
984 |
+
|
985 |
+
if protocols:
|
986 |
+
real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
|
987 |
+
if origin is not None:
|
988 |
+
real_headers[hdrs.ORIGIN] = origin
|
989 |
+
if compress:
|
990 |
+
extstr = ws_ext_gen(compress=compress)
|
991 |
+
real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
|
992 |
+
|
993 |
+
# For the sake of backward compatibility, if user passes in None, convert it to True
|
994 |
+
if ssl is None:
|
995 |
+
warnings.warn(
|
996 |
+
"ssl=None is deprecated, please use ssl=True",
|
997 |
+
DeprecationWarning,
|
998 |
+
stacklevel=2,
|
999 |
+
)
|
1000 |
+
ssl = True
|
1001 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
1002 |
+
|
1003 |
+
# send request
|
1004 |
+
resp = await self.request(
|
1005 |
+
method,
|
1006 |
+
url,
|
1007 |
+
params=params,
|
1008 |
+
headers=real_headers,
|
1009 |
+
read_until_eof=False,
|
1010 |
+
auth=auth,
|
1011 |
+
proxy=proxy,
|
1012 |
+
proxy_auth=proxy_auth,
|
1013 |
+
ssl=ssl,
|
1014 |
+
server_hostname=server_hostname,
|
1015 |
+
proxy_headers=proxy_headers,
|
1016 |
+
)
|
1017 |
+
|
1018 |
+
try:
|
1019 |
+
# check handshake
|
1020 |
+
if resp.status != 101:
|
1021 |
+
raise WSServerHandshakeError(
|
1022 |
+
resp.request_info,
|
1023 |
+
resp.history,
|
1024 |
+
message="Invalid response status",
|
1025 |
+
status=resp.status,
|
1026 |
+
headers=resp.headers,
|
1027 |
+
)
|
1028 |
+
|
1029 |
+
if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
|
1030 |
+
raise WSServerHandshakeError(
|
1031 |
+
resp.request_info,
|
1032 |
+
resp.history,
|
1033 |
+
message="Invalid upgrade header",
|
1034 |
+
status=resp.status,
|
1035 |
+
headers=resp.headers,
|
1036 |
+
)
|
1037 |
+
|
1038 |
+
if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
|
1039 |
+
raise WSServerHandshakeError(
|
1040 |
+
resp.request_info,
|
1041 |
+
resp.history,
|
1042 |
+
message="Invalid connection header",
|
1043 |
+
status=resp.status,
|
1044 |
+
headers=resp.headers,
|
1045 |
+
)
|
1046 |
+
|
1047 |
+
# key calculation
|
1048 |
+
r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
|
1049 |
+
match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
|
1050 |
+
if r_key != match:
|
1051 |
+
raise WSServerHandshakeError(
|
1052 |
+
resp.request_info,
|
1053 |
+
resp.history,
|
1054 |
+
message="Invalid challenge response",
|
1055 |
+
status=resp.status,
|
1056 |
+
headers=resp.headers,
|
1057 |
+
)
|
1058 |
+
|
1059 |
+
# websocket protocol
|
1060 |
+
protocol = None
|
1061 |
+
if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
|
1062 |
+
resp_protocols = [
|
1063 |
+
proto.strip()
|
1064 |
+
for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
1065 |
+
]
|
1066 |
+
|
1067 |
+
for proto in resp_protocols:
|
1068 |
+
if proto in protocols:
|
1069 |
+
protocol = proto
|
1070 |
+
break
|
1071 |
+
|
1072 |
+
# websocket compress
|
1073 |
+
notakeover = False
|
1074 |
+
if compress:
|
1075 |
+
compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
|
1076 |
+
if compress_hdrs:
|
1077 |
+
try:
|
1078 |
+
compress, notakeover = ws_ext_parse(compress_hdrs)
|
1079 |
+
except WSHandshakeError as exc:
|
1080 |
+
raise WSServerHandshakeError(
|
1081 |
+
resp.request_info,
|
1082 |
+
resp.history,
|
1083 |
+
message=exc.args[0],
|
1084 |
+
status=resp.status,
|
1085 |
+
headers=resp.headers,
|
1086 |
+
) from exc
|
1087 |
+
else:
|
1088 |
+
compress = 0
|
1089 |
+
notakeover = False
|
1090 |
+
|
1091 |
+
conn = resp.connection
|
1092 |
+
assert conn is not None
|
1093 |
+
conn_proto = conn.protocol
|
1094 |
+
assert conn_proto is not None
|
1095 |
+
|
1096 |
+
# For WS connection the read_timeout must be either receive_timeout or greater
|
1097 |
+
# None == no timeout, i.e. infinite timeout, so None is the max timeout possible
|
1098 |
+
if ws_timeout.ws_receive is None:
|
1099 |
+
# Reset regardless
|
1100 |
+
conn_proto.read_timeout = None
|
1101 |
+
elif conn_proto.read_timeout is not None:
|
1102 |
+
conn_proto.read_timeout = max(
|
1103 |
+
ws_timeout.ws_receive, conn_proto.read_timeout
|
1104 |
+
)
|
1105 |
+
|
1106 |
+
transport = conn.transport
|
1107 |
+
assert transport is not None
|
1108 |
+
reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop)
|
1109 |
+
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
|
1110 |
+
writer = WebSocketWriter(
|
1111 |
+
conn_proto,
|
1112 |
+
transport,
|
1113 |
+
use_mask=True,
|
1114 |
+
compress=compress,
|
1115 |
+
notakeover=notakeover,
|
1116 |
+
)
|
1117 |
+
except BaseException:
|
1118 |
+
resp.close()
|
1119 |
+
raise
|
1120 |
+
else:
|
1121 |
+
return self._ws_response_class(
|
1122 |
+
reader,
|
1123 |
+
writer,
|
1124 |
+
protocol,
|
1125 |
+
resp,
|
1126 |
+
ws_timeout,
|
1127 |
+
autoclose,
|
1128 |
+
autoping,
|
1129 |
+
self._loop,
|
1130 |
+
heartbeat=heartbeat,
|
1131 |
+
compress=compress,
|
1132 |
+
client_notakeover=notakeover,
|
1133 |
+
)
|
1134 |
+
|
1135 |
+
def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
|
1136 |
+
"""Add default headers and transform it to CIMultiDict"""
|
1137 |
+
# Convert headers to MultiDict
|
1138 |
+
result = CIMultiDict(self._default_headers)
|
1139 |
+
if headers:
|
1140 |
+
if not isinstance(headers, (MultiDictProxy, MultiDict)):
|
1141 |
+
headers = CIMultiDict(headers)
|
1142 |
+
added_names: Set[str] = set()
|
1143 |
+
for key, value in headers.items():
|
1144 |
+
if key in added_names:
|
1145 |
+
result.add(key, value)
|
1146 |
+
else:
|
1147 |
+
result[key] = value
|
1148 |
+
added_names.add(key)
|
1149 |
+
return result
|
1150 |
+
|
1151 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
1152 |
+
|
1153 |
+
def get(
|
1154 |
+
self,
|
1155 |
+
url: StrOrURL,
|
1156 |
+
**kwargs: Unpack[_RequestOptions],
|
1157 |
+
) -> "_RequestContextManager": ...
|
1158 |
+
|
1159 |
+
def options(
|
1160 |
+
self,
|
1161 |
+
url: StrOrURL,
|
1162 |
+
**kwargs: Unpack[_RequestOptions],
|
1163 |
+
) -> "_RequestContextManager": ...
|
1164 |
+
|
1165 |
+
def head(
|
1166 |
+
self,
|
1167 |
+
url: StrOrURL,
|
1168 |
+
**kwargs: Unpack[_RequestOptions],
|
1169 |
+
) -> "_RequestContextManager": ...
|
1170 |
+
|
1171 |
+
def post(
|
1172 |
+
self,
|
1173 |
+
url: StrOrURL,
|
1174 |
+
**kwargs: Unpack[_RequestOptions],
|
1175 |
+
) -> "_RequestContextManager": ...
|
1176 |
+
|
1177 |
+
def put(
|
1178 |
+
self,
|
1179 |
+
url: StrOrURL,
|
1180 |
+
**kwargs: Unpack[_RequestOptions],
|
1181 |
+
) -> "_RequestContextManager": ...
|
1182 |
+
|
1183 |
+
def patch(
|
1184 |
+
self,
|
1185 |
+
url: StrOrURL,
|
1186 |
+
**kwargs: Unpack[_RequestOptions],
|
1187 |
+
) -> "_RequestContextManager": ...
|
1188 |
+
|
1189 |
+
def delete(
|
1190 |
+
self,
|
1191 |
+
url: StrOrURL,
|
1192 |
+
**kwargs: Unpack[_RequestOptions],
|
1193 |
+
) -> "_RequestContextManager": ...
|
1194 |
+
|
1195 |
+
else:
|
1196 |
+
|
1197 |
+
def get(
|
1198 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
1199 |
+
) -> "_RequestContextManager":
|
1200 |
+
"""Perform HTTP GET request."""
|
1201 |
+
return _RequestContextManager(
|
1202 |
+
self._request(
|
1203 |
+
hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs
|
1204 |
+
)
|
1205 |
+
)
|
1206 |
+
|
1207 |
+
def options(
|
1208 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
1209 |
+
) -> "_RequestContextManager":
|
1210 |
+
"""Perform HTTP OPTIONS request."""
|
1211 |
+
return _RequestContextManager(
|
1212 |
+
self._request(
|
1213 |
+
hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
|
1214 |
+
)
|
1215 |
+
)
|
1216 |
+
|
1217 |
+
def head(
|
1218 |
+
self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
|
1219 |
+
) -> "_RequestContextManager":
|
1220 |
+
"""Perform HTTP HEAD request."""
|
1221 |
+
return _RequestContextManager(
|
1222 |
+
self._request(
|
1223 |
+
hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
|
1224 |
+
)
|
1225 |
+
)
|
1226 |
+
|
1227 |
+
def post(
|
1228 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
1229 |
+
) -> "_RequestContextManager":
|
1230 |
+
"""Perform HTTP POST request."""
|
1231 |
+
return _RequestContextManager(
|
1232 |
+
self._request(hdrs.METH_POST, url, data=data, **kwargs)
|
1233 |
+
)
|
1234 |
+
|
1235 |
+
def put(
|
1236 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
1237 |
+
) -> "_RequestContextManager":
|
1238 |
+
"""Perform HTTP PUT request."""
|
1239 |
+
return _RequestContextManager(
|
1240 |
+
self._request(hdrs.METH_PUT, url, data=data, **kwargs)
|
1241 |
+
)
|
1242 |
+
|
1243 |
+
def patch(
|
1244 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
1245 |
+
) -> "_RequestContextManager":
|
1246 |
+
"""Perform HTTP PATCH request."""
|
1247 |
+
return _RequestContextManager(
|
1248 |
+
self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
|
1249 |
+
)
|
1250 |
+
|
1251 |
+
def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
|
1252 |
+
"""Perform HTTP DELETE request."""
|
1253 |
+
return _RequestContextManager(
|
1254 |
+
self._request(hdrs.METH_DELETE, url, **kwargs)
|
1255 |
+
)
|
1256 |
+
|
1257 |
+
async def close(self) -> None:
|
1258 |
+
"""Close underlying connector.
|
1259 |
+
|
1260 |
+
Release all acquired resources.
|
1261 |
+
"""
|
1262 |
+
if not self.closed:
|
1263 |
+
if self._connector is not None and self._connector_owner:
|
1264 |
+
await self._connector.close()
|
1265 |
+
self._connector = None
|
1266 |
+
|
1267 |
+
@property
|
1268 |
+
def closed(self) -> bool:
|
1269 |
+
"""Is client session closed.
|
1270 |
+
|
1271 |
+
A readonly property.
|
1272 |
+
"""
|
1273 |
+
return self._connector is None or self._connector.closed
|
1274 |
+
|
1275 |
+
@property
|
1276 |
+
def connector(self) -> Optional[BaseConnector]:
|
1277 |
+
"""Connector instance used for the session."""
|
1278 |
+
return self._connector
|
1279 |
+
|
1280 |
+
@property
|
1281 |
+
def cookie_jar(self) -> AbstractCookieJar:
|
1282 |
+
"""The session cookies."""
|
1283 |
+
return self._cookie_jar
|
1284 |
+
|
1285 |
+
@property
|
1286 |
+
def version(self) -> Tuple[int, int]:
|
1287 |
+
"""The session HTTP protocol version."""
|
1288 |
+
return self._version
|
1289 |
+
|
1290 |
+
@property
|
1291 |
+
def requote_redirect_url(self) -> bool:
|
1292 |
+
"""Do URL requoting on redirection handling."""
|
1293 |
+
return self._requote_redirect_url
|
1294 |
+
|
1295 |
+
@requote_redirect_url.setter
|
1296 |
+
def requote_redirect_url(self, val: bool) -> None:
|
1297 |
+
"""Do URL requoting on redirection handling."""
|
1298 |
+
warnings.warn(
|
1299 |
+
"session.requote_redirect_url modification is deprecated #2778",
|
1300 |
+
DeprecationWarning,
|
1301 |
+
stacklevel=2,
|
1302 |
+
)
|
1303 |
+
self._requote_redirect_url = val
|
1304 |
+
|
1305 |
+
@property
|
1306 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
1307 |
+
"""Session's loop."""
|
1308 |
+
warnings.warn(
|
1309 |
+
"client.loop property is deprecated", DeprecationWarning, stacklevel=2
|
1310 |
+
)
|
1311 |
+
return self._loop
|
1312 |
+
|
1313 |
+
@property
|
1314 |
+
def timeout(self) -> ClientTimeout:
|
1315 |
+
"""Timeout for the session."""
|
1316 |
+
return self._timeout
|
1317 |
+
|
1318 |
+
@property
|
1319 |
+
def headers(self) -> "CIMultiDict[str]":
|
1320 |
+
"""The default headers of the client session."""
|
1321 |
+
return self._default_headers
|
1322 |
+
|
1323 |
+
@property
|
1324 |
+
def skip_auto_headers(self) -> FrozenSet[istr]:
|
1325 |
+
"""Headers for which autogeneration should be skipped"""
|
1326 |
+
return self._skip_auto_headers
|
1327 |
+
|
1328 |
+
@property
|
1329 |
+
def auth(self) -> Optional[BasicAuth]:
|
1330 |
+
"""An object that represents HTTP Basic Authorization"""
|
1331 |
+
return self._default_auth
|
1332 |
+
|
1333 |
+
@property
|
1334 |
+
def json_serialize(self) -> JSONEncoder:
|
1335 |
+
"""Json serializer callable"""
|
1336 |
+
return self._json_serialize
|
1337 |
+
|
1338 |
+
@property
|
1339 |
+
def connector_owner(self) -> bool:
|
1340 |
+
"""Should connector be closed on session closing"""
|
1341 |
+
return self._connector_owner
|
1342 |
+
|
1343 |
+
@property
|
1344 |
+
def raise_for_status(
|
1345 |
+
self,
|
1346 |
+
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
|
1347 |
+
"""Should `ClientResponse.raise_for_status()` be called for each response."""
|
1348 |
+
return self._raise_for_status
|
1349 |
+
|
1350 |
+
@property
|
1351 |
+
def auto_decompress(self) -> bool:
|
1352 |
+
"""Should the body response be automatically decompressed."""
|
1353 |
+
return self._auto_decompress
|
1354 |
+
|
1355 |
+
@property
|
1356 |
+
def trust_env(self) -> bool:
|
1357 |
+
"""
|
1358 |
+
Should proxies information from environment or netrc be trusted.
|
1359 |
+
|
1360 |
+
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
|
1361 |
+
or ~/.netrc file if present.
|
1362 |
+
"""
|
1363 |
+
return self._trust_env
|
1364 |
+
|
1365 |
+
@property
|
1366 |
+
def trace_configs(self) -> List[TraceConfig]:
|
1367 |
+
"""A list of TraceConfig instances used for client tracing"""
|
1368 |
+
return self._trace_configs
|
1369 |
+
|
1370 |
+
def detach(self) -> None:
|
1371 |
+
"""Detach connector from session without closing the former.
|
1372 |
+
|
1373 |
+
Session is switched to closed state anyway.
|
1374 |
+
"""
|
1375 |
+
self._connector = None
|
1376 |
+
|
1377 |
+
def __enter__(self) -> None:
|
1378 |
+
raise TypeError("Use async with instead")
|
1379 |
+
|
1380 |
+
def __exit__(
|
1381 |
+
self,
|
1382 |
+
exc_type: Optional[Type[BaseException]],
|
1383 |
+
exc_val: Optional[BaseException],
|
1384 |
+
exc_tb: Optional[TracebackType],
|
1385 |
+
) -> None:
|
1386 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
1387 |
+
pass # pragma: no cover
|
1388 |
+
|
1389 |
+
async def __aenter__(self) -> "ClientSession":
|
1390 |
+
return self
|
1391 |
+
|
1392 |
+
async def __aexit__(
|
1393 |
+
self,
|
1394 |
+
exc_type: Optional[Type[BaseException]],
|
1395 |
+
exc_val: Optional[BaseException],
|
1396 |
+
exc_tb: Optional[TracebackType],
|
1397 |
+
) -> None:
|
1398 |
+
await self.close()
|
1399 |
+
|
1400 |
+
|
1401 |
+
class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
|
1402 |
+
|
1403 |
+
__slots__ = ("_coro", "_resp")
|
1404 |
+
|
1405 |
+
def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
|
1406 |
+
self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro
|
1407 |
+
|
1408 |
+
def send(self, arg: None) -> "asyncio.Future[Any]":
|
1409 |
+
return self._coro.send(arg)
|
1410 |
+
|
1411 |
+
def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
|
1412 |
+
return self._coro.throw(*args, **kwargs)
|
1413 |
+
|
1414 |
+
def close(self) -> None:
|
1415 |
+
return self._coro.close()
|
1416 |
+
|
1417 |
+
def __await__(self) -> Generator[Any, None, _RetType]:
|
1418 |
+
ret = self._coro.__await__()
|
1419 |
+
return ret
|
1420 |
+
|
1421 |
+
def __iter__(self) -> Generator[Any, None, _RetType]:
|
1422 |
+
return self.__await__()
|
1423 |
+
|
1424 |
+
async def __aenter__(self) -> _RetType:
|
1425 |
+
self._resp: _RetType = await self._coro
|
1426 |
+
return await self._resp.__aenter__()
|
1427 |
+
|
1428 |
+
async def __aexit__(
|
1429 |
+
self,
|
1430 |
+
exc_type: Optional[Type[BaseException]],
|
1431 |
+
exc: Optional[BaseException],
|
1432 |
+
tb: Optional[TracebackType],
|
1433 |
+
) -> None:
|
1434 |
+
await self._resp.__aexit__(exc_type, exc, tb)
|
1435 |
+
|
1436 |
+
|
1437 |
+
_RequestContextManager = _BaseRequestContextManager[ClientResponse]
|
1438 |
+
_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse]
|
1439 |
+
|
1440 |
+
|
1441 |
+
class _SessionRequestContextManager:
|
1442 |
+
|
1443 |
+
__slots__ = ("_coro", "_resp", "_session")
|
1444 |
+
|
1445 |
+
def __init__(
|
1446 |
+
self,
|
1447 |
+
coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
|
1448 |
+
session: ClientSession,
|
1449 |
+
) -> None:
|
1450 |
+
self._coro = coro
|
1451 |
+
self._resp: Optional[ClientResponse] = None
|
1452 |
+
self._session = session
|
1453 |
+
|
1454 |
+
async def __aenter__(self) -> ClientResponse:
|
1455 |
+
try:
|
1456 |
+
self._resp = await self._coro
|
1457 |
+
except BaseException:
|
1458 |
+
await self._session.close()
|
1459 |
+
raise
|
1460 |
+
else:
|
1461 |
+
return self._resp
|
1462 |
+
|
1463 |
+
async def __aexit__(
|
1464 |
+
self,
|
1465 |
+
exc_type: Optional[Type[BaseException]],
|
1466 |
+
exc: Optional[BaseException],
|
1467 |
+
tb: Optional[TracebackType],
|
1468 |
+
) -> None:
|
1469 |
+
assert self._resp is not None
|
1470 |
+
self._resp.close()
|
1471 |
+
await self._session.close()
|
1472 |
+
|
1473 |
+
|
1474 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
1475 |
+
|
1476 |
+
def request(
|
1477 |
+
method: str,
|
1478 |
+
url: StrOrURL,
|
1479 |
+
*,
|
1480 |
+
version: HttpVersion = http.HttpVersion11,
|
1481 |
+
connector: Optional[BaseConnector] = None,
|
1482 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1483 |
+
**kwargs: Unpack[_RequestOptions],
|
1484 |
+
) -> _SessionRequestContextManager: ...
|
1485 |
+
|
1486 |
+
else:
|
1487 |
+
|
1488 |
+
def request(
|
1489 |
+
method: str,
|
1490 |
+
url: StrOrURL,
|
1491 |
+
*,
|
1492 |
+
version: HttpVersion = http.HttpVersion11,
|
1493 |
+
connector: Optional[BaseConnector] = None,
|
1494 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1495 |
+
**kwargs: Any,
|
1496 |
+
) -> _SessionRequestContextManager:
|
1497 |
+
"""Constructs and sends a request.
|
1498 |
+
|
1499 |
+
Returns response object.
|
1500 |
+
method - HTTP method
|
1501 |
+
url - request url
|
1502 |
+
params - (optional) Dictionary or bytes to be sent in the query
|
1503 |
+
string of the new request
|
1504 |
+
data - (optional) Dictionary, bytes, or file-like object to
|
1505 |
+
send in the body of the request
|
1506 |
+
json - (optional) Any json compatible python object
|
1507 |
+
headers - (optional) Dictionary of HTTP Headers to send with
|
1508 |
+
the request
|
1509 |
+
cookies - (optional) Dict object to send with the request
|
1510 |
+
auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
|
1511 |
+
auth - aiohttp.helpers.BasicAuth
|
1512 |
+
allow_redirects - (optional) If set to False, do not follow
|
1513 |
+
redirects
|
1514 |
+
version - Request HTTP version.
|
1515 |
+
compress - Set to True if request has to be compressed
|
1516 |
+
with deflate encoding.
|
1517 |
+
chunked - Set to chunk size for chunked transfer encoding.
|
1518 |
+
expect100 - Expect 100-continue response from server.
|
1519 |
+
connector - BaseConnector sub-class instance to support
|
1520 |
+
connection pooling.
|
1521 |
+
read_until_eof - Read response until eof if response
|
1522 |
+
does not have Content-Length header.
|
1523 |
+
loop - Optional event loop.
|
1524 |
+
timeout - Optional ClientTimeout settings structure, 5min
|
1525 |
+
total timeout by default.
|
1526 |
+
Usage::
|
1527 |
+
>>> import aiohttp
|
1528 |
+
>>> async with aiohttp.request('GET', 'http://python.org/') as resp:
|
1529 |
+
... print(resp)
|
1530 |
+
... data = await resp.read()
|
1531 |
+
<ClientResponse(https://www.python.org/) [200 OK]>
|
1532 |
+
"""
|
1533 |
+
connector_owner = False
|
1534 |
+
if connector is None:
|
1535 |
+
connector_owner = True
|
1536 |
+
connector = TCPConnector(loop=loop, force_close=True)
|
1537 |
+
|
1538 |
+
session = ClientSession(
|
1539 |
+
loop=loop,
|
1540 |
+
cookies=kwargs.pop("cookies", None),
|
1541 |
+
version=version,
|
1542 |
+
timeout=kwargs.pop("timeout", sentinel),
|
1543 |
+
connector=connector,
|
1544 |
+
connector_owner=connector_owner,
|
1545 |
+
)
|
1546 |
+
|
1547 |
+
return _SessionRequestContextManager(
|
1548 |
+
session._request(method, url, **kwargs),
|
1549 |
+
session,
|
1550 |
+
)
|
venv/Lib/site-packages/aiohttp/client_exceptions.py
ADDED
@@ -0,0 +1,421 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""HTTP related errors."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import warnings
|
5 |
+
from typing import TYPE_CHECKING, Optional, Tuple, Union
|
6 |
+
|
7 |
+
from multidict import MultiMapping
|
8 |
+
|
9 |
+
from .typedefs import StrOrURL
|
10 |
+
|
11 |
+
if TYPE_CHECKING:
|
12 |
+
import ssl
|
13 |
+
|
14 |
+
SSLContext = ssl.SSLContext
|
15 |
+
else:
|
16 |
+
try:
|
17 |
+
import ssl
|
18 |
+
|
19 |
+
SSLContext = ssl.SSLContext
|
20 |
+
except ImportError: # pragma: no cover
|
21 |
+
ssl = SSLContext = None # type: ignore[assignment]
|
22 |
+
|
23 |
+
if TYPE_CHECKING:
|
24 |
+
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
25 |
+
from .http_parser import RawResponseMessage
|
26 |
+
else:
|
27 |
+
RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None
|
28 |
+
|
29 |
+
__all__ = (
|
30 |
+
"ClientError",
|
31 |
+
"ClientConnectionError",
|
32 |
+
"ClientConnectionResetError",
|
33 |
+
"ClientOSError",
|
34 |
+
"ClientConnectorError",
|
35 |
+
"ClientProxyConnectionError",
|
36 |
+
"ClientSSLError",
|
37 |
+
"ClientConnectorDNSError",
|
38 |
+
"ClientConnectorSSLError",
|
39 |
+
"ClientConnectorCertificateError",
|
40 |
+
"ConnectionTimeoutError",
|
41 |
+
"SocketTimeoutError",
|
42 |
+
"ServerConnectionError",
|
43 |
+
"ServerTimeoutError",
|
44 |
+
"ServerDisconnectedError",
|
45 |
+
"ServerFingerprintMismatch",
|
46 |
+
"ClientResponseError",
|
47 |
+
"ClientHttpProxyError",
|
48 |
+
"WSServerHandshakeError",
|
49 |
+
"ContentTypeError",
|
50 |
+
"ClientPayloadError",
|
51 |
+
"InvalidURL",
|
52 |
+
"InvalidUrlClientError",
|
53 |
+
"RedirectClientError",
|
54 |
+
"NonHttpUrlClientError",
|
55 |
+
"InvalidUrlRedirectClientError",
|
56 |
+
"NonHttpUrlRedirectClientError",
|
57 |
+
"WSMessageTypeError",
|
58 |
+
)
|
59 |
+
|
60 |
+
|
61 |
+
class ClientError(Exception):
|
62 |
+
"""Base class for client connection errors."""
|
63 |
+
|
64 |
+
|
65 |
+
class ClientResponseError(ClientError):
|
66 |
+
"""Base class for exceptions that occur after getting a response.
|
67 |
+
|
68 |
+
request_info: An instance of RequestInfo.
|
69 |
+
history: A sequence of responses, if redirects occurred.
|
70 |
+
status: HTTP status code.
|
71 |
+
message: Error message.
|
72 |
+
headers: Response headers.
|
73 |
+
"""
|
74 |
+
|
75 |
+
def __init__(
|
76 |
+
self,
|
77 |
+
request_info: RequestInfo,
|
78 |
+
history: Tuple[ClientResponse, ...],
|
79 |
+
*,
|
80 |
+
code: Optional[int] = None,
|
81 |
+
status: Optional[int] = None,
|
82 |
+
message: str = "",
|
83 |
+
headers: Optional[MultiMapping[str]] = None,
|
84 |
+
) -> None:
|
85 |
+
self.request_info = request_info
|
86 |
+
if code is not None:
|
87 |
+
if status is not None:
|
88 |
+
raise ValueError(
|
89 |
+
"Both code and status arguments are provided; "
|
90 |
+
"code is deprecated, use status instead"
|
91 |
+
)
|
92 |
+
warnings.warn(
|
93 |
+
"code argument is deprecated, use status instead",
|
94 |
+
DeprecationWarning,
|
95 |
+
stacklevel=2,
|
96 |
+
)
|
97 |
+
if status is not None:
|
98 |
+
self.status = status
|
99 |
+
elif code is not None:
|
100 |
+
self.status = code
|
101 |
+
else:
|
102 |
+
self.status = 0
|
103 |
+
self.message = message
|
104 |
+
self.headers = headers
|
105 |
+
self.history = history
|
106 |
+
self.args = (request_info, history)
|
107 |
+
|
108 |
+
def __str__(self) -> str:
|
109 |
+
return "{}, message={!r}, url={!r}".format(
|
110 |
+
self.status,
|
111 |
+
self.message,
|
112 |
+
str(self.request_info.real_url),
|
113 |
+
)
|
114 |
+
|
115 |
+
def __repr__(self) -> str:
|
116 |
+
args = f"{self.request_info!r}, {self.history!r}"
|
117 |
+
if self.status != 0:
|
118 |
+
args += f", status={self.status!r}"
|
119 |
+
if self.message != "":
|
120 |
+
args += f", message={self.message!r}"
|
121 |
+
if self.headers is not None:
|
122 |
+
args += f", headers={self.headers!r}"
|
123 |
+
return f"{type(self).__name__}({args})"
|
124 |
+
|
125 |
+
@property
|
126 |
+
def code(self) -> int:
|
127 |
+
warnings.warn(
|
128 |
+
"code property is deprecated, use status instead",
|
129 |
+
DeprecationWarning,
|
130 |
+
stacklevel=2,
|
131 |
+
)
|
132 |
+
return self.status
|
133 |
+
|
134 |
+
@code.setter
|
135 |
+
def code(self, value: int) -> None:
|
136 |
+
warnings.warn(
|
137 |
+
"code property is deprecated, use status instead",
|
138 |
+
DeprecationWarning,
|
139 |
+
stacklevel=2,
|
140 |
+
)
|
141 |
+
self.status = value
|
142 |
+
|
143 |
+
|
144 |
+
class ContentTypeError(ClientResponseError):
|
145 |
+
"""ContentType found is not valid."""
|
146 |
+
|
147 |
+
|
148 |
+
class WSServerHandshakeError(ClientResponseError):
|
149 |
+
"""websocket server handshake error."""
|
150 |
+
|
151 |
+
|
152 |
+
class ClientHttpProxyError(ClientResponseError):
|
153 |
+
"""HTTP proxy error.
|
154 |
+
|
155 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
156 |
+
proxy responds with status other than ``200 OK``
|
157 |
+
on ``CONNECT`` request.
|
158 |
+
"""
|
159 |
+
|
160 |
+
|
161 |
+
class TooManyRedirects(ClientResponseError):
|
162 |
+
"""Client was redirected too many times."""
|
163 |
+
|
164 |
+
|
165 |
+
class ClientConnectionError(ClientError):
|
166 |
+
"""Base class for client socket errors."""
|
167 |
+
|
168 |
+
|
169 |
+
class ClientConnectionResetError(ClientConnectionError, ConnectionResetError):
|
170 |
+
"""ConnectionResetError"""
|
171 |
+
|
172 |
+
|
173 |
+
class ClientOSError(ClientConnectionError, OSError):
|
174 |
+
"""OSError error."""
|
175 |
+
|
176 |
+
|
177 |
+
class ClientConnectorError(ClientOSError):
|
178 |
+
"""Client connector error.
|
179 |
+
|
180 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
181 |
+
a connection can not be established.
|
182 |
+
"""
|
183 |
+
|
184 |
+
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
185 |
+
self._conn_key = connection_key
|
186 |
+
self._os_error = os_error
|
187 |
+
super().__init__(os_error.errno, os_error.strerror)
|
188 |
+
self.args = (connection_key, os_error)
|
189 |
+
|
190 |
+
@property
|
191 |
+
def os_error(self) -> OSError:
|
192 |
+
return self._os_error
|
193 |
+
|
194 |
+
@property
|
195 |
+
def host(self) -> str:
|
196 |
+
return self._conn_key.host
|
197 |
+
|
198 |
+
@property
|
199 |
+
def port(self) -> Optional[int]:
|
200 |
+
return self._conn_key.port
|
201 |
+
|
202 |
+
@property
|
203 |
+
def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
|
204 |
+
return self._conn_key.ssl
|
205 |
+
|
206 |
+
def __str__(self) -> str:
|
207 |
+
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
208 |
+
self, "default" if self.ssl is True else self.ssl, self.strerror
|
209 |
+
)
|
210 |
+
|
211 |
+
# OSError.__reduce__ does too much black magick
|
212 |
+
__reduce__ = BaseException.__reduce__
|
213 |
+
|
214 |
+
|
215 |
+
class ClientConnectorDNSError(ClientConnectorError):
|
216 |
+
"""DNS resolution failed during client connection.
|
217 |
+
|
218 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
219 |
+
DNS resolution fails.
|
220 |
+
"""
|
221 |
+
|
222 |
+
|
223 |
+
class ClientProxyConnectionError(ClientConnectorError):
|
224 |
+
"""Proxy connection error.
|
225 |
+
|
226 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
227 |
+
connection to proxy can not be established.
|
228 |
+
"""
|
229 |
+
|
230 |
+
|
231 |
+
class UnixClientConnectorError(ClientConnectorError):
|
232 |
+
"""Unix connector error.
|
233 |
+
|
234 |
+
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
235 |
+
if connection to unix socket can not be established.
|
236 |
+
"""
|
237 |
+
|
238 |
+
def __init__(
|
239 |
+
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
240 |
+
) -> None:
|
241 |
+
self._path = path
|
242 |
+
super().__init__(connection_key, os_error)
|
243 |
+
|
244 |
+
@property
|
245 |
+
def path(self) -> str:
|
246 |
+
return self._path
|
247 |
+
|
248 |
+
def __str__(self) -> str:
|
249 |
+
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
250 |
+
self, "default" if self.ssl is True else self.ssl, self.strerror
|
251 |
+
)
|
252 |
+
|
253 |
+
|
254 |
+
class ServerConnectionError(ClientConnectionError):
|
255 |
+
"""Server connection errors."""
|
256 |
+
|
257 |
+
|
258 |
+
class ServerDisconnectedError(ServerConnectionError):
|
259 |
+
"""Server disconnected."""
|
260 |
+
|
261 |
+
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
262 |
+
if message is None:
|
263 |
+
message = "Server disconnected"
|
264 |
+
|
265 |
+
self.args = (message,)
|
266 |
+
self.message = message
|
267 |
+
|
268 |
+
|
269 |
+
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
270 |
+
"""Server timeout error."""
|
271 |
+
|
272 |
+
|
273 |
+
class ConnectionTimeoutError(ServerTimeoutError):
|
274 |
+
"""Connection timeout error."""
|
275 |
+
|
276 |
+
|
277 |
+
class SocketTimeoutError(ServerTimeoutError):
|
278 |
+
"""Socket timeout error."""
|
279 |
+
|
280 |
+
|
281 |
+
class ServerFingerprintMismatch(ServerConnectionError):
|
282 |
+
"""SSL certificate does not match expected fingerprint."""
|
283 |
+
|
284 |
+
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
285 |
+
self.expected = expected
|
286 |
+
self.got = got
|
287 |
+
self.host = host
|
288 |
+
self.port = port
|
289 |
+
self.args = (expected, got, host, port)
|
290 |
+
|
291 |
+
def __repr__(self) -> str:
|
292 |
+
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
293 |
+
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
294 |
+
)
|
295 |
+
|
296 |
+
|
297 |
+
class ClientPayloadError(ClientError):
|
298 |
+
"""Response payload error."""
|
299 |
+
|
300 |
+
|
301 |
+
class InvalidURL(ClientError, ValueError):
|
302 |
+
"""Invalid URL.
|
303 |
+
|
304 |
+
URL used for fetching is malformed, e.g. it doesn't contains host
|
305 |
+
part.
|
306 |
+
"""
|
307 |
+
|
308 |
+
# Derive from ValueError for backward compatibility
|
309 |
+
|
310 |
+
def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None:
|
311 |
+
# The type of url is not yarl.URL because the exception can be raised
|
312 |
+
# on URL(url) call
|
313 |
+
self._url = url
|
314 |
+
self._description = description
|
315 |
+
|
316 |
+
if description:
|
317 |
+
super().__init__(url, description)
|
318 |
+
else:
|
319 |
+
super().__init__(url)
|
320 |
+
|
321 |
+
@property
|
322 |
+
def url(self) -> StrOrURL:
|
323 |
+
return self._url
|
324 |
+
|
325 |
+
@property
|
326 |
+
def description(self) -> "str | None":
|
327 |
+
return self._description
|
328 |
+
|
329 |
+
def __repr__(self) -> str:
|
330 |
+
return f"<{self.__class__.__name__} {self}>"
|
331 |
+
|
332 |
+
def __str__(self) -> str:
|
333 |
+
if self._description:
|
334 |
+
return f"{self._url} - {self._description}"
|
335 |
+
return str(self._url)
|
336 |
+
|
337 |
+
|
338 |
+
class InvalidUrlClientError(InvalidURL):
|
339 |
+
"""Invalid URL client error."""
|
340 |
+
|
341 |
+
|
342 |
+
class RedirectClientError(ClientError):
|
343 |
+
"""Client redirect error."""
|
344 |
+
|
345 |
+
|
346 |
+
class NonHttpUrlClientError(ClientError):
|
347 |
+
"""Non http URL client error."""
|
348 |
+
|
349 |
+
|
350 |
+
class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError):
|
351 |
+
"""Invalid URL redirect client error."""
|
352 |
+
|
353 |
+
|
354 |
+
class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError):
|
355 |
+
"""Non http URL redirect client error."""
|
356 |
+
|
357 |
+
|
358 |
+
class ClientSSLError(ClientConnectorError):
|
359 |
+
"""Base error for ssl.*Errors."""
|
360 |
+
|
361 |
+
|
362 |
+
if ssl is not None:
|
363 |
+
cert_errors = (ssl.CertificateError,)
|
364 |
+
cert_errors_bases = (
|
365 |
+
ClientSSLError,
|
366 |
+
ssl.CertificateError,
|
367 |
+
)
|
368 |
+
|
369 |
+
ssl_errors = (ssl.SSLError,)
|
370 |
+
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
371 |
+
else: # pragma: no cover
|
372 |
+
cert_errors = tuple()
|
373 |
+
cert_errors_bases = (
|
374 |
+
ClientSSLError,
|
375 |
+
ValueError,
|
376 |
+
)
|
377 |
+
|
378 |
+
ssl_errors = tuple()
|
379 |
+
ssl_error_bases = (ClientSSLError,)
|
380 |
+
|
381 |
+
|
382 |
+
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
383 |
+
"""Response ssl error."""
|
384 |
+
|
385 |
+
|
386 |
+
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
387 |
+
"""Response certificate error."""
|
388 |
+
|
389 |
+
def __init__(
|
390 |
+
self, connection_key: ConnectionKey, certificate_error: Exception
|
391 |
+
) -> None:
|
392 |
+
self._conn_key = connection_key
|
393 |
+
self._certificate_error = certificate_error
|
394 |
+
self.args = (connection_key, certificate_error)
|
395 |
+
|
396 |
+
@property
|
397 |
+
def certificate_error(self) -> Exception:
|
398 |
+
return self._certificate_error
|
399 |
+
|
400 |
+
@property
|
401 |
+
def host(self) -> str:
|
402 |
+
return self._conn_key.host
|
403 |
+
|
404 |
+
@property
|
405 |
+
def port(self) -> Optional[int]:
|
406 |
+
return self._conn_key.port
|
407 |
+
|
408 |
+
@property
|
409 |
+
def ssl(self) -> bool:
|
410 |
+
return self._conn_key.is_ssl
|
411 |
+
|
412 |
+
def __str__(self) -> str:
|
413 |
+
return (
|
414 |
+
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
415 |
+
"[{0.certificate_error.__class__.__name__}: "
|
416 |
+
"{0.certificate_error.args}]".format(self)
|
417 |
+
)
|
418 |
+
|
419 |
+
|
420 |
+
class WSMessageTypeError(TypeError):
|
421 |
+
"""WebSocket message type is not valid."""
|
venv/Lib/site-packages/aiohttp/client_proto.py
ADDED
@@ -0,0 +1,308 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
from contextlib import suppress
|
3 |
+
from typing import Any, Optional, Tuple
|
4 |
+
|
5 |
+
from .base_protocol import BaseProtocol
|
6 |
+
from .client_exceptions import (
|
7 |
+
ClientOSError,
|
8 |
+
ClientPayloadError,
|
9 |
+
ServerDisconnectedError,
|
10 |
+
SocketTimeoutError,
|
11 |
+
)
|
12 |
+
from .helpers import (
|
13 |
+
_EXC_SENTINEL,
|
14 |
+
EMPTY_BODY_STATUS_CODES,
|
15 |
+
BaseTimerContext,
|
16 |
+
set_exception,
|
17 |
+
)
|
18 |
+
from .http import HttpResponseParser, RawResponseMessage
|
19 |
+
from .http_exceptions import HttpProcessingError
|
20 |
+
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
21 |
+
|
22 |
+
|
23 |
+
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
24 |
+
"""Helper class to adapt between Protocol and StreamReader."""
|
25 |
+
|
26 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
27 |
+
BaseProtocol.__init__(self, loop=loop)
|
28 |
+
DataQueue.__init__(self, loop)
|
29 |
+
|
30 |
+
self._should_close = False
|
31 |
+
|
32 |
+
self._payload: Optional[StreamReader] = None
|
33 |
+
self._skip_payload = False
|
34 |
+
self._payload_parser = None
|
35 |
+
|
36 |
+
self._timer = None
|
37 |
+
|
38 |
+
self._tail = b""
|
39 |
+
self._upgraded = False
|
40 |
+
self._parser: Optional[HttpResponseParser] = None
|
41 |
+
|
42 |
+
self._read_timeout: Optional[float] = None
|
43 |
+
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
|
44 |
+
|
45 |
+
self._timeout_ceil_threshold: Optional[float] = 5
|
46 |
+
|
47 |
+
@property
|
48 |
+
def upgraded(self) -> bool:
|
49 |
+
return self._upgraded
|
50 |
+
|
51 |
+
@property
|
52 |
+
def should_close(self) -> bool:
|
53 |
+
return bool(
|
54 |
+
self._should_close
|
55 |
+
or (self._payload is not None and not self._payload.is_eof())
|
56 |
+
or self._upgraded
|
57 |
+
or self._exception is not None
|
58 |
+
or self._payload_parser is not None
|
59 |
+
or self._buffer
|
60 |
+
or self._tail
|
61 |
+
)
|
62 |
+
|
63 |
+
def force_close(self) -> None:
|
64 |
+
self._should_close = True
|
65 |
+
|
66 |
+
def close(self) -> None:
|
67 |
+
self._exception = None # Break cyclic references
|
68 |
+
transport = self.transport
|
69 |
+
if transport is not None:
|
70 |
+
transport.close()
|
71 |
+
self.transport = None
|
72 |
+
self._payload = None
|
73 |
+
self._drop_timeout()
|
74 |
+
|
75 |
+
def is_connected(self) -> bool:
|
76 |
+
return self.transport is not None and not self.transport.is_closing()
|
77 |
+
|
78 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
79 |
+
self._drop_timeout()
|
80 |
+
|
81 |
+
original_connection_error = exc
|
82 |
+
reraised_exc = original_connection_error
|
83 |
+
|
84 |
+
connection_closed_cleanly = original_connection_error is None
|
85 |
+
|
86 |
+
if self._payload_parser is not None:
|
87 |
+
with suppress(Exception): # FIXME: log this somehow?
|
88 |
+
self._payload_parser.feed_eof()
|
89 |
+
|
90 |
+
uncompleted = None
|
91 |
+
if self._parser is not None:
|
92 |
+
try:
|
93 |
+
uncompleted = self._parser.feed_eof()
|
94 |
+
except Exception as underlying_exc:
|
95 |
+
if self._payload is not None:
|
96 |
+
client_payload_exc_msg = (
|
97 |
+
f"Response payload is not completed: {underlying_exc !r}"
|
98 |
+
)
|
99 |
+
if not connection_closed_cleanly:
|
100 |
+
client_payload_exc_msg = (
|
101 |
+
f"{client_payload_exc_msg !s}. "
|
102 |
+
f"{original_connection_error !r}"
|
103 |
+
)
|
104 |
+
set_exception(
|
105 |
+
self._payload,
|
106 |
+
ClientPayloadError(client_payload_exc_msg),
|
107 |
+
underlying_exc,
|
108 |
+
)
|
109 |
+
|
110 |
+
if not self.is_eof():
|
111 |
+
if isinstance(original_connection_error, OSError):
|
112 |
+
reraised_exc = ClientOSError(*original_connection_error.args)
|
113 |
+
if connection_closed_cleanly:
|
114 |
+
reraised_exc = ServerDisconnectedError(uncompleted)
|
115 |
+
# assigns self._should_close to True as side effect,
|
116 |
+
# we do it anyway below
|
117 |
+
underlying_non_eof_exc = (
|
118 |
+
_EXC_SENTINEL
|
119 |
+
if connection_closed_cleanly
|
120 |
+
else original_connection_error
|
121 |
+
)
|
122 |
+
assert underlying_non_eof_exc is not None
|
123 |
+
assert reraised_exc is not None
|
124 |
+
self.set_exception(reraised_exc, underlying_non_eof_exc)
|
125 |
+
|
126 |
+
self._should_close = True
|
127 |
+
self._parser = None
|
128 |
+
self._payload = None
|
129 |
+
self._payload_parser = None
|
130 |
+
self._reading_paused = False
|
131 |
+
|
132 |
+
super().connection_lost(reraised_exc)
|
133 |
+
|
134 |
+
def eof_received(self) -> None:
|
135 |
+
# should call parser.feed_eof() most likely
|
136 |
+
self._drop_timeout()
|
137 |
+
|
138 |
+
def pause_reading(self) -> None:
|
139 |
+
super().pause_reading()
|
140 |
+
self._drop_timeout()
|
141 |
+
|
142 |
+
def resume_reading(self) -> None:
|
143 |
+
super().resume_reading()
|
144 |
+
self._reschedule_timeout()
|
145 |
+
|
146 |
+
def set_exception(
|
147 |
+
self,
|
148 |
+
exc: BaseException,
|
149 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
150 |
+
) -> None:
|
151 |
+
self._should_close = True
|
152 |
+
self._drop_timeout()
|
153 |
+
super().set_exception(exc, exc_cause)
|
154 |
+
|
155 |
+
def set_parser(self, parser: Any, payload: Any) -> None:
|
156 |
+
# TODO: actual types are:
|
157 |
+
# parser: WebSocketReader
|
158 |
+
# payload: WebSocketDataQueue
|
159 |
+
# but they are not generi enough
|
160 |
+
# Need an ABC for both types
|
161 |
+
self._payload = payload
|
162 |
+
self._payload_parser = parser
|
163 |
+
|
164 |
+
self._drop_timeout()
|
165 |
+
|
166 |
+
if self._tail:
|
167 |
+
data, self._tail = self._tail, b""
|
168 |
+
self.data_received(data)
|
169 |
+
|
170 |
+
def set_response_params(
|
171 |
+
self,
|
172 |
+
*,
|
173 |
+
timer: Optional[BaseTimerContext] = None,
|
174 |
+
skip_payload: bool = False,
|
175 |
+
read_until_eof: bool = False,
|
176 |
+
auto_decompress: bool = True,
|
177 |
+
read_timeout: Optional[float] = None,
|
178 |
+
read_bufsize: int = 2**16,
|
179 |
+
timeout_ceil_threshold: float = 5,
|
180 |
+
max_line_size: int = 8190,
|
181 |
+
max_field_size: int = 8190,
|
182 |
+
) -> None:
|
183 |
+
self._skip_payload = skip_payload
|
184 |
+
|
185 |
+
self._read_timeout = read_timeout
|
186 |
+
|
187 |
+
self._timeout_ceil_threshold = timeout_ceil_threshold
|
188 |
+
|
189 |
+
self._parser = HttpResponseParser(
|
190 |
+
self,
|
191 |
+
self._loop,
|
192 |
+
read_bufsize,
|
193 |
+
timer=timer,
|
194 |
+
payload_exception=ClientPayloadError,
|
195 |
+
response_with_body=not skip_payload,
|
196 |
+
read_until_eof=read_until_eof,
|
197 |
+
auto_decompress=auto_decompress,
|
198 |
+
max_line_size=max_line_size,
|
199 |
+
max_field_size=max_field_size,
|
200 |
+
)
|
201 |
+
|
202 |
+
if self._tail:
|
203 |
+
data, self._tail = self._tail, b""
|
204 |
+
self.data_received(data)
|
205 |
+
|
206 |
+
def _drop_timeout(self) -> None:
|
207 |
+
if self._read_timeout_handle is not None:
|
208 |
+
self._read_timeout_handle.cancel()
|
209 |
+
self._read_timeout_handle = None
|
210 |
+
|
211 |
+
def _reschedule_timeout(self) -> None:
|
212 |
+
timeout = self._read_timeout
|
213 |
+
if self._read_timeout_handle is not None:
|
214 |
+
self._read_timeout_handle.cancel()
|
215 |
+
|
216 |
+
if timeout:
|
217 |
+
self._read_timeout_handle = self._loop.call_later(
|
218 |
+
timeout, self._on_read_timeout
|
219 |
+
)
|
220 |
+
else:
|
221 |
+
self._read_timeout_handle = None
|
222 |
+
|
223 |
+
def start_timeout(self) -> None:
|
224 |
+
self._reschedule_timeout()
|
225 |
+
|
226 |
+
@property
|
227 |
+
def read_timeout(self) -> Optional[float]:
|
228 |
+
return self._read_timeout
|
229 |
+
|
230 |
+
@read_timeout.setter
|
231 |
+
def read_timeout(self, read_timeout: Optional[float]) -> None:
|
232 |
+
self._read_timeout = read_timeout
|
233 |
+
|
234 |
+
def _on_read_timeout(self) -> None:
|
235 |
+
exc = SocketTimeoutError("Timeout on reading data from socket")
|
236 |
+
self.set_exception(exc)
|
237 |
+
if self._payload is not None:
|
238 |
+
set_exception(self._payload, exc)
|
239 |
+
|
240 |
+
def data_received(self, data: bytes) -> None:
|
241 |
+
self._reschedule_timeout()
|
242 |
+
|
243 |
+
if not data:
|
244 |
+
return
|
245 |
+
|
246 |
+
# custom payload parser - currently always WebSocketReader
|
247 |
+
if self._payload_parser is not None:
|
248 |
+
eof, tail = self._payload_parser.feed_data(data)
|
249 |
+
if eof:
|
250 |
+
self._payload = None
|
251 |
+
self._payload_parser = None
|
252 |
+
|
253 |
+
if tail:
|
254 |
+
self.data_received(tail)
|
255 |
+
return
|
256 |
+
|
257 |
+
if self._upgraded or self._parser is None:
|
258 |
+
# i.e. websocket connection, websocket parser is not set yet
|
259 |
+
self._tail += data
|
260 |
+
return
|
261 |
+
|
262 |
+
# parse http messages
|
263 |
+
try:
|
264 |
+
messages, upgraded, tail = self._parser.feed_data(data)
|
265 |
+
except BaseException as underlying_exc:
|
266 |
+
if self.transport is not None:
|
267 |
+
# connection.release() could be called BEFORE
|
268 |
+
# data_received(), the transport is already
|
269 |
+
# closed in this case
|
270 |
+
self.transport.close()
|
271 |
+
# should_close is True after the call
|
272 |
+
if isinstance(underlying_exc, HttpProcessingError):
|
273 |
+
exc = HttpProcessingError(
|
274 |
+
code=underlying_exc.code,
|
275 |
+
message=underlying_exc.message,
|
276 |
+
headers=underlying_exc.headers,
|
277 |
+
)
|
278 |
+
else:
|
279 |
+
exc = HttpProcessingError()
|
280 |
+
self.set_exception(exc, underlying_exc)
|
281 |
+
return
|
282 |
+
|
283 |
+
self._upgraded = upgraded
|
284 |
+
|
285 |
+
payload: Optional[StreamReader] = None
|
286 |
+
for message, payload in messages:
|
287 |
+
if message.should_close:
|
288 |
+
self._should_close = True
|
289 |
+
|
290 |
+
self._payload = payload
|
291 |
+
|
292 |
+
if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES:
|
293 |
+
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
294 |
+
else:
|
295 |
+
self.feed_data((message, payload), 0)
|
296 |
+
|
297 |
+
if payload is not None:
|
298 |
+
# new message(s) was processed
|
299 |
+
# register timeout handler unsubscribing
|
300 |
+
# either on end-of-stream or immediately for
|
301 |
+
# EMPTY_PAYLOAD
|
302 |
+
if payload is not EMPTY_PAYLOAD:
|
303 |
+
payload.on_eof(self._drop_timeout)
|
304 |
+
else:
|
305 |
+
self._drop_timeout()
|
306 |
+
|
307 |
+
if upgraded and tail:
|
308 |
+
self.data_received(tail)
|
venv/Lib/site-packages/aiohttp/client_reqrep.py
ADDED
@@ -0,0 +1,1315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import codecs
|
3 |
+
import contextlib
|
4 |
+
import functools
|
5 |
+
import io
|
6 |
+
import re
|
7 |
+
import sys
|
8 |
+
import traceback
|
9 |
+
import warnings
|
10 |
+
from hashlib import md5, sha1, sha256
|
11 |
+
from http.cookies import CookieError, Morsel, SimpleCookie
|
12 |
+
from types import MappingProxyType, TracebackType
|
13 |
+
from typing import (
|
14 |
+
TYPE_CHECKING,
|
15 |
+
Any,
|
16 |
+
Callable,
|
17 |
+
Dict,
|
18 |
+
Iterable,
|
19 |
+
List,
|
20 |
+
Mapping,
|
21 |
+
NamedTuple,
|
22 |
+
Optional,
|
23 |
+
Tuple,
|
24 |
+
Type,
|
25 |
+
Union,
|
26 |
+
)
|
27 |
+
|
28 |
+
import attr
|
29 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
|
30 |
+
from yarl import URL
|
31 |
+
|
32 |
+
from . import hdrs, helpers, http, multipart, payload
|
33 |
+
from .abc import AbstractStreamWriter
|
34 |
+
from .client_exceptions import (
|
35 |
+
ClientConnectionError,
|
36 |
+
ClientOSError,
|
37 |
+
ClientResponseError,
|
38 |
+
ContentTypeError,
|
39 |
+
InvalidURL,
|
40 |
+
ServerFingerprintMismatch,
|
41 |
+
)
|
42 |
+
from .compression_utils import HAS_BROTLI
|
43 |
+
from .formdata import FormData
|
44 |
+
from .helpers import (
|
45 |
+
_SENTINEL,
|
46 |
+
BaseTimerContext,
|
47 |
+
BasicAuth,
|
48 |
+
HeadersMixin,
|
49 |
+
TimerNoop,
|
50 |
+
basicauth_from_netrc,
|
51 |
+
netrc_from_env,
|
52 |
+
noop,
|
53 |
+
reify,
|
54 |
+
set_exception,
|
55 |
+
set_result,
|
56 |
+
)
|
57 |
+
from .http import (
|
58 |
+
SERVER_SOFTWARE,
|
59 |
+
HttpVersion,
|
60 |
+
HttpVersion10,
|
61 |
+
HttpVersion11,
|
62 |
+
StreamWriter,
|
63 |
+
)
|
64 |
+
from .log import client_logger
|
65 |
+
from .streams import StreamReader
|
66 |
+
from .typedefs import (
|
67 |
+
DEFAULT_JSON_DECODER,
|
68 |
+
JSONDecoder,
|
69 |
+
LooseCookies,
|
70 |
+
LooseHeaders,
|
71 |
+
Query,
|
72 |
+
RawHeaders,
|
73 |
+
)
|
74 |
+
|
75 |
+
if TYPE_CHECKING:
|
76 |
+
import ssl
|
77 |
+
from ssl import SSLContext
|
78 |
+
else:
|
79 |
+
try:
|
80 |
+
import ssl
|
81 |
+
from ssl import SSLContext
|
82 |
+
except ImportError: # pragma: no cover
|
83 |
+
ssl = None # type: ignore[assignment]
|
84 |
+
SSLContext = object # type: ignore[misc,assignment]
|
85 |
+
|
86 |
+
|
87 |
+
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
|
88 |
+
|
89 |
+
|
90 |
+
if TYPE_CHECKING:
|
91 |
+
from .client import ClientSession
|
92 |
+
from .connector import Connection
|
93 |
+
from .tracing import Trace
|
94 |
+
|
95 |
+
|
96 |
+
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
97 |
+
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
|
98 |
+
|
99 |
+
|
100 |
+
def _gen_default_accept_encoding() -> str:
|
101 |
+
return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
|
102 |
+
|
103 |
+
|
104 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
105 |
+
class ContentDisposition:
|
106 |
+
type: Optional[str]
|
107 |
+
parameters: "MappingProxyType[str, str]"
|
108 |
+
filename: Optional[str]
|
109 |
+
|
110 |
+
|
111 |
+
class _RequestInfo(NamedTuple):
|
112 |
+
url: URL
|
113 |
+
method: str
|
114 |
+
headers: "CIMultiDictProxy[str]"
|
115 |
+
real_url: URL
|
116 |
+
|
117 |
+
|
118 |
+
class RequestInfo(_RequestInfo):
|
119 |
+
|
120 |
+
def __new__(
|
121 |
+
cls,
|
122 |
+
url: URL,
|
123 |
+
method: str,
|
124 |
+
headers: "CIMultiDictProxy[str]",
|
125 |
+
real_url: URL = _SENTINEL, # type: ignore[assignment]
|
126 |
+
) -> "RequestInfo":
|
127 |
+
"""Create a new RequestInfo instance.
|
128 |
+
|
129 |
+
For backwards compatibility, the real_url parameter is optional.
|
130 |
+
"""
|
131 |
+
return tuple.__new__(
|
132 |
+
cls, (url, method, headers, url if real_url is _SENTINEL else real_url)
|
133 |
+
)
|
134 |
+
|
135 |
+
|
136 |
+
class Fingerprint:
|
137 |
+
HASHFUNC_BY_DIGESTLEN = {
|
138 |
+
16: md5,
|
139 |
+
20: sha1,
|
140 |
+
32: sha256,
|
141 |
+
}
|
142 |
+
|
143 |
+
def __init__(self, fingerprint: bytes) -> None:
|
144 |
+
digestlen = len(fingerprint)
|
145 |
+
hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
|
146 |
+
if not hashfunc:
|
147 |
+
raise ValueError("fingerprint has invalid length")
|
148 |
+
elif hashfunc is md5 or hashfunc is sha1:
|
149 |
+
raise ValueError("md5 and sha1 are insecure and not supported. Use sha256.")
|
150 |
+
self._hashfunc = hashfunc
|
151 |
+
self._fingerprint = fingerprint
|
152 |
+
|
153 |
+
@property
|
154 |
+
def fingerprint(self) -> bytes:
|
155 |
+
return self._fingerprint
|
156 |
+
|
157 |
+
def check(self, transport: asyncio.Transport) -> None:
|
158 |
+
if not transport.get_extra_info("sslcontext"):
|
159 |
+
return
|
160 |
+
sslobj = transport.get_extra_info("ssl_object")
|
161 |
+
cert = sslobj.getpeercert(binary_form=True)
|
162 |
+
got = self._hashfunc(cert).digest()
|
163 |
+
if got != self._fingerprint:
|
164 |
+
host, port, *_ = transport.get_extra_info("peername")
|
165 |
+
raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
|
166 |
+
|
167 |
+
|
168 |
+
if ssl is not None:
|
169 |
+
SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
|
170 |
+
else: # pragma: no cover
|
171 |
+
SSL_ALLOWED_TYPES = (bool, type(None))
|
172 |
+
|
173 |
+
|
174 |
+
def _merge_ssl_params(
|
175 |
+
ssl: Union["SSLContext", bool, Fingerprint],
|
176 |
+
verify_ssl: Optional[bool],
|
177 |
+
ssl_context: Optional["SSLContext"],
|
178 |
+
fingerprint: Optional[bytes],
|
179 |
+
) -> Union["SSLContext", bool, Fingerprint]:
|
180 |
+
if ssl is None:
|
181 |
+
ssl = True # Double check for backwards compatibility
|
182 |
+
if verify_ssl is not None and not verify_ssl:
|
183 |
+
warnings.warn(
|
184 |
+
"verify_ssl is deprecated, use ssl=False instead",
|
185 |
+
DeprecationWarning,
|
186 |
+
stacklevel=3,
|
187 |
+
)
|
188 |
+
if ssl is not True:
|
189 |
+
raise ValueError(
|
190 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
191 |
+
"parameters are mutually exclusive"
|
192 |
+
)
|
193 |
+
else:
|
194 |
+
ssl = False
|
195 |
+
if ssl_context is not None:
|
196 |
+
warnings.warn(
|
197 |
+
"ssl_context is deprecated, use ssl=context instead",
|
198 |
+
DeprecationWarning,
|
199 |
+
stacklevel=3,
|
200 |
+
)
|
201 |
+
if ssl is not True:
|
202 |
+
raise ValueError(
|
203 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
204 |
+
"parameters are mutually exclusive"
|
205 |
+
)
|
206 |
+
else:
|
207 |
+
ssl = ssl_context
|
208 |
+
if fingerprint is not None:
|
209 |
+
warnings.warn(
|
210 |
+
"fingerprint is deprecated, use ssl=Fingerprint(fingerprint) instead",
|
211 |
+
DeprecationWarning,
|
212 |
+
stacklevel=3,
|
213 |
+
)
|
214 |
+
if ssl is not True:
|
215 |
+
raise ValueError(
|
216 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
217 |
+
"parameters are mutually exclusive"
|
218 |
+
)
|
219 |
+
else:
|
220 |
+
ssl = Fingerprint(fingerprint)
|
221 |
+
if not isinstance(ssl, SSL_ALLOWED_TYPES):
|
222 |
+
raise TypeError(
|
223 |
+
"ssl should be SSLContext, bool, Fingerprint or None, "
|
224 |
+
"got {!r} instead.".format(ssl)
|
225 |
+
)
|
226 |
+
return ssl
|
227 |
+
|
228 |
+
|
229 |
+
_SSL_SCHEMES = frozenset(("https", "wss"))
|
230 |
+
|
231 |
+
|
232 |
+
# ConnectionKey is a NamedTuple because it is used as a key in a dict
|
233 |
+
# and a set in the connector. Since a NamedTuple is a tuple it uses
|
234 |
+
# the fast native tuple __hash__ and __eq__ implementation in CPython.
|
235 |
+
class ConnectionKey(NamedTuple):
|
236 |
+
# the key should contain an information about used proxy / TLS
|
237 |
+
# to prevent reusing wrong connections from a pool
|
238 |
+
host: str
|
239 |
+
port: Optional[int]
|
240 |
+
is_ssl: bool
|
241 |
+
ssl: Union[SSLContext, bool, Fingerprint]
|
242 |
+
proxy: Optional[URL]
|
243 |
+
proxy_auth: Optional[BasicAuth]
|
244 |
+
proxy_headers_hash: Optional[int] # hash(CIMultiDict)
|
245 |
+
|
246 |
+
|
247 |
+
def _is_expected_content_type(
|
248 |
+
response_content_type: str, expected_content_type: str
|
249 |
+
) -> bool:
|
250 |
+
if expected_content_type == "application/json":
|
251 |
+
return json_re.match(response_content_type) is not None
|
252 |
+
return expected_content_type in response_content_type
|
253 |
+
|
254 |
+
|
255 |
+
class ClientRequest:
|
256 |
+
GET_METHODS = {
|
257 |
+
hdrs.METH_GET,
|
258 |
+
hdrs.METH_HEAD,
|
259 |
+
hdrs.METH_OPTIONS,
|
260 |
+
hdrs.METH_TRACE,
|
261 |
+
}
|
262 |
+
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
|
263 |
+
ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
|
264 |
+
|
265 |
+
DEFAULT_HEADERS = {
|
266 |
+
hdrs.ACCEPT: "*/*",
|
267 |
+
hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
|
268 |
+
}
|
269 |
+
|
270 |
+
# Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
|
271 |
+
body: Any = b""
|
272 |
+
auth = None
|
273 |
+
response = None
|
274 |
+
|
275 |
+
__writer = None # async task for streaming data
|
276 |
+
_continue = None # waiter future for '100 Continue' response
|
277 |
+
|
278 |
+
_skip_auto_headers: Optional["CIMultiDict[None]"] = None
|
279 |
+
|
280 |
+
# N.B.
|
281 |
+
# Adding __del__ method with self._writer closing doesn't make sense
|
282 |
+
# because _writer is instance method, thus it keeps a reference to self.
|
283 |
+
# Until writer has finished finalizer will not be called.
|
284 |
+
|
285 |
+
def __init__(
|
286 |
+
self,
|
287 |
+
method: str,
|
288 |
+
url: URL,
|
289 |
+
*,
|
290 |
+
params: Query = None,
|
291 |
+
headers: Optional[LooseHeaders] = None,
|
292 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
293 |
+
data: Any = None,
|
294 |
+
cookies: Optional[LooseCookies] = None,
|
295 |
+
auth: Optional[BasicAuth] = None,
|
296 |
+
version: http.HttpVersion = http.HttpVersion11,
|
297 |
+
compress: Union[str, bool, None] = None,
|
298 |
+
chunked: Optional[bool] = None,
|
299 |
+
expect100: bool = False,
|
300 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
301 |
+
response_class: Optional[Type["ClientResponse"]] = None,
|
302 |
+
proxy: Optional[URL] = None,
|
303 |
+
proxy_auth: Optional[BasicAuth] = None,
|
304 |
+
timer: Optional[BaseTimerContext] = None,
|
305 |
+
session: Optional["ClientSession"] = None,
|
306 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
307 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
308 |
+
traces: Optional[List["Trace"]] = None,
|
309 |
+
trust_env: bool = False,
|
310 |
+
server_hostname: Optional[str] = None,
|
311 |
+
):
|
312 |
+
if loop is None:
|
313 |
+
loop = asyncio.get_event_loop()
|
314 |
+
if match := _CONTAINS_CONTROL_CHAR_RE.search(method):
|
315 |
+
raise ValueError(
|
316 |
+
f"Method cannot contain non-token characters {method!r} "
|
317 |
+
f"(found at least {match.group()!r})"
|
318 |
+
)
|
319 |
+
# URL forbids subclasses, so a simple type check is enough.
|
320 |
+
assert type(url) is URL, url
|
321 |
+
if proxy is not None:
|
322 |
+
assert type(proxy) is URL, proxy
|
323 |
+
# FIXME: session is None in tests only, need to fix tests
|
324 |
+
# assert session is not None
|
325 |
+
if TYPE_CHECKING:
|
326 |
+
assert session is not None
|
327 |
+
self._session = session
|
328 |
+
if params:
|
329 |
+
url = url.extend_query(params)
|
330 |
+
self.original_url = url
|
331 |
+
self.url = url.with_fragment(None) if url.raw_fragment else url
|
332 |
+
self.method = method.upper()
|
333 |
+
self.chunked = chunked
|
334 |
+
self.compress = compress
|
335 |
+
self.loop = loop
|
336 |
+
self.length = None
|
337 |
+
if response_class is None:
|
338 |
+
real_response_class = ClientResponse
|
339 |
+
else:
|
340 |
+
real_response_class = response_class
|
341 |
+
self.response_class: Type[ClientResponse] = real_response_class
|
342 |
+
self._timer = timer if timer is not None else TimerNoop()
|
343 |
+
self._ssl = ssl if ssl is not None else True
|
344 |
+
self.server_hostname = server_hostname
|
345 |
+
|
346 |
+
if loop.get_debug():
|
347 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
348 |
+
|
349 |
+
self.update_version(version)
|
350 |
+
self.update_host(url)
|
351 |
+
self.update_headers(headers)
|
352 |
+
self.update_auto_headers(skip_auto_headers)
|
353 |
+
self.update_cookies(cookies)
|
354 |
+
self.update_content_encoding(data)
|
355 |
+
self.update_auth(auth, trust_env)
|
356 |
+
self.update_proxy(proxy, proxy_auth, proxy_headers)
|
357 |
+
|
358 |
+
self.update_body_from_data(data)
|
359 |
+
if data is not None or self.method not in self.GET_METHODS:
|
360 |
+
self.update_transfer_encoding()
|
361 |
+
self.update_expect_continue(expect100)
|
362 |
+
self._traces = [] if traces is None else traces
|
363 |
+
|
364 |
+
def __reset_writer(self, _: object = None) -> None:
|
365 |
+
self.__writer = None
|
366 |
+
|
367 |
+
@property
|
368 |
+
def skip_auto_headers(self) -> CIMultiDict[None]:
|
369 |
+
return self._skip_auto_headers or CIMultiDict()
|
370 |
+
|
371 |
+
@property
|
372 |
+
def _writer(self) -> Optional["asyncio.Task[None]"]:
|
373 |
+
return self.__writer
|
374 |
+
|
375 |
+
@_writer.setter
|
376 |
+
def _writer(self, writer: "asyncio.Task[None]") -> None:
|
377 |
+
if self.__writer is not None:
|
378 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
379 |
+
self.__writer = writer
|
380 |
+
writer.add_done_callback(self.__reset_writer)
|
381 |
+
|
382 |
+
def is_ssl(self) -> bool:
|
383 |
+
return self.url.scheme in _SSL_SCHEMES
|
384 |
+
|
385 |
+
@property
|
386 |
+
def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
|
387 |
+
return self._ssl
|
388 |
+
|
389 |
+
@property
|
390 |
+
def connection_key(self) -> ConnectionKey:
|
391 |
+
if proxy_headers := self.proxy_headers:
|
392 |
+
h: Optional[int] = hash(tuple(proxy_headers.items()))
|
393 |
+
else:
|
394 |
+
h = None
|
395 |
+
url = self.url
|
396 |
+
return tuple.__new__(
|
397 |
+
ConnectionKey,
|
398 |
+
(
|
399 |
+
url.raw_host or "",
|
400 |
+
url.port,
|
401 |
+
url.scheme in _SSL_SCHEMES,
|
402 |
+
self._ssl,
|
403 |
+
self.proxy,
|
404 |
+
self.proxy_auth,
|
405 |
+
h,
|
406 |
+
),
|
407 |
+
)
|
408 |
+
|
409 |
+
@property
|
410 |
+
def host(self) -> str:
|
411 |
+
ret = self.url.raw_host
|
412 |
+
assert ret is not None
|
413 |
+
return ret
|
414 |
+
|
415 |
+
@property
|
416 |
+
def port(self) -> Optional[int]:
|
417 |
+
return self.url.port
|
418 |
+
|
419 |
+
@property
|
420 |
+
def request_info(self) -> RequestInfo:
|
421 |
+
headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
|
422 |
+
# These are created on every request, so we use a NamedTuple
|
423 |
+
# for performance reasons. We don't use the RequestInfo.__new__
|
424 |
+
# method because it has a different signature which is provided
|
425 |
+
# for backwards compatibility only.
|
426 |
+
return tuple.__new__(
|
427 |
+
RequestInfo, (self.url, self.method, headers, self.original_url)
|
428 |
+
)
|
429 |
+
|
430 |
+
def update_host(self, url: URL) -> None:
|
431 |
+
"""Update destination host, port and connection type (ssl)."""
|
432 |
+
# get host/port
|
433 |
+
if not url.raw_host:
|
434 |
+
raise InvalidURL(url)
|
435 |
+
|
436 |
+
# basic auth info
|
437 |
+
if url.raw_user or url.raw_password:
|
438 |
+
self.auth = helpers.BasicAuth(url.user or "", url.password or "")
|
439 |
+
|
440 |
+
def update_version(self, version: Union[http.HttpVersion, str]) -> None:
|
441 |
+
"""Convert request version to two elements tuple.
|
442 |
+
|
443 |
+
parser HTTP version '1.1' => (1, 1)
|
444 |
+
"""
|
445 |
+
if isinstance(version, str):
|
446 |
+
v = [part.strip() for part in version.split(".", 1)]
|
447 |
+
try:
|
448 |
+
version = http.HttpVersion(int(v[0]), int(v[1]))
|
449 |
+
except ValueError:
|
450 |
+
raise ValueError(
|
451 |
+
f"Can not parse http version number: {version}"
|
452 |
+
) from None
|
453 |
+
self.version = version
|
454 |
+
|
455 |
+
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
|
456 |
+
"""Update request headers."""
|
457 |
+
self.headers: CIMultiDict[str] = CIMultiDict()
|
458 |
+
|
459 |
+
# Build the host header
|
460 |
+
host = self.url.host_port_subcomponent
|
461 |
+
|
462 |
+
# host_port_subcomponent is None when the URL is a relative URL.
|
463 |
+
# but we know we do not have a relative URL here.
|
464 |
+
assert host is not None
|
465 |
+
self.headers[hdrs.HOST] = host
|
466 |
+
|
467 |
+
if not headers:
|
468 |
+
return
|
469 |
+
|
470 |
+
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
|
471 |
+
headers = headers.items()
|
472 |
+
|
473 |
+
for key, value in headers: # type: ignore[misc]
|
474 |
+
# A special case for Host header
|
475 |
+
if key in hdrs.HOST_ALL:
|
476 |
+
self.headers[key] = value
|
477 |
+
else:
|
478 |
+
self.headers.add(key, value)
|
479 |
+
|
480 |
+
def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
|
481 |
+
if skip_auto_headers is not None:
|
482 |
+
self._skip_auto_headers = CIMultiDict(
|
483 |
+
(hdr, None) for hdr in sorted(skip_auto_headers)
|
484 |
+
)
|
485 |
+
used_headers = self.headers.copy()
|
486 |
+
used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type]
|
487 |
+
else:
|
488 |
+
# Fast path when there are no headers to skip
|
489 |
+
# which is the most common case.
|
490 |
+
used_headers = self.headers
|
491 |
+
|
492 |
+
for hdr, val in self.DEFAULT_HEADERS.items():
|
493 |
+
if hdr not in used_headers:
|
494 |
+
self.headers[hdr] = val
|
495 |
+
|
496 |
+
if hdrs.USER_AGENT not in used_headers:
|
497 |
+
self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
|
498 |
+
|
499 |
+
def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
|
500 |
+
"""Update request cookies header."""
|
501 |
+
if not cookies:
|
502 |
+
return
|
503 |
+
|
504 |
+
c = SimpleCookie()
|
505 |
+
if hdrs.COOKIE in self.headers:
|
506 |
+
c.load(self.headers.get(hdrs.COOKIE, ""))
|
507 |
+
del self.headers[hdrs.COOKIE]
|
508 |
+
|
509 |
+
if isinstance(cookies, Mapping):
|
510 |
+
iter_cookies = cookies.items()
|
511 |
+
else:
|
512 |
+
iter_cookies = cookies # type: ignore[assignment]
|
513 |
+
for name, value in iter_cookies:
|
514 |
+
if isinstance(value, Morsel):
|
515 |
+
# Preserve coded_value
|
516 |
+
mrsl_val = value.get(value.key, Morsel())
|
517 |
+
mrsl_val.set(value.key, value.value, value.coded_value)
|
518 |
+
c[name] = mrsl_val
|
519 |
+
else:
|
520 |
+
c[name] = value # type: ignore[assignment]
|
521 |
+
|
522 |
+
self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
|
523 |
+
|
524 |
+
def update_content_encoding(self, data: Any) -> None:
|
525 |
+
"""Set request content encoding."""
|
526 |
+
if not data:
|
527 |
+
# Don't compress an empty body.
|
528 |
+
self.compress = None
|
529 |
+
return
|
530 |
+
|
531 |
+
if self.headers.get(hdrs.CONTENT_ENCODING):
|
532 |
+
if self.compress:
|
533 |
+
raise ValueError(
|
534 |
+
"compress can not be set if Content-Encoding header is set"
|
535 |
+
)
|
536 |
+
elif self.compress:
|
537 |
+
if not isinstance(self.compress, str):
|
538 |
+
self.compress = "deflate"
|
539 |
+
self.headers[hdrs.CONTENT_ENCODING] = self.compress
|
540 |
+
self.chunked = True # enable chunked, no need to deal with length
|
541 |
+
|
542 |
+
def update_transfer_encoding(self) -> None:
|
543 |
+
"""Analyze transfer-encoding header."""
|
544 |
+
te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
545 |
+
|
546 |
+
if "chunked" in te:
|
547 |
+
if self.chunked:
|
548 |
+
raise ValueError(
|
549 |
+
"chunked can not be set "
|
550 |
+
'if "Transfer-Encoding: chunked" header is set'
|
551 |
+
)
|
552 |
+
|
553 |
+
elif self.chunked:
|
554 |
+
if hdrs.CONTENT_LENGTH in self.headers:
|
555 |
+
raise ValueError(
|
556 |
+
"chunked can not be set if Content-Length header is set"
|
557 |
+
)
|
558 |
+
|
559 |
+
self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
560 |
+
else:
|
561 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
562 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
|
563 |
+
|
564 |
+
def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
|
565 |
+
"""Set basic auth."""
|
566 |
+
if auth is None:
|
567 |
+
auth = self.auth
|
568 |
+
if auth is None and trust_env and self.url.host is not None:
|
569 |
+
netrc_obj = netrc_from_env()
|
570 |
+
with contextlib.suppress(LookupError):
|
571 |
+
auth = basicauth_from_netrc(netrc_obj, self.url.host)
|
572 |
+
if auth is None:
|
573 |
+
return
|
574 |
+
|
575 |
+
if not isinstance(auth, helpers.BasicAuth):
|
576 |
+
raise TypeError("BasicAuth() tuple is required instead")
|
577 |
+
|
578 |
+
self.headers[hdrs.AUTHORIZATION] = auth.encode()
|
579 |
+
|
580 |
+
def update_body_from_data(self, body: Any) -> None:
|
581 |
+
if body is None:
|
582 |
+
return
|
583 |
+
|
584 |
+
# FormData
|
585 |
+
if isinstance(body, FormData):
|
586 |
+
body = body()
|
587 |
+
|
588 |
+
try:
|
589 |
+
body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
|
590 |
+
except payload.LookupError:
|
591 |
+
body = FormData(body)()
|
592 |
+
|
593 |
+
self.body = body
|
594 |
+
|
595 |
+
# enable chunked encoding if needed
|
596 |
+
if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
|
597 |
+
if (size := body.size) is not None:
|
598 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(size)
|
599 |
+
else:
|
600 |
+
self.chunked = True
|
601 |
+
|
602 |
+
# copy payload headers
|
603 |
+
assert body.headers
|
604 |
+
headers = self.headers
|
605 |
+
skip_headers = self._skip_auto_headers
|
606 |
+
for key, value in body.headers.items():
|
607 |
+
if key in headers or (skip_headers is not None and key in skip_headers):
|
608 |
+
continue
|
609 |
+
headers[key] = value
|
610 |
+
|
611 |
+
def update_expect_continue(self, expect: bool = False) -> None:
|
612 |
+
if expect:
|
613 |
+
self.headers[hdrs.EXPECT] = "100-continue"
|
614 |
+
elif (
|
615 |
+
hdrs.EXPECT in self.headers
|
616 |
+
and self.headers[hdrs.EXPECT].lower() == "100-continue"
|
617 |
+
):
|
618 |
+
expect = True
|
619 |
+
|
620 |
+
if expect:
|
621 |
+
self._continue = self.loop.create_future()
|
622 |
+
|
623 |
+
def update_proxy(
|
624 |
+
self,
|
625 |
+
proxy: Optional[URL],
|
626 |
+
proxy_auth: Optional[BasicAuth],
|
627 |
+
proxy_headers: Optional[LooseHeaders],
|
628 |
+
) -> None:
|
629 |
+
self.proxy = proxy
|
630 |
+
if proxy is None:
|
631 |
+
self.proxy_auth = None
|
632 |
+
self.proxy_headers = None
|
633 |
+
return
|
634 |
+
|
635 |
+
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
|
636 |
+
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
|
637 |
+
self.proxy_auth = proxy_auth
|
638 |
+
|
639 |
+
if proxy_headers is not None and not isinstance(
|
640 |
+
proxy_headers, (MultiDict, MultiDictProxy)
|
641 |
+
):
|
642 |
+
proxy_headers = CIMultiDict(proxy_headers)
|
643 |
+
self.proxy_headers = proxy_headers
|
644 |
+
|
645 |
+
async def write_bytes(
|
646 |
+
self, writer: AbstractStreamWriter, conn: "Connection"
|
647 |
+
) -> None:
|
648 |
+
"""Support coroutines that yields bytes objects."""
|
649 |
+
# 100 response
|
650 |
+
if self._continue is not None:
|
651 |
+
await writer.drain()
|
652 |
+
await self._continue
|
653 |
+
|
654 |
+
protocol = conn.protocol
|
655 |
+
assert protocol is not None
|
656 |
+
try:
|
657 |
+
if isinstance(self.body, payload.Payload):
|
658 |
+
await self.body.write(writer)
|
659 |
+
else:
|
660 |
+
if isinstance(self.body, (bytes, bytearray)):
|
661 |
+
self.body = (self.body,)
|
662 |
+
|
663 |
+
for chunk in self.body:
|
664 |
+
await writer.write(chunk)
|
665 |
+
except OSError as underlying_exc:
|
666 |
+
reraised_exc = underlying_exc
|
667 |
+
|
668 |
+
exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
|
669 |
+
underlying_exc, asyncio.TimeoutError
|
670 |
+
)
|
671 |
+
if exc_is_not_timeout:
|
672 |
+
reraised_exc = ClientOSError(
|
673 |
+
underlying_exc.errno,
|
674 |
+
f"Can not write request body for {self.url !s}",
|
675 |
+
)
|
676 |
+
|
677 |
+
set_exception(protocol, reraised_exc, underlying_exc)
|
678 |
+
except asyncio.CancelledError:
|
679 |
+
# Body hasn't been fully sent, so connection can't be reused.
|
680 |
+
conn.close()
|
681 |
+
raise
|
682 |
+
except Exception as underlying_exc:
|
683 |
+
set_exception(
|
684 |
+
protocol,
|
685 |
+
ClientConnectionError(
|
686 |
+
f"Failed to send bytes into the underlying connection {conn !s}",
|
687 |
+
),
|
688 |
+
underlying_exc,
|
689 |
+
)
|
690 |
+
else:
|
691 |
+
await writer.write_eof()
|
692 |
+
protocol.start_timeout()
|
693 |
+
|
694 |
+
async def send(self, conn: "Connection") -> "ClientResponse":
|
695 |
+
# Specify request target:
|
696 |
+
# - CONNECT request must send authority form URI
|
697 |
+
# - not CONNECT proxy must send absolute form URI
|
698 |
+
# - most common is origin form URI
|
699 |
+
if self.method == hdrs.METH_CONNECT:
|
700 |
+
connect_host = self.url.host_subcomponent
|
701 |
+
assert connect_host is not None
|
702 |
+
path = f"{connect_host}:{self.url.port}"
|
703 |
+
elif self.proxy and not self.is_ssl():
|
704 |
+
path = str(self.url)
|
705 |
+
else:
|
706 |
+
path = self.url.raw_path_qs
|
707 |
+
|
708 |
+
protocol = conn.protocol
|
709 |
+
assert protocol is not None
|
710 |
+
writer = StreamWriter(
|
711 |
+
protocol,
|
712 |
+
self.loop,
|
713 |
+
on_chunk_sent=(
|
714 |
+
functools.partial(self._on_chunk_request_sent, self.method, self.url)
|
715 |
+
if self._traces
|
716 |
+
else None
|
717 |
+
),
|
718 |
+
on_headers_sent=(
|
719 |
+
functools.partial(self._on_headers_request_sent, self.method, self.url)
|
720 |
+
if self._traces
|
721 |
+
else None
|
722 |
+
),
|
723 |
+
)
|
724 |
+
|
725 |
+
if self.compress:
|
726 |
+
writer.enable_compression(self.compress) # type: ignore[arg-type]
|
727 |
+
|
728 |
+
if self.chunked is not None:
|
729 |
+
writer.enable_chunking()
|
730 |
+
|
731 |
+
# set default content-type
|
732 |
+
if (
|
733 |
+
self.method in self.POST_METHODS
|
734 |
+
and (
|
735 |
+
self._skip_auto_headers is None
|
736 |
+
or hdrs.CONTENT_TYPE not in self._skip_auto_headers
|
737 |
+
)
|
738 |
+
and hdrs.CONTENT_TYPE not in self.headers
|
739 |
+
):
|
740 |
+
self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
|
741 |
+
|
742 |
+
v = self.version
|
743 |
+
if hdrs.CONNECTION not in self.headers:
|
744 |
+
if conn._connector.force_close:
|
745 |
+
if v == HttpVersion11:
|
746 |
+
self.headers[hdrs.CONNECTION] = "close"
|
747 |
+
elif v == HttpVersion10:
|
748 |
+
self.headers[hdrs.CONNECTION] = "keep-alive"
|
749 |
+
|
750 |
+
# status + headers
|
751 |
+
status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
|
752 |
+
await writer.write_headers(status_line, self.headers)
|
753 |
+
task: Optional["asyncio.Task[None]"]
|
754 |
+
if self.body or self._continue is not None or protocol.writing_paused:
|
755 |
+
coro = self.write_bytes(writer, conn)
|
756 |
+
if sys.version_info >= (3, 12):
|
757 |
+
# Optimization for Python 3.12, try to write
|
758 |
+
# bytes immediately to avoid having to schedule
|
759 |
+
# the task on the event loop.
|
760 |
+
task = asyncio.Task(coro, loop=self.loop, eager_start=True)
|
761 |
+
else:
|
762 |
+
task = self.loop.create_task(coro)
|
763 |
+
if task.done():
|
764 |
+
task = None
|
765 |
+
else:
|
766 |
+
self._writer = task
|
767 |
+
else:
|
768 |
+
# We have nothing to write because
|
769 |
+
# - there is no body
|
770 |
+
# - the protocol does not have writing paused
|
771 |
+
# - we are not waiting for a 100-continue response
|
772 |
+
protocol.start_timeout()
|
773 |
+
writer.set_eof()
|
774 |
+
task = None
|
775 |
+
response_class = self.response_class
|
776 |
+
assert response_class is not None
|
777 |
+
self.response = response_class(
|
778 |
+
self.method,
|
779 |
+
self.original_url,
|
780 |
+
writer=task,
|
781 |
+
continue100=self._continue,
|
782 |
+
timer=self._timer,
|
783 |
+
request_info=self.request_info,
|
784 |
+
traces=self._traces,
|
785 |
+
loop=self.loop,
|
786 |
+
session=self._session,
|
787 |
+
)
|
788 |
+
return self.response
|
789 |
+
|
790 |
+
async def close(self) -> None:
|
791 |
+
if self.__writer is not None:
|
792 |
+
try:
|
793 |
+
await self.__writer
|
794 |
+
except asyncio.CancelledError:
|
795 |
+
if (
|
796 |
+
sys.version_info >= (3, 11)
|
797 |
+
and (task := asyncio.current_task())
|
798 |
+
and task.cancelling()
|
799 |
+
):
|
800 |
+
raise
|
801 |
+
|
802 |
+
def terminate(self) -> None:
|
803 |
+
if self.__writer is not None:
|
804 |
+
if not self.loop.is_closed():
|
805 |
+
self.__writer.cancel()
|
806 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
807 |
+
self.__writer = None
|
808 |
+
|
809 |
+
async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
|
810 |
+
for trace in self._traces:
|
811 |
+
await trace.send_request_chunk_sent(method, url, chunk)
|
812 |
+
|
813 |
+
async def _on_headers_request_sent(
|
814 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
815 |
+
) -> None:
|
816 |
+
for trace in self._traces:
|
817 |
+
await trace.send_request_headers(method, url, headers)
|
818 |
+
|
819 |
+
|
820 |
+
_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed")
|
821 |
+
|
822 |
+
|
823 |
+
class ClientResponse(HeadersMixin):
|
824 |
+
|
825 |
+
# Some of these attributes are None when created,
|
826 |
+
# but will be set by the start() method.
|
827 |
+
# As the end user will likely never see the None values, we cheat the types below.
|
828 |
+
# from the Status-Line of the response
|
829 |
+
version: Optional[HttpVersion] = None # HTTP-Version
|
830 |
+
status: int = None # type: ignore[assignment] # Status-Code
|
831 |
+
reason: Optional[str] = None # Reason-Phrase
|
832 |
+
|
833 |
+
content: StreamReader = None # type: ignore[assignment] # Payload stream
|
834 |
+
_body: Optional[bytes] = None
|
835 |
+
_headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
|
836 |
+
_history: Tuple["ClientResponse", ...] = ()
|
837 |
+
_raw_headers: RawHeaders = None # type: ignore[assignment]
|
838 |
+
|
839 |
+
_connection: Optional["Connection"] = None # current connection
|
840 |
+
_cookies: Optional[SimpleCookie] = None
|
841 |
+
_continue: Optional["asyncio.Future[bool]"] = None
|
842 |
+
_source_traceback: Optional[traceback.StackSummary] = None
|
843 |
+
_session: Optional["ClientSession"] = None
|
844 |
+
# set up by ClientRequest after ClientResponse object creation
|
845 |
+
# post-init stage allows to not change ctor signature
|
846 |
+
_closed = True # to allow __del__ for non-initialized properly response
|
847 |
+
_released = False
|
848 |
+
_in_context = False
|
849 |
+
|
850 |
+
_resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8"
|
851 |
+
|
852 |
+
__writer: Optional["asyncio.Task[None]"] = None
|
853 |
+
|
854 |
+
def __init__(
|
855 |
+
self,
|
856 |
+
method: str,
|
857 |
+
url: URL,
|
858 |
+
*,
|
859 |
+
writer: "Optional[asyncio.Task[None]]",
|
860 |
+
continue100: Optional["asyncio.Future[bool]"],
|
861 |
+
timer: BaseTimerContext,
|
862 |
+
request_info: RequestInfo,
|
863 |
+
traces: List["Trace"],
|
864 |
+
loop: asyncio.AbstractEventLoop,
|
865 |
+
session: "ClientSession",
|
866 |
+
) -> None:
|
867 |
+
# URL forbids subclasses, so a simple type check is enough.
|
868 |
+
assert type(url) is URL
|
869 |
+
|
870 |
+
self.method = method
|
871 |
+
|
872 |
+
self._real_url = url
|
873 |
+
self._url = url.with_fragment(None) if url.raw_fragment else url
|
874 |
+
if writer is not None:
|
875 |
+
self._writer = writer
|
876 |
+
if continue100 is not None:
|
877 |
+
self._continue = continue100
|
878 |
+
self._request_info = request_info
|
879 |
+
self._timer = timer if timer is not None else TimerNoop()
|
880 |
+
self._cache: Dict[str, Any] = {}
|
881 |
+
self._traces = traces
|
882 |
+
self._loop = loop
|
883 |
+
# Save reference to _resolve_charset, so that get_encoding() will still
|
884 |
+
# work after the response has finished reading the body.
|
885 |
+
# TODO: Fix session=None in tests (see ClientRequest.__init__).
|
886 |
+
if session is not None:
|
887 |
+
# store a reference to session #1985
|
888 |
+
self._session = session
|
889 |
+
self._resolve_charset = session._resolve_charset
|
890 |
+
if loop.get_debug():
|
891 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
892 |
+
|
893 |
+
def __reset_writer(self, _: object = None) -> None:
|
894 |
+
self.__writer = None
|
895 |
+
|
896 |
+
@property
|
897 |
+
def _writer(self) -> Optional["asyncio.Task[None]"]:
|
898 |
+
"""The writer task for streaming data.
|
899 |
+
|
900 |
+
_writer is only provided for backwards compatibility
|
901 |
+
for subclasses that may need to access it.
|
902 |
+
"""
|
903 |
+
return self.__writer
|
904 |
+
|
905 |
+
@_writer.setter
|
906 |
+
def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
|
907 |
+
"""Set the writer task for streaming data."""
|
908 |
+
if self.__writer is not None:
|
909 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
910 |
+
self.__writer = writer
|
911 |
+
if writer is None:
|
912 |
+
return
|
913 |
+
if writer.done():
|
914 |
+
# The writer is already done, so we can clear it immediately.
|
915 |
+
self.__writer = None
|
916 |
+
else:
|
917 |
+
writer.add_done_callback(self.__reset_writer)
|
918 |
+
|
919 |
+
@property
|
920 |
+
def cookies(self) -> SimpleCookie:
|
921 |
+
if self._cookies is None:
|
922 |
+
self._cookies = SimpleCookie()
|
923 |
+
return self._cookies
|
924 |
+
|
925 |
+
@cookies.setter
|
926 |
+
def cookies(self, cookies: SimpleCookie) -> None:
|
927 |
+
self._cookies = cookies
|
928 |
+
|
929 |
+
@reify
|
930 |
+
def url(self) -> URL:
|
931 |
+
return self._url
|
932 |
+
|
933 |
+
@reify
|
934 |
+
def url_obj(self) -> URL:
|
935 |
+
warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
|
936 |
+
return self._url
|
937 |
+
|
938 |
+
@reify
|
939 |
+
def real_url(self) -> URL:
|
940 |
+
return self._real_url
|
941 |
+
|
942 |
+
@reify
|
943 |
+
def host(self) -> str:
|
944 |
+
assert self._url.host is not None
|
945 |
+
return self._url.host
|
946 |
+
|
947 |
+
@reify
|
948 |
+
def headers(self) -> "CIMultiDictProxy[str]":
|
949 |
+
return self._headers
|
950 |
+
|
951 |
+
@reify
|
952 |
+
def raw_headers(self) -> RawHeaders:
|
953 |
+
return self._raw_headers
|
954 |
+
|
955 |
+
@reify
|
956 |
+
def request_info(self) -> RequestInfo:
|
957 |
+
return self._request_info
|
958 |
+
|
959 |
+
@reify
|
960 |
+
def content_disposition(self) -> Optional[ContentDisposition]:
|
961 |
+
raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
|
962 |
+
if raw is None:
|
963 |
+
return None
|
964 |
+
disposition_type, params_dct = multipart.parse_content_disposition(raw)
|
965 |
+
params = MappingProxyType(params_dct)
|
966 |
+
filename = multipart.content_disposition_filename(params)
|
967 |
+
return ContentDisposition(disposition_type, params, filename)
|
968 |
+
|
969 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
970 |
+
if self._closed:
|
971 |
+
return
|
972 |
+
|
973 |
+
if self._connection is not None:
|
974 |
+
self._connection.release()
|
975 |
+
self._cleanup_writer()
|
976 |
+
|
977 |
+
if self._loop.get_debug():
|
978 |
+
kwargs = {"source": self}
|
979 |
+
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
|
980 |
+
context = {"client_response": self, "message": "Unclosed response"}
|
981 |
+
if self._source_traceback:
|
982 |
+
context["source_traceback"] = self._source_traceback
|
983 |
+
self._loop.call_exception_handler(context)
|
984 |
+
|
985 |
+
def __repr__(self) -> str:
|
986 |
+
out = io.StringIO()
|
987 |
+
ascii_encodable_url = str(self.url)
|
988 |
+
if self.reason:
|
989 |
+
ascii_encodable_reason = self.reason.encode(
|
990 |
+
"ascii", "backslashreplace"
|
991 |
+
).decode("ascii")
|
992 |
+
else:
|
993 |
+
ascii_encodable_reason = "None"
|
994 |
+
print(
|
995 |
+
"<ClientResponse({}) [{} {}]>".format(
|
996 |
+
ascii_encodable_url, self.status, ascii_encodable_reason
|
997 |
+
),
|
998 |
+
file=out,
|
999 |
+
)
|
1000 |
+
print(self.headers, file=out)
|
1001 |
+
return out.getvalue()
|
1002 |
+
|
1003 |
+
@property
|
1004 |
+
def connection(self) -> Optional["Connection"]:
|
1005 |
+
return self._connection
|
1006 |
+
|
1007 |
+
@reify
|
1008 |
+
def history(self) -> Tuple["ClientResponse", ...]:
|
1009 |
+
"""A sequence of of responses, if redirects occurred."""
|
1010 |
+
return self._history
|
1011 |
+
|
1012 |
+
@reify
|
1013 |
+
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
|
1014 |
+
links_str = ", ".join(self.headers.getall("link", []))
|
1015 |
+
|
1016 |
+
if not links_str:
|
1017 |
+
return MultiDictProxy(MultiDict())
|
1018 |
+
|
1019 |
+
links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
|
1020 |
+
|
1021 |
+
for val in re.split(r",(?=\s*<)", links_str):
|
1022 |
+
match = re.match(r"\s*<(.*)>(.*)", val)
|
1023 |
+
if match is None: # pragma: no cover
|
1024 |
+
# the check exists to suppress mypy error
|
1025 |
+
continue
|
1026 |
+
url, params_str = match.groups()
|
1027 |
+
params = params_str.split(";")[1:]
|
1028 |
+
|
1029 |
+
link: MultiDict[Union[str, URL]] = MultiDict()
|
1030 |
+
|
1031 |
+
for param in params:
|
1032 |
+
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
|
1033 |
+
if match is None: # pragma: no cover
|
1034 |
+
# the check exists to suppress mypy error
|
1035 |
+
continue
|
1036 |
+
key, _, value, _ = match.groups()
|
1037 |
+
|
1038 |
+
link.add(key, value)
|
1039 |
+
|
1040 |
+
key = link.get("rel", url)
|
1041 |
+
|
1042 |
+
link.add("url", self.url.join(URL(url)))
|
1043 |
+
|
1044 |
+
links.add(str(key), MultiDictProxy(link))
|
1045 |
+
|
1046 |
+
return MultiDictProxy(links)
|
1047 |
+
|
1048 |
+
async def start(self, connection: "Connection") -> "ClientResponse":
|
1049 |
+
"""Start response processing."""
|
1050 |
+
self._closed = False
|
1051 |
+
self._protocol = connection.protocol
|
1052 |
+
self._connection = connection
|
1053 |
+
|
1054 |
+
with self._timer:
|
1055 |
+
while True:
|
1056 |
+
# read response
|
1057 |
+
try:
|
1058 |
+
protocol = self._protocol
|
1059 |
+
message, payload = await protocol.read() # type: ignore[union-attr]
|
1060 |
+
except http.HttpProcessingError as exc:
|
1061 |
+
raise ClientResponseError(
|
1062 |
+
self.request_info,
|
1063 |
+
self.history,
|
1064 |
+
status=exc.code,
|
1065 |
+
message=exc.message,
|
1066 |
+
headers=exc.headers,
|
1067 |
+
) from exc
|
1068 |
+
|
1069 |
+
if message.code < 100 or message.code > 199 or message.code == 101:
|
1070 |
+
break
|
1071 |
+
|
1072 |
+
if self._continue is not None:
|
1073 |
+
set_result(self._continue, True)
|
1074 |
+
self._continue = None
|
1075 |
+
|
1076 |
+
# payload eof handler
|
1077 |
+
payload.on_eof(self._response_eof)
|
1078 |
+
|
1079 |
+
# response status
|
1080 |
+
self.version = message.version
|
1081 |
+
self.status = message.code
|
1082 |
+
self.reason = message.reason
|
1083 |
+
|
1084 |
+
# headers
|
1085 |
+
self._headers = message.headers # type is CIMultiDictProxy
|
1086 |
+
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
|
1087 |
+
|
1088 |
+
# payload
|
1089 |
+
self.content = payload
|
1090 |
+
|
1091 |
+
# cookies
|
1092 |
+
if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
|
1093 |
+
cookies = SimpleCookie()
|
1094 |
+
for hdr in cookie_hdrs:
|
1095 |
+
try:
|
1096 |
+
cookies.load(hdr)
|
1097 |
+
except CookieError as exc:
|
1098 |
+
client_logger.warning("Can not load response cookies: %s", exc)
|
1099 |
+
self._cookies = cookies
|
1100 |
+
return self
|
1101 |
+
|
1102 |
+
def _response_eof(self) -> None:
|
1103 |
+
if self._closed:
|
1104 |
+
return
|
1105 |
+
|
1106 |
+
# protocol could be None because connection could be detached
|
1107 |
+
protocol = self._connection and self._connection.protocol
|
1108 |
+
if protocol is not None and protocol.upgraded:
|
1109 |
+
return
|
1110 |
+
|
1111 |
+
self._closed = True
|
1112 |
+
self._cleanup_writer()
|
1113 |
+
self._release_connection()
|
1114 |
+
|
1115 |
+
@property
|
1116 |
+
def closed(self) -> bool:
|
1117 |
+
return self._closed
|
1118 |
+
|
1119 |
+
def close(self) -> None:
|
1120 |
+
if not self._released:
|
1121 |
+
self._notify_content()
|
1122 |
+
|
1123 |
+
self._closed = True
|
1124 |
+
if self._loop is None or self._loop.is_closed():
|
1125 |
+
return
|
1126 |
+
|
1127 |
+
self._cleanup_writer()
|
1128 |
+
if self._connection is not None:
|
1129 |
+
self._connection.close()
|
1130 |
+
self._connection = None
|
1131 |
+
|
1132 |
+
def release(self) -> Any:
|
1133 |
+
if not self._released:
|
1134 |
+
self._notify_content()
|
1135 |
+
|
1136 |
+
self._closed = True
|
1137 |
+
|
1138 |
+
self._cleanup_writer()
|
1139 |
+
self._release_connection()
|
1140 |
+
return noop()
|
1141 |
+
|
1142 |
+
@property
|
1143 |
+
def ok(self) -> bool:
|
1144 |
+
"""Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
|
1145 |
+
|
1146 |
+
This is **not** a check for ``200 OK`` but a check that the response
|
1147 |
+
status is under 400.
|
1148 |
+
"""
|
1149 |
+
return 400 > self.status
|
1150 |
+
|
1151 |
+
def raise_for_status(self) -> None:
|
1152 |
+
if not self.ok:
|
1153 |
+
# reason should always be not None for a started response
|
1154 |
+
assert self.reason is not None
|
1155 |
+
|
1156 |
+
# If we're in a context we can rely on __aexit__() to release as the
|
1157 |
+
# exception propagates.
|
1158 |
+
if not self._in_context:
|
1159 |
+
self.release()
|
1160 |
+
|
1161 |
+
raise ClientResponseError(
|
1162 |
+
self.request_info,
|
1163 |
+
self.history,
|
1164 |
+
status=self.status,
|
1165 |
+
message=self.reason,
|
1166 |
+
headers=self.headers,
|
1167 |
+
)
|
1168 |
+
|
1169 |
+
def _release_connection(self) -> None:
|
1170 |
+
if self._connection is not None:
|
1171 |
+
if self.__writer is None:
|
1172 |
+
self._connection.release()
|
1173 |
+
self._connection = None
|
1174 |
+
else:
|
1175 |
+
self.__writer.add_done_callback(lambda f: self._release_connection())
|
1176 |
+
|
1177 |
+
async def _wait_released(self) -> None:
|
1178 |
+
if self.__writer is not None:
|
1179 |
+
try:
|
1180 |
+
await self.__writer
|
1181 |
+
except asyncio.CancelledError:
|
1182 |
+
if (
|
1183 |
+
sys.version_info >= (3, 11)
|
1184 |
+
and (task := asyncio.current_task())
|
1185 |
+
and task.cancelling()
|
1186 |
+
):
|
1187 |
+
raise
|
1188 |
+
self._release_connection()
|
1189 |
+
|
1190 |
+
def _cleanup_writer(self) -> None:
|
1191 |
+
if self.__writer is not None:
|
1192 |
+
self.__writer.cancel()
|
1193 |
+
self._session = None
|
1194 |
+
|
1195 |
+
def _notify_content(self) -> None:
|
1196 |
+
content = self.content
|
1197 |
+
if content and content.exception() is None:
|
1198 |
+
set_exception(content, _CONNECTION_CLOSED_EXCEPTION)
|
1199 |
+
self._released = True
|
1200 |
+
|
1201 |
+
async def wait_for_close(self) -> None:
|
1202 |
+
if self.__writer is not None:
|
1203 |
+
try:
|
1204 |
+
await self.__writer
|
1205 |
+
except asyncio.CancelledError:
|
1206 |
+
if (
|
1207 |
+
sys.version_info >= (3, 11)
|
1208 |
+
and (task := asyncio.current_task())
|
1209 |
+
and task.cancelling()
|
1210 |
+
):
|
1211 |
+
raise
|
1212 |
+
self.release()
|
1213 |
+
|
1214 |
+
async def read(self) -> bytes:
|
1215 |
+
"""Read response payload."""
|
1216 |
+
if self._body is None:
|
1217 |
+
try:
|
1218 |
+
self._body = await self.content.read()
|
1219 |
+
for trace in self._traces:
|
1220 |
+
await trace.send_response_chunk_received(
|
1221 |
+
self.method, self.url, self._body
|
1222 |
+
)
|
1223 |
+
except BaseException:
|
1224 |
+
self.close()
|
1225 |
+
raise
|
1226 |
+
elif self._released: # Response explicitly released
|
1227 |
+
raise ClientConnectionError("Connection closed")
|
1228 |
+
|
1229 |
+
protocol = self._connection and self._connection.protocol
|
1230 |
+
if protocol is None or not protocol.upgraded:
|
1231 |
+
await self._wait_released() # Underlying connection released
|
1232 |
+
return self._body
|
1233 |
+
|
1234 |
+
def get_encoding(self) -> str:
|
1235 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
1236 |
+
mimetype = helpers.parse_mimetype(ctype)
|
1237 |
+
|
1238 |
+
encoding = mimetype.parameters.get("charset")
|
1239 |
+
if encoding:
|
1240 |
+
with contextlib.suppress(LookupError, ValueError):
|
1241 |
+
return codecs.lookup(encoding).name
|
1242 |
+
|
1243 |
+
if mimetype.type == "application" and (
|
1244 |
+
mimetype.subtype == "json" or mimetype.subtype == "rdap"
|
1245 |
+
):
|
1246 |
+
# RFC 7159 states that the default encoding is UTF-8.
|
1247 |
+
# RFC 7483 defines application/rdap+json
|
1248 |
+
return "utf-8"
|
1249 |
+
|
1250 |
+
if self._body is None:
|
1251 |
+
raise RuntimeError(
|
1252 |
+
"Cannot compute fallback encoding of a not yet read body"
|
1253 |
+
)
|
1254 |
+
|
1255 |
+
return self._resolve_charset(self, self._body)
|
1256 |
+
|
1257 |
+
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
|
1258 |
+
"""Read response payload and decode."""
|
1259 |
+
if self._body is None:
|
1260 |
+
await self.read()
|
1261 |
+
|
1262 |
+
if encoding is None:
|
1263 |
+
encoding = self.get_encoding()
|
1264 |
+
|
1265 |
+
return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]
|
1266 |
+
|
1267 |
+
async def json(
|
1268 |
+
self,
|
1269 |
+
*,
|
1270 |
+
encoding: Optional[str] = None,
|
1271 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
1272 |
+
content_type: Optional[str] = "application/json",
|
1273 |
+
) -> Any:
|
1274 |
+
"""Read and decodes JSON response."""
|
1275 |
+
if self._body is None:
|
1276 |
+
await self.read()
|
1277 |
+
|
1278 |
+
if content_type:
|
1279 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
1280 |
+
if not _is_expected_content_type(ctype, content_type):
|
1281 |
+
raise ContentTypeError(
|
1282 |
+
self.request_info,
|
1283 |
+
self.history,
|
1284 |
+
status=self.status,
|
1285 |
+
message=(
|
1286 |
+
"Attempt to decode JSON with unexpected mimetype: %s" % ctype
|
1287 |
+
),
|
1288 |
+
headers=self.headers,
|
1289 |
+
)
|
1290 |
+
|
1291 |
+
stripped = self._body.strip() # type: ignore[union-attr]
|
1292 |
+
if not stripped:
|
1293 |
+
return None
|
1294 |
+
|
1295 |
+
if encoding is None:
|
1296 |
+
encoding = self.get_encoding()
|
1297 |
+
|
1298 |
+
return loads(stripped.decode(encoding))
|
1299 |
+
|
1300 |
+
async def __aenter__(self) -> "ClientResponse":
|
1301 |
+
self._in_context = True
|
1302 |
+
return self
|
1303 |
+
|
1304 |
+
async def __aexit__(
|
1305 |
+
self,
|
1306 |
+
exc_type: Optional[Type[BaseException]],
|
1307 |
+
exc_val: Optional[BaseException],
|
1308 |
+
exc_tb: Optional[TracebackType],
|
1309 |
+
) -> None:
|
1310 |
+
self._in_context = False
|
1311 |
+
# similar to _RequestContextManager, we do not need to check
|
1312 |
+
# for exceptions, response object can close connection
|
1313 |
+
# if state is broken
|
1314 |
+
self.release()
|
1315 |
+
await self.wait_for_close()
|
venv/Lib/site-packages/aiohttp/client_ws.py
ADDED
@@ -0,0 +1,428 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""WebSocket client for asyncio."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import sys
|
5 |
+
from types import TracebackType
|
6 |
+
from typing import Any, Optional, Type, cast
|
7 |
+
|
8 |
+
import attr
|
9 |
+
|
10 |
+
from ._websocket.reader import WebSocketDataQueue
|
11 |
+
from .client_exceptions import ClientError, ServerTimeoutError, WSMessageTypeError
|
12 |
+
from .client_reqrep import ClientResponse
|
13 |
+
from .helpers import calculate_timeout_when, set_result
|
14 |
+
from .http import (
|
15 |
+
WS_CLOSED_MESSAGE,
|
16 |
+
WS_CLOSING_MESSAGE,
|
17 |
+
WebSocketError,
|
18 |
+
WSCloseCode,
|
19 |
+
WSMessage,
|
20 |
+
WSMsgType,
|
21 |
+
)
|
22 |
+
from .http_websocket import _INTERNAL_RECEIVE_TYPES, WebSocketWriter
|
23 |
+
from .streams import EofStream
|
24 |
+
from .typedefs import (
|
25 |
+
DEFAULT_JSON_DECODER,
|
26 |
+
DEFAULT_JSON_ENCODER,
|
27 |
+
JSONDecoder,
|
28 |
+
JSONEncoder,
|
29 |
+
)
|
30 |
+
|
31 |
+
if sys.version_info >= (3, 11):
|
32 |
+
import asyncio as async_timeout
|
33 |
+
else:
|
34 |
+
import async_timeout
|
35 |
+
|
36 |
+
|
37 |
+
@attr.s(frozen=True, slots=True)
|
38 |
+
class ClientWSTimeout:
|
39 |
+
ws_receive = attr.ib(type=Optional[float], default=None)
|
40 |
+
ws_close = attr.ib(type=Optional[float], default=None)
|
41 |
+
|
42 |
+
|
43 |
+
DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0)
|
44 |
+
|
45 |
+
|
46 |
+
class ClientWebSocketResponse:
|
47 |
+
def __init__(
|
48 |
+
self,
|
49 |
+
reader: WebSocketDataQueue,
|
50 |
+
writer: WebSocketWriter,
|
51 |
+
protocol: Optional[str],
|
52 |
+
response: ClientResponse,
|
53 |
+
timeout: ClientWSTimeout,
|
54 |
+
autoclose: bool,
|
55 |
+
autoping: bool,
|
56 |
+
loop: asyncio.AbstractEventLoop,
|
57 |
+
*,
|
58 |
+
heartbeat: Optional[float] = None,
|
59 |
+
compress: int = 0,
|
60 |
+
client_notakeover: bool = False,
|
61 |
+
) -> None:
|
62 |
+
self._response = response
|
63 |
+
self._conn = response.connection
|
64 |
+
|
65 |
+
self._writer = writer
|
66 |
+
self._reader = reader
|
67 |
+
self._protocol = protocol
|
68 |
+
self._closed = False
|
69 |
+
self._closing = False
|
70 |
+
self._close_code: Optional[int] = None
|
71 |
+
self._timeout = timeout
|
72 |
+
self._autoclose = autoclose
|
73 |
+
self._autoping = autoping
|
74 |
+
self._heartbeat = heartbeat
|
75 |
+
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
76 |
+
self._heartbeat_when: float = 0.0
|
77 |
+
if heartbeat is not None:
|
78 |
+
self._pong_heartbeat = heartbeat / 2.0
|
79 |
+
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
80 |
+
self._loop = loop
|
81 |
+
self._waiting: bool = False
|
82 |
+
self._close_wait: Optional[asyncio.Future[None]] = None
|
83 |
+
self._exception: Optional[BaseException] = None
|
84 |
+
self._compress = compress
|
85 |
+
self._client_notakeover = client_notakeover
|
86 |
+
self._ping_task: Optional[asyncio.Task[None]] = None
|
87 |
+
|
88 |
+
self._reset_heartbeat()
|
89 |
+
|
90 |
+
def _cancel_heartbeat(self) -> None:
|
91 |
+
self._cancel_pong_response_cb()
|
92 |
+
if self._heartbeat_cb is not None:
|
93 |
+
self._heartbeat_cb.cancel()
|
94 |
+
self._heartbeat_cb = None
|
95 |
+
if self._ping_task is not None:
|
96 |
+
self._ping_task.cancel()
|
97 |
+
self._ping_task = None
|
98 |
+
|
99 |
+
def _cancel_pong_response_cb(self) -> None:
|
100 |
+
if self._pong_response_cb is not None:
|
101 |
+
self._pong_response_cb.cancel()
|
102 |
+
self._pong_response_cb = None
|
103 |
+
|
104 |
+
def _reset_heartbeat(self) -> None:
|
105 |
+
if self._heartbeat is None:
|
106 |
+
return
|
107 |
+
self._cancel_pong_response_cb()
|
108 |
+
loop = self._loop
|
109 |
+
assert loop is not None
|
110 |
+
conn = self._conn
|
111 |
+
timeout_ceil_threshold = (
|
112 |
+
conn._connector._timeout_ceil_threshold if conn is not None else 5
|
113 |
+
)
|
114 |
+
now = loop.time()
|
115 |
+
when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold)
|
116 |
+
self._heartbeat_when = when
|
117 |
+
if self._heartbeat_cb is None:
|
118 |
+
# We do not cancel the previous heartbeat_cb here because
|
119 |
+
# it generates a significant amount of TimerHandle churn
|
120 |
+
# which causes asyncio to rebuild the heap frequently.
|
121 |
+
# Instead _send_heartbeat() will reschedule the next
|
122 |
+
# heartbeat if it fires too early.
|
123 |
+
self._heartbeat_cb = loop.call_at(when, self._send_heartbeat)
|
124 |
+
|
125 |
+
def _send_heartbeat(self) -> None:
|
126 |
+
self._heartbeat_cb = None
|
127 |
+
loop = self._loop
|
128 |
+
now = loop.time()
|
129 |
+
if now < self._heartbeat_when:
|
130 |
+
# Heartbeat fired too early, reschedule
|
131 |
+
self._heartbeat_cb = loop.call_at(
|
132 |
+
self._heartbeat_when, self._send_heartbeat
|
133 |
+
)
|
134 |
+
return
|
135 |
+
|
136 |
+
conn = self._conn
|
137 |
+
timeout_ceil_threshold = (
|
138 |
+
conn._connector._timeout_ceil_threshold if conn is not None else 5
|
139 |
+
)
|
140 |
+
when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold)
|
141 |
+
self._cancel_pong_response_cb()
|
142 |
+
self._pong_response_cb = loop.call_at(when, self._pong_not_received)
|
143 |
+
|
144 |
+
coro = self._writer.send_frame(b"", WSMsgType.PING)
|
145 |
+
if sys.version_info >= (3, 12):
|
146 |
+
# Optimization for Python 3.12, try to send the ping
|
147 |
+
# immediately to avoid having to schedule
|
148 |
+
# the task on the event loop.
|
149 |
+
ping_task = asyncio.Task(coro, loop=loop, eager_start=True)
|
150 |
+
else:
|
151 |
+
ping_task = loop.create_task(coro)
|
152 |
+
|
153 |
+
if not ping_task.done():
|
154 |
+
self._ping_task = ping_task
|
155 |
+
ping_task.add_done_callback(self._ping_task_done)
|
156 |
+
else:
|
157 |
+
self._ping_task_done(ping_task)
|
158 |
+
|
159 |
+
def _ping_task_done(self, task: "asyncio.Task[None]") -> None:
|
160 |
+
"""Callback for when the ping task completes."""
|
161 |
+
if not task.cancelled() and (exc := task.exception()):
|
162 |
+
self._handle_ping_pong_exception(exc)
|
163 |
+
self._ping_task = None
|
164 |
+
|
165 |
+
def _pong_not_received(self) -> None:
|
166 |
+
self._handle_ping_pong_exception(
|
167 |
+
ServerTimeoutError(f"No PONG received after {self._pong_heartbeat} seconds")
|
168 |
+
)
|
169 |
+
|
170 |
+
def _handle_ping_pong_exception(self, exc: BaseException) -> None:
|
171 |
+
"""Handle exceptions raised during ping/pong processing."""
|
172 |
+
if self._closed:
|
173 |
+
return
|
174 |
+
self._set_closed()
|
175 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
176 |
+
self._exception = exc
|
177 |
+
self._response.close()
|
178 |
+
if self._waiting and not self._closing:
|
179 |
+
self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0)
|
180 |
+
|
181 |
+
def _set_closed(self) -> None:
|
182 |
+
"""Set the connection to closed.
|
183 |
+
|
184 |
+
Cancel any heartbeat timers and set the closed flag.
|
185 |
+
"""
|
186 |
+
self._closed = True
|
187 |
+
self._cancel_heartbeat()
|
188 |
+
|
189 |
+
def _set_closing(self) -> None:
|
190 |
+
"""Set the connection to closing.
|
191 |
+
|
192 |
+
Cancel any heartbeat timers and set the closing flag.
|
193 |
+
"""
|
194 |
+
self._closing = True
|
195 |
+
self._cancel_heartbeat()
|
196 |
+
|
197 |
+
@property
|
198 |
+
def closed(self) -> bool:
|
199 |
+
return self._closed
|
200 |
+
|
201 |
+
@property
|
202 |
+
def close_code(self) -> Optional[int]:
|
203 |
+
return self._close_code
|
204 |
+
|
205 |
+
@property
|
206 |
+
def protocol(self) -> Optional[str]:
|
207 |
+
return self._protocol
|
208 |
+
|
209 |
+
@property
|
210 |
+
def compress(self) -> int:
|
211 |
+
return self._compress
|
212 |
+
|
213 |
+
@property
|
214 |
+
def client_notakeover(self) -> bool:
|
215 |
+
return self._client_notakeover
|
216 |
+
|
217 |
+
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
218 |
+
"""extra info from connection transport"""
|
219 |
+
conn = self._response.connection
|
220 |
+
if conn is None:
|
221 |
+
return default
|
222 |
+
transport = conn.transport
|
223 |
+
if transport is None:
|
224 |
+
return default
|
225 |
+
return transport.get_extra_info(name, default)
|
226 |
+
|
227 |
+
def exception(self) -> Optional[BaseException]:
|
228 |
+
return self._exception
|
229 |
+
|
230 |
+
async def ping(self, message: bytes = b"") -> None:
|
231 |
+
await self._writer.send_frame(message, WSMsgType.PING)
|
232 |
+
|
233 |
+
async def pong(self, message: bytes = b"") -> None:
|
234 |
+
await self._writer.send_frame(message, WSMsgType.PONG)
|
235 |
+
|
236 |
+
async def send_frame(
|
237 |
+
self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None
|
238 |
+
) -> None:
|
239 |
+
"""Send a frame over the websocket."""
|
240 |
+
await self._writer.send_frame(message, opcode, compress)
|
241 |
+
|
242 |
+
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
243 |
+
if not isinstance(data, str):
|
244 |
+
raise TypeError("data argument must be str (%r)" % type(data))
|
245 |
+
await self._writer.send_frame(
|
246 |
+
data.encode("utf-8"), WSMsgType.TEXT, compress=compress
|
247 |
+
)
|
248 |
+
|
249 |
+
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
250 |
+
if not isinstance(data, (bytes, bytearray, memoryview)):
|
251 |
+
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
252 |
+
await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress)
|
253 |
+
|
254 |
+
async def send_json(
|
255 |
+
self,
|
256 |
+
data: Any,
|
257 |
+
compress: Optional[int] = None,
|
258 |
+
*,
|
259 |
+
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
260 |
+
) -> None:
|
261 |
+
await self.send_str(dumps(data), compress=compress)
|
262 |
+
|
263 |
+
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
264 |
+
# we need to break `receive()` cycle first,
|
265 |
+
# `close()` may be called from different task
|
266 |
+
if self._waiting and not self._closing:
|
267 |
+
assert self._loop is not None
|
268 |
+
self._close_wait = self._loop.create_future()
|
269 |
+
self._set_closing()
|
270 |
+
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
271 |
+
await self._close_wait
|
272 |
+
|
273 |
+
if self._closed:
|
274 |
+
return False
|
275 |
+
|
276 |
+
self._set_closed()
|
277 |
+
try:
|
278 |
+
await self._writer.close(code, message)
|
279 |
+
except asyncio.CancelledError:
|
280 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
281 |
+
self._response.close()
|
282 |
+
raise
|
283 |
+
except Exception as exc:
|
284 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
285 |
+
self._exception = exc
|
286 |
+
self._response.close()
|
287 |
+
return True
|
288 |
+
|
289 |
+
if self._close_code:
|
290 |
+
self._response.close()
|
291 |
+
return True
|
292 |
+
|
293 |
+
while True:
|
294 |
+
try:
|
295 |
+
async with async_timeout.timeout(self._timeout.ws_close):
|
296 |
+
msg = await self._reader.read()
|
297 |
+
except asyncio.CancelledError:
|
298 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
299 |
+
self._response.close()
|
300 |
+
raise
|
301 |
+
except Exception as exc:
|
302 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
303 |
+
self._exception = exc
|
304 |
+
self._response.close()
|
305 |
+
return True
|
306 |
+
|
307 |
+
if msg.type is WSMsgType.CLOSE:
|
308 |
+
self._close_code = msg.data
|
309 |
+
self._response.close()
|
310 |
+
return True
|
311 |
+
|
312 |
+
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
313 |
+
receive_timeout = timeout or self._timeout.ws_receive
|
314 |
+
|
315 |
+
while True:
|
316 |
+
if self._waiting:
|
317 |
+
raise RuntimeError("Concurrent call to receive() is not allowed")
|
318 |
+
|
319 |
+
if self._closed:
|
320 |
+
return WS_CLOSED_MESSAGE
|
321 |
+
elif self._closing:
|
322 |
+
await self.close()
|
323 |
+
return WS_CLOSED_MESSAGE
|
324 |
+
|
325 |
+
try:
|
326 |
+
self._waiting = True
|
327 |
+
try:
|
328 |
+
if receive_timeout:
|
329 |
+
# Entering the context manager and creating
|
330 |
+
# Timeout() object can take almost 50% of the
|
331 |
+
# run time in this loop so we avoid it if
|
332 |
+
# there is no read timeout.
|
333 |
+
async with async_timeout.timeout(receive_timeout):
|
334 |
+
msg = await self._reader.read()
|
335 |
+
else:
|
336 |
+
msg = await self._reader.read()
|
337 |
+
self._reset_heartbeat()
|
338 |
+
finally:
|
339 |
+
self._waiting = False
|
340 |
+
if self._close_wait:
|
341 |
+
set_result(self._close_wait, None)
|
342 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
343 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
344 |
+
raise
|
345 |
+
except EofStream:
|
346 |
+
self._close_code = WSCloseCode.OK
|
347 |
+
await self.close()
|
348 |
+
return WSMessage(WSMsgType.CLOSED, None, None)
|
349 |
+
except ClientError:
|
350 |
+
# Likely ServerDisconnectedError when connection is lost
|
351 |
+
self._set_closed()
|
352 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
353 |
+
return WS_CLOSED_MESSAGE
|
354 |
+
except WebSocketError as exc:
|
355 |
+
self._close_code = exc.code
|
356 |
+
await self.close(code=exc.code)
|
357 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
358 |
+
except Exception as exc:
|
359 |
+
self._exception = exc
|
360 |
+
self._set_closing()
|
361 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
362 |
+
await self.close()
|
363 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
364 |
+
|
365 |
+
if msg.type not in _INTERNAL_RECEIVE_TYPES:
|
366 |
+
# If its not a close/closing/ping/pong message
|
367 |
+
# we can return it immediately
|
368 |
+
return msg
|
369 |
+
|
370 |
+
if msg.type is WSMsgType.CLOSE:
|
371 |
+
self._set_closing()
|
372 |
+
self._close_code = msg.data
|
373 |
+
if not self._closed and self._autoclose:
|
374 |
+
await self.close()
|
375 |
+
elif msg.type is WSMsgType.CLOSING:
|
376 |
+
self._set_closing()
|
377 |
+
elif msg.type is WSMsgType.PING and self._autoping:
|
378 |
+
await self.pong(msg.data)
|
379 |
+
continue
|
380 |
+
elif msg.type is WSMsgType.PONG and self._autoping:
|
381 |
+
continue
|
382 |
+
|
383 |
+
return msg
|
384 |
+
|
385 |
+
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
386 |
+
msg = await self.receive(timeout)
|
387 |
+
if msg.type is not WSMsgType.TEXT:
|
388 |
+
raise WSMessageTypeError(
|
389 |
+
f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT"
|
390 |
+
)
|
391 |
+
return cast(str, msg.data)
|
392 |
+
|
393 |
+
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
394 |
+
msg = await self.receive(timeout)
|
395 |
+
if msg.type is not WSMsgType.BINARY:
|
396 |
+
raise WSMessageTypeError(
|
397 |
+
f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY"
|
398 |
+
)
|
399 |
+
return cast(bytes, msg.data)
|
400 |
+
|
401 |
+
async def receive_json(
|
402 |
+
self,
|
403 |
+
*,
|
404 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
405 |
+
timeout: Optional[float] = None,
|
406 |
+
) -> Any:
|
407 |
+
data = await self.receive_str(timeout=timeout)
|
408 |
+
return loads(data)
|
409 |
+
|
410 |
+
def __aiter__(self) -> "ClientWebSocketResponse":
|
411 |
+
return self
|
412 |
+
|
413 |
+
async def __anext__(self) -> WSMessage:
|
414 |
+
msg = await self.receive()
|
415 |
+
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
416 |
+
raise StopAsyncIteration
|
417 |
+
return msg
|
418 |
+
|
419 |
+
async def __aenter__(self) -> "ClientWebSocketResponse":
|
420 |
+
return self
|
421 |
+
|
422 |
+
async def __aexit__(
|
423 |
+
self,
|
424 |
+
exc_type: Optional[Type[BaseException]],
|
425 |
+
exc_val: Optional[BaseException],
|
426 |
+
exc_tb: Optional[TracebackType],
|
427 |
+
) -> None:
|
428 |
+
await self.close()
|
venv/Lib/site-packages/aiohttp/compression_utils.py
ADDED
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import zlib
|
3 |
+
from concurrent.futures import Executor
|
4 |
+
from typing import Optional, cast
|
5 |
+
|
6 |
+
try:
|
7 |
+
try:
|
8 |
+
import brotlicffi as brotli
|
9 |
+
except ImportError:
|
10 |
+
import brotli
|
11 |
+
|
12 |
+
HAS_BROTLI = True
|
13 |
+
except ImportError: # pragma: no cover
|
14 |
+
HAS_BROTLI = False
|
15 |
+
|
16 |
+
MAX_SYNC_CHUNK_SIZE = 1024
|
17 |
+
|
18 |
+
|
19 |
+
def encoding_to_mode(
|
20 |
+
encoding: Optional[str] = None,
|
21 |
+
suppress_deflate_header: bool = False,
|
22 |
+
) -> int:
|
23 |
+
if encoding == "gzip":
|
24 |
+
return 16 + zlib.MAX_WBITS
|
25 |
+
|
26 |
+
return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
|
27 |
+
|
28 |
+
|
29 |
+
class ZlibBaseHandler:
|
30 |
+
def __init__(
|
31 |
+
self,
|
32 |
+
mode: int,
|
33 |
+
executor: Optional[Executor] = None,
|
34 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
35 |
+
):
|
36 |
+
self._mode = mode
|
37 |
+
self._executor = executor
|
38 |
+
self._max_sync_chunk_size = max_sync_chunk_size
|
39 |
+
|
40 |
+
|
41 |
+
class ZLibCompressor(ZlibBaseHandler):
|
42 |
+
def __init__(
|
43 |
+
self,
|
44 |
+
encoding: Optional[str] = None,
|
45 |
+
suppress_deflate_header: bool = False,
|
46 |
+
level: Optional[int] = None,
|
47 |
+
wbits: Optional[int] = None,
|
48 |
+
strategy: int = zlib.Z_DEFAULT_STRATEGY,
|
49 |
+
executor: Optional[Executor] = None,
|
50 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
51 |
+
):
|
52 |
+
super().__init__(
|
53 |
+
mode=(
|
54 |
+
encoding_to_mode(encoding, suppress_deflate_header)
|
55 |
+
if wbits is None
|
56 |
+
else wbits
|
57 |
+
),
|
58 |
+
executor=executor,
|
59 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
60 |
+
)
|
61 |
+
if level is None:
|
62 |
+
self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
|
63 |
+
else:
|
64 |
+
self._compressor = zlib.compressobj(
|
65 |
+
wbits=self._mode, strategy=strategy, level=level
|
66 |
+
)
|
67 |
+
self._compress_lock = asyncio.Lock()
|
68 |
+
|
69 |
+
def compress_sync(self, data: bytes) -> bytes:
|
70 |
+
return self._compressor.compress(data)
|
71 |
+
|
72 |
+
async def compress(self, data: bytes) -> bytes:
|
73 |
+
"""Compress the data and returned the compressed bytes.
|
74 |
+
|
75 |
+
Note that flush() must be called after the last call to compress()
|
76 |
+
|
77 |
+
If the data size is large than the max_sync_chunk_size, the compression
|
78 |
+
will be done in the executor. Otherwise, the compression will be done
|
79 |
+
in the event loop.
|
80 |
+
"""
|
81 |
+
async with self._compress_lock:
|
82 |
+
# To ensure the stream is consistent in the event
|
83 |
+
# there are multiple writers, we need to lock
|
84 |
+
# the compressor so that only one writer can
|
85 |
+
# compress at a time.
|
86 |
+
if (
|
87 |
+
self._max_sync_chunk_size is not None
|
88 |
+
and len(data) > self._max_sync_chunk_size
|
89 |
+
):
|
90 |
+
return await asyncio.get_running_loop().run_in_executor(
|
91 |
+
self._executor, self._compressor.compress, data
|
92 |
+
)
|
93 |
+
return self.compress_sync(data)
|
94 |
+
|
95 |
+
def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
|
96 |
+
return self._compressor.flush(mode)
|
97 |
+
|
98 |
+
|
99 |
+
class ZLibDecompressor(ZlibBaseHandler):
|
100 |
+
def __init__(
|
101 |
+
self,
|
102 |
+
encoding: Optional[str] = None,
|
103 |
+
suppress_deflate_header: bool = False,
|
104 |
+
executor: Optional[Executor] = None,
|
105 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
106 |
+
):
|
107 |
+
super().__init__(
|
108 |
+
mode=encoding_to_mode(encoding, suppress_deflate_header),
|
109 |
+
executor=executor,
|
110 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
111 |
+
)
|
112 |
+
self._decompressor = zlib.decompressobj(wbits=self._mode)
|
113 |
+
|
114 |
+
def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
|
115 |
+
return self._decompressor.decompress(data, max_length)
|
116 |
+
|
117 |
+
async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
|
118 |
+
"""Decompress the data and return the decompressed bytes.
|
119 |
+
|
120 |
+
If the data size is large than the max_sync_chunk_size, the decompression
|
121 |
+
will be done in the executor. Otherwise, the decompression will be done
|
122 |
+
in the event loop.
|
123 |
+
"""
|
124 |
+
if (
|
125 |
+
self._max_sync_chunk_size is not None
|
126 |
+
and len(data) > self._max_sync_chunk_size
|
127 |
+
):
|
128 |
+
return await asyncio.get_running_loop().run_in_executor(
|
129 |
+
self._executor, self._decompressor.decompress, data, max_length
|
130 |
+
)
|
131 |
+
return self.decompress_sync(data, max_length)
|
132 |
+
|
133 |
+
def flush(self, length: int = 0) -> bytes:
|
134 |
+
return (
|
135 |
+
self._decompressor.flush(length)
|
136 |
+
if length > 0
|
137 |
+
else self._decompressor.flush()
|
138 |
+
)
|
139 |
+
|
140 |
+
@property
|
141 |
+
def eof(self) -> bool:
|
142 |
+
return self._decompressor.eof
|
143 |
+
|
144 |
+
@property
|
145 |
+
def unconsumed_tail(self) -> bytes:
|
146 |
+
return self._decompressor.unconsumed_tail
|
147 |
+
|
148 |
+
@property
|
149 |
+
def unused_data(self) -> bytes:
|
150 |
+
return self._decompressor.unused_data
|
151 |
+
|
152 |
+
|
153 |
+
class BrotliDecompressor:
|
154 |
+
# Supports both 'brotlipy' and 'Brotli' packages
|
155 |
+
# since they share an import name. The top branches
|
156 |
+
# are for 'brotlipy' and bottom branches for 'Brotli'
|
157 |
+
def __init__(self) -> None:
|
158 |
+
if not HAS_BROTLI:
|
159 |
+
raise RuntimeError(
|
160 |
+
"The brotli decompression is not available. "
|
161 |
+
"Please install `Brotli` module"
|
162 |
+
)
|
163 |
+
self._obj = brotli.Decompressor()
|
164 |
+
|
165 |
+
def decompress_sync(self, data: bytes) -> bytes:
|
166 |
+
if hasattr(self._obj, "decompress"):
|
167 |
+
return cast(bytes, self._obj.decompress(data))
|
168 |
+
return cast(bytes, self._obj.process(data))
|
169 |
+
|
170 |
+
def flush(self) -> bytes:
|
171 |
+
if hasattr(self._obj, "flush"):
|
172 |
+
return cast(bytes, self._obj.flush())
|
173 |
+
return b""
|
venv/Lib/site-packages/aiohttp/connector.py
ADDED
@@ -0,0 +1,1658 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import functools
|
3 |
+
import random
|
4 |
+
import socket
|
5 |
+
import sys
|
6 |
+
import traceback
|
7 |
+
import warnings
|
8 |
+
from collections import OrderedDict, defaultdict, deque
|
9 |
+
from contextlib import suppress
|
10 |
+
from http import HTTPStatus
|
11 |
+
from itertools import chain, cycle, islice
|
12 |
+
from time import monotonic
|
13 |
+
from types import TracebackType
|
14 |
+
from typing import (
|
15 |
+
TYPE_CHECKING,
|
16 |
+
Any,
|
17 |
+
Awaitable,
|
18 |
+
Callable,
|
19 |
+
DefaultDict,
|
20 |
+
Deque,
|
21 |
+
Dict,
|
22 |
+
Iterator,
|
23 |
+
List,
|
24 |
+
Literal,
|
25 |
+
Optional,
|
26 |
+
Sequence,
|
27 |
+
Set,
|
28 |
+
Tuple,
|
29 |
+
Type,
|
30 |
+
Union,
|
31 |
+
cast,
|
32 |
+
)
|
33 |
+
|
34 |
+
import aiohappyeyeballs
|
35 |
+
|
36 |
+
from . import hdrs, helpers
|
37 |
+
from .abc import AbstractResolver, ResolveResult
|
38 |
+
from .client_exceptions import (
|
39 |
+
ClientConnectionError,
|
40 |
+
ClientConnectorCertificateError,
|
41 |
+
ClientConnectorDNSError,
|
42 |
+
ClientConnectorError,
|
43 |
+
ClientConnectorSSLError,
|
44 |
+
ClientHttpProxyError,
|
45 |
+
ClientProxyConnectionError,
|
46 |
+
ServerFingerprintMismatch,
|
47 |
+
UnixClientConnectorError,
|
48 |
+
cert_errors,
|
49 |
+
ssl_errors,
|
50 |
+
)
|
51 |
+
from .client_proto import ResponseHandler
|
52 |
+
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
|
53 |
+
from .helpers import (
|
54 |
+
ceil_timeout,
|
55 |
+
is_ip_address,
|
56 |
+
noop,
|
57 |
+
sentinel,
|
58 |
+
set_exception,
|
59 |
+
set_result,
|
60 |
+
)
|
61 |
+
from .resolver import DefaultResolver
|
62 |
+
|
63 |
+
if TYPE_CHECKING:
|
64 |
+
import ssl
|
65 |
+
|
66 |
+
SSLContext = ssl.SSLContext
|
67 |
+
else:
|
68 |
+
try:
|
69 |
+
import ssl
|
70 |
+
|
71 |
+
SSLContext = ssl.SSLContext
|
72 |
+
except ImportError: # pragma: no cover
|
73 |
+
ssl = None # type: ignore[assignment]
|
74 |
+
SSLContext = object # type: ignore[misc,assignment]
|
75 |
+
|
76 |
+
EMPTY_SCHEMA_SET = frozenset({""})
|
77 |
+
HTTP_SCHEMA_SET = frozenset({"http", "https"})
|
78 |
+
WS_SCHEMA_SET = frozenset({"ws", "wss"})
|
79 |
+
|
80 |
+
HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET
|
81 |
+
HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET
|
82 |
+
|
83 |
+
NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < (
|
84 |
+
3,
|
85 |
+
13,
|
86 |
+
1,
|
87 |
+
) or sys.version_info < (3, 12, 7)
|
88 |
+
# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960
|
89 |
+
# which first appeared in Python 3.12.7 and 3.13.1
|
90 |
+
|
91 |
+
|
92 |
+
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
|
93 |
+
|
94 |
+
|
95 |
+
if TYPE_CHECKING:
|
96 |
+
from .client import ClientTimeout
|
97 |
+
from .client_reqrep import ConnectionKey
|
98 |
+
from .tracing import Trace
|
99 |
+
|
100 |
+
|
101 |
+
class _DeprecationWaiter:
|
102 |
+
__slots__ = ("_awaitable", "_awaited")
|
103 |
+
|
104 |
+
def __init__(self, awaitable: Awaitable[Any]) -> None:
|
105 |
+
self._awaitable = awaitable
|
106 |
+
self._awaited = False
|
107 |
+
|
108 |
+
def __await__(self) -> Any:
|
109 |
+
self._awaited = True
|
110 |
+
return self._awaitable.__await__()
|
111 |
+
|
112 |
+
def __del__(self) -> None:
|
113 |
+
if not self._awaited:
|
114 |
+
warnings.warn(
|
115 |
+
"Connector.close() is a coroutine, "
|
116 |
+
"please use await connector.close()",
|
117 |
+
DeprecationWarning,
|
118 |
+
)
|
119 |
+
|
120 |
+
|
121 |
+
class Connection:
|
122 |
+
|
123 |
+
_source_traceback = None
|
124 |
+
|
125 |
+
def __init__(
|
126 |
+
self,
|
127 |
+
connector: "BaseConnector",
|
128 |
+
key: "ConnectionKey",
|
129 |
+
protocol: ResponseHandler,
|
130 |
+
loop: asyncio.AbstractEventLoop,
|
131 |
+
) -> None:
|
132 |
+
self._key = key
|
133 |
+
self._connector = connector
|
134 |
+
self._loop = loop
|
135 |
+
self._protocol: Optional[ResponseHandler] = protocol
|
136 |
+
self._callbacks: List[Callable[[], None]] = []
|
137 |
+
|
138 |
+
if loop.get_debug():
|
139 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
140 |
+
|
141 |
+
def __repr__(self) -> str:
|
142 |
+
return f"Connection<{self._key}>"
|
143 |
+
|
144 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
145 |
+
if self._protocol is not None:
|
146 |
+
kwargs = {"source": self}
|
147 |
+
_warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
|
148 |
+
if self._loop.is_closed():
|
149 |
+
return
|
150 |
+
|
151 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
152 |
+
|
153 |
+
context = {"client_connection": self, "message": "Unclosed connection"}
|
154 |
+
if self._source_traceback is not None:
|
155 |
+
context["source_traceback"] = self._source_traceback
|
156 |
+
self._loop.call_exception_handler(context)
|
157 |
+
|
158 |
+
def __bool__(self) -> Literal[True]:
|
159 |
+
"""Force subclasses to not be falsy, to make checks simpler."""
|
160 |
+
return True
|
161 |
+
|
162 |
+
@property
|
163 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
164 |
+
warnings.warn(
|
165 |
+
"connector.loop property is deprecated", DeprecationWarning, stacklevel=2
|
166 |
+
)
|
167 |
+
return self._loop
|
168 |
+
|
169 |
+
@property
|
170 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
171 |
+
if self._protocol is None:
|
172 |
+
return None
|
173 |
+
return self._protocol.transport
|
174 |
+
|
175 |
+
@property
|
176 |
+
def protocol(self) -> Optional[ResponseHandler]:
|
177 |
+
return self._protocol
|
178 |
+
|
179 |
+
def add_callback(self, callback: Callable[[], None]) -> None:
|
180 |
+
if callback is not None:
|
181 |
+
self._callbacks.append(callback)
|
182 |
+
|
183 |
+
def _notify_release(self) -> None:
|
184 |
+
callbacks, self._callbacks = self._callbacks[:], []
|
185 |
+
|
186 |
+
for cb in callbacks:
|
187 |
+
with suppress(Exception):
|
188 |
+
cb()
|
189 |
+
|
190 |
+
def close(self) -> None:
|
191 |
+
self._notify_release()
|
192 |
+
|
193 |
+
if self._protocol is not None:
|
194 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
195 |
+
self._protocol = None
|
196 |
+
|
197 |
+
def release(self) -> None:
|
198 |
+
self._notify_release()
|
199 |
+
|
200 |
+
if self._protocol is not None:
|
201 |
+
self._connector._release(self._key, self._protocol)
|
202 |
+
self._protocol = None
|
203 |
+
|
204 |
+
@property
|
205 |
+
def closed(self) -> bool:
|
206 |
+
return self._protocol is None or not self._protocol.is_connected()
|
207 |
+
|
208 |
+
|
209 |
+
class _TransportPlaceholder:
|
210 |
+
"""placeholder for BaseConnector.connect function"""
|
211 |
+
|
212 |
+
__slots__ = ()
|
213 |
+
|
214 |
+
def close(self) -> None:
|
215 |
+
"""Close the placeholder transport."""
|
216 |
+
|
217 |
+
|
218 |
+
class BaseConnector:
|
219 |
+
"""Base connector class.
|
220 |
+
|
221 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
222 |
+
force_close - Set to True to force close and do reconnect
|
223 |
+
after each request (and between redirects).
|
224 |
+
limit - The total number of simultaneous connections.
|
225 |
+
limit_per_host - Number of simultaneous connections to one host.
|
226 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
227 |
+
Disabled by default.
|
228 |
+
timeout_ceil_threshold - Trigger ceiling of timeout values when
|
229 |
+
it's above timeout_ceil_threshold.
|
230 |
+
loop - Optional event loop.
|
231 |
+
"""
|
232 |
+
|
233 |
+
_closed = True # prevent AttributeError in __del__ if ctor was failed
|
234 |
+
_source_traceback = None
|
235 |
+
|
236 |
+
# abort transport after 2 seconds (cleanup broken connections)
|
237 |
+
_cleanup_closed_period = 2.0
|
238 |
+
|
239 |
+
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET
|
240 |
+
|
241 |
+
def __init__(
|
242 |
+
self,
|
243 |
+
*,
|
244 |
+
keepalive_timeout: Union[object, None, float] = sentinel,
|
245 |
+
force_close: bool = False,
|
246 |
+
limit: int = 100,
|
247 |
+
limit_per_host: int = 0,
|
248 |
+
enable_cleanup_closed: bool = False,
|
249 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
250 |
+
timeout_ceil_threshold: float = 5,
|
251 |
+
) -> None:
|
252 |
+
|
253 |
+
if force_close:
|
254 |
+
if keepalive_timeout is not None and keepalive_timeout is not sentinel:
|
255 |
+
raise ValueError(
|
256 |
+
"keepalive_timeout cannot be set if force_close is True"
|
257 |
+
)
|
258 |
+
else:
|
259 |
+
if keepalive_timeout is sentinel:
|
260 |
+
keepalive_timeout = 15.0
|
261 |
+
|
262 |
+
loop = loop or asyncio.get_running_loop()
|
263 |
+
self._timeout_ceil_threshold = timeout_ceil_threshold
|
264 |
+
|
265 |
+
self._closed = False
|
266 |
+
if loop.get_debug():
|
267 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
268 |
+
|
269 |
+
# Connection pool of reusable connections.
|
270 |
+
# We use a deque to store connections because it has O(1) popleft()
|
271 |
+
# and O(1) append() operations to implement a FIFO queue.
|
272 |
+
self._conns: DefaultDict[
|
273 |
+
ConnectionKey, Deque[Tuple[ResponseHandler, float]]
|
274 |
+
] = defaultdict(deque)
|
275 |
+
self._limit = limit
|
276 |
+
self._limit_per_host = limit_per_host
|
277 |
+
self._acquired: Set[ResponseHandler] = set()
|
278 |
+
self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = (
|
279 |
+
defaultdict(set)
|
280 |
+
)
|
281 |
+
self._keepalive_timeout = cast(float, keepalive_timeout)
|
282 |
+
self._force_close = force_close
|
283 |
+
|
284 |
+
# {host_key: FIFO list of waiters}
|
285 |
+
# The FIFO is implemented with an OrderedDict with None keys because
|
286 |
+
# python does not have an ordered set.
|
287 |
+
self._waiters: DefaultDict[
|
288 |
+
ConnectionKey, OrderedDict[asyncio.Future[None], None]
|
289 |
+
] = defaultdict(OrderedDict)
|
290 |
+
|
291 |
+
self._loop = loop
|
292 |
+
self._factory = functools.partial(ResponseHandler, loop=loop)
|
293 |
+
|
294 |
+
# start keep-alive connection cleanup task
|
295 |
+
self._cleanup_handle: Optional[asyncio.TimerHandle] = None
|
296 |
+
|
297 |
+
# start cleanup closed transports task
|
298 |
+
self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
|
299 |
+
|
300 |
+
if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED:
|
301 |
+
warnings.warn(
|
302 |
+
"enable_cleanup_closed ignored because "
|
303 |
+
"https://github.com/python/cpython/pull/118960 is fixed "
|
304 |
+
f"in Python version {sys.version_info}",
|
305 |
+
DeprecationWarning,
|
306 |
+
stacklevel=2,
|
307 |
+
)
|
308 |
+
enable_cleanup_closed = False
|
309 |
+
|
310 |
+
self._cleanup_closed_disabled = not enable_cleanup_closed
|
311 |
+
self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
|
312 |
+
self._cleanup_closed()
|
313 |
+
|
314 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
315 |
+
if self._closed:
|
316 |
+
return
|
317 |
+
if not self._conns:
|
318 |
+
return
|
319 |
+
|
320 |
+
conns = [repr(c) for c in self._conns.values()]
|
321 |
+
|
322 |
+
self._close()
|
323 |
+
|
324 |
+
kwargs = {"source": self}
|
325 |
+
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
|
326 |
+
context = {
|
327 |
+
"connector": self,
|
328 |
+
"connections": conns,
|
329 |
+
"message": "Unclosed connector",
|
330 |
+
}
|
331 |
+
if self._source_traceback is not None:
|
332 |
+
context["source_traceback"] = self._source_traceback
|
333 |
+
self._loop.call_exception_handler(context)
|
334 |
+
|
335 |
+
def __enter__(self) -> "BaseConnector":
|
336 |
+
warnings.warn(
|
337 |
+
'"with Connector():" is deprecated, '
|
338 |
+
'use "async with Connector():" instead',
|
339 |
+
DeprecationWarning,
|
340 |
+
)
|
341 |
+
return self
|
342 |
+
|
343 |
+
def __exit__(self, *exc: Any) -> None:
|
344 |
+
self._close()
|
345 |
+
|
346 |
+
async def __aenter__(self) -> "BaseConnector":
|
347 |
+
return self
|
348 |
+
|
349 |
+
async def __aexit__(
|
350 |
+
self,
|
351 |
+
exc_type: Optional[Type[BaseException]] = None,
|
352 |
+
exc_value: Optional[BaseException] = None,
|
353 |
+
exc_traceback: Optional[TracebackType] = None,
|
354 |
+
) -> None:
|
355 |
+
await self.close()
|
356 |
+
|
357 |
+
@property
|
358 |
+
def force_close(self) -> bool:
|
359 |
+
"""Ultimately close connection on releasing if True."""
|
360 |
+
return self._force_close
|
361 |
+
|
362 |
+
@property
|
363 |
+
def limit(self) -> int:
|
364 |
+
"""The total number for simultaneous connections.
|
365 |
+
|
366 |
+
If limit is 0 the connector has no limit.
|
367 |
+
The default limit size is 100.
|
368 |
+
"""
|
369 |
+
return self._limit
|
370 |
+
|
371 |
+
@property
|
372 |
+
def limit_per_host(self) -> int:
|
373 |
+
"""The limit for simultaneous connections to the same endpoint.
|
374 |
+
|
375 |
+
Endpoints are the same if they are have equal
|
376 |
+
(host, port, is_ssl) triple.
|
377 |
+
"""
|
378 |
+
return self._limit_per_host
|
379 |
+
|
380 |
+
def _cleanup(self) -> None:
|
381 |
+
"""Cleanup unused transports."""
|
382 |
+
if self._cleanup_handle:
|
383 |
+
self._cleanup_handle.cancel()
|
384 |
+
# _cleanup_handle should be unset, otherwise _release() will not
|
385 |
+
# recreate it ever!
|
386 |
+
self._cleanup_handle = None
|
387 |
+
|
388 |
+
now = monotonic()
|
389 |
+
timeout = self._keepalive_timeout
|
390 |
+
|
391 |
+
if self._conns:
|
392 |
+
connections = defaultdict(deque)
|
393 |
+
deadline = now - timeout
|
394 |
+
for key, conns in self._conns.items():
|
395 |
+
alive: Deque[Tuple[ResponseHandler, float]] = deque()
|
396 |
+
for proto, use_time in conns:
|
397 |
+
if proto.is_connected() and use_time - deadline >= 0:
|
398 |
+
alive.append((proto, use_time))
|
399 |
+
continue
|
400 |
+
transport = proto.transport
|
401 |
+
proto.close()
|
402 |
+
if not self._cleanup_closed_disabled and key.is_ssl:
|
403 |
+
self._cleanup_closed_transports.append(transport)
|
404 |
+
|
405 |
+
if alive:
|
406 |
+
connections[key] = alive
|
407 |
+
|
408 |
+
self._conns = connections
|
409 |
+
|
410 |
+
if self._conns:
|
411 |
+
self._cleanup_handle = helpers.weakref_handle(
|
412 |
+
self,
|
413 |
+
"_cleanup",
|
414 |
+
timeout,
|
415 |
+
self._loop,
|
416 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
417 |
+
)
|
418 |
+
|
419 |
+
def _cleanup_closed(self) -> None:
|
420 |
+
"""Double confirmation for transport close.
|
421 |
+
|
422 |
+
Some broken ssl servers may leave socket open without proper close.
|
423 |
+
"""
|
424 |
+
if self._cleanup_closed_handle:
|
425 |
+
self._cleanup_closed_handle.cancel()
|
426 |
+
|
427 |
+
for transport in self._cleanup_closed_transports:
|
428 |
+
if transport is not None:
|
429 |
+
transport.abort()
|
430 |
+
|
431 |
+
self._cleanup_closed_transports = []
|
432 |
+
|
433 |
+
if not self._cleanup_closed_disabled:
|
434 |
+
self._cleanup_closed_handle = helpers.weakref_handle(
|
435 |
+
self,
|
436 |
+
"_cleanup_closed",
|
437 |
+
self._cleanup_closed_period,
|
438 |
+
self._loop,
|
439 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
440 |
+
)
|
441 |
+
|
442 |
+
def close(self) -> Awaitable[None]:
|
443 |
+
"""Close all opened transports."""
|
444 |
+
self._close()
|
445 |
+
return _DeprecationWaiter(noop())
|
446 |
+
|
447 |
+
def _close(self) -> None:
|
448 |
+
if self._closed:
|
449 |
+
return
|
450 |
+
|
451 |
+
self._closed = True
|
452 |
+
|
453 |
+
try:
|
454 |
+
if self._loop.is_closed():
|
455 |
+
return
|
456 |
+
|
457 |
+
# cancel cleanup task
|
458 |
+
if self._cleanup_handle:
|
459 |
+
self._cleanup_handle.cancel()
|
460 |
+
|
461 |
+
# cancel cleanup close task
|
462 |
+
if self._cleanup_closed_handle:
|
463 |
+
self._cleanup_closed_handle.cancel()
|
464 |
+
|
465 |
+
for data in self._conns.values():
|
466 |
+
for proto, t0 in data:
|
467 |
+
proto.close()
|
468 |
+
|
469 |
+
for proto in self._acquired:
|
470 |
+
proto.close()
|
471 |
+
|
472 |
+
for transport in self._cleanup_closed_transports:
|
473 |
+
if transport is not None:
|
474 |
+
transport.abort()
|
475 |
+
|
476 |
+
finally:
|
477 |
+
self._conns.clear()
|
478 |
+
self._acquired.clear()
|
479 |
+
for keyed_waiters in self._waiters.values():
|
480 |
+
for keyed_waiter in keyed_waiters:
|
481 |
+
keyed_waiter.cancel()
|
482 |
+
self._waiters.clear()
|
483 |
+
self._cleanup_handle = None
|
484 |
+
self._cleanup_closed_transports.clear()
|
485 |
+
self._cleanup_closed_handle = None
|
486 |
+
|
487 |
+
@property
|
488 |
+
def closed(self) -> bool:
|
489 |
+
"""Is connector closed.
|
490 |
+
|
491 |
+
A readonly property.
|
492 |
+
"""
|
493 |
+
return self._closed
|
494 |
+
|
495 |
+
def _available_connections(self, key: "ConnectionKey") -> int:
|
496 |
+
"""
|
497 |
+
Return number of available connections.
|
498 |
+
|
499 |
+
The limit, limit_per_host and the connection key are taken into account.
|
500 |
+
|
501 |
+
If it returns less than 1 means that there are no connections
|
502 |
+
available.
|
503 |
+
"""
|
504 |
+
# check total available connections
|
505 |
+
# If there are no limits, this will always return 1
|
506 |
+
total_remain = 1
|
507 |
+
|
508 |
+
if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0:
|
509 |
+
return total_remain
|
510 |
+
|
511 |
+
# check limit per host
|
512 |
+
if host_remain := self._limit_per_host:
|
513 |
+
if acquired := self._acquired_per_host.get(key):
|
514 |
+
host_remain -= len(acquired)
|
515 |
+
if total_remain > host_remain:
|
516 |
+
return host_remain
|
517 |
+
|
518 |
+
return total_remain
|
519 |
+
|
520 |
+
async def connect(
|
521 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
522 |
+
) -> Connection:
|
523 |
+
"""Get from pool or create new connection."""
|
524 |
+
key = req.connection_key
|
525 |
+
if (conn := await self._get(key, traces)) is not None:
|
526 |
+
# If we do not have to wait and we can get a connection from the pool
|
527 |
+
# we can avoid the timeout ceil logic and directly return the connection
|
528 |
+
return conn
|
529 |
+
|
530 |
+
async with ceil_timeout(timeout.connect, timeout.ceil_threshold):
|
531 |
+
if self._available_connections(key) <= 0:
|
532 |
+
await self._wait_for_available_connection(key, traces)
|
533 |
+
if (conn := await self._get(key, traces)) is not None:
|
534 |
+
return conn
|
535 |
+
|
536 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
537 |
+
self._acquired.add(placeholder)
|
538 |
+
if self._limit_per_host:
|
539 |
+
self._acquired_per_host[key].add(placeholder)
|
540 |
+
|
541 |
+
try:
|
542 |
+
# Traces are done inside the try block to ensure that the
|
543 |
+
# that the placeholder is still cleaned up if an exception
|
544 |
+
# is raised.
|
545 |
+
if traces:
|
546 |
+
for trace in traces:
|
547 |
+
await trace.send_connection_create_start()
|
548 |
+
proto = await self._create_connection(req, traces, timeout)
|
549 |
+
if traces:
|
550 |
+
for trace in traces:
|
551 |
+
await trace.send_connection_create_end()
|
552 |
+
except BaseException:
|
553 |
+
self._release_acquired(key, placeholder)
|
554 |
+
raise
|
555 |
+
else:
|
556 |
+
if self._closed:
|
557 |
+
proto.close()
|
558 |
+
raise ClientConnectionError("Connector is closed.")
|
559 |
+
|
560 |
+
# The connection was successfully created, drop the placeholder
|
561 |
+
# and add the real connection to the acquired set. There should
|
562 |
+
# be no awaits after the proto is added to the acquired set
|
563 |
+
# to ensure that the connection is not left in the acquired set
|
564 |
+
# on cancellation.
|
565 |
+
self._acquired.remove(placeholder)
|
566 |
+
self._acquired.add(proto)
|
567 |
+
if self._limit_per_host:
|
568 |
+
acquired_per_host = self._acquired_per_host[key]
|
569 |
+
acquired_per_host.remove(placeholder)
|
570 |
+
acquired_per_host.add(proto)
|
571 |
+
return Connection(self, key, proto, self._loop)
|
572 |
+
|
573 |
+
async def _wait_for_available_connection(
|
574 |
+
self, key: "ConnectionKey", traces: List["Trace"]
|
575 |
+
) -> None:
|
576 |
+
"""Wait for an available connection slot."""
|
577 |
+
# We loop here because there is a race between
|
578 |
+
# the connection limit check and the connection
|
579 |
+
# being acquired. If the connection is acquired
|
580 |
+
# between the check and the await statement, we
|
581 |
+
# need to loop again to check if the connection
|
582 |
+
# slot is still available.
|
583 |
+
attempts = 0
|
584 |
+
while True:
|
585 |
+
fut: asyncio.Future[None] = self._loop.create_future()
|
586 |
+
keyed_waiters = self._waiters[key]
|
587 |
+
keyed_waiters[fut] = None
|
588 |
+
if attempts:
|
589 |
+
# If we have waited before, we need to move the waiter
|
590 |
+
# to the front of the queue as otherwise we might get
|
591 |
+
# starved and hit the timeout.
|
592 |
+
keyed_waiters.move_to_end(fut, last=False)
|
593 |
+
|
594 |
+
try:
|
595 |
+
# Traces happen in the try block to ensure that the
|
596 |
+
# the waiter is still cleaned up if an exception is raised.
|
597 |
+
if traces:
|
598 |
+
for trace in traces:
|
599 |
+
await trace.send_connection_queued_start()
|
600 |
+
await fut
|
601 |
+
if traces:
|
602 |
+
for trace in traces:
|
603 |
+
await trace.send_connection_queued_end()
|
604 |
+
finally:
|
605 |
+
# pop the waiter from the queue if its still
|
606 |
+
# there and not already removed by _release_waiter
|
607 |
+
keyed_waiters.pop(fut, None)
|
608 |
+
if not self._waiters.get(key, True):
|
609 |
+
del self._waiters[key]
|
610 |
+
|
611 |
+
if self._available_connections(key) > 0:
|
612 |
+
break
|
613 |
+
attempts += 1
|
614 |
+
|
615 |
+
async def _get(
|
616 |
+
self, key: "ConnectionKey", traces: List["Trace"]
|
617 |
+
) -> Optional[Connection]:
|
618 |
+
"""Get next reusable connection for the key or None.
|
619 |
+
|
620 |
+
The connection will be marked as acquired.
|
621 |
+
"""
|
622 |
+
if (conns := self._conns.get(key)) is None:
|
623 |
+
return None
|
624 |
+
|
625 |
+
t1 = monotonic()
|
626 |
+
while conns:
|
627 |
+
proto, t0 = conns.popleft()
|
628 |
+
# We will we reuse the connection if its connected and
|
629 |
+
# the keepalive timeout has not been exceeded
|
630 |
+
if proto.is_connected() and t1 - t0 <= self._keepalive_timeout:
|
631 |
+
if not conns:
|
632 |
+
# The very last connection was reclaimed: drop the key
|
633 |
+
del self._conns[key]
|
634 |
+
self._acquired.add(proto)
|
635 |
+
if self._limit_per_host:
|
636 |
+
self._acquired_per_host[key].add(proto)
|
637 |
+
if traces:
|
638 |
+
for trace in traces:
|
639 |
+
try:
|
640 |
+
await trace.send_connection_reuseconn()
|
641 |
+
except BaseException:
|
642 |
+
self._release_acquired(key, proto)
|
643 |
+
raise
|
644 |
+
return Connection(self, key, proto, self._loop)
|
645 |
+
|
646 |
+
# Connection cannot be reused, close it
|
647 |
+
transport = proto.transport
|
648 |
+
proto.close()
|
649 |
+
# only for SSL transports
|
650 |
+
if not self._cleanup_closed_disabled and key.is_ssl:
|
651 |
+
self._cleanup_closed_transports.append(transport)
|
652 |
+
|
653 |
+
# No more connections: drop the key
|
654 |
+
del self._conns[key]
|
655 |
+
return None
|
656 |
+
|
657 |
+
def _release_waiter(self) -> None:
|
658 |
+
"""
|
659 |
+
Iterates over all waiters until one to be released is found.
|
660 |
+
|
661 |
+
The one to be released is not finished and
|
662 |
+
belongs to a host that has available connections.
|
663 |
+
"""
|
664 |
+
if not self._waiters:
|
665 |
+
return
|
666 |
+
|
667 |
+
# Having the dict keys ordered this avoids to iterate
|
668 |
+
# at the same order at each call.
|
669 |
+
queues = list(self._waiters)
|
670 |
+
random.shuffle(queues)
|
671 |
+
|
672 |
+
for key in queues:
|
673 |
+
if self._available_connections(key) < 1:
|
674 |
+
continue
|
675 |
+
|
676 |
+
waiters = self._waiters[key]
|
677 |
+
while waiters:
|
678 |
+
waiter, _ = waiters.popitem(last=False)
|
679 |
+
if not waiter.done():
|
680 |
+
waiter.set_result(None)
|
681 |
+
return
|
682 |
+
|
683 |
+
def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
|
684 |
+
"""Release acquired connection."""
|
685 |
+
if self._closed:
|
686 |
+
# acquired connection is already released on connector closing
|
687 |
+
return
|
688 |
+
|
689 |
+
self._acquired.discard(proto)
|
690 |
+
if self._limit_per_host and (conns := self._acquired_per_host.get(key)):
|
691 |
+
conns.discard(proto)
|
692 |
+
if not conns:
|
693 |
+
del self._acquired_per_host[key]
|
694 |
+
self._release_waiter()
|
695 |
+
|
696 |
+
def _release(
|
697 |
+
self,
|
698 |
+
key: "ConnectionKey",
|
699 |
+
protocol: ResponseHandler,
|
700 |
+
*,
|
701 |
+
should_close: bool = False,
|
702 |
+
) -> None:
|
703 |
+
if self._closed:
|
704 |
+
# acquired connection is already released on connector closing
|
705 |
+
return
|
706 |
+
|
707 |
+
self._release_acquired(key, protocol)
|
708 |
+
|
709 |
+
if self._force_close or should_close or protocol.should_close:
|
710 |
+
transport = protocol.transport
|
711 |
+
protocol.close()
|
712 |
+
|
713 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
714 |
+
self._cleanup_closed_transports.append(transport)
|
715 |
+
return
|
716 |
+
|
717 |
+
self._conns[key].append((protocol, monotonic()))
|
718 |
+
|
719 |
+
if self._cleanup_handle is None:
|
720 |
+
self._cleanup_handle = helpers.weakref_handle(
|
721 |
+
self,
|
722 |
+
"_cleanup",
|
723 |
+
self._keepalive_timeout,
|
724 |
+
self._loop,
|
725 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
726 |
+
)
|
727 |
+
|
728 |
+
async def _create_connection(
|
729 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
730 |
+
) -> ResponseHandler:
|
731 |
+
raise NotImplementedError()
|
732 |
+
|
733 |
+
|
734 |
+
class _DNSCacheTable:
|
735 |
+
def __init__(self, ttl: Optional[float] = None) -> None:
|
736 |
+
self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {}
|
737 |
+
self._timestamps: Dict[Tuple[str, int], float] = {}
|
738 |
+
self._ttl = ttl
|
739 |
+
|
740 |
+
def __contains__(self, host: object) -> bool:
|
741 |
+
return host in self._addrs_rr
|
742 |
+
|
743 |
+
def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None:
|
744 |
+
self._addrs_rr[key] = (cycle(addrs), len(addrs))
|
745 |
+
|
746 |
+
if self._ttl is not None:
|
747 |
+
self._timestamps[key] = monotonic()
|
748 |
+
|
749 |
+
def remove(self, key: Tuple[str, int]) -> None:
|
750 |
+
self._addrs_rr.pop(key, None)
|
751 |
+
|
752 |
+
if self._ttl is not None:
|
753 |
+
self._timestamps.pop(key, None)
|
754 |
+
|
755 |
+
def clear(self) -> None:
|
756 |
+
self._addrs_rr.clear()
|
757 |
+
self._timestamps.clear()
|
758 |
+
|
759 |
+
def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]:
|
760 |
+
loop, length = self._addrs_rr[key]
|
761 |
+
addrs = list(islice(loop, length))
|
762 |
+
# Consume one more element to shift internal state of `cycle`
|
763 |
+
next(loop)
|
764 |
+
return addrs
|
765 |
+
|
766 |
+
def expired(self, key: Tuple[str, int]) -> bool:
|
767 |
+
if self._ttl is None:
|
768 |
+
return False
|
769 |
+
|
770 |
+
return self._timestamps[key] + self._ttl < monotonic()
|
771 |
+
|
772 |
+
|
773 |
+
def _make_ssl_context(verified: bool) -> SSLContext:
|
774 |
+
"""Create SSL context.
|
775 |
+
|
776 |
+
This method is not async-friendly and should be called from a thread
|
777 |
+
because it will load certificates from disk and do other blocking I/O.
|
778 |
+
"""
|
779 |
+
if ssl is None:
|
780 |
+
# No ssl support
|
781 |
+
return None
|
782 |
+
if verified:
|
783 |
+
sslcontext = ssl.create_default_context()
|
784 |
+
else:
|
785 |
+
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
786 |
+
sslcontext.options |= ssl.OP_NO_SSLv2
|
787 |
+
sslcontext.options |= ssl.OP_NO_SSLv3
|
788 |
+
sslcontext.check_hostname = False
|
789 |
+
sslcontext.verify_mode = ssl.CERT_NONE
|
790 |
+
sslcontext.options |= ssl.OP_NO_COMPRESSION
|
791 |
+
sslcontext.set_default_verify_paths()
|
792 |
+
sslcontext.set_alpn_protocols(("http/1.1",))
|
793 |
+
return sslcontext
|
794 |
+
|
795 |
+
|
796 |
+
# The default SSLContext objects are created at import time
|
797 |
+
# since they do blocking I/O to load certificates from disk,
|
798 |
+
# and imports should always be done before the event loop starts
|
799 |
+
# or in a thread.
|
800 |
+
_SSL_CONTEXT_VERIFIED = _make_ssl_context(True)
|
801 |
+
_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False)
|
802 |
+
|
803 |
+
|
804 |
+
class TCPConnector(BaseConnector):
|
805 |
+
"""TCP connector.
|
806 |
+
|
807 |
+
verify_ssl - Set to True to check ssl certifications.
|
808 |
+
fingerprint - Pass the binary sha256
|
809 |
+
digest of the expected certificate in DER format to verify
|
810 |
+
that the certificate the server presents matches. See also
|
811 |
+
https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning
|
812 |
+
resolver - Enable DNS lookups and use this
|
813 |
+
resolver
|
814 |
+
use_dns_cache - Use memory cache for DNS lookups.
|
815 |
+
ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
|
816 |
+
family - socket address family
|
817 |
+
local_addr - local tuple of (host, port) to bind socket to
|
818 |
+
|
819 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
820 |
+
force_close - Set to True to force close and do reconnect
|
821 |
+
after each request (and between redirects).
|
822 |
+
limit - The total number of simultaneous connections.
|
823 |
+
limit_per_host - Number of simultaneous connections to one host.
|
824 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
825 |
+
Disabled by default.
|
826 |
+
happy_eyeballs_delay - This is the “Connection Attempt Delay”
|
827 |
+
as defined in RFC 8305. To disable
|
828 |
+
the happy eyeballs algorithm, set to None.
|
829 |
+
interleave - “First Address Family Count” as defined in RFC 8305
|
830 |
+
loop - Optional event loop.
|
831 |
+
"""
|
832 |
+
|
833 |
+
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"})
|
834 |
+
|
835 |
+
def __init__(
|
836 |
+
self,
|
837 |
+
*,
|
838 |
+
verify_ssl: bool = True,
|
839 |
+
fingerprint: Optional[bytes] = None,
|
840 |
+
use_dns_cache: bool = True,
|
841 |
+
ttl_dns_cache: Optional[int] = 10,
|
842 |
+
family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC,
|
843 |
+
ssl_context: Optional[SSLContext] = None,
|
844 |
+
ssl: Union[bool, Fingerprint, SSLContext] = True,
|
845 |
+
local_addr: Optional[Tuple[str, int]] = None,
|
846 |
+
resolver: Optional[AbstractResolver] = None,
|
847 |
+
keepalive_timeout: Union[None, float, object] = sentinel,
|
848 |
+
force_close: bool = False,
|
849 |
+
limit: int = 100,
|
850 |
+
limit_per_host: int = 0,
|
851 |
+
enable_cleanup_closed: bool = False,
|
852 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
853 |
+
timeout_ceil_threshold: float = 5,
|
854 |
+
happy_eyeballs_delay: Optional[float] = 0.25,
|
855 |
+
interleave: Optional[int] = None,
|
856 |
+
):
|
857 |
+
super().__init__(
|
858 |
+
keepalive_timeout=keepalive_timeout,
|
859 |
+
force_close=force_close,
|
860 |
+
limit=limit,
|
861 |
+
limit_per_host=limit_per_host,
|
862 |
+
enable_cleanup_closed=enable_cleanup_closed,
|
863 |
+
loop=loop,
|
864 |
+
timeout_ceil_threshold=timeout_ceil_threshold,
|
865 |
+
)
|
866 |
+
|
867 |
+
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
868 |
+
if resolver is None:
|
869 |
+
resolver = DefaultResolver(loop=self._loop)
|
870 |
+
self._resolver = resolver
|
871 |
+
|
872 |
+
self._use_dns_cache = use_dns_cache
|
873 |
+
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
|
874 |
+
self._throttle_dns_futures: Dict[
|
875 |
+
Tuple[str, int], Set["asyncio.Future[None]"]
|
876 |
+
] = {}
|
877 |
+
self._family = family
|
878 |
+
self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr)
|
879 |
+
self._happy_eyeballs_delay = happy_eyeballs_delay
|
880 |
+
self._interleave = interleave
|
881 |
+
self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set()
|
882 |
+
|
883 |
+
def close(self) -> Awaitable[None]:
|
884 |
+
"""Close all ongoing DNS calls."""
|
885 |
+
for fut in chain.from_iterable(self._throttle_dns_futures.values()):
|
886 |
+
fut.cancel()
|
887 |
+
|
888 |
+
for t in self._resolve_host_tasks:
|
889 |
+
t.cancel()
|
890 |
+
|
891 |
+
return super().close()
|
892 |
+
|
893 |
+
@property
|
894 |
+
def family(self) -> int:
|
895 |
+
"""Socket family like AF_INET."""
|
896 |
+
return self._family
|
897 |
+
|
898 |
+
@property
|
899 |
+
def use_dns_cache(self) -> bool:
|
900 |
+
"""True if local DNS caching is enabled."""
|
901 |
+
return self._use_dns_cache
|
902 |
+
|
903 |
+
def clear_dns_cache(
|
904 |
+
self, host: Optional[str] = None, port: Optional[int] = None
|
905 |
+
) -> None:
|
906 |
+
"""Remove specified host/port or clear all dns local cache."""
|
907 |
+
if host is not None and port is not None:
|
908 |
+
self._cached_hosts.remove((host, port))
|
909 |
+
elif host is not None or port is not None:
|
910 |
+
raise ValueError("either both host and port or none of them are allowed")
|
911 |
+
else:
|
912 |
+
self._cached_hosts.clear()
|
913 |
+
|
914 |
+
async def _resolve_host(
|
915 |
+
self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None
|
916 |
+
) -> List[ResolveResult]:
|
917 |
+
"""Resolve host and return list of addresses."""
|
918 |
+
if is_ip_address(host):
|
919 |
+
return [
|
920 |
+
{
|
921 |
+
"hostname": host,
|
922 |
+
"host": host,
|
923 |
+
"port": port,
|
924 |
+
"family": self._family,
|
925 |
+
"proto": 0,
|
926 |
+
"flags": 0,
|
927 |
+
}
|
928 |
+
]
|
929 |
+
|
930 |
+
if not self._use_dns_cache:
|
931 |
+
|
932 |
+
if traces:
|
933 |
+
for trace in traces:
|
934 |
+
await trace.send_dns_resolvehost_start(host)
|
935 |
+
|
936 |
+
res = await self._resolver.resolve(host, port, family=self._family)
|
937 |
+
|
938 |
+
if traces:
|
939 |
+
for trace in traces:
|
940 |
+
await trace.send_dns_resolvehost_end(host)
|
941 |
+
|
942 |
+
return res
|
943 |
+
|
944 |
+
key = (host, port)
|
945 |
+
if key in self._cached_hosts and not self._cached_hosts.expired(key):
|
946 |
+
# get result early, before any await (#4014)
|
947 |
+
result = self._cached_hosts.next_addrs(key)
|
948 |
+
|
949 |
+
if traces:
|
950 |
+
for trace in traces:
|
951 |
+
await trace.send_dns_cache_hit(host)
|
952 |
+
return result
|
953 |
+
|
954 |
+
futures: Set["asyncio.Future[None]"]
|
955 |
+
#
|
956 |
+
# If multiple connectors are resolving the same host, we wait
|
957 |
+
# for the first one to resolve and then use the result for all of them.
|
958 |
+
# We use a throttle to ensure that we only resolve the host once
|
959 |
+
# and then use the result for all the waiters.
|
960 |
+
#
|
961 |
+
if key in self._throttle_dns_futures:
|
962 |
+
# get futures early, before any await (#4014)
|
963 |
+
futures = self._throttle_dns_futures[key]
|
964 |
+
future: asyncio.Future[None] = self._loop.create_future()
|
965 |
+
futures.add(future)
|
966 |
+
if traces:
|
967 |
+
for trace in traces:
|
968 |
+
await trace.send_dns_cache_hit(host)
|
969 |
+
try:
|
970 |
+
await future
|
971 |
+
finally:
|
972 |
+
futures.discard(future)
|
973 |
+
return self._cached_hosts.next_addrs(key)
|
974 |
+
|
975 |
+
# update dict early, before any await (#4014)
|
976 |
+
self._throttle_dns_futures[key] = futures = set()
|
977 |
+
# In this case we need to create a task to ensure that we can shield
|
978 |
+
# the task from cancellation as cancelling this lookup should not cancel
|
979 |
+
# the underlying lookup or else the cancel event will get broadcast to
|
980 |
+
# all the waiters across all connections.
|
981 |
+
#
|
982 |
+
coro = self._resolve_host_with_throttle(key, host, port, futures, traces)
|
983 |
+
loop = asyncio.get_running_loop()
|
984 |
+
if sys.version_info >= (3, 12):
|
985 |
+
# Optimization for Python 3.12, try to send immediately
|
986 |
+
resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True)
|
987 |
+
else:
|
988 |
+
resolved_host_task = loop.create_task(coro)
|
989 |
+
|
990 |
+
if not resolved_host_task.done():
|
991 |
+
self._resolve_host_tasks.add(resolved_host_task)
|
992 |
+
resolved_host_task.add_done_callback(self._resolve_host_tasks.discard)
|
993 |
+
|
994 |
+
try:
|
995 |
+
return await asyncio.shield(resolved_host_task)
|
996 |
+
except asyncio.CancelledError:
|
997 |
+
|
998 |
+
def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None:
|
999 |
+
with suppress(Exception, asyncio.CancelledError):
|
1000 |
+
fut.result()
|
1001 |
+
|
1002 |
+
resolved_host_task.add_done_callback(drop_exception)
|
1003 |
+
raise
|
1004 |
+
|
1005 |
+
async def _resolve_host_with_throttle(
|
1006 |
+
self,
|
1007 |
+
key: Tuple[str, int],
|
1008 |
+
host: str,
|
1009 |
+
port: int,
|
1010 |
+
futures: Set["asyncio.Future[None]"],
|
1011 |
+
traces: Optional[Sequence["Trace"]],
|
1012 |
+
) -> List[ResolveResult]:
|
1013 |
+
"""Resolve host and set result for all waiters.
|
1014 |
+
|
1015 |
+
This method must be run in a task and shielded from cancellation
|
1016 |
+
to avoid cancelling the underlying lookup.
|
1017 |
+
"""
|
1018 |
+
try:
|
1019 |
+
if traces:
|
1020 |
+
for trace in traces:
|
1021 |
+
await trace.send_dns_cache_miss(host)
|
1022 |
+
|
1023 |
+
for trace in traces:
|
1024 |
+
await trace.send_dns_resolvehost_start(host)
|
1025 |
+
|
1026 |
+
addrs = await self._resolver.resolve(host, port, family=self._family)
|
1027 |
+
if traces:
|
1028 |
+
for trace in traces:
|
1029 |
+
await trace.send_dns_resolvehost_end(host)
|
1030 |
+
|
1031 |
+
self._cached_hosts.add(key, addrs)
|
1032 |
+
for fut in futures:
|
1033 |
+
set_result(fut, None)
|
1034 |
+
except BaseException as e:
|
1035 |
+
# any DNS exception is set for the waiters to raise the same exception.
|
1036 |
+
# This coro is always run in task that is shielded from cancellation so
|
1037 |
+
# we should never be propagating cancellation here.
|
1038 |
+
for fut in futures:
|
1039 |
+
set_exception(fut, e)
|
1040 |
+
raise
|
1041 |
+
finally:
|
1042 |
+
self._throttle_dns_futures.pop(key)
|
1043 |
+
|
1044 |
+
return self._cached_hosts.next_addrs(key)
|
1045 |
+
|
1046 |
+
async def _create_connection(
|
1047 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1048 |
+
) -> ResponseHandler:
|
1049 |
+
"""Create connection.
|
1050 |
+
|
1051 |
+
Has same keyword arguments as BaseEventLoop.create_connection.
|
1052 |
+
"""
|
1053 |
+
if req.proxy:
|
1054 |
+
_, proto = await self._create_proxy_connection(req, traces, timeout)
|
1055 |
+
else:
|
1056 |
+
_, proto = await self._create_direct_connection(req, traces, timeout)
|
1057 |
+
|
1058 |
+
return proto
|
1059 |
+
|
1060 |
+
def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
|
1061 |
+
"""Logic to get the correct SSL context
|
1062 |
+
|
1063 |
+
0. if req.ssl is false, return None
|
1064 |
+
|
1065 |
+
1. if ssl_context is specified in req, use it
|
1066 |
+
2. if _ssl_context is specified in self, use it
|
1067 |
+
3. otherwise:
|
1068 |
+
1. if verify_ssl is not specified in req, use self.ssl_context
|
1069 |
+
(will generate a default context according to self.verify_ssl)
|
1070 |
+
2. if verify_ssl is True in req, generate a default SSL context
|
1071 |
+
3. if verify_ssl is False in req, generate a SSL context that
|
1072 |
+
won't verify
|
1073 |
+
"""
|
1074 |
+
if not req.is_ssl():
|
1075 |
+
return None
|
1076 |
+
|
1077 |
+
if ssl is None: # pragma: no cover
|
1078 |
+
raise RuntimeError("SSL is not supported.")
|
1079 |
+
sslcontext = req.ssl
|
1080 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
1081 |
+
return sslcontext
|
1082 |
+
if sslcontext is not True:
|
1083 |
+
# not verified or fingerprinted
|
1084 |
+
return _SSL_CONTEXT_UNVERIFIED
|
1085 |
+
sslcontext = self._ssl
|
1086 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
1087 |
+
return sslcontext
|
1088 |
+
if sslcontext is not True:
|
1089 |
+
# not verified or fingerprinted
|
1090 |
+
return _SSL_CONTEXT_UNVERIFIED
|
1091 |
+
return _SSL_CONTEXT_VERIFIED
|
1092 |
+
|
1093 |
+
def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
|
1094 |
+
ret = req.ssl
|
1095 |
+
if isinstance(ret, Fingerprint):
|
1096 |
+
return ret
|
1097 |
+
ret = self._ssl
|
1098 |
+
if isinstance(ret, Fingerprint):
|
1099 |
+
return ret
|
1100 |
+
return None
|
1101 |
+
|
1102 |
+
async def _wrap_create_connection(
|
1103 |
+
self,
|
1104 |
+
*args: Any,
|
1105 |
+
addr_infos: List[aiohappyeyeballs.AddrInfoType],
|
1106 |
+
req: ClientRequest,
|
1107 |
+
timeout: "ClientTimeout",
|
1108 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1109 |
+
**kwargs: Any,
|
1110 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
1111 |
+
try:
|
1112 |
+
async with ceil_timeout(
|
1113 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1114 |
+
):
|
1115 |
+
sock = await aiohappyeyeballs.start_connection(
|
1116 |
+
addr_infos=addr_infos,
|
1117 |
+
local_addr_infos=self._local_addr_infos,
|
1118 |
+
happy_eyeballs_delay=self._happy_eyeballs_delay,
|
1119 |
+
interleave=self._interleave,
|
1120 |
+
loop=self._loop,
|
1121 |
+
)
|
1122 |
+
return await self._loop.create_connection(*args, **kwargs, sock=sock)
|
1123 |
+
except cert_errors as exc:
|
1124 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
1125 |
+
except ssl_errors as exc:
|
1126 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
1127 |
+
except OSError as exc:
|
1128 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1129 |
+
raise
|
1130 |
+
raise client_error(req.connection_key, exc) from exc
|
1131 |
+
|
1132 |
+
async def _wrap_existing_connection(
|
1133 |
+
self,
|
1134 |
+
*args: Any,
|
1135 |
+
req: ClientRequest,
|
1136 |
+
timeout: "ClientTimeout",
|
1137 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1138 |
+
**kwargs: Any,
|
1139 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
1140 |
+
try:
|
1141 |
+
async with ceil_timeout(
|
1142 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1143 |
+
):
|
1144 |
+
return await self._loop.create_connection(*args, **kwargs)
|
1145 |
+
except cert_errors as exc:
|
1146 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
1147 |
+
except ssl_errors as exc:
|
1148 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
1149 |
+
except OSError as exc:
|
1150 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1151 |
+
raise
|
1152 |
+
raise client_error(req.connection_key, exc) from exc
|
1153 |
+
|
1154 |
+
def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
|
1155 |
+
"""Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
|
1156 |
+
|
1157 |
+
It is necessary for TLS-in-TLS so that it is possible to
|
1158 |
+
send HTTPS queries through HTTPS proxies.
|
1159 |
+
|
1160 |
+
This doesn't affect regular HTTP requests, though.
|
1161 |
+
"""
|
1162 |
+
if not req.is_ssl():
|
1163 |
+
return
|
1164 |
+
|
1165 |
+
proxy_url = req.proxy
|
1166 |
+
assert proxy_url is not None
|
1167 |
+
if proxy_url.scheme != "https":
|
1168 |
+
return
|
1169 |
+
|
1170 |
+
self._check_loop_for_start_tls()
|
1171 |
+
|
1172 |
+
def _check_loop_for_start_tls(self) -> None:
|
1173 |
+
try:
|
1174 |
+
self._loop.start_tls
|
1175 |
+
except AttributeError as attr_exc:
|
1176 |
+
raise RuntimeError(
|
1177 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
1178 |
+
"This needs support for TLS in TLS but it is not implemented "
|
1179 |
+
"in your runtime for the stdlib asyncio.\n\n"
|
1180 |
+
"Please upgrade to Python 3.11 or higher. For more details, "
|
1181 |
+
"please see:\n"
|
1182 |
+
"* https://bugs.python.org/issue37179\n"
|
1183 |
+
"* https://github.com/python/cpython/pull/28073\n"
|
1184 |
+
"* https://docs.aiohttp.org/en/stable/"
|
1185 |
+
"client_advanced.html#proxy-support\n"
|
1186 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
1187 |
+
) from attr_exc
|
1188 |
+
|
1189 |
+
def _loop_supports_start_tls(self) -> bool:
|
1190 |
+
try:
|
1191 |
+
self._check_loop_for_start_tls()
|
1192 |
+
except RuntimeError:
|
1193 |
+
return False
|
1194 |
+
else:
|
1195 |
+
return True
|
1196 |
+
|
1197 |
+
def _warn_about_tls_in_tls(
|
1198 |
+
self,
|
1199 |
+
underlying_transport: asyncio.Transport,
|
1200 |
+
req: ClientRequest,
|
1201 |
+
) -> None:
|
1202 |
+
"""Issue a warning if the requested URL has HTTPS scheme."""
|
1203 |
+
if req.request_info.url.scheme != "https":
|
1204 |
+
return
|
1205 |
+
|
1206 |
+
# Check if uvloop is being used, which supports TLS in TLS,
|
1207 |
+
# otherwise assume that asyncio's native transport is being used.
|
1208 |
+
if type(underlying_transport).__module__.startswith("uvloop"):
|
1209 |
+
return
|
1210 |
+
|
1211 |
+
# Support in asyncio was added in Python 3.11 (bpo-44011)
|
1212 |
+
asyncio_supports_tls_in_tls = sys.version_info >= (3, 11) or getattr(
|
1213 |
+
underlying_transport,
|
1214 |
+
"_start_tls_compatible",
|
1215 |
+
False,
|
1216 |
+
)
|
1217 |
+
|
1218 |
+
if asyncio_supports_tls_in_tls:
|
1219 |
+
return
|
1220 |
+
|
1221 |
+
warnings.warn(
|
1222 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
1223 |
+
"This support for TLS in TLS is known to be disabled "
|
1224 |
+
"in the stdlib asyncio (Python <3.11). This is why you'll probably see "
|
1225 |
+
"an error in the log below.\n\n"
|
1226 |
+
"It is possible to enable it via monkeypatching. "
|
1227 |
+
"For more details, see:\n"
|
1228 |
+
"* https://bugs.python.org/issue37179\n"
|
1229 |
+
"* https://github.com/python/cpython/pull/28073\n\n"
|
1230 |
+
"You can temporarily patch this as follows:\n"
|
1231 |
+
"* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
|
1232 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
1233 |
+
RuntimeWarning,
|
1234 |
+
source=self,
|
1235 |
+
# Why `4`? At least 3 of the calls in the stack originate
|
1236 |
+
# from the methods in this class.
|
1237 |
+
stacklevel=3,
|
1238 |
+
)
|
1239 |
+
|
1240 |
+
async def _start_tls_connection(
|
1241 |
+
self,
|
1242 |
+
underlying_transport: asyncio.Transport,
|
1243 |
+
req: ClientRequest,
|
1244 |
+
timeout: "ClientTimeout",
|
1245 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1246 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
1247 |
+
"""Wrap the raw TCP transport with TLS."""
|
1248 |
+
tls_proto = self._factory() # Create a brand new proto for TLS
|
1249 |
+
sslcontext = self._get_ssl_context(req)
|
1250 |
+
if TYPE_CHECKING:
|
1251 |
+
# _start_tls_connection is unreachable in the current code path
|
1252 |
+
# if sslcontext is None.
|
1253 |
+
assert sslcontext is not None
|
1254 |
+
|
1255 |
+
try:
|
1256 |
+
async with ceil_timeout(
|
1257 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1258 |
+
):
|
1259 |
+
try:
|
1260 |
+
tls_transport = await self._loop.start_tls(
|
1261 |
+
underlying_transport,
|
1262 |
+
tls_proto,
|
1263 |
+
sslcontext,
|
1264 |
+
server_hostname=req.server_hostname or req.host,
|
1265 |
+
ssl_handshake_timeout=timeout.total,
|
1266 |
+
)
|
1267 |
+
except BaseException:
|
1268 |
+
# We need to close the underlying transport since
|
1269 |
+
# `start_tls()` probably failed before it had a
|
1270 |
+
# chance to do this:
|
1271 |
+
underlying_transport.close()
|
1272 |
+
raise
|
1273 |
+
if isinstance(tls_transport, asyncio.Transport):
|
1274 |
+
fingerprint = self._get_fingerprint(req)
|
1275 |
+
if fingerprint:
|
1276 |
+
try:
|
1277 |
+
fingerprint.check(tls_transport)
|
1278 |
+
except ServerFingerprintMismatch:
|
1279 |
+
tls_transport.close()
|
1280 |
+
if not self._cleanup_closed_disabled:
|
1281 |
+
self._cleanup_closed_transports.append(tls_transport)
|
1282 |
+
raise
|
1283 |
+
except cert_errors as exc:
|
1284 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
1285 |
+
except ssl_errors as exc:
|
1286 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
1287 |
+
except OSError as exc:
|
1288 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1289 |
+
raise
|
1290 |
+
raise client_error(req.connection_key, exc) from exc
|
1291 |
+
except TypeError as type_err:
|
1292 |
+
# Example cause looks like this:
|
1293 |
+
# TypeError: transport <asyncio.sslproto._SSLProtocolTransport
|
1294 |
+
# object at 0x7f760615e460> is not supported by start_tls()
|
1295 |
+
|
1296 |
+
raise ClientConnectionError(
|
1297 |
+
"Cannot initialize a TLS-in-TLS connection to host "
|
1298 |
+
f"{req.host!s}:{req.port:d} through an underlying connection "
|
1299 |
+
f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
|
1300 |
+
f"[{type_err!s}]"
|
1301 |
+
) from type_err
|
1302 |
+
else:
|
1303 |
+
if tls_transport is None:
|
1304 |
+
msg = "Failed to start TLS (possibly caused by closing transport)"
|
1305 |
+
raise client_error(req.connection_key, OSError(msg))
|
1306 |
+
tls_proto.connection_made(
|
1307 |
+
tls_transport
|
1308 |
+
) # Kick the state machine of the new TLS protocol
|
1309 |
+
|
1310 |
+
return tls_transport, tls_proto
|
1311 |
+
|
1312 |
+
def _convert_hosts_to_addr_infos(
|
1313 |
+
self, hosts: List[ResolveResult]
|
1314 |
+
) -> List[aiohappyeyeballs.AddrInfoType]:
|
1315 |
+
"""Converts the list of hosts to a list of addr_infos.
|
1316 |
+
|
1317 |
+
The list of hosts is the result of a DNS lookup. The list of
|
1318 |
+
addr_infos is the result of a call to `socket.getaddrinfo()`.
|
1319 |
+
"""
|
1320 |
+
addr_infos: List[aiohappyeyeballs.AddrInfoType] = []
|
1321 |
+
for hinfo in hosts:
|
1322 |
+
host = hinfo["host"]
|
1323 |
+
is_ipv6 = ":" in host
|
1324 |
+
family = socket.AF_INET6 if is_ipv6 else socket.AF_INET
|
1325 |
+
if self._family and self._family != family:
|
1326 |
+
continue
|
1327 |
+
addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"])
|
1328 |
+
addr_infos.append(
|
1329 |
+
(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)
|
1330 |
+
)
|
1331 |
+
return addr_infos
|
1332 |
+
|
1333 |
+
async def _create_direct_connection(
|
1334 |
+
self,
|
1335 |
+
req: ClientRequest,
|
1336 |
+
traces: List["Trace"],
|
1337 |
+
timeout: "ClientTimeout",
|
1338 |
+
*,
|
1339 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1340 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
1341 |
+
sslcontext = self._get_ssl_context(req)
|
1342 |
+
fingerprint = self._get_fingerprint(req)
|
1343 |
+
|
1344 |
+
host = req.url.raw_host
|
1345 |
+
assert host is not None
|
1346 |
+
# Replace multiple trailing dots with a single one.
|
1347 |
+
# A trailing dot is only present for fully-qualified domain names.
|
1348 |
+
# See https://github.com/aio-libs/aiohttp/pull/7364.
|
1349 |
+
if host.endswith(".."):
|
1350 |
+
host = host.rstrip(".") + "."
|
1351 |
+
port = req.port
|
1352 |
+
assert port is not None
|
1353 |
+
try:
|
1354 |
+
# Cancelling this lookup should not cancel the underlying lookup
|
1355 |
+
# or else the cancel event will get broadcast to all the waiters
|
1356 |
+
# across all connections.
|
1357 |
+
hosts = await self._resolve_host(host, port, traces=traces)
|
1358 |
+
except OSError as exc:
|
1359 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1360 |
+
raise
|
1361 |
+
# in case of proxy it is not ClientProxyConnectionError
|
1362 |
+
# it is problem of resolving proxy ip itself
|
1363 |
+
raise ClientConnectorDNSError(req.connection_key, exc) from exc
|
1364 |
+
|
1365 |
+
last_exc: Optional[Exception] = None
|
1366 |
+
addr_infos = self._convert_hosts_to_addr_infos(hosts)
|
1367 |
+
while addr_infos:
|
1368 |
+
# Strip trailing dots, certificates contain FQDN without dots.
|
1369 |
+
# See https://github.com/aio-libs/aiohttp/issues/3636
|
1370 |
+
server_hostname = (
|
1371 |
+
(req.server_hostname or host).rstrip(".") if sslcontext else None
|
1372 |
+
)
|
1373 |
+
|
1374 |
+
try:
|
1375 |
+
transp, proto = await self._wrap_create_connection(
|
1376 |
+
self._factory,
|
1377 |
+
timeout=timeout,
|
1378 |
+
ssl=sslcontext,
|
1379 |
+
addr_infos=addr_infos,
|
1380 |
+
server_hostname=server_hostname,
|
1381 |
+
req=req,
|
1382 |
+
client_error=client_error,
|
1383 |
+
)
|
1384 |
+
except (ClientConnectorError, asyncio.TimeoutError) as exc:
|
1385 |
+
last_exc = exc
|
1386 |
+
aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave)
|
1387 |
+
continue
|
1388 |
+
|
1389 |
+
if req.is_ssl() and fingerprint:
|
1390 |
+
try:
|
1391 |
+
fingerprint.check(transp)
|
1392 |
+
except ServerFingerprintMismatch as exc:
|
1393 |
+
transp.close()
|
1394 |
+
if not self._cleanup_closed_disabled:
|
1395 |
+
self._cleanup_closed_transports.append(transp)
|
1396 |
+
last_exc = exc
|
1397 |
+
# Remove the bad peer from the list of addr_infos
|
1398 |
+
sock: socket.socket = transp.get_extra_info("socket")
|
1399 |
+
bad_peer = sock.getpeername()
|
1400 |
+
aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer)
|
1401 |
+
continue
|
1402 |
+
|
1403 |
+
return transp, proto
|
1404 |
+
else:
|
1405 |
+
assert last_exc is not None
|
1406 |
+
raise last_exc
|
1407 |
+
|
1408 |
+
async def _create_proxy_connection(
|
1409 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1410 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
1411 |
+
self._fail_on_no_start_tls(req)
|
1412 |
+
runtime_has_start_tls = self._loop_supports_start_tls()
|
1413 |
+
|
1414 |
+
headers: Dict[str, str] = {}
|
1415 |
+
if req.proxy_headers is not None:
|
1416 |
+
headers = req.proxy_headers # type: ignore[assignment]
|
1417 |
+
headers[hdrs.HOST] = req.headers[hdrs.HOST]
|
1418 |
+
|
1419 |
+
url = req.proxy
|
1420 |
+
assert url is not None
|
1421 |
+
proxy_req = ClientRequest(
|
1422 |
+
hdrs.METH_GET,
|
1423 |
+
url,
|
1424 |
+
headers=headers,
|
1425 |
+
auth=req.proxy_auth,
|
1426 |
+
loop=self._loop,
|
1427 |
+
ssl=req.ssl,
|
1428 |
+
)
|
1429 |
+
|
1430 |
+
# create connection to proxy server
|
1431 |
+
transport, proto = await self._create_direct_connection(
|
1432 |
+
proxy_req, [], timeout, client_error=ClientProxyConnectionError
|
1433 |
+
)
|
1434 |
+
|
1435 |
+
auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
|
1436 |
+
if auth is not None:
|
1437 |
+
if not req.is_ssl():
|
1438 |
+
req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
1439 |
+
else:
|
1440 |
+
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
1441 |
+
|
1442 |
+
if req.is_ssl():
|
1443 |
+
if runtime_has_start_tls:
|
1444 |
+
self._warn_about_tls_in_tls(transport, req)
|
1445 |
+
|
1446 |
+
# For HTTPS requests over HTTP proxy
|
1447 |
+
# we must notify proxy to tunnel connection
|
1448 |
+
# so we send CONNECT command:
|
1449 |
+
# CONNECT www.python.org:443 HTTP/1.1
|
1450 |
+
# Host: www.python.org
|
1451 |
+
#
|
1452 |
+
# next we must do TLS handshake and so on
|
1453 |
+
# to do this we must wrap raw socket into secure one
|
1454 |
+
# asyncio handles this perfectly
|
1455 |
+
proxy_req.method = hdrs.METH_CONNECT
|
1456 |
+
proxy_req.url = req.url
|
1457 |
+
key = req.connection_key._replace(
|
1458 |
+
proxy=None, proxy_auth=None, proxy_headers_hash=None
|
1459 |
+
)
|
1460 |
+
conn = Connection(self, key, proto, self._loop)
|
1461 |
+
proxy_resp = await proxy_req.send(conn)
|
1462 |
+
try:
|
1463 |
+
protocol = conn._protocol
|
1464 |
+
assert protocol is not None
|
1465 |
+
|
1466 |
+
# read_until_eof=True will ensure the connection isn't closed
|
1467 |
+
# once the response is received and processed allowing
|
1468 |
+
# START_TLS to work on the connection below.
|
1469 |
+
protocol.set_response_params(
|
1470 |
+
read_until_eof=runtime_has_start_tls,
|
1471 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
1472 |
+
)
|
1473 |
+
resp = await proxy_resp.start(conn)
|
1474 |
+
except BaseException:
|
1475 |
+
proxy_resp.close()
|
1476 |
+
conn.close()
|
1477 |
+
raise
|
1478 |
+
else:
|
1479 |
+
conn._protocol = None
|
1480 |
+
try:
|
1481 |
+
if resp.status != 200:
|
1482 |
+
message = resp.reason
|
1483 |
+
if message is None:
|
1484 |
+
message = HTTPStatus(resp.status).phrase
|
1485 |
+
raise ClientHttpProxyError(
|
1486 |
+
proxy_resp.request_info,
|
1487 |
+
resp.history,
|
1488 |
+
status=resp.status,
|
1489 |
+
message=message,
|
1490 |
+
headers=resp.headers,
|
1491 |
+
)
|
1492 |
+
if not runtime_has_start_tls:
|
1493 |
+
rawsock = transport.get_extra_info("socket", default=None)
|
1494 |
+
if rawsock is None:
|
1495 |
+
raise RuntimeError(
|
1496 |
+
"Transport does not expose socket instance"
|
1497 |
+
)
|
1498 |
+
# Duplicate the socket, so now we can close proxy transport
|
1499 |
+
rawsock = rawsock.dup()
|
1500 |
+
except BaseException:
|
1501 |
+
# It shouldn't be closed in `finally` because it's fed to
|
1502 |
+
# `loop.start_tls()` and the docs say not to touch it after
|
1503 |
+
# passing there.
|
1504 |
+
transport.close()
|
1505 |
+
raise
|
1506 |
+
finally:
|
1507 |
+
if not runtime_has_start_tls:
|
1508 |
+
transport.close()
|
1509 |
+
|
1510 |
+
if not runtime_has_start_tls:
|
1511 |
+
# HTTP proxy with support for upgrade to HTTPS
|
1512 |
+
sslcontext = self._get_ssl_context(req)
|
1513 |
+
return await self._wrap_existing_connection(
|
1514 |
+
self._factory,
|
1515 |
+
timeout=timeout,
|
1516 |
+
ssl=sslcontext,
|
1517 |
+
sock=rawsock,
|
1518 |
+
server_hostname=req.host,
|
1519 |
+
req=req,
|
1520 |
+
)
|
1521 |
+
|
1522 |
+
return await self._start_tls_connection(
|
1523 |
+
# Access the old transport for the last time before it's
|
1524 |
+
# closed and forgotten forever:
|
1525 |
+
transport,
|
1526 |
+
req=req,
|
1527 |
+
timeout=timeout,
|
1528 |
+
)
|
1529 |
+
finally:
|
1530 |
+
proxy_resp.close()
|
1531 |
+
|
1532 |
+
return transport, proto
|
1533 |
+
|
1534 |
+
|
1535 |
+
class UnixConnector(BaseConnector):
|
1536 |
+
"""Unix socket connector.
|
1537 |
+
|
1538 |
+
path - Unix socket path.
|
1539 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
1540 |
+
force_close - Set to True to force close and do reconnect
|
1541 |
+
after each request (and between redirects).
|
1542 |
+
limit - The total number of simultaneous connections.
|
1543 |
+
limit_per_host - Number of simultaneous connections to one host.
|
1544 |
+
loop - Optional event loop.
|
1545 |
+
"""
|
1546 |
+
|
1547 |
+
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"})
|
1548 |
+
|
1549 |
+
def __init__(
|
1550 |
+
self,
|
1551 |
+
path: str,
|
1552 |
+
force_close: bool = False,
|
1553 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
1554 |
+
limit: int = 100,
|
1555 |
+
limit_per_host: int = 0,
|
1556 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1557 |
+
) -> None:
|
1558 |
+
super().__init__(
|
1559 |
+
force_close=force_close,
|
1560 |
+
keepalive_timeout=keepalive_timeout,
|
1561 |
+
limit=limit,
|
1562 |
+
limit_per_host=limit_per_host,
|
1563 |
+
loop=loop,
|
1564 |
+
)
|
1565 |
+
self._path = path
|
1566 |
+
|
1567 |
+
@property
|
1568 |
+
def path(self) -> str:
|
1569 |
+
"""Path to unix socket."""
|
1570 |
+
return self._path
|
1571 |
+
|
1572 |
+
async def _create_connection(
|
1573 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1574 |
+
) -> ResponseHandler:
|
1575 |
+
try:
|
1576 |
+
async with ceil_timeout(
|
1577 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1578 |
+
):
|
1579 |
+
_, proto = await self._loop.create_unix_connection(
|
1580 |
+
self._factory, self._path
|
1581 |
+
)
|
1582 |
+
except OSError as exc:
|
1583 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1584 |
+
raise
|
1585 |
+
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
|
1586 |
+
|
1587 |
+
return proto
|
1588 |
+
|
1589 |
+
|
1590 |
+
class NamedPipeConnector(BaseConnector):
|
1591 |
+
"""Named pipe connector.
|
1592 |
+
|
1593 |
+
Only supported by the proactor event loop.
|
1594 |
+
See also: https://docs.python.org/3/library/asyncio-eventloop.html
|
1595 |
+
|
1596 |
+
path - Windows named pipe path.
|
1597 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
1598 |
+
force_close - Set to True to force close and do reconnect
|
1599 |
+
after each request (and between redirects).
|
1600 |
+
limit - The total number of simultaneous connections.
|
1601 |
+
limit_per_host - Number of simultaneous connections to one host.
|
1602 |
+
loop - Optional event loop.
|
1603 |
+
"""
|
1604 |
+
|
1605 |
+
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"})
|
1606 |
+
|
1607 |
+
def __init__(
|
1608 |
+
self,
|
1609 |
+
path: str,
|
1610 |
+
force_close: bool = False,
|
1611 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
1612 |
+
limit: int = 100,
|
1613 |
+
limit_per_host: int = 0,
|
1614 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1615 |
+
) -> None:
|
1616 |
+
super().__init__(
|
1617 |
+
force_close=force_close,
|
1618 |
+
keepalive_timeout=keepalive_timeout,
|
1619 |
+
limit=limit,
|
1620 |
+
limit_per_host=limit_per_host,
|
1621 |
+
loop=loop,
|
1622 |
+
)
|
1623 |
+
if not isinstance(
|
1624 |
+
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
1625 |
+
):
|
1626 |
+
raise RuntimeError(
|
1627 |
+
"Named Pipes only available in proactor loop under windows"
|
1628 |
+
)
|
1629 |
+
self._path = path
|
1630 |
+
|
1631 |
+
@property
|
1632 |
+
def path(self) -> str:
|
1633 |
+
"""Path to the named pipe."""
|
1634 |
+
return self._path
|
1635 |
+
|
1636 |
+
async def _create_connection(
|
1637 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1638 |
+
) -> ResponseHandler:
|
1639 |
+
try:
|
1640 |
+
async with ceil_timeout(
|
1641 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1642 |
+
):
|
1643 |
+
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
|
1644 |
+
self._factory, self._path
|
1645 |
+
)
|
1646 |
+
# the drain is required so that the connection_made is called
|
1647 |
+
# and transport is set otherwise it is not set before the
|
1648 |
+
# `assert conn.transport is not None`
|
1649 |
+
# in client.py's _request method
|
1650 |
+
await asyncio.sleep(0)
|
1651 |
+
# other option is to manually set transport like
|
1652 |
+
# `proto.transport = trans`
|
1653 |
+
except OSError as exc:
|
1654 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1655 |
+
raise
|
1656 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
1657 |
+
|
1658 |
+
return cast(ResponseHandler, proto)
|
venv/Lib/site-packages/aiohttp/cookiejar.py
ADDED
@@ -0,0 +1,495 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import calendar
|
3 |
+
import contextlib
|
4 |
+
import datetime
|
5 |
+
import heapq
|
6 |
+
import itertools
|
7 |
+
import os # noqa
|
8 |
+
import pathlib
|
9 |
+
import pickle
|
10 |
+
import re
|
11 |
+
import time
|
12 |
+
import warnings
|
13 |
+
from collections import defaultdict
|
14 |
+
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
15 |
+
from typing import (
|
16 |
+
DefaultDict,
|
17 |
+
Dict,
|
18 |
+
Iterable,
|
19 |
+
Iterator,
|
20 |
+
List,
|
21 |
+
Mapping,
|
22 |
+
Optional,
|
23 |
+
Set,
|
24 |
+
Tuple,
|
25 |
+
Union,
|
26 |
+
cast,
|
27 |
+
)
|
28 |
+
|
29 |
+
from yarl import URL
|
30 |
+
|
31 |
+
from .abc import AbstractCookieJar, ClearCookiePredicate
|
32 |
+
from .helpers import is_ip_address
|
33 |
+
from .typedefs import LooseCookies, PathLike, StrOrURL
|
34 |
+
|
35 |
+
__all__ = ("CookieJar", "DummyCookieJar")
|
36 |
+
|
37 |
+
|
38 |
+
CookieItem = Union[str, "Morsel[str]"]
|
39 |
+
|
40 |
+
# We cache these string methods here as their use is in performance critical code.
|
41 |
+
_FORMAT_PATH = "{}/{}".format
|
42 |
+
_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format
|
43 |
+
|
44 |
+
# The minimum number of scheduled cookie expirations before we start cleaning up
|
45 |
+
# the expiration heap. This is a performance optimization to avoid cleaning up the
|
46 |
+
# heap too often when there are only a few scheduled expirations.
|
47 |
+
_MIN_SCHEDULED_COOKIE_EXPIRATION = 100
|
48 |
+
|
49 |
+
|
50 |
+
class CookieJar(AbstractCookieJar):
|
51 |
+
"""Implements cookie storage adhering to RFC 6265."""
|
52 |
+
|
53 |
+
DATE_TOKENS_RE = re.compile(
|
54 |
+
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
55 |
+
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
56 |
+
)
|
57 |
+
|
58 |
+
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
59 |
+
|
60 |
+
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
61 |
+
|
62 |
+
DATE_MONTH_RE = re.compile(
|
63 |
+
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|(nov)|(dec)",
|
64 |
+
re.I,
|
65 |
+
)
|
66 |
+
|
67 |
+
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
68 |
+
|
69 |
+
# calendar.timegm() fails for timestamps after datetime.datetime.max
|
70 |
+
# Minus one as a loss of precision occurs when timestamp() is called.
|
71 |
+
MAX_TIME = (
|
72 |
+
int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
|
73 |
+
)
|
74 |
+
try:
|
75 |
+
calendar.timegm(time.gmtime(MAX_TIME))
|
76 |
+
except (OSError, ValueError):
|
77 |
+
# Hit the maximum representable time on Windows
|
78 |
+
# https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
|
79 |
+
# Throws ValueError on PyPy 3.9, OSError elsewhere
|
80 |
+
MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
|
81 |
+
except OverflowError:
|
82 |
+
# #4515: datetime.max may not be representable on 32-bit platforms
|
83 |
+
MAX_TIME = 2**31 - 1
|
84 |
+
# Avoid minuses in the future, 3x faster
|
85 |
+
SUB_MAX_TIME = MAX_TIME - 1
|
86 |
+
|
87 |
+
def __init__(
|
88 |
+
self,
|
89 |
+
*,
|
90 |
+
unsafe: bool = False,
|
91 |
+
quote_cookie: bool = True,
|
92 |
+
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
93 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
94 |
+
) -> None:
|
95 |
+
super().__init__(loop=loop)
|
96 |
+
self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
|
97 |
+
SimpleCookie
|
98 |
+
)
|
99 |
+
self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = (
|
100 |
+
defaultdict(dict)
|
101 |
+
)
|
102 |
+
self._host_only_cookies: Set[Tuple[str, str]] = set()
|
103 |
+
self._unsafe = unsafe
|
104 |
+
self._quote_cookie = quote_cookie
|
105 |
+
if treat_as_secure_origin is None:
|
106 |
+
treat_as_secure_origin = []
|
107 |
+
elif isinstance(treat_as_secure_origin, URL):
|
108 |
+
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
109 |
+
elif isinstance(treat_as_secure_origin, str):
|
110 |
+
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
111 |
+
else:
|
112 |
+
treat_as_secure_origin = [
|
113 |
+
URL(url).origin() if isinstance(url, str) else url.origin()
|
114 |
+
for url in treat_as_secure_origin
|
115 |
+
]
|
116 |
+
self._treat_as_secure_origin = treat_as_secure_origin
|
117 |
+
self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = []
|
118 |
+
self._expirations: Dict[Tuple[str, str, str], float] = {}
|
119 |
+
|
120 |
+
@property
|
121 |
+
def quote_cookie(self) -> bool:
|
122 |
+
return self._quote_cookie
|
123 |
+
|
124 |
+
def save(self, file_path: PathLike) -> None:
|
125 |
+
file_path = pathlib.Path(file_path)
|
126 |
+
with file_path.open(mode="wb") as f:
|
127 |
+
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
128 |
+
|
129 |
+
def load(self, file_path: PathLike) -> None:
|
130 |
+
file_path = pathlib.Path(file_path)
|
131 |
+
with file_path.open(mode="rb") as f:
|
132 |
+
self._cookies = pickle.load(f)
|
133 |
+
|
134 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
135 |
+
if predicate is None:
|
136 |
+
self._expire_heap.clear()
|
137 |
+
self._cookies.clear()
|
138 |
+
self._morsel_cache.clear()
|
139 |
+
self._host_only_cookies.clear()
|
140 |
+
self._expirations.clear()
|
141 |
+
return
|
142 |
+
|
143 |
+
now = time.time()
|
144 |
+
to_del = [
|
145 |
+
key
|
146 |
+
for (domain, path), cookie in self._cookies.items()
|
147 |
+
for name, morsel in cookie.items()
|
148 |
+
if (
|
149 |
+
(key := (domain, path, name)) in self._expirations
|
150 |
+
and self._expirations[key] <= now
|
151 |
+
)
|
152 |
+
or predicate(morsel)
|
153 |
+
]
|
154 |
+
if to_del:
|
155 |
+
self._delete_cookies(to_del)
|
156 |
+
|
157 |
+
def clear_domain(self, domain: str) -> None:
|
158 |
+
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
159 |
+
|
160 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
161 |
+
self._do_expiration()
|
162 |
+
for val in self._cookies.values():
|
163 |
+
yield from val.values()
|
164 |
+
|
165 |
+
def __len__(self) -> int:
|
166 |
+
"""Return number of cookies.
|
167 |
+
|
168 |
+
This function does not iterate self to avoid unnecessary expiration
|
169 |
+
checks.
|
170 |
+
"""
|
171 |
+
return sum(len(cookie.values()) for cookie in self._cookies.values())
|
172 |
+
|
173 |
+
def _do_expiration(self) -> None:
|
174 |
+
"""Remove expired cookies."""
|
175 |
+
if not (expire_heap_len := len(self._expire_heap)):
|
176 |
+
return
|
177 |
+
|
178 |
+
# If the expiration heap grows larger than the number expirations
|
179 |
+
# times two, we clean it up to avoid keeping expired entries in
|
180 |
+
# the heap and consuming memory. We guard this with a minimum
|
181 |
+
# threshold to avoid cleaning up the heap too often when there are
|
182 |
+
# only a few scheduled expirations.
|
183 |
+
if (
|
184 |
+
expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION
|
185 |
+
and expire_heap_len > len(self._expirations) * 2
|
186 |
+
):
|
187 |
+
# Remove any expired entries from the expiration heap
|
188 |
+
# that do not match the expiration time in the expirations
|
189 |
+
# as it means the cookie has been re-added to the heap
|
190 |
+
# with a different expiration time.
|
191 |
+
self._expire_heap = [
|
192 |
+
entry
|
193 |
+
for entry in self._expire_heap
|
194 |
+
if self._expirations.get(entry[1]) == entry[0]
|
195 |
+
]
|
196 |
+
heapq.heapify(self._expire_heap)
|
197 |
+
|
198 |
+
now = time.time()
|
199 |
+
to_del: List[Tuple[str, str, str]] = []
|
200 |
+
# Find any expired cookies and add them to the to-delete list
|
201 |
+
while self._expire_heap:
|
202 |
+
when, cookie_key = self._expire_heap[0]
|
203 |
+
if when > now:
|
204 |
+
break
|
205 |
+
heapq.heappop(self._expire_heap)
|
206 |
+
# Check if the cookie hasn't been re-added to the heap
|
207 |
+
# with a different expiration time as it will be removed
|
208 |
+
# later when it reaches the top of the heap and its
|
209 |
+
# expiration time is met.
|
210 |
+
if self._expirations.get(cookie_key) == when:
|
211 |
+
to_del.append(cookie_key)
|
212 |
+
|
213 |
+
if to_del:
|
214 |
+
self._delete_cookies(to_del)
|
215 |
+
|
216 |
+
def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None:
|
217 |
+
for domain, path, name in to_del:
|
218 |
+
self._host_only_cookies.discard((domain, name))
|
219 |
+
self._cookies[(domain, path)].pop(name, None)
|
220 |
+
self._morsel_cache[(domain, path)].pop(name, None)
|
221 |
+
self._expirations.pop((domain, path, name), None)
|
222 |
+
|
223 |
+
def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
|
224 |
+
cookie_key = (domain, path, name)
|
225 |
+
if self._expirations.get(cookie_key) == when:
|
226 |
+
# Avoid adding duplicates to the heap
|
227 |
+
return
|
228 |
+
heapq.heappush(self._expire_heap, (when, cookie_key))
|
229 |
+
self._expirations[cookie_key] = when
|
230 |
+
|
231 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
232 |
+
"""Update cookies."""
|
233 |
+
hostname = response_url.raw_host
|
234 |
+
|
235 |
+
if not self._unsafe and is_ip_address(hostname):
|
236 |
+
# Don't accept cookies from IPs
|
237 |
+
return
|
238 |
+
|
239 |
+
if isinstance(cookies, Mapping):
|
240 |
+
cookies = cookies.items()
|
241 |
+
|
242 |
+
for name, cookie in cookies:
|
243 |
+
if not isinstance(cookie, Morsel):
|
244 |
+
tmp = SimpleCookie()
|
245 |
+
tmp[name] = cookie # type: ignore[assignment]
|
246 |
+
cookie = tmp[name]
|
247 |
+
|
248 |
+
domain = cookie["domain"]
|
249 |
+
|
250 |
+
# ignore domains with trailing dots
|
251 |
+
if domain and domain[-1] == ".":
|
252 |
+
domain = ""
|
253 |
+
del cookie["domain"]
|
254 |
+
|
255 |
+
if not domain and hostname is not None:
|
256 |
+
# Set the cookie's domain to the response hostname
|
257 |
+
# and set its host-only-flag
|
258 |
+
self._host_only_cookies.add((hostname, name))
|
259 |
+
domain = cookie["domain"] = hostname
|
260 |
+
|
261 |
+
if domain and domain[0] == ".":
|
262 |
+
# Remove leading dot
|
263 |
+
domain = domain[1:]
|
264 |
+
cookie["domain"] = domain
|
265 |
+
|
266 |
+
if hostname and not self._is_domain_match(domain, hostname):
|
267 |
+
# Setting cookies for different domains is not allowed
|
268 |
+
continue
|
269 |
+
|
270 |
+
path = cookie["path"]
|
271 |
+
if not path or path[0] != "/":
|
272 |
+
# Set the cookie's path to the response path
|
273 |
+
path = response_url.path
|
274 |
+
if not path.startswith("/"):
|
275 |
+
path = "/"
|
276 |
+
else:
|
277 |
+
# Cut everything from the last slash to the end
|
278 |
+
path = "/" + path[1 : path.rfind("/")]
|
279 |
+
cookie["path"] = path
|
280 |
+
path = path.rstrip("/")
|
281 |
+
|
282 |
+
if max_age := cookie["max-age"]:
|
283 |
+
try:
|
284 |
+
delta_seconds = int(max_age)
|
285 |
+
max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
|
286 |
+
self._expire_cookie(max_age_expiration, domain, path, name)
|
287 |
+
except ValueError:
|
288 |
+
cookie["max-age"] = ""
|
289 |
+
|
290 |
+
elif expires := cookie["expires"]:
|
291 |
+
if expire_time := self._parse_date(expires):
|
292 |
+
self._expire_cookie(expire_time, domain, path, name)
|
293 |
+
else:
|
294 |
+
cookie["expires"] = ""
|
295 |
+
|
296 |
+
key = (domain, path)
|
297 |
+
if self._cookies[key].get(name) != cookie:
|
298 |
+
# Don't blow away the cache if the same
|
299 |
+
# cookie gets set again
|
300 |
+
self._cookies[key][name] = cookie
|
301 |
+
self._morsel_cache[key].pop(name, None)
|
302 |
+
|
303 |
+
self._do_expiration()
|
304 |
+
|
305 |
+
def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
|
306 |
+
"""Returns this jar's cookies filtered by their attributes."""
|
307 |
+
filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
|
308 |
+
SimpleCookie() if self._quote_cookie else BaseCookie()
|
309 |
+
)
|
310 |
+
if not self._cookies:
|
311 |
+
# Skip do_expiration() if there are no cookies.
|
312 |
+
return filtered
|
313 |
+
self._do_expiration()
|
314 |
+
if not self._cookies:
|
315 |
+
# Skip rest of function if no non-expired cookies.
|
316 |
+
return filtered
|
317 |
+
if type(request_url) is not URL:
|
318 |
+
warnings.warn(
|
319 |
+
"filter_cookies expects yarl.URL instances only,"
|
320 |
+
f"and will stop working in 4.x, got {type(request_url)}",
|
321 |
+
DeprecationWarning,
|
322 |
+
stacklevel=2,
|
323 |
+
)
|
324 |
+
request_url = URL(request_url)
|
325 |
+
hostname = request_url.raw_host or ""
|
326 |
+
|
327 |
+
is_not_secure = request_url.scheme not in ("https", "wss")
|
328 |
+
if is_not_secure and self._treat_as_secure_origin:
|
329 |
+
request_origin = URL()
|
330 |
+
with contextlib.suppress(ValueError):
|
331 |
+
request_origin = request_url.origin()
|
332 |
+
is_not_secure = request_origin not in self._treat_as_secure_origin
|
333 |
+
|
334 |
+
# Send shared cookie
|
335 |
+
for c in self._cookies[("", "")].values():
|
336 |
+
filtered[c.key] = c.value
|
337 |
+
|
338 |
+
if is_ip_address(hostname):
|
339 |
+
if not self._unsafe:
|
340 |
+
return filtered
|
341 |
+
domains: Iterable[str] = (hostname,)
|
342 |
+
else:
|
343 |
+
# Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com")
|
344 |
+
domains = itertools.accumulate(
|
345 |
+
reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED
|
346 |
+
)
|
347 |
+
|
348 |
+
# Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar")
|
349 |
+
paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH)
|
350 |
+
# Create every combination of (domain, path) pairs.
|
351 |
+
pairs = itertools.product(domains, paths)
|
352 |
+
|
353 |
+
path_len = len(request_url.path)
|
354 |
+
# Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
|
355 |
+
for p in pairs:
|
356 |
+
for name, cookie in self._cookies[p].items():
|
357 |
+
domain = cookie["domain"]
|
358 |
+
|
359 |
+
if (domain, name) in self._host_only_cookies and domain != hostname:
|
360 |
+
continue
|
361 |
+
|
362 |
+
# Skip edge case when the cookie has a trailing slash but request doesn't.
|
363 |
+
if len(cookie["path"]) > path_len:
|
364 |
+
continue
|
365 |
+
|
366 |
+
if is_not_secure and cookie["secure"]:
|
367 |
+
continue
|
368 |
+
|
369 |
+
# We already built the Morsel so reuse it here
|
370 |
+
if name in self._morsel_cache[p]:
|
371 |
+
filtered[name] = self._morsel_cache[p][name]
|
372 |
+
continue
|
373 |
+
|
374 |
+
# It's critical we use the Morsel so the coded_value
|
375 |
+
# (based on cookie version) is preserved
|
376 |
+
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
377 |
+
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
378 |
+
self._morsel_cache[p][name] = mrsl_val
|
379 |
+
filtered[name] = mrsl_val
|
380 |
+
|
381 |
+
return filtered
|
382 |
+
|
383 |
+
@staticmethod
|
384 |
+
def _is_domain_match(domain: str, hostname: str) -> bool:
|
385 |
+
"""Implements domain matching adhering to RFC 6265."""
|
386 |
+
if hostname == domain:
|
387 |
+
return True
|
388 |
+
|
389 |
+
if not hostname.endswith(domain):
|
390 |
+
return False
|
391 |
+
|
392 |
+
non_matching = hostname[: -len(domain)]
|
393 |
+
|
394 |
+
if not non_matching.endswith("."):
|
395 |
+
return False
|
396 |
+
|
397 |
+
return not is_ip_address(hostname)
|
398 |
+
|
399 |
+
@classmethod
|
400 |
+
def _parse_date(cls, date_str: str) -> Optional[int]:
|
401 |
+
"""Implements date string parsing adhering to RFC 6265."""
|
402 |
+
if not date_str:
|
403 |
+
return None
|
404 |
+
|
405 |
+
found_time = False
|
406 |
+
found_day = False
|
407 |
+
found_month = False
|
408 |
+
found_year = False
|
409 |
+
|
410 |
+
hour = minute = second = 0
|
411 |
+
day = 0
|
412 |
+
month = 0
|
413 |
+
year = 0
|
414 |
+
|
415 |
+
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
416 |
+
|
417 |
+
token = token_match.group("token")
|
418 |
+
|
419 |
+
if not found_time:
|
420 |
+
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
421 |
+
if time_match:
|
422 |
+
found_time = True
|
423 |
+
hour, minute, second = (int(s) for s in time_match.groups())
|
424 |
+
continue
|
425 |
+
|
426 |
+
if not found_day:
|
427 |
+
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
428 |
+
if day_match:
|
429 |
+
found_day = True
|
430 |
+
day = int(day_match.group())
|
431 |
+
continue
|
432 |
+
|
433 |
+
if not found_month:
|
434 |
+
month_match = cls.DATE_MONTH_RE.match(token)
|
435 |
+
if month_match:
|
436 |
+
found_month = True
|
437 |
+
assert month_match.lastindex is not None
|
438 |
+
month = month_match.lastindex
|
439 |
+
continue
|
440 |
+
|
441 |
+
if not found_year:
|
442 |
+
year_match = cls.DATE_YEAR_RE.match(token)
|
443 |
+
if year_match:
|
444 |
+
found_year = True
|
445 |
+
year = int(year_match.group())
|
446 |
+
|
447 |
+
if 70 <= year <= 99:
|
448 |
+
year += 1900
|
449 |
+
elif 0 <= year <= 69:
|
450 |
+
year += 2000
|
451 |
+
|
452 |
+
if False in (found_day, found_month, found_year, found_time):
|
453 |
+
return None
|
454 |
+
|
455 |
+
if not 1 <= day <= 31:
|
456 |
+
return None
|
457 |
+
|
458 |
+
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
459 |
+
return None
|
460 |
+
|
461 |
+
return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
|
462 |
+
|
463 |
+
|
464 |
+
class DummyCookieJar(AbstractCookieJar):
|
465 |
+
"""Implements a dummy cookie storage.
|
466 |
+
|
467 |
+
It can be used with the ClientSession when no cookie processing is needed.
|
468 |
+
|
469 |
+
"""
|
470 |
+
|
471 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
472 |
+
super().__init__(loop=loop)
|
473 |
+
|
474 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
475 |
+
while False:
|
476 |
+
yield None
|
477 |
+
|
478 |
+
def __len__(self) -> int:
|
479 |
+
return 0
|
480 |
+
|
481 |
+
@property
|
482 |
+
def quote_cookie(self) -> bool:
|
483 |
+
return True
|
484 |
+
|
485 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
486 |
+
pass
|
487 |
+
|
488 |
+
def clear_domain(self, domain: str) -> None:
|
489 |
+
pass
|
490 |
+
|
491 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
492 |
+
pass
|
493 |
+
|
494 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
495 |
+
return SimpleCookie()
|
venv/Lib/site-packages/aiohttp/formdata.py
ADDED
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import io
|
2 |
+
import warnings
|
3 |
+
from typing import Any, Iterable, List, Optional
|
4 |
+
from urllib.parse import urlencode
|
5 |
+
|
6 |
+
from multidict import MultiDict, MultiDictProxy
|
7 |
+
|
8 |
+
from . import hdrs, multipart, payload
|
9 |
+
from .helpers import guess_filename
|
10 |
+
from .payload import Payload
|
11 |
+
|
12 |
+
__all__ = ("FormData",)
|
13 |
+
|
14 |
+
|
15 |
+
class FormData:
|
16 |
+
"""Helper class for form body generation.
|
17 |
+
|
18 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
19 |
+
"""
|
20 |
+
|
21 |
+
def __init__(
|
22 |
+
self,
|
23 |
+
fields: Iterable[Any] = (),
|
24 |
+
quote_fields: bool = True,
|
25 |
+
charset: Optional[str] = None,
|
26 |
+
*,
|
27 |
+
default_to_multipart: bool = False,
|
28 |
+
) -> None:
|
29 |
+
self._writer = multipart.MultipartWriter("form-data")
|
30 |
+
self._fields: List[Any] = []
|
31 |
+
self._is_multipart = default_to_multipart
|
32 |
+
self._is_processed = False
|
33 |
+
self._quote_fields = quote_fields
|
34 |
+
self._charset = charset
|
35 |
+
|
36 |
+
if isinstance(fields, dict):
|
37 |
+
fields = list(fields.items())
|
38 |
+
elif not isinstance(fields, (list, tuple)):
|
39 |
+
fields = (fields,)
|
40 |
+
self.add_fields(*fields)
|
41 |
+
|
42 |
+
@property
|
43 |
+
def is_multipart(self) -> bool:
|
44 |
+
return self._is_multipart
|
45 |
+
|
46 |
+
def add_field(
|
47 |
+
self,
|
48 |
+
name: str,
|
49 |
+
value: Any,
|
50 |
+
*,
|
51 |
+
content_type: Optional[str] = None,
|
52 |
+
filename: Optional[str] = None,
|
53 |
+
content_transfer_encoding: Optional[str] = None,
|
54 |
+
) -> None:
|
55 |
+
|
56 |
+
if isinstance(value, io.IOBase):
|
57 |
+
self._is_multipart = True
|
58 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
59 |
+
msg = (
|
60 |
+
"In v4, passing bytes will no longer create a file field. "
|
61 |
+
"Please explicitly use the filename parameter or pass a BytesIO object."
|
62 |
+
)
|
63 |
+
if filename is None and content_transfer_encoding is None:
|
64 |
+
warnings.warn(msg, DeprecationWarning)
|
65 |
+
filename = name
|
66 |
+
|
67 |
+
type_options: MultiDict[str] = MultiDict({"name": name})
|
68 |
+
if filename is not None and not isinstance(filename, str):
|
69 |
+
raise TypeError("filename must be an instance of str. Got: %s" % filename)
|
70 |
+
if filename is None and isinstance(value, io.IOBase):
|
71 |
+
filename = guess_filename(value, name)
|
72 |
+
if filename is not None:
|
73 |
+
type_options["filename"] = filename
|
74 |
+
self._is_multipart = True
|
75 |
+
|
76 |
+
headers = {}
|
77 |
+
if content_type is not None:
|
78 |
+
if not isinstance(content_type, str):
|
79 |
+
raise TypeError(
|
80 |
+
"content_type must be an instance of str. Got: %s" % content_type
|
81 |
+
)
|
82 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
83 |
+
self._is_multipart = True
|
84 |
+
if content_transfer_encoding is not None:
|
85 |
+
if not isinstance(content_transfer_encoding, str):
|
86 |
+
raise TypeError(
|
87 |
+
"content_transfer_encoding must be an instance"
|
88 |
+
" of str. Got: %s" % content_transfer_encoding
|
89 |
+
)
|
90 |
+
msg = (
|
91 |
+
"content_transfer_encoding is deprecated. "
|
92 |
+
"To maintain compatibility with v4 please pass a BytesPayload."
|
93 |
+
)
|
94 |
+
warnings.warn(msg, DeprecationWarning)
|
95 |
+
self._is_multipart = True
|
96 |
+
|
97 |
+
self._fields.append((type_options, headers, value))
|
98 |
+
|
99 |
+
def add_fields(self, *fields: Any) -> None:
|
100 |
+
to_add = list(fields)
|
101 |
+
|
102 |
+
while to_add:
|
103 |
+
rec = to_add.pop(0)
|
104 |
+
|
105 |
+
if isinstance(rec, io.IOBase):
|
106 |
+
k = guess_filename(rec, "unknown")
|
107 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
108 |
+
|
109 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
110 |
+
to_add.extend(rec.items())
|
111 |
+
|
112 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
113 |
+
k, fp = rec
|
114 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
115 |
+
|
116 |
+
else:
|
117 |
+
raise TypeError(
|
118 |
+
"Only io.IOBase, multidict and (name, file) "
|
119 |
+
"pairs allowed, use .add_field() for passing "
|
120 |
+
"more complex parameters, got {!r}".format(rec)
|
121 |
+
)
|
122 |
+
|
123 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
124 |
+
# form data (x-www-form-urlencoded)
|
125 |
+
data = []
|
126 |
+
for type_options, _, value in self._fields:
|
127 |
+
data.append((type_options["name"], value))
|
128 |
+
|
129 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
130 |
+
|
131 |
+
if charset == "utf-8":
|
132 |
+
content_type = "application/x-www-form-urlencoded"
|
133 |
+
else:
|
134 |
+
content_type = "application/x-www-form-urlencoded; charset=%s" % charset
|
135 |
+
|
136 |
+
return payload.BytesPayload(
|
137 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
138 |
+
content_type=content_type,
|
139 |
+
)
|
140 |
+
|
141 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
142 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
143 |
+
if self._is_processed:
|
144 |
+
raise RuntimeError("Form data has been processed already")
|
145 |
+
for dispparams, headers, value in self._fields:
|
146 |
+
try:
|
147 |
+
if hdrs.CONTENT_TYPE in headers:
|
148 |
+
part = payload.get_payload(
|
149 |
+
value,
|
150 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
151 |
+
headers=headers,
|
152 |
+
encoding=self._charset,
|
153 |
+
)
|
154 |
+
else:
|
155 |
+
part = payload.get_payload(
|
156 |
+
value, headers=headers, encoding=self._charset
|
157 |
+
)
|
158 |
+
except Exception as exc:
|
159 |
+
raise TypeError(
|
160 |
+
"Can not serialize value type: %r\n "
|
161 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
162 |
+
) from exc
|
163 |
+
|
164 |
+
if dispparams:
|
165 |
+
part.set_content_disposition(
|
166 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
167 |
+
)
|
168 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
169 |
+
# Content-Length which were sent via chunked transfer encoding
|
170 |
+
assert part.headers is not None
|
171 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
172 |
+
|
173 |
+
self._writer.append_payload(part)
|
174 |
+
|
175 |
+
self._is_processed = True
|
176 |
+
return self._writer
|
177 |
+
|
178 |
+
def __call__(self) -> Payload:
|
179 |
+
if self._is_multipart:
|
180 |
+
return self._gen_form_data()
|
181 |
+
else:
|
182 |
+
return self._gen_form_urlencoded()
|
venv/Lib/site-packages/aiohttp/hdrs.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""HTTP Headers constants."""
|
2 |
+
|
3 |
+
# After changing the file content call ./tools/gen.py
|
4 |
+
# to regenerate the headers parser
|
5 |
+
import itertools
|
6 |
+
from typing import Final, Set
|
7 |
+
|
8 |
+
from multidict import istr
|
9 |
+
|
10 |
+
METH_ANY: Final[str] = "*"
|
11 |
+
METH_CONNECT: Final[str] = "CONNECT"
|
12 |
+
METH_HEAD: Final[str] = "HEAD"
|
13 |
+
METH_GET: Final[str] = "GET"
|
14 |
+
METH_DELETE: Final[str] = "DELETE"
|
15 |
+
METH_OPTIONS: Final[str] = "OPTIONS"
|
16 |
+
METH_PATCH: Final[str] = "PATCH"
|
17 |
+
METH_POST: Final[str] = "POST"
|
18 |
+
METH_PUT: Final[str] = "PUT"
|
19 |
+
METH_TRACE: Final[str] = "TRACE"
|
20 |
+
|
21 |
+
METH_ALL: Final[Set[str]] = {
|
22 |
+
METH_CONNECT,
|
23 |
+
METH_HEAD,
|
24 |
+
METH_GET,
|
25 |
+
METH_DELETE,
|
26 |
+
METH_OPTIONS,
|
27 |
+
METH_PATCH,
|
28 |
+
METH_POST,
|
29 |
+
METH_PUT,
|
30 |
+
METH_TRACE,
|
31 |
+
}
|
32 |
+
|
33 |
+
ACCEPT: Final[istr] = istr("Accept")
|
34 |
+
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
|
35 |
+
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
|
36 |
+
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
|
37 |
+
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
|
38 |
+
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
|
39 |
+
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
|
40 |
+
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
|
41 |
+
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
|
42 |
+
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
|
43 |
+
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
|
44 |
+
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
|
45 |
+
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
|
46 |
+
AGE: Final[istr] = istr("Age")
|
47 |
+
ALLOW: Final[istr] = istr("Allow")
|
48 |
+
AUTHORIZATION: Final[istr] = istr("Authorization")
|
49 |
+
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
|
50 |
+
CONNECTION: Final[istr] = istr("Connection")
|
51 |
+
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
|
52 |
+
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
|
53 |
+
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
|
54 |
+
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
|
55 |
+
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
|
56 |
+
CONTENT_MD5: Final[istr] = istr("Content-MD5")
|
57 |
+
CONTENT_RANGE: Final[istr] = istr("Content-Range")
|
58 |
+
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
|
59 |
+
CONTENT_TYPE: Final[istr] = istr("Content-Type")
|
60 |
+
COOKIE: Final[istr] = istr("Cookie")
|
61 |
+
DATE: Final[istr] = istr("Date")
|
62 |
+
DESTINATION: Final[istr] = istr("Destination")
|
63 |
+
DIGEST: Final[istr] = istr("Digest")
|
64 |
+
ETAG: Final[istr] = istr("Etag")
|
65 |
+
EXPECT: Final[istr] = istr("Expect")
|
66 |
+
EXPIRES: Final[istr] = istr("Expires")
|
67 |
+
FORWARDED: Final[istr] = istr("Forwarded")
|
68 |
+
FROM: Final[istr] = istr("From")
|
69 |
+
HOST: Final[istr] = istr("Host")
|
70 |
+
IF_MATCH: Final[istr] = istr("If-Match")
|
71 |
+
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
|
72 |
+
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
|
73 |
+
IF_RANGE: Final[istr] = istr("If-Range")
|
74 |
+
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
|
75 |
+
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
|
76 |
+
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
|
77 |
+
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
|
78 |
+
LINK: Final[istr] = istr("Link")
|
79 |
+
LOCATION: Final[istr] = istr("Location")
|
80 |
+
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
|
81 |
+
ORIGIN: Final[istr] = istr("Origin")
|
82 |
+
PRAGMA: Final[istr] = istr("Pragma")
|
83 |
+
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
|
84 |
+
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
|
85 |
+
RANGE: Final[istr] = istr("Range")
|
86 |
+
REFERER: Final[istr] = istr("Referer")
|
87 |
+
RETRY_AFTER: Final[istr] = istr("Retry-After")
|
88 |
+
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
|
89 |
+
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
|
90 |
+
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
|
91 |
+
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
|
92 |
+
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
|
93 |
+
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
|
94 |
+
SERVER: Final[istr] = istr("Server")
|
95 |
+
SET_COOKIE: Final[istr] = istr("Set-Cookie")
|
96 |
+
TE: Final[istr] = istr("TE")
|
97 |
+
TRAILER: Final[istr] = istr("Trailer")
|
98 |
+
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
|
99 |
+
UPGRADE: Final[istr] = istr("Upgrade")
|
100 |
+
URI: Final[istr] = istr("URI")
|
101 |
+
USER_AGENT: Final[istr] = istr("User-Agent")
|
102 |
+
VARY: Final[istr] = istr("Vary")
|
103 |
+
VIA: Final[istr] = istr("Via")
|
104 |
+
WANT_DIGEST: Final[istr] = istr("Want-Digest")
|
105 |
+
WARNING: Final[istr] = istr("Warning")
|
106 |
+
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
|
107 |
+
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
|
108 |
+
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
|
109 |
+
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
|
110 |
+
|
111 |
+
# These are the upper/lower case variants of the headers/methods
|
112 |
+
# Example: {'hOst', 'host', 'HoST', 'HOSt', 'hOsT', 'HosT', 'hoSt', ...}
|
113 |
+
METH_HEAD_ALL: Final = frozenset(
|
114 |
+
map("".join, itertools.product(*zip(METH_HEAD.upper(), METH_HEAD.lower())))
|
115 |
+
)
|
116 |
+
METH_CONNECT_ALL: Final = frozenset(
|
117 |
+
map("".join, itertools.product(*zip(METH_CONNECT.upper(), METH_CONNECT.lower())))
|
118 |
+
)
|
119 |
+
HOST_ALL: Final = frozenset(
|
120 |
+
map("".join, itertools.product(*zip(HOST.upper(), HOST.lower())))
|
121 |
+
)
|
venv/Lib/site-packages/aiohttp/helpers.py
ADDED
@@ -0,0 +1,958 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Various helper functions"""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import base64
|
5 |
+
import binascii
|
6 |
+
import contextlib
|
7 |
+
import datetime
|
8 |
+
import enum
|
9 |
+
import functools
|
10 |
+
import inspect
|
11 |
+
import netrc
|
12 |
+
import os
|
13 |
+
import platform
|
14 |
+
import re
|
15 |
+
import sys
|
16 |
+
import time
|
17 |
+
import weakref
|
18 |
+
from collections import namedtuple
|
19 |
+
from contextlib import suppress
|
20 |
+
from email.parser import HeaderParser
|
21 |
+
from email.utils import parsedate
|
22 |
+
from math import ceil
|
23 |
+
from pathlib import Path
|
24 |
+
from types import MappingProxyType, TracebackType
|
25 |
+
from typing import (
|
26 |
+
Any,
|
27 |
+
Callable,
|
28 |
+
ContextManager,
|
29 |
+
Dict,
|
30 |
+
Generator,
|
31 |
+
Generic,
|
32 |
+
Iterable,
|
33 |
+
Iterator,
|
34 |
+
List,
|
35 |
+
Mapping,
|
36 |
+
Optional,
|
37 |
+
Protocol,
|
38 |
+
Tuple,
|
39 |
+
Type,
|
40 |
+
TypeVar,
|
41 |
+
Union,
|
42 |
+
get_args,
|
43 |
+
overload,
|
44 |
+
)
|
45 |
+
from urllib.parse import quote
|
46 |
+
from urllib.request import getproxies, proxy_bypass
|
47 |
+
|
48 |
+
import attr
|
49 |
+
from multidict import MultiDict, MultiDictProxy, MultiMapping
|
50 |
+
from propcache.api import under_cached_property as reify
|
51 |
+
from yarl import URL
|
52 |
+
|
53 |
+
from . import hdrs
|
54 |
+
from .log import client_logger
|
55 |
+
|
56 |
+
if sys.version_info >= (3, 11):
|
57 |
+
import asyncio as async_timeout
|
58 |
+
else:
|
59 |
+
import async_timeout
|
60 |
+
|
61 |
+
__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "reify")
|
62 |
+
|
63 |
+
IS_MACOS = platform.system() == "Darwin"
|
64 |
+
IS_WINDOWS = platform.system() == "Windows"
|
65 |
+
|
66 |
+
PY_310 = sys.version_info >= (3, 10)
|
67 |
+
PY_311 = sys.version_info >= (3, 11)
|
68 |
+
|
69 |
+
|
70 |
+
_T = TypeVar("_T")
|
71 |
+
_S = TypeVar("_S")
|
72 |
+
|
73 |
+
_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
|
74 |
+
sentinel = _SENTINEL.sentinel
|
75 |
+
|
76 |
+
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
77 |
+
|
78 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
79 |
+
EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200)))
|
80 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
81 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
82 |
+
EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL
|
83 |
+
|
84 |
+
DEBUG = sys.flags.dev_mode or (
|
85 |
+
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
86 |
+
)
|
87 |
+
|
88 |
+
|
89 |
+
CHAR = {chr(i) for i in range(0, 128)}
|
90 |
+
CTL = {chr(i) for i in range(0, 32)} | {
|
91 |
+
chr(127),
|
92 |
+
}
|
93 |
+
SEPARATORS = {
|
94 |
+
"(",
|
95 |
+
")",
|
96 |
+
"<",
|
97 |
+
">",
|
98 |
+
"@",
|
99 |
+
",",
|
100 |
+
";",
|
101 |
+
":",
|
102 |
+
"\\",
|
103 |
+
'"',
|
104 |
+
"/",
|
105 |
+
"[",
|
106 |
+
"]",
|
107 |
+
"?",
|
108 |
+
"=",
|
109 |
+
"{",
|
110 |
+
"}",
|
111 |
+
" ",
|
112 |
+
chr(9),
|
113 |
+
}
|
114 |
+
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
115 |
+
|
116 |
+
|
117 |
+
class noop:
|
118 |
+
def __await__(self) -> Generator[None, None, None]:
|
119 |
+
yield
|
120 |
+
|
121 |
+
|
122 |
+
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
123 |
+
"""Http basic authentication helper."""
|
124 |
+
|
125 |
+
def __new__(
|
126 |
+
cls, login: str, password: str = "", encoding: str = "latin1"
|
127 |
+
) -> "BasicAuth":
|
128 |
+
if login is None:
|
129 |
+
raise ValueError("None is not allowed as login value")
|
130 |
+
|
131 |
+
if password is None:
|
132 |
+
raise ValueError("None is not allowed as password value")
|
133 |
+
|
134 |
+
if ":" in login:
|
135 |
+
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
136 |
+
|
137 |
+
return super().__new__(cls, login, password, encoding)
|
138 |
+
|
139 |
+
@classmethod
|
140 |
+
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
141 |
+
"""Create a BasicAuth object from an Authorization HTTP header."""
|
142 |
+
try:
|
143 |
+
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
144 |
+
except ValueError:
|
145 |
+
raise ValueError("Could not parse authorization header.")
|
146 |
+
|
147 |
+
if auth_type.lower() != "basic":
|
148 |
+
raise ValueError("Unknown authorization method %s" % auth_type)
|
149 |
+
|
150 |
+
try:
|
151 |
+
decoded = base64.b64decode(
|
152 |
+
encoded_credentials.encode("ascii"), validate=True
|
153 |
+
).decode(encoding)
|
154 |
+
except binascii.Error:
|
155 |
+
raise ValueError("Invalid base64 encoding.")
|
156 |
+
|
157 |
+
try:
|
158 |
+
# RFC 2617 HTTP Authentication
|
159 |
+
# https://www.ietf.org/rfc/rfc2617.txt
|
160 |
+
# the colon must be present, but the username and password may be
|
161 |
+
# otherwise blank.
|
162 |
+
username, password = decoded.split(":", 1)
|
163 |
+
except ValueError:
|
164 |
+
raise ValueError("Invalid credentials.")
|
165 |
+
|
166 |
+
return cls(username, password, encoding=encoding)
|
167 |
+
|
168 |
+
@classmethod
|
169 |
+
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
170 |
+
"""Create BasicAuth from url."""
|
171 |
+
if not isinstance(url, URL):
|
172 |
+
raise TypeError("url should be yarl.URL instance")
|
173 |
+
# Check raw_user and raw_password first as yarl is likely
|
174 |
+
# to already have these values parsed from the netloc in the cache.
|
175 |
+
if url.raw_user is None and url.raw_password is None:
|
176 |
+
return None
|
177 |
+
return cls(url.user or "", url.password or "", encoding=encoding)
|
178 |
+
|
179 |
+
def encode(self) -> str:
|
180 |
+
"""Encode credentials."""
|
181 |
+
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
182 |
+
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
183 |
+
|
184 |
+
|
185 |
+
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
186 |
+
"""Remove user and password from URL if present and return BasicAuth object."""
|
187 |
+
# Check raw_user and raw_password first as yarl is likely
|
188 |
+
# to already have these values parsed from the netloc in the cache.
|
189 |
+
if url.raw_user is None and url.raw_password is None:
|
190 |
+
return url, None
|
191 |
+
return url.with_user(None), BasicAuth(url.user or "", url.password or "")
|
192 |
+
|
193 |
+
|
194 |
+
def netrc_from_env() -> Optional[netrc.netrc]:
|
195 |
+
"""Load netrc from file.
|
196 |
+
|
197 |
+
Attempt to load it from the path specified by the env-var
|
198 |
+
NETRC or in the default location in the user's home directory.
|
199 |
+
|
200 |
+
Returns None if it couldn't be found or fails to parse.
|
201 |
+
"""
|
202 |
+
netrc_env = os.environ.get("NETRC")
|
203 |
+
|
204 |
+
if netrc_env is not None:
|
205 |
+
netrc_path = Path(netrc_env)
|
206 |
+
else:
|
207 |
+
try:
|
208 |
+
home_dir = Path.home()
|
209 |
+
except RuntimeError as e: # pragma: no cover
|
210 |
+
# if pathlib can't resolve home, it may raise a RuntimeError
|
211 |
+
client_logger.debug(
|
212 |
+
"Could not resolve home directory when "
|
213 |
+
"trying to look for .netrc file: %s",
|
214 |
+
e,
|
215 |
+
)
|
216 |
+
return None
|
217 |
+
|
218 |
+
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
219 |
+
|
220 |
+
try:
|
221 |
+
return netrc.netrc(str(netrc_path))
|
222 |
+
except netrc.NetrcParseError as e:
|
223 |
+
client_logger.warning("Could not parse .netrc file: %s", e)
|
224 |
+
except OSError as e:
|
225 |
+
netrc_exists = False
|
226 |
+
with contextlib.suppress(OSError):
|
227 |
+
netrc_exists = netrc_path.is_file()
|
228 |
+
# we couldn't read the file (doesn't exist, permissions, etc.)
|
229 |
+
if netrc_env or netrc_exists:
|
230 |
+
# only warn if the environment wanted us to load it,
|
231 |
+
# or it appears like the default file does actually exist
|
232 |
+
client_logger.warning("Could not read .netrc file: %s", e)
|
233 |
+
|
234 |
+
return None
|
235 |
+
|
236 |
+
|
237 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
238 |
+
class ProxyInfo:
|
239 |
+
proxy: URL
|
240 |
+
proxy_auth: Optional[BasicAuth]
|
241 |
+
|
242 |
+
|
243 |
+
def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
|
244 |
+
"""
|
245 |
+
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
|
246 |
+
|
247 |
+
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
|
248 |
+
entry is found for the ``host``.
|
249 |
+
"""
|
250 |
+
if netrc_obj is None:
|
251 |
+
raise LookupError("No .netrc file found")
|
252 |
+
auth_from_netrc = netrc_obj.authenticators(host)
|
253 |
+
|
254 |
+
if auth_from_netrc is None:
|
255 |
+
raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
|
256 |
+
login, account, password = auth_from_netrc
|
257 |
+
|
258 |
+
# TODO(PY311): username = login or account
|
259 |
+
# Up to python 3.10, account could be None if not specified,
|
260 |
+
# and login will be empty string if not specified. From 3.11,
|
261 |
+
# login and account will be empty string if not specified.
|
262 |
+
username = login if (login or account is None) else account
|
263 |
+
|
264 |
+
# TODO(PY311): Remove this, as password will be empty string
|
265 |
+
# if not specified
|
266 |
+
if password is None:
|
267 |
+
password = ""
|
268 |
+
|
269 |
+
return BasicAuth(username, password)
|
270 |
+
|
271 |
+
|
272 |
+
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
273 |
+
proxy_urls = {
|
274 |
+
k: URL(v)
|
275 |
+
for k, v in getproxies().items()
|
276 |
+
if k in ("http", "https", "ws", "wss")
|
277 |
+
}
|
278 |
+
netrc_obj = netrc_from_env()
|
279 |
+
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
280 |
+
ret = {}
|
281 |
+
for proto, val in stripped.items():
|
282 |
+
proxy, auth = val
|
283 |
+
if proxy.scheme in ("https", "wss"):
|
284 |
+
client_logger.warning(
|
285 |
+
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
286 |
+
)
|
287 |
+
continue
|
288 |
+
if netrc_obj and auth is None:
|
289 |
+
if proxy.host is not None:
|
290 |
+
try:
|
291 |
+
auth = basicauth_from_netrc(netrc_obj, proxy.host)
|
292 |
+
except LookupError:
|
293 |
+
auth = None
|
294 |
+
ret[proto] = ProxyInfo(proxy, auth)
|
295 |
+
return ret
|
296 |
+
|
297 |
+
|
298 |
+
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
299 |
+
"""Get a permitted proxy for the given URL from the env."""
|
300 |
+
if url.host is not None and proxy_bypass(url.host):
|
301 |
+
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
302 |
+
|
303 |
+
proxies_in_env = proxies_from_env()
|
304 |
+
try:
|
305 |
+
proxy_info = proxies_in_env[url.scheme]
|
306 |
+
except KeyError:
|
307 |
+
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
308 |
+
else:
|
309 |
+
return proxy_info.proxy, proxy_info.proxy_auth
|
310 |
+
|
311 |
+
|
312 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
313 |
+
class MimeType:
|
314 |
+
type: str
|
315 |
+
subtype: str
|
316 |
+
suffix: str
|
317 |
+
parameters: "MultiDictProxy[str]"
|
318 |
+
|
319 |
+
|
320 |
+
@functools.lru_cache(maxsize=56)
|
321 |
+
def parse_mimetype(mimetype: str) -> MimeType:
|
322 |
+
"""Parses a MIME type into its components.
|
323 |
+
|
324 |
+
mimetype is a MIME type string.
|
325 |
+
|
326 |
+
Returns a MimeType object.
|
327 |
+
|
328 |
+
Example:
|
329 |
+
|
330 |
+
>>> parse_mimetype('text/html; charset=utf-8')
|
331 |
+
MimeType(type='text', subtype='html', suffix='',
|
332 |
+
parameters={'charset': 'utf-8'})
|
333 |
+
|
334 |
+
"""
|
335 |
+
if not mimetype:
|
336 |
+
return MimeType(
|
337 |
+
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
338 |
+
)
|
339 |
+
|
340 |
+
parts = mimetype.split(";")
|
341 |
+
params: MultiDict[str] = MultiDict()
|
342 |
+
for item in parts[1:]:
|
343 |
+
if not item:
|
344 |
+
continue
|
345 |
+
key, _, value = item.partition("=")
|
346 |
+
params.add(key.lower().strip(), value.strip(' "'))
|
347 |
+
|
348 |
+
fulltype = parts[0].strip().lower()
|
349 |
+
if fulltype == "*":
|
350 |
+
fulltype = "*/*"
|
351 |
+
|
352 |
+
mtype, _, stype = fulltype.partition("/")
|
353 |
+
stype, _, suffix = stype.partition("+")
|
354 |
+
|
355 |
+
return MimeType(
|
356 |
+
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
357 |
+
)
|
358 |
+
|
359 |
+
|
360 |
+
@functools.lru_cache(maxsize=56)
|
361 |
+
def parse_content_type(raw: str) -> Tuple[str, MappingProxyType[str, str]]:
|
362 |
+
"""Parse Content-Type header.
|
363 |
+
|
364 |
+
Returns a tuple of the parsed content type and a
|
365 |
+
MappingProxyType of parameters.
|
366 |
+
"""
|
367 |
+
msg = HeaderParser().parsestr(f"Content-Type: {raw}")
|
368 |
+
content_type = msg.get_content_type()
|
369 |
+
params = msg.get_params(())
|
370 |
+
content_dict = dict(params[1:]) # First element is content type again
|
371 |
+
return content_type, MappingProxyType(content_dict)
|
372 |
+
|
373 |
+
|
374 |
+
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
375 |
+
name = getattr(obj, "name", None)
|
376 |
+
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
377 |
+
return Path(name).name
|
378 |
+
return default
|
379 |
+
|
380 |
+
|
381 |
+
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
382 |
+
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
383 |
+
|
384 |
+
|
385 |
+
def quoted_string(content: str) -> str:
|
386 |
+
"""Return 7-bit content as quoted-string.
|
387 |
+
|
388 |
+
Format content into a quoted-string as defined in RFC5322 for
|
389 |
+
Internet Message Format. Notice that this is not the 8-bit HTTP
|
390 |
+
format, but the 7-bit email format. Content must be in usascii or
|
391 |
+
a ValueError is raised.
|
392 |
+
"""
|
393 |
+
if not (QCONTENT > set(content)):
|
394 |
+
raise ValueError(f"bad content for quoted-string {content!r}")
|
395 |
+
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
396 |
+
|
397 |
+
|
398 |
+
def content_disposition_header(
|
399 |
+
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
400 |
+
) -> str:
|
401 |
+
"""Sets ``Content-Disposition`` header for MIME.
|
402 |
+
|
403 |
+
This is the MIME payload Content-Disposition header from RFC 2183
|
404 |
+
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
405 |
+
RFC 6266.
|
406 |
+
|
407 |
+
disptype is a disposition type: inline, attachment, form-data.
|
408 |
+
Should be valid extension token (see RFC 2183)
|
409 |
+
|
410 |
+
quote_fields performs value quoting to 7-bit MIME headers
|
411 |
+
according to RFC 7578. Set to quote_fields to False if recipient
|
412 |
+
can take 8-bit file names and field values.
|
413 |
+
|
414 |
+
_charset specifies the charset to use when quote_fields is True.
|
415 |
+
|
416 |
+
params is a dict with disposition params.
|
417 |
+
"""
|
418 |
+
if not disptype or not (TOKEN > set(disptype)):
|
419 |
+
raise ValueError(f"bad content disposition type {disptype!r}")
|
420 |
+
|
421 |
+
value = disptype
|
422 |
+
if params:
|
423 |
+
lparams = []
|
424 |
+
for key, val in params.items():
|
425 |
+
if not key or not (TOKEN > set(key)):
|
426 |
+
raise ValueError(f"bad content disposition parameter {key!r}={val!r}")
|
427 |
+
if quote_fields:
|
428 |
+
if key.lower() == "filename":
|
429 |
+
qval = quote(val, "", encoding=_charset)
|
430 |
+
lparams.append((key, '"%s"' % qval))
|
431 |
+
else:
|
432 |
+
try:
|
433 |
+
qval = quoted_string(val)
|
434 |
+
except ValueError:
|
435 |
+
qval = "".join(
|
436 |
+
(_charset, "''", quote(val, "", encoding=_charset))
|
437 |
+
)
|
438 |
+
lparams.append((key + "*", qval))
|
439 |
+
else:
|
440 |
+
lparams.append((key, '"%s"' % qval))
|
441 |
+
else:
|
442 |
+
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
443 |
+
lparams.append((key, '"%s"' % qval))
|
444 |
+
sparams = "; ".join("=".join(pair) for pair in lparams)
|
445 |
+
value = "; ".join((value, sparams))
|
446 |
+
return value
|
447 |
+
|
448 |
+
|
449 |
+
def is_ip_address(host: Optional[str]) -> bool:
|
450 |
+
"""Check if host looks like an IP Address.
|
451 |
+
|
452 |
+
This check is only meant as a heuristic to ensure that
|
453 |
+
a host is not a domain name.
|
454 |
+
"""
|
455 |
+
if not host:
|
456 |
+
return False
|
457 |
+
# For a host to be an ipv4 address, it must be all numeric.
|
458 |
+
# The host must contain a colon to be an IPv6 address.
|
459 |
+
return ":" in host or host.replace(".", "").isdigit()
|
460 |
+
|
461 |
+
|
462 |
+
_cached_current_datetime: Optional[int] = None
|
463 |
+
_cached_formatted_datetime = ""
|
464 |
+
|
465 |
+
|
466 |
+
def rfc822_formatted_time() -> str:
|
467 |
+
global _cached_current_datetime
|
468 |
+
global _cached_formatted_datetime
|
469 |
+
|
470 |
+
now = int(time.time())
|
471 |
+
if now != _cached_current_datetime:
|
472 |
+
# Weekday and month names for HTTP date/time formatting;
|
473 |
+
# always English!
|
474 |
+
# Tuples are constants stored in codeobject!
|
475 |
+
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
476 |
+
_monthname = (
|
477 |
+
"", # Dummy so we can use 1-based month numbers
|
478 |
+
"Jan",
|
479 |
+
"Feb",
|
480 |
+
"Mar",
|
481 |
+
"Apr",
|
482 |
+
"May",
|
483 |
+
"Jun",
|
484 |
+
"Jul",
|
485 |
+
"Aug",
|
486 |
+
"Sep",
|
487 |
+
"Oct",
|
488 |
+
"Nov",
|
489 |
+
"Dec",
|
490 |
+
)
|
491 |
+
|
492 |
+
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
493 |
+
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
494 |
+
_weekdayname[wd],
|
495 |
+
day,
|
496 |
+
_monthname[month],
|
497 |
+
year,
|
498 |
+
hh,
|
499 |
+
mm,
|
500 |
+
ss,
|
501 |
+
)
|
502 |
+
_cached_current_datetime = now
|
503 |
+
return _cached_formatted_datetime
|
504 |
+
|
505 |
+
|
506 |
+
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
507 |
+
ref, name = info
|
508 |
+
ob = ref()
|
509 |
+
if ob is not None:
|
510 |
+
with suppress(Exception):
|
511 |
+
getattr(ob, name)()
|
512 |
+
|
513 |
+
|
514 |
+
def weakref_handle(
|
515 |
+
ob: object,
|
516 |
+
name: str,
|
517 |
+
timeout: float,
|
518 |
+
loop: asyncio.AbstractEventLoop,
|
519 |
+
timeout_ceil_threshold: float = 5,
|
520 |
+
) -> Optional[asyncio.TimerHandle]:
|
521 |
+
if timeout is not None and timeout > 0:
|
522 |
+
when = loop.time() + timeout
|
523 |
+
if timeout >= timeout_ceil_threshold:
|
524 |
+
when = ceil(when)
|
525 |
+
|
526 |
+
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
527 |
+
return None
|
528 |
+
|
529 |
+
|
530 |
+
def call_later(
|
531 |
+
cb: Callable[[], Any],
|
532 |
+
timeout: float,
|
533 |
+
loop: asyncio.AbstractEventLoop,
|
534 |
+
timeout_ceil_threshold: float = 5,
|
535 |
+
) -> Optional[asyncio.TimerHandle]:
|
536 |
+
if timeout is None or timeout <= 0:
|
537 |
+
return None
|
538 |
+
now = loop.time()
|
539 |
+
when = calculate_timeout_when(now, timeout, timeout_ceil_threshold)
|
540 |
+
return loop.call_at(when, cb)
|
541 |
+
|
542 |
+
|
543 |
+
def calculate_timeout_when(
|
544 |
+
loop_time: float,
|
545 |
+
timeout: float,
|
546 |
+
timeout_ceiling_threshold: float,
|
547 |
+
) -> float:
|
548 |
+
"""Calculate when to execute a timeout."""
|
549 |
+
when = loop_time + timeout
|
550 |
+
if timeout > timeout_ceiling_threshold:
|
551 |
+
return ceil(when)
|
552 |
+
return when
|
553 |
+
|
554 |
+
|
555 |
+
class TimeoutHandle:
|
556 |
+
"""Timeout handle"""
|
557 |
+
|
558 |
+
__slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks")
|
559 |
+
|
560 |
+
def __init__(
|
561 |
+
self,
|
562 |
+
loop: asyncio.AbstractEventLoop,
|
563 |
+
timeout: Optional[float],
|
564 |
+
ceil_threshold: float = 5,
|
565 |
+
) -> None:
|
566 |
+
self._timeout = timeout
|
567 |
+
self._loop = loop
|
568 |
+
self._ceil_threshold = ceil_threshold
|
569 |
+
self._callbacks: List[
|
570 |
+
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
571 |
+
] = []
|
572 |
+
|
573 |
+
def register(
|
574 |
+
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
575 |
+
) -> None:
|
576 |
+
self._callbacks.append((callback, args, kwargs))
|
577 |
+
|
578 |
+
def close(self) -> None:
|
579 |
+
self._callbacks.clear()
|
580 |
+
|
581 |
+
def start(self) -> Optional[asyncio.TimerHandle]:
|
582 |
+
timeout = self._timeout
|
583 |
+
if timeout is not None and timeout > 0:
|
584 |
+
when = self._loop.time() + timeout
|
585 |
+
if timeout >= self._ceil_threshold:
|
586 |
+
when = ceil(when)
|
587 |
+
return self._loop.call_at(when, self.__call__)
|
588 |
+
else:
|
589 |
+
return None
|
590 |
+
|
591 |
+
def timer(self) -> "BaseTimerContext":
|
592 |
+
if self._timeout is not None and self._timeout > 0:
|
593 |
+
timer = TimerContext(self._loop)
|
594 |
+
self.register(timer.timeout)
|
595 |
+
return timer
|
596 |
+
else:
|
597 |
+
return TimerNoop()
|
598 |
+
|
599 |
+
def __call__(self) -> None:
|
600 |
+
for cb, args, kwargs in self._callbacks:
|
601 |
+
with suppress(Exception):
|
602 |
+
cb(*args, **kwargs)
|
603 |
+
|
604 |
+
self._callbacks.clear()
|
605 |
+
|
606 |
+
|
607 |
+
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
608 |
+
|
609 |
+
__slots__ = ()
|
610 |
+
|
611 |
+
def assert_timeout(self) -> None:
|
612 |
+
"""Raise TimeoutError if timeout has been exceeded."""
|
613 |
+
|
614 |
+
|
615 |
+
class TimerNoop(BaseTimerContext):
|
616 |
+
|
617 |
+
__slots__ = ()
|
618 |
+
|
619 |
+
def __enter__(self) -> BaseTimerContext:
|
620 |
+
return self
|
621 |
+
|
622 |
+
def __exit__(
|
623 |
+
self,
|
624 |
+
exc_type: Optional[Type[BaseException]],
|
625 |
+
exc_val: Optional[BaseException],
|
626 |
+
exc_tb: Optional[TracebackType],
|
627 |
+
) -> None:
|
628 |
+
return
|
629 |
+
|
630 |
+
|
631 |
+
class TimerContext(BaseTimerContext):
|
632 |
+
"""Low resolution timeout context manager"""
|
633 |
+
|
634 |
+
__slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling")
|
635 |
+
|
636 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
637 |
+
self._loop = loop
|
638 |
+
self._tasks: List[asyncio.Task[Any]] = []
|
639 |
+
self._cancelled = False
|
640 |
+
self._cancelling = 0
|
641 |
+
|
642 |
+
def assert_timeout(self) -> None:
|
643 |
+
"""Raise TimeoutError if timer has already been cancelled."""
|
644 |
+
if self._cancelled:
|
645 |
+
raise asyncio.TimeoutError from None
|
646 |
+
|
647 |
+
def __enter__(self) -> BaseTimerContext:
|
648 |
+
task = asyncio.current_task(loop=self._loop)
|
649 |
+
if task is None:
|
650 |
+
raise RuntimeError("Timeout context manager should be used inside a task")
|
651 |
+
|
652 |
+
if sys.version_info >= (3, 11):
|
653 |
+
# Remember if the task was already cancelling
|
654 |
+
# so when we __exit__ we can decide if we should
|
655 |
+
# raise asyncio.TimeoutError or let the cancellation propagate
|
656 |
+
self._cancelling = task.cancelling()
|
657 |
+
|
658 |
+
if self._cancelled:
|
659 |
+
raise asyncio.TimeoutError from None
|
660 |
+
|
661 |
+
self._tasks.append(task)
|
662 |
+
return self
|
663 |
+
|
664 |
+
def __exit__(
|
665 |
+
self,
|
666 |
+
exc_type: Optional[Type[BaseException]],
|
667 |
+
exc_val: Optional[BaseException],
|
668 |
+
exc_tb: Optional[TracebackType],
|
669 |
+
) -> Optional[bool]:
|
670 |
+
enter_task: Optional[asyncio.Task[Any]] = None
|
671 |
+
if self._tasks:
|
672 |
+
enter_task = self._tasks.pop()
|
673 |
+
|
674 |
+
if exc_type is asyncio.CancelledError and self._cancelled:
|
675 |
+
assert enter_task is not None
|
676 |
+
# The timeout was hit, and the task was cancelled
|
677 |
+
# so we need to uncancel the last task that entered the context manager
|
678 |
+
# since the cancellation should not leak out of the context manager
|
679 |
+
if sys.version_info >= (3, 11):
|
680 |
+
# If the task was already cancelling don't raise
|
681 |
+
# asyncio.TimeoutError and instead return None
|
682 |
+
# to allow the cancellation to propagate
|
683 |
+
if enter_task.uncancel() > self._cancelling:
|
684 |
+
return None
|
685 |
+
raise asyncio.TimeoutError from exc_val
|
686 |
+
return None
|
687 |
+
|
688 |
+
def timeout(self) -> None:
|
689 |
+
if not self._cancelled:
|
690 |
+
for task in set(self._tasks):
|
691 |
+
task.cancel()
|
692 |
+
|
693 |
+
self._cancelled = True
|
694 |
+
|
695 |
+
|
696 |
+
def ceil_timeout(
|
697 |
+
delay: Optional[float], ceil_threshold: float = 5
|
698 |
+
) -> async_timeout.Timeout:
|
699 |
+
if delay is None or delay <= 0:
|
700 |
+
return async_timeout.timeout(None)
|
701 |
+
|
702 |
+
loop = asyncio.get_running_loop()
|
703 |
+
now = loop.time()
|
704 |
+
when = now + delay
|
705 |
+
if delay > ceil_threshold:
|
706 |
+
when = ceil(when)
|
707 |
+
return async_timeout.timeout_at(when)
|
708 |
+
|
709 |
+
|
710 |
+
class HeadersMixin:
|
711 |
+
"""Mixin for handling headers."""
|
712 |
+
|
713 |
+
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
714 |
+
|
715 |
+
_headers: MultiMapping[str]
|
716 |
+
_content_type: Optional[str] = None
|
717 |
+
_content_dict: Optional[Dict[str, str]] = None
|
718 |
+
_stored_content_type: Union[str, None, _SENTINEL] = sentinel
|
719 |
+
|
720 |
+
def _parse_content_type(self, raw: Optional[str]) -> None:
|
721 |
+
self._stored_content_type = raw
|
722 |
+
if raw is None:
|
723 |
+
# default value according to RFC 2616
|
724 |
+
self._content_type = "application/octet-stream"
|
725 |
+
self._content_dict = {}
|
726 |
+
else:
|
727 |
+
content_type, content_mapping_proxy = parse_content_type(raw)
|
728 |
+
self._content_type = content_type
|
729 |
+
# _content_dict needs to be mutable so we can update it
|
730 |
+
self._content_dict = content_mapping_proxy.copy()
|
731 |
+
|
732 |
+
@property
|
733 |
+
def content_type(self) -> str:
|
734 |
+
"""The value of content part for Content-Type HTTP header."""
|
735 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
736 |
+
if self._stored_content_type != raw:
|
737 |
+
self._parse_content_type(raw)
|
738 |
+
assert self._content_type is not None
|
739 |
+
return self._content_type
|
740 |
+
|
741 |
+
@property
|
742 |
+
def charset(self) -> Optional[str]:
|
743 |
+
"""The value of charset part for Content-Type HTTP header."""
|
744 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
745 |
+
if self._stored_content_type != raw:
|
746 |
+
self._parse_content_type(raw)
|
747 |
+
assert self._content_dict is not None
|
748 |
+
return self._content_dict.get("charset")
|
749 |
+
|
750 |
+
@property
|
751 |
+
def content_length(self) -> Optional[int]:
|
752 |
+
"""The value of Content-Length HTTP header."""
|
753 |
+
content_length = self._headers.get(hdrs.CONTENT_LENGTH)
|
754 |
+
return None if content_length is None else int(content_length)
|
755 |
+
|
756 |
+
|
757 |
+
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
758 |
+
if not fut.done():
|
759 |
+
fut.set_result(result)
|
760 |
+
|
761 |
+
|
762 |
+
_EXC_SENTINEL = BaseException()
|
763 |
+
|
764 |
+
|
765 |
+
class ErrorableProtocol(Protocol):
|
766 |
+
def set_exception(
|
767 |
+
self,
|
768 |
+
exc: BaseException,
|
769 |
+
exc_cause: BaseException = ...,
|
770 |
+
) -> None: ... # pragma: no cover
|
771 |
+
|
772 |
+
|
773 |
+
def set_exception(
|
774 |
+
fut: "asyncio.Future[_T] | ErrorableProtocol",
|
775 |
+
exc: BaseException,
|
776 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
777 |
+
) -> None:
|
778 |
+
"""Set future exception.
|
779 |
+
|
780 |
+
If the future is marked as complete, this function is a no-op.
|
781 |
+
|
782 |
+
:param exc_cause: An exception that is a direct cause of ``exc``.
|
783 |
+
Only set if provided.
|
784 |
+
"""
|
785 |
+
if asyncio.isfuture(fut) and fut.done():
|
786 |
+
return
|
787 |
+
|
788 |
+
exc_is_sentinel = exc_cause is _EXC_SENTINEL
|
789 |
+
exc_causes_itself = exc is exc_cause
|
790 |
+
if not exc_is_sentinel and not exc_causes_itself:
|
791 |
+
exc.__cause__ = exc_cause
|
792 |
+
|
793 |
+
fut.set_exception(exc)
|
794 |
+
|
795 |
+
|
796 |
+
@functools.total_ordering
|
797 |
+
class AppKey(Generic[_T]):
|
798 |
+
"""Keys for static typing support in Application."""
|
799 |
+
|
800 |
+
__slots__ = ("_name", "_t", "__orig_class__")
|
801 |
+
|
802 |
+
# This may be set by Python when instantiating with a generic type. We need to
|
803 |
+
# support this, in order to support types that are not concrete classes,
|
804 |
+
# like Iterable, which can't be passed as the second parameter to __init__.
|
805 |
+
__orig_class__: Type[object]
|
806 |
+
|
807 |
+
def __init__(self, name: str, t: Optional[Type[_T]] = None):
|
808 |
+
# Prefix with module name to help deduplicate key names.
|
809 |
+
frame = inspect.currentframe()
|
810 |
+
while frame:
|
811 |
+
if frame.f_code.co_name == "<module>":
|
812 |
+
module: str = frame.f_globals["__name__"]
|
813 |
+
break
|
814 |
+
frame = frame.f_back
|
815 |
+
|
816 |
+
self._name = module + "." + name
|
817 |
+
self._t = t
|
818 |
+
|
819 |
+
def __lt__(self, other: object) -> bool:
|
820 |
+
if isinstance(other, AppKey):
|
821 |
+
return self._name < other._name
|
822 |
+
return True # Order AppKey above other types.
|
823 |
+
|
824 |
+
def __repr__(self) -> str:
|
825 |
+
t = self._t
|
826 |
+
if t is None:
|
827 |
+
with suppress(AttributeError):
|
828 |
+
# Set to type arg.
|
829 |
+
t = get_args(self.__orig_class__)[0]
|
830 |
+
|
831 |
+
if t is None:
|
832 |
+
t_repr = "<<Unknown>>"
|
833 |
+
elif isinstance(t, type):
|
834 |
+
if t.__module__ == "builtins":
|
835 |
+
t_repr = t.__qualname__
|
836 |
+
else:
|
837 |
+
t_repr = f"{t.__module__}.{t.__qualname__}"
|
838 |
+
else:
|
839 |
+
t_repr = repr(t)
|
840 |
+
return f"<AppKey({self._name}, type={t_repr})>"
|
841 |
+
|
842 |
+
|
843 |
+
class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
|
844 |
+
__slots__ = ("_maps",)
|
845 |
+
|
846 |
+
def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
|
847 |
+
self._maps = tuple(maps)
|
848 |
+
|
849 |
+
def __init_subclass__(cls) -> None:
|
850 |
+
raise TypeError(
|
851 |
+
"Inheritance class {} from ChainMapProxy "
|
852 |
+
"is forbidden".format(cls.__name__)
|
853 |
+
)
|
854 |
+
|
855 |
+
@overload # type: ignore[override]
|
856 |
+
def __getitem__(self, key: AppKey[_T]) -> _T: ...
|
857 |
+
|
858 |
+
@overload
|
859 |
+
def __getitem__(self, key: str) -> Any: ...
|
860 |
+
|
861 |
+
def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
|
862 |
+
for mapping in self._maps:
|
863 |
+
try:
|
864 |
+
return mapping[key]
|
865 |
+
except KeyError:
|
866 |
+
pass
|
867 |
+
raise KeyError(key)
|
868 |
+
|
869 |
+
@overload # type: ignore[override]
|
870 |
+
def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ...
|
871 |
+
|
872 |
+
@overload
|
873 |
+
def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ...
|
874 |
+
|
875 |
+
@overload
|
876 |
+
def get(self, key: str, default: Any = ...) -> Any: ...
|
877 |
+
|
878 |
+
def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
|
879 |
+
try:
|
880 |
+
return self[key]
|
881 |
+
except KeyError:
|
882 |
+
return default
|
883 |
+
|
884 |
+
def __len__(self) -> int:
|
885 |
+
# reuses stored hash values if possible
|
886 |
+
return len(set().union(*self._maps))
|
887 |
+
|
888 |
+
def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
|
889 |
+
d: Dict[Union[str, AppKey[Any]], Any] = {}
|
890 |
+
for mapping in reversed(self._maps):
|
891 |
+
# reuses stored hash values if possible
|
892 |
+
d.update(mapping)
|
893 |
+
return iter(d)
|
894 |
+
|
895 |
+
def __contains__(self, key: object) -> bool:
|
896 |
+
return any(key in m for m in self._maps)
|
897 |
+
|
898 |
+
def __bool__(self) -> bool:
|
899 |
+
return any(self._maps)
|
900 |
+
|
901 |
+
def __repr__(self) -> str:
|
902 |
+
content = ", ".join(map(repr, self._maps))
|
903 |
+
return f"ChainMapProxy({content})"
|
904 |
+
|
905 |
+
|
906 |
+
# https://tools.ietf.org/html/rfc7232#section-2.3
|
907 |
+
_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
|
908 |
+
_ETAGC_RE = re.compile(_ETAGC)
|
909 |
+
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
910 |
+
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
911 |
+
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
912 |
+
|
913 |
+
ETAG_ANY = "*"
|
914 |
+
|
915 |
+
|
916 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
917 |
+
class ETag:
|
918 |
+
value: str
|
919 |
+
is_weak: bool = False
|
920 |
+
|
921 |
+
|
922 |
+
def validate_etag_value(value: str) -> None:
|
923 |
+
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
924 |
+
raise ValueError(
|
925 |
+
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
926 |
+
)
|
927 |
+
|
928 |
+
|
929 |
+
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
930 |
+
"""Process a date string, return a datetime object"""
|
931 |
+
if date_str is not None:
|
932 |
+
timetuple = parsedate(date_str)
|
933 |
+
if timetuple is not None:
|
934 |
+
with suppress(ValueError):
|
935 |
+
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
936 |
+
return None
|
937 |
+
|
938 |
+
|
939 |
+
@functools.lru_cache
|
940 |
+
def must_be_empty_body(method: str, code: int) -> bool:
|
941 |
+
"""Check if a request must return an empty body."""
|
942 |
+
return (
|
943 |
+
code in EMPTY_BODY_STATUS_CODES
|
944 |
+
or method in EMPTY_BODY_METHODS
|
945 |
+
or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL)
|
946 |
+
)
|
947 |
+
|
948 |
+
|
949 |
+
def should_remove_content_length(method: str, code: int) -> bool:
|
950 |
+
"""Check if a Content-Length header should be removed.
|
951 |
+
|
952 |
+
This should always be a subset of must_be_empty_body
|
953 |
+
"""
|
954 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
|
955 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
|
956 |
+
return code in EMPTY_BODY_STATUS_CODES or (
|
957 |
+
200 <= code < 300 and method in hdrs.METH_CONNECT_ALL
|
958 |
+
)
|
venv/Lib/site-packages/aiohttp/http.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
from http import HTTPStatus
|
3 |
+
from typing import Mapping, Tuple
|
4 |
+
|
5 |
+
from . import __version__
|
6 |
+
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
7 |
+
from .http_parser import (
|
8 |
+
HeadersParser as HeadersParser,
|
9 |
+
HttpParser as HttpParser,
|
10 |
+
HttpRequestParser as HttpRequestParser,
|
11 |
+
HttpResponseParser as HttpResponseParser,
|
12 |
+
RawRequestMessage as RawRequestMessage,
|
13 |
+
RawResponseMessage as RawResponseMessage,
|
14 |
+
)
|
15 |
+
from .http_websocket import (
|
16 |
+
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
17 |
+
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
18 |
+
WS_KEY as WS_KEY,
|
19 |
+
WebSocketError as WebSocketError,
|
20 |
+
WebSocketReader as WebSocketReader,
|
21 |
+
WebSocketWriter as WebSocketWriter,
|
22 |
+
WSCloseCode as WSCloseCode,
|
23 |
+
WSMessage as WSMessage,
|
24 |
+
WSMsgType as WSMsgType,
|
25 |
+
ws_ext_gen as ws_ext_gen,
|
26 |
+
ws_ext_parse as ws_ext_parse,
|
27 |
+
)
|
28 |
+
from .http_writer import (
|
29 |
+
HttpVersion as HttpVersion,
|
30 |
+
HttpVersion10 as HttpVersion10,
|
31 |
+
HttpVersion11 as HttpVersion11,
|
32 |
+
StreamWriter as StreamWriter,
|
33 |
+
)
|
34 |
+
|
35 |
+
__all__ = (
|
36 |
+
"HttpProcessingError",
|
37 |
+
"RESPONSES",
|
38 |
+
"SERVER_SOFTWARE",
|
39 |
+
# .http_writer
|
40 |
+
"StreamWriter",
|
41 |
+
"HttpVersion",
|
42 |
+
"HttpVersion10",
|
43 |
+
"HttpVersion11",
|
44 |
+
# .http_parser
|
45 |
+
"HeadersParser",
|
46 |
+
"HttpParser",
|
47 |
+
"HttpRequestParser",
|
48 |
+
"HttpResponseParser",
|
49 |
+
"RawRequestMessage",
|
50 |
+
"RawResponseMessage",
|
51 |
+
# .http_websocket
|
52 |
+
"WS_CLOSED_MESSAGE",
|
53 |
+
"WS_CLOSING_MESSAGE",
|
54 |
+
"WS_KEY",
|
55 |
+
"WebSocketReader",
|
56 |
+
"WebSocketWriter",
|
57 |
+
"ws_ext_gen",
|
58 |
+
"ws_ext_parse",
|
59 |
+
"WSMessage",
|
60 |
+
"WebSocketError",
|
61 |
+
"WSMsgType",
|
62 |
+
"WSCloseCode",
|
63 |
+
)
|
64 |
+
|
65 |
+
|
66 |
+
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
67 |
+
sys.version_info, __version__
|
68 |
+
)
|
69 |
+
|
70 |
+
RESPONSES: Mapping[int, Tuple[str, str]] = {
|
71 |
+
v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
|
72 |
+
}
|
venv/Lib/site-packages/aiohttp/http_exceptions.py
ADDED
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Low-level http related exceptions."""
|
2 |
+
|
3 |
+
from textwrap import indent
|
4 |
+
from typing import Optional, Union
|
5 |
+
|
6 |
+
from .typedefs import _CIMultiDict
|
7 |
+
|
8 |
+
__all__ = ("HttpProcessingError",)
|
9 |
+
|
10 |
+
|
11 |
+
class HttpProcessingError(Exception):
|
12 |
+
"""HTTP error.
|
13 |
+
|
14 |
+
Shortcut for raising HTTP errors with custom code, message and headers.
|
15 |
+
|
16 |
+
code: HTTP Error code.
|
17 |
+
message: (optional) Error message.
|
18 |
+
headers: (optional) Headers to be sent in response, a list of pairs
|
19 |
+
"""
|
20 |
+
|
21 |
+
code = 0
|
22 |
+
message = ""
|
23 |
+
headers = None
|
24 |
+
|
25 |
+
def __init__(
|
26 |
+
self,
|
27 |
+
*,
|
28 |
+
code: Optional[int] = None,
|
29 |
+
message: str = "",
|
30 |
+
headers: Optional[_CIMultiDict] = None,
|
31 |
+
) -> None:
|
32 |
+
if code is not None:
|
33 |
+
self.code = code
|
34 |
+
self.headers = headers
|
35 |
+
self.message = message
|
36 |
+
|
37 |
+
def __str__(self) -> str:
|
38 |
+
msg = indent(self.message, " ")
|
39 |
+
return f"{self.code}, message:\n{msg}"
|
40 |
+
|
41 |
+
def __repr__(self) -> str:
|
42 |
+
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
43 |
+
|
44 |
+
|
45 |
+
class BadHttpMessage(HttpProcessingError):
|
46 |
+
|
47 |
+
code = 400
|
48 |
+
message = "Bad Request"
|
49 |
+
|
50 |
+
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
51 |
+
super().__init__(message=message, headers=headers)
|
52 |
+
self.args = (message,)
|
53 |
+
|
54 |
+
|
55 |
+
class HttpBadRequest(BadHttpMessage):
|
56 |
+
|
57 |
+
code = 400
|
58 |
+
message = "Bad Request"
|
59 |
+
|
60 |
+
|
61 |
+
class PayloadEncodingError(BadHttpMessage):
|
62 |
+
"""Base class for payload errors"""
|
63 |
+
|
64 |
+
|
65 |
+
class ContentEncodingError(PayloadEncodingError):
|
66 |
+
"""Content encoding error."""
|
67 |
+
|
68 |
+
|
69 |
+
class TransferEncodingError(PayloadEncodingError):
|
70 |
+
"""transfer encoding error."""
|
71 |
+
|
72 |
+
|
73 |
+
class ContentLengthError(PayloadEncodingError):
|
74 |
+
"""Not enough data for satisfy content length header."""
|
75 |
+
|
76 |
+
|
77 |
+
class LineTooLong(BadHttpMessage):
|
78 |
+
def __init__(
|
79 |
+
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
80 |
+
) -> None:
|
81 |
+
super().__init__(
|
82 |
+
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
83 |
+
)
|
84 |
+
self.args = (line, limit, actual_size)
|
85 |
+
|
86 |
+
|
87 |
+
class InvalidHeader(BadHttpMessage):
|
88 |
+
def __init__(self, hdr: Union[bytes, str]) -> None:
|
89 |
+
hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
|
90 |
+
super().__init__(f"Invalid HTTP header: {hdr!r}")
|
91 |
+
self.hdr = hdr_s
|
92 |
+
self.args = (hdr,)
|
93 |
+
|
94 |
+
|
95 |
+
class BadStatusLine(BadHttpMessage):
|
96 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
97 |
+
if not isinstance(line, str):
|
98 |
+
line = repr(line)
|
99 |
+
super().__init__(error or f"Bad status line {line!r}")
|
100 |
+
self.args = (line,)
|
101 |
+
self.line = line
|
102 |
+
|
103 |
+
|
104 |
+
class BadHttpMethod(BadStatusLine):
|
105 |
+
"""Invalid HTTP method in status line."""
|
106 |
+
|
107 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
108 |
+
super().__init__(line, error or f"Bad HTTP method in status line {line!r}")
|
109 |
+
|
110 |
+
|
111 |
+
class InvalidURLError(BadHttpMessage):
|
112 |
+
pass
|
venv/Lib/site-packages/aiohttp/http_parser.py
ADDED
@@ -0,0 +1,1046 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import abc
|
2 |
+
import asyncio
|
3 |
+
import re
|
4 |
+
import string
|
5 |
+
from contextlib import suppress
|
6 |
+
from enum import IntEnum
|
7 |
+
from typing import (
|
8 |
+
Any,
|
9 |
+
ClassVar,
|
10 |
+
Final,
|
11 |
+
Generic,
|
12 |
+
List,
|
13 |
+
Literal,
|
14 |
+
NamedTuple,
|
15 |
+
Optional,
|
16 |
+
Pattern,
|
17 |
+
Set,
|
18 |
+
Tuple,
|
19 |
+
Type,
|
20 |
+
TypeVar,
|
21 |
+
Union,
|
22 |
+
)
|
23 |
+
|
24 |
+
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
25 |
+
from yarl import URL
|
26 |
+
|
27 |
+
from . import hdrs
|
28 |
+
from .base_protocol import BaseProtocol
|
29 |
+
from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
|
30 |
+
from .helpers import (
|
31 |
+
_EXC_SENTINEL,
|
32 |
+
DEBUG,
|
33 |
+
EMPTY_BODY_METHODS,
|
34 |
+
EMPTY_BODY_STATUS_CODES,
|
35 |
+
NO_EXTENSIONS,
|
36 |
+
BaseTimerContext,
|
37 |
+
set_exception,
|
38 |
+
)
|
39 |
+
from .http_exceptions import (
|
40 |
+
BadHttpMessage,
|
41 |
+
BadHttpMethod,
|
42 |
+
BadStatusLine,
|
43 |
+
ContentEncodingError,
|
44 |
+
ContentLengthError,
|
45 |
+
InvalidHeader,
|
46 |
+
InvalidURLError,
|
47 |
+
LineTooLong,
|
48 |
+
TransferEncodingError,
|
49 |
+
)
|
50 |
+
from .http_writer import HttpVersion, HttpVersion10
|
51 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
52 |
+
from .typedefs import RawHeaders
|
53 |
+
|
54 |
+
__all__ = (
|
55 |
+
"HeadersParser",
|
56 |
+
"HttpParser",
|
57 |
+
"HttpRequestParser",
|
58 |
+
"HttpResponseParser",
|
59 |
+
"RawRequestMessage",
|
60 |
+
"RawResponseMessage",
|
61 |
+
)
|
62 |
+
|
63 |
+
_SEP = Literal[b"\r\n", b"\n"]
|
64 |
+
|
65 |
+
ASCIISET: Final[Set[str]] = set(string.printable)
|
66 |
+
|
67 |
+
# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
|
68 |
+
# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
|
69 |
+
#
|
70 |
+
# method = token
|
71 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
72 |
+
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
73 |
+
# token = 1*tchar
|
74 |
+
_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
|
75 |
+
TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
|
76 |
+
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
|
77 |
+
DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
|
78 |
+
HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
|
79 |
+
|
80 |
+
|
81 |
+
class RawRequestMessage(NamedTuple):
|
82 |
+
method: str
|
83 |
+
path: str
|
84 |
+
version: HttpVersion
|
85 |
+
headers: "CIMultiDictProxy[str]"
|
86 |
+
raw_headers: RawHeaders
|
87 |
+
should_close: bool
|
88 |
+
compression: Optional[str]
|
89 |
+
upgrade: bool
|
90 |
+
chunked: bool
|
91 |
+
url: URL
|
92 |
+
|
93 |
+
|
94 |
+
class RawResponseMessage(NamedTuple):
|
95 |
+
version: HttpVersion
|
96 |
+
code: int
|
97 |
+
reason: str
|
98 |
+
headers: CIMultiDictProxy[str]
|
99 |
+
raw_headers: RawHeaders
|
100 |
+
should_close: bool
|
101 |
+
compression: Optional[str]
|
102 |
+
upgrade: bool
|
103 |
+
chunked: bool
|
104 |
+
|
105 |
+
|
106 |
+
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
107 |
+
|
108 |
+
|
109 |
+
class ParseState(IntEnum):
|
110 |
+
|
111 |
+
PARSE_NONE = 0
|
112 |
+
PARSE_LENGTH = 1
|
113 |
+
PARSE_CHUNKED = 2
|
114 |
+
PARSE_UNTIL_EOF = 3
|
115 |
+
|
116 |
+
|
117 |
+
class ChunkState(IntEnum):
|
118 |
+
PARSE_CHUNKED_SIZE = 0
|
119 |
+
PARSE_CHUNKED_CHUNK = 1
|
120 |
+
PARSE_CHUNKED_CHUNK_EOF = 2
|
121 |
+
PARSE_MAYBE_TRAILERS = 3
|
122 |
+
PARSE_TRAILERS = 4
|
123 |
+
|
124 |
+
|
125 |
+
class HeadersParser:
|
126 |
+
def __init__(
|
127 |
+
self,
|
128 |
+
max_line_size: int = 8190,
|
129 |
+
max_headers: int = 32768,
|
130 |
+
max_field_size: int = 8190,
|
131 |
+
lax: bool = False,
|
132 |
+
) -> None:
|
133 |
+
self.max_line_size = max_line_size
|
134 |
+
self.max_headers = max_headers
|
135 |
+
self.max_field_size = max_field_size
|
136 |
+
self._lax = lax
|
137 |
+
|
138 |
+
def parse_headers(
|
139 |
+
self, lines: List[bytes]
|
140 |
+
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
141 |
+
headers: CIMultiDict[str] = CIMultiDict()
|
142 |
+
# note: "raw" does not mean inclusion of OWS before/after the field value
|
143 |
+
raw_headers = []
|
144 |
+
|
145 |
+
lines_idx = 1
|
146 |
+
line = lines[1]
|
147 |
+
line_count = len(lines)
|
148 |
+
|
149 |
+
while line:
|
150 |
+
# Parse initial header name : value pair.
|
151 |
+
try:
|
152 |
+
bname, bvalue = line.split(b":", 1)
|
153 |
+
except ValueError:
|
154 |
+
raise InvalidHeader(line) from None
|
155 |
+
|
156 |
+
if len(bname) == 0:
|
157 |
+
raise InvalidHeader(bname)
|
158 |
+
|
159 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
|
160 |
+
if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
|
161 |
+
raise InvalidHeader(line)
|
162 |
+
|
163 |
+
bvalue = bvalue.lstrip(b" \t")
|
164 |
+
if len(bname) > self.max_field_size:
|
165 |
+
raise LineTooLong(
|
166 |
+
"request header name {}".format(
|
167 |
+
bname.decode("utf8", "backslashreplace")
|
168 |
+
),
|
169 |
+
str(self.max_field_size),
|
170 |
+
str(len(bname)),
|
171 |
+
)
|
172 |
+
name = bname.decode("utf-8", "surrogateescape")
|
173 |
+
if not TOKENRE.fullmatch(name):
|
174 |
+
raise InvalidHeader(bname)
|
175 |
+
|
176 |
+
header_length = len(bvalue)
|
177 |
+
|
178 |
+
# next line
|
179 |
+
lines_idx += 1
|
180 |
+
line = lines[lines_idx]
|
181 |
+
|
182 |
+
# consume continuation lines
|
183 |
+
continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
|
184 |
+
|
185 |
+
# Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
|
186 |
+
if continuation:
|
187 |
+
bvalue_lst = [bvalue]
|
188 |
+
while continuation:
|
189 |
+
header_length += len(line)
|
190 |
+
if header_length > self.max_field_size:
|
191 |
+
raise LineTooLong(
|
192 |
+
"request header field {}".format(
|
193 |
+
bname.decode("utf8", "backslashreplace")
|
194 |
+
),
|
195 |
+
str(self.max_field_size),
|
196 |
+
str(header_length),
|
197 |
+
)
|
198 |
+
bvalue_lst.append(line)
|
199 |
+
|
200 |
+
# next line
|
201 |
+
lines_idx += 1
|
202 |
+
if lines_idx < line_count:
|
203 |
+
line = lines[lines_idx]
|
204 |
+
if line:
|
205 |
+
continuation = line[0] in (32, 9) # (' ', '\t')
|
206 |
+
else:
|
207 |
+
line = b""
|
208 |
+
break
|
209 |
+
bvalue = b"".join(bvalue_lst)
|
210 |
+
else:
|
211 |
+
if header_length > self.max_field_size:
|
212 |
+
raise LineTooLong(
|
213 |
+
"request header field {}".format(
|
214 |
+
bname.decode("utf8", "backslashreplace")
|
215 |
+
),
|
216 |
+
str(self.max_field_size),
|
217 |
+
str(header_length),
|
218 |
+
)
|
219 |
+
|
220 |
+
bvalue = bvalue.strip(b" \t")
|
221 |
+
value = bvalue.decode("utf-8", "surrogateescape")
|
222 |
+
|
223 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
|
224 |
+
if "\n" in value or "\r" in value or "\x00" in value:
|
225 |
+
raise InvalidHeader(bvalue)
|
226 |
+
|
227 |
+
headers.add(name, value)
|
228 |
+
raw_headers.append((bname, bvalue))
|
229 |
+
|
230 |
+
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
231 |
+
|
232 |
+
|
233 |
+
def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
|
234 |
+
"""Check if the upgrade header is supported."""
|
235 |
+
return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
|
236 |
+
|
237 |
+
|
238 |
+
class HttpParser(abc.ABC, Generic[_MsgT]):
|
239 |
+
lax: ClassVar[bool] = False
|
240 |
+
|
241 |
+
def __init__(
|
242 |
+
self,
|
243 |
+
protocol: Optional[BaseProtocol] = None,
|
244 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
245 |
+
limit: int = 2**16,
|
246 |
+
max_line_size: int = 8190,
|
247 |
+
max_headers: int = 32768,
|
248 |
+
max_field_size: int = 8190,
|
249 |
+
timer: Optional[BaseTimerContext] = None,
|
250 |
+
code: Optional[int] = None,
|
251 |
+
method: Optional[str] = None,
|
252 |
+
payload_exception: Optional[Type[BaseException]] = None,
|
253 |
+
response_with_body: bool = True,
|
254 |
+
read_until_eof: bool = False,
|
255 |
+
auto_decompress: bool = True,
|
256 |
+
) -> None:
|
257 |
+
self.protocol = protocol
|
258 |
+
self.loop = loop
|
259 |
+
self.max_line_size = max_line_size
|
260 |
+
self.max_headers = max_headers
|
261 |
+
self.max_field_size = max_field_size
|
262 |
+
self.timer = timer
|
263 |
+
self.code = code
|
264 |
+
self.method = method
|
265 |
+
self.payload_exception = payload_exception
|
266 |
+
self.response_with_body = response_with_body
|
267 |
+
self.read_until_eof = read_until_eof
|
268 |
+
|
269 |
+
self._lines: List[bytes] = []
|
270 |
+
self._tail = b""
|
271 |
+
self._upgraded = False
|
272 |
+
self._payload = None
|
273 |
+
self._payload_parser: Optional[HttpPayloadParser] = None
|
274 |
+
self._auto_decompress = auto_decompress
|
275 |
+
self._limit = limit
|
276 |
+
self._headers_parser = HeadersParser(
|
277 |
+
max_line_size, max_headers, max_field_size, self.lax
|
278 |
+
)
|
279 |
+
|
280 |
+
@abc.abstractmethod
|
281 |
+
def parse_message(self, lines: List[bytes]) -> _MsgT: ...
|
282 |
+
|
283 |
+
@abc.abstractmethod
|
284 |
+
def _is_chunked_te(self, te: str) -> bool: ...
|
285 |
+
|
286 |
+
def feed_eof(self) -> Optional[_MsgT]:
|
287 |
+
if self._payload_parser is not None:
|
288 |
+
self._payload_parser.feed_eof()
|
289 |
+
self._payload_parser = None
|
290 |
+
else:
|
291 |
+
# try to extract partial message
|
292 |
+
if self._tail:
|
293 |
+
self._lines.append(self._tail)
|
294 |
+
|
295 |
+
if self._lines:
|
296 |
+
if self._lines[-1] != "\r\n":
|
297 |
+
self._lines.append(b"")
|
298 |
+
with suppress(Exception):
|
299 |
+
return self.parse_message(self._lines)
|
300 |
+
return None
|
301 |
+
|
302 |
+
def feed_data(
|
303 |
+
self,
|
304 |
+
data: bytes,
|
305 |
+
SEP: _SEP = b"\r\n",
|
306 |
+
EMPTY: bytes = b"",
|
307 |
+
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
308 |
+
METH_CONNECT: str = hdrs.METH_CONNECT,
|
309 |
+
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
310 |
+
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
311 |
+
|
312 |
+
messages = []
|
313 |
+
|
314 |
+
if self._tail:
|
315 |
+
data, self._tail = self._tail + data, b""
|
316 |
+
|
317 |
+
data_len = len(data)
|
318 |
+
start_pos = 0
|
319 |
+
loop = self.loop
|
320 |
+
|
321 |
+
should_close = False
|
322 |
+
while start_pos < data_len:
|
323 |
+
|
324 |
+
# read HTTP message (request/response line + headers), \r\n\r\n
|
325 |
+
# and split by lines
|
326 |
+
if self._payload_parser is None and not self._upgraded:
|
327 |
+
pos = data.find(SEP, start_pos)
|
328 |
+
# consume \r\n
|
329 |
+
if pos == start_pos and not self._lines:
|
330 |
+
start_pos = pos + len(SEP)
|
331 |
+
continue
|
332 |
+
|
333 |
+
if pos >= start_pos:
|
334 |
+
if should_close:
|
335 |
+
raise BadHttpMessage("Data after `Connection: close`")
|
336 |
+
|
337 |
+
# line found
|
338 |
+
line = data[start_pos:pos]
|
339 |
+
if SEP == b"\n": # For lax response parsing
|
340 |
+
line = line.rstrip(b"\r")
|
341 |
+
self._lines.append(line)
|
342 |
+
start_pos = pos + len(SEP)
|
343 |
+
|
344 |
+
# \r\n\r\n found
|
345 |
+
if self._lines[-1] == EMPTY:
|
346 |
+
try:
|
347 |
+
msg: _MsgT = self.parse_message(self._lines)
|
348 |
+
finally:
|
349 |
+
self._lines.clear()
|
350 |
+
|
351 |
+
def get_content_length() -> Optional[int]:
|
352 |
+
# payload length
|
353 |
+
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
354 |
+
if length_hdr is None:
|
355 |
+
return None
|
356 |
+
|
357 |
+
# Shouldn't allow +/- or other number formats.
|
358 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
|
359 |
+
# msg.headers is already stripped of leading/trailing wsp
|
360 |
+
if not DIGITS.fullmatch(length_hdr):
|
361 |
+
raise InvalidHeader(CONTENT_LENGTH)
|
362 |
+
|
363 |
+
return int(length_hdr)
|
364 |
+
|
365 |
+
length = get_content_length()
|
366 |
+
# do not support old websocket spec
|
367 |
+
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
368 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
369 |
+
|
370 |
+
self._upgraded = msg.upgrade and _is_supported_upgrade(
|
371 |
+
msg.headers
|
372 |
+
)
|
373 |
+
|
374 |
+
method = getattr(msg, "method", self.method)
|
375 |
+
# code is only present on responses
|
376 |
+
code = getattr(msg, "code", 0)
|
377 |
+
|
378 |
+
assert self.protocol is not None
|
379 |
+
# calculate payload
|
380 |
+
empty_body = code in EMPTY_BODY_STATUS_CODES or bool(
|
381 |
+
method and method in EMPTY_BODY_METHODS
|
382 |
+
)
|
383 |
+
if not empty_body and (
|
384 |
+
((length is not None and length > 0) or msg.chunked)
|
385 |
+
and not self._upgraded
|
386 |
+
):
|
387 |
+
payload = StreamReader(
|
388 |
+
self.protocol,
|
389 |
+
timer=self.timer,
|
390 |
+
loop=loop,
|
391 |
+
limit=self._limit,
|
392 |
+
)
|
393 |
+
payload_parser = HttpPayloadParser(
|
394 |
+
payload,
|
395 |
+
length=length,
|
396 |
+
chunked=msg.chunked,
|
397 |
+
method=method,
|
398 |
+
compression=msg.compression,
|
399 |
+
code=self.code,
|
400 |
+
response_with_body=self.response_with_body,
|
401 |
+
auto_decompress=self._auto_decompress,
|
402 |
+
lax=self.lax,
|
403 |
+
)
|
404 |
+
if not payload_parser.done:
|
405 |
+
self._payload_parser = payload_parser
|
406 |
+
elif method == METH_CONNECT:
|
407 |
+
assert isinstance(msg, RawRequestMessage)
|
408 |
+
payload = StreamReader(
|
409 |
+
self.protocol,
|
410 |
+
timer=self.timer,
|
411 |
+
loop=loop,
|
412 |
+
limit=self._limit,
|
413 |
+
)
|
414 |
+
self._upgraded = True
|
415 |
+
self._payload_parser = HttpPayloadParser(
|
416 |
+
payload,
|
417 |
+
method=msg.method,
|
418 |
+
compression=msg.compression,
|
419 |
+
auto_decompress=self._auto_decompress,
|
420 |
+
lax=self.lax,
|
421 |
+
)
|
422 |
+
elif not empty_body and length is None and self.read_until_eof:
|
423 |
+
payload = StreamReader(
|
424 |
+
self.protocol,
|
425 |
+
timer=self.timer,
|
426 |
+
loop=loop,
|
427 |
+
limit=self._limit,
|
428 |
+
)
|
429 |
+
payload_parser = HttpPayloadParser(
|
430 |
+
payload,
|
431 |
+
length=length,
|
432 |
+
chunked=msg.chunked,
|
433 |
+
method=method,
|
434 |
+
compression=msg.compression,
|
435 |
+
code=self.code,
|
436 |
+
response_with_body=self.response_with_body,
|
437 |
+
auto_decompress=self._auto_decompress,
|
438 |
+
lax=self.lax,
|
439 |
+
)
|
440 |
+
if not payload_parser.done:
|
441 |
+
self._payload_parser = payload_parser
|
442 |
+
else:
|
443 |
+
payload = EMPTY_PAYLOAD
|
444 |
+
|
445 |
+
messages.append((msg, payload))
|
446 |
+
should_close = msg.should_close
|
447 |
+
else:
|
448 |
+
self._tail = data[start_pos:]
|
449 |
+
data = EMPTY
|
450 |
+
break
|
451 |
+
|
452 |
+
# no parser, just store
|
453 |
+
elif self._payload_parser is None and self._upgraded:
|
454 |
+
assert not self._lines
|
455 |
+
break
|
456 |
+
|
457 |
+
# feed payload
|
458 |
+
elif data and start_pos < data_len:
|
459 |
+
assert not self._lines
|
460 |
+
assert self._payload_parser is not None
|
461 |
+
try:
|
462 |
+
eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
|
463 |
+
except BaseException as underlying_exc:
|
464 |
+
reraised_exc = underlying_exc
|
465 |
+
if self.payload_exception is not None:
|
466 |
+
reraised_exc = self.payload_exception(str(underlying_exc))
|
467 |
+
|
468 |
+
set_exception(
|
469 |
+
self._payload_parser.payload,
|
470 |
+
reraised_exc,
|
471 |
+
underlying_exc,
|
472 |
+
)
|
473 |
+
|
474 |
+
eof = True
|
475 |
+
data = b""
|
476 |
+
|
477 |
+
if eof:
|
478 |
+
start_pos = 0
|
479 |
+
data_len = len(data)
|
480 |
+
self._payload_parser = None
|
481 |
+
continue
|
482 |
+
else:
|
483 |
+
break
|
484 |
+
|
485 |
+
if data and start_pos < data_len:
|
486 |
+
data = data[start_pos:]
|
487 |
+
else:
|
488 |
+
data = EMPTY
|
489 |
+
|
490 |
+
return messages, self._upgraded, data
|
491 |
+
|
492 |
+
def parse_headers(
|
493 |
+
self, lines: List[bytes]
|
494 |
+
) -> Tuple[
|
495 |
+
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
496 |
+
]:
|
497 |
+
"""Parses RFC 5322 headers from a stream.
|
498 |
+
|
499 |
+
Line continuations are supported. Returns list of header name
|
500 |
+
and value pairs. Header name is in upper case.
|
501 |
+
"""
|
502 |
+
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
503 |
+
close_conn = None
|
504 |
+
encoding = None
|
505 |
+
upgrade = False
|
506 |
+
chunked = False
|
507 |
+
|
508 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
|
509 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
|
510 |
+
singletons = (
|
511 |
+
hdrs.CONTENT_LENGTH,
|
512 |
+
hdrs.CONTENT_LOCATION,
|
513 |
+
hdrs.CONTENT_RANGE,
|
514 |
+
hdrs.CONTENT_TYPE,
|
515 |
+
hdrs.ETAG,
|
516 |
+
hdrs.HOST,
|
517 |
+
hdrs.MAX_FORWARDS,
|
518 |
+
hdrs.SERVER,
|
519 |
+
hdrs.TRANSFER_ENCODING,
|
520 |
+
hdrs.USER_AGENT,
|
521 |
+
)
|
522 |
+
bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
|
523 |
+
if bad_hdr is not None:
|
524 |
+
raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
|
525 |
+
|
526 |
+
# keep-alive
|
527 |
+
conn = headers.get(hdrs.CONNECTION)
|
528 |
+
if conn:
|
529 |
+
v = conn.lower()
|
530 |
+
if v == "close":
|
531 |
+
close_conn = True
|
532 |
+
elif v == "keep-alive":
|
533 |
+
close_conn = False
|
534 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
|
535 |
+
elif v == "upgrade" and headers.get(hdrs.UPGRADE):
|
536 |
+
upgrade = True
|
537 |
+
|
538 |
+
# encoding
|
539 |
+
enc = headers.get(hdrs.CONTENT_ENCODING)
|
540 |
+
if enc:
|
541 |
+
enc = enc.lower()
|
542 |
+
if enc in ("gzip", "deflate", "br"):
|
543 |
+
encoding = enc
|
544 |
+
|
545 |
+
# chunking
|
546 |
+
te = headers.get(hdrs.TRANSFER_ENCODING)
|
547 |
+
if te is not None:
|
548 |
+
if self._is_chunked_te(te):
|
549 |
+
chunked = True
|
550 |
+
|
551 |
+
if hdrs.CONTENT_LENGTH in headers:
|
552 |
+
raise BadHttpMessage(
|
553 |
+
"Transfer-Encoding can't be present with Content-Length",
|
554 |
+
)
|
555 |
+
|
556 |
+
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
557 |
+
|
558 |
+
def set_upgraded(self, val: bool) -> None:
|
559 |
+
"""Set connection upgraded (to websocket) mode.
|
560 |
+
|
561 |
+
:param bool val: new state.
|
562 |
+
"""
|
563 |
+
self._upgraded = val
|
564 |
+
|
565 |
+
|
566 |
+
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
567 |
+
"""Read request status line.
|
568 |
+
|
569 |
+
Exception .http_exceptions.BadStatusLine
|
570 |
+
could be raised in case of any errors in status line.
|
571 |
+
Returns RawRequestMessage.
|
572 |
+
"""
|
573 |
+
|
574 |
+
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
575 |
+
# request line
|
576 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
577 |
+
try:
|
578 |
+
method, path, version = line.split(" ", maxsplit=2)
|
579 |
+
except ValueError:
|
580 |
+
raise BadHttpMethod(line) from None
|
581 |
+
|
582 |
+
if len(path) > self.max_line_size:
|
583 |
+
raise LineTooLong(
|
584 |
+
"Status line is too long", str(self.max_line_size), str(len(path))
|
585 |
+
)
|
586 |
+
|
587 |
+
# method
|
588 |
+
if not TOKENRE.fullmatch(method):
|
589 |
+
raise BadHttpMethod(method)
|
590 |
+
|
591 |
+
# version
|
592 |
+
match = VERSRE.fullmatch(version)
|
593 |
+
if match is None:
|
594 |
+
raise BadStatusLine(line)
|
595 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
596 |
+
|
597 |
+
if method == "CONNECT":
|
598 |
+
# authority-form,
|
599 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
600 |
+
url = URL.build(authority=path, encoded=True)
|
601 |
+
elif path.startswith("/"):
|
602 |
+
# origin-form,
|
603 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
604 |
+
path_part, _hash_separator, url_fragment = path.partition("#")
|
605 |
+
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
606 |
+
|
607 |
+
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
608 |
+
# NOTE: parser does, otherwise it results into the same
|
609 |
+
# NOTE: HTTP Request-Line input producing different
|
610 |
+
# NOTE: `yarl.URL()` objects
|
611 |
+
url = URL.build(
|
612 |
+
path=path_part,
|
613 |
+
query_string=qs_part,
|
614 |
+
fragment=url_fragment,
|
615 |
+
encoded=True,
|
616 |
+
)
|
617 |
+
elif path == "*" and method == "OPTIONS":
|
618 |
+
# asterisk-form,
|
619 |
+
url = URL(path, encoded=True)
|
620 |
+
else:
|
621 |
+
# absolute-form for proxy maybe,
|
622 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
623 |
+
url = URL(path, encoded=True)
|
624 |
+
if url.scheme == "":
|
625 |
+
# not absolute-form
|
626 |
+
raise InvalidURLError(
|
627 |
+
path.encode(errors="surrogateescape").decode("latin1")
|
628 |
+
)
|
629 |
+
|
630 |
+
# read headers
|
631 |
+
(
|
632 |
+
headers,
|
633 |
+
raw_headers,
|
634 |
+
close,
|
635 |
+
compression,
|
636 |
+
upgrade,
|
637 |
+
chunked,
|
638 |
+
) = self.parse_headers(lines)
|
639 |
+
|
640 |
+
if close is None: # then the headers weren't set in the request
|
641 |
+
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
642 |
+
close = True
|
643 |
+
else: # HTTP 1.1 must ask to close.
|
644 |
+
close = False
|
645 |
+
|
646 |
+
return RawRequestMessage(
|
647 |
+
method,
|
648 |
+
path,
|
649 |
+
version_o,
|
650 |
+
headers,
|
651 |
+
raw_headers,
|
652 |
+
close,
|
653 |
+
compression,
|
654 |
+
upgrade,
|
655 |
+
chunked,
|
656 |
+
url,
|
657 |
+
)
|
658 |
+
|
659 |
+
def _is_chunked_te(self, te: str) -> bool:
|
660 |
+
if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked":
|
661 |
+
return True
|
662 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
|
663 |
+
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
664 |
+
|
665 |
+
|
666 |
+
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
667 |
+
"""Read response status line and headers.
|
668 |
+
|
669 |
+
BadStatusLine could be raised in case of any errors in status line.
|
670 |
+
Returns RawResponseMessage.
|
671 |
+
"""
|
672 |
+
|
673 |
+
# Lax mode should only be enabled on response parser.
|
674 |
+
lax = not DEBUG
|
675 |
+
|
676 |
+
def feed_data(
|
677 |
+
self,
|
678 |
+
data: bytes,
|
679 |
+
SEP: Optional[_SEP] = None,
|
680 |
+
*args: Any,
|
681 |
+
**kwargs: Any,
|
682 |
+
) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
|
683 |
+
if SEP is None:
|
684 |
+
SEP = b"\r\n" if DEBUG else b"\n"
|
685 |
+
return super().feed_data(data, SEP, *args, **kwargs)
|
686 |
+
|
687 |
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
688 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
689 |
+
try:
|
690 |
+
version, status = line.split(maxsplit=1)
|
691 |
+
except ValueError:
|
692 |
+
raise BadStatusLine(line) from None
|
693 |
+
|
694 |
+
try:
|
695 |
+
status, reason = status.split(maxsplit=1)
|
696 |
+
except ValueError:
|
697 |
+
status = status.strip()
|
698 |
+
reason = ""
|
699 |
+
|
700 |
+
if len(reason) > self.max_line_size:
|
701 |
+
raise LineTooLong(
|
702 |
+
"Status line is too long", str(self.max_line_size), str(len(reason))
|
703 |
+
)
|
704 |
+
|
705 |
+
# version
|
706 |
+
match = VERSRE.fullmatch(version)
|
707 |
+
if match is None:
|
708 |
+
raise BadStatusLine(line)
|
709 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
710 |
+
|
711 |
+
# The status code is a three-digit ASCII number, no padding
|
712 |
+
if len(status) != 3 or not DIGITS.fullmatch(status):
|
713 |
+
raise BadStatusLine(line)
|
714 |
+
status_i = int(status)
|
715 |
+
|
716 |
+
# read headers
|
717 |
+
(
|
718 |
+
headers,
|
719 |
+
raw_headers,
|
720 |
+
close,
|
721 |
+
compression,
|
722 |
+
upgrade,
|
723 |
+
chunked,
|
724 |
+
) = self.parse_headers(lines)
|
725 |
+
|
726 |
+
if close is None:
|
727 |
+
if version_o <= HttpVersion10:
|
728 |
+
close = True
|
729 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
|
730 |
+
elif 100 <= status_i < 200 or status_i in {204, 304}:
|
731 |
+
close = False
|
732 |
+
elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
|
733 |
+
close = False
|
734 |
+
else:
|
735 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
|
736 |
+
close = True
|
737 |
+
|
738 |
+
return RawResponseMessage(
|
739 |
+
version_o,
|
740 |
+
status_i,
|
741 |
+
reason.strip(),
|
742 |
+
headers,
|
743 |
+
raw_headers,
|
744 |
+
close,
|
745 |
+
compression,
|
746 |
+
upgrade,
|
747 |
+
chunked,
|
748 |
+
)
|
749 |
+
|
750 |
+
def _is_chunked_te(self, te: str) -> bool:
|
751 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
|
752 |
+
return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
|
753 |
+
|
754 |
+
|
755 |
+
class HttpPayloadParser:
|
756 |
+
def __init__(
|
757 |
+
self,
|
758 |
+
payload: StreamReader,
|
759 |
+
length: Optional[int] = None,
|
760 |
+
chunked: bool = False,
|
761 |
+
compression: Optional[str] = None,
|
762 |
+
code: Optional[int] = None,
|
763 |
+
method: Optional[str] = None,
|
764 |
+
response_with_body: bool = True,
|
765 |
+
auto_decompress: bool = True,
|
766 |
+
lax: bool = False,
|
767 |
+
) -> None:
|
768 |
+
self._length = 0
|
769 |
+
self._type = ParseState.PARSE_UNTIL_EOF
|
770 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
771 |
+
self._chunk_size = 0
|
772 |
+
self._chunk_tail = b""
|
773 |
+
self._auto_decompress = auto_decompress
|
774 |
+
self._lax = lax
|
775 |
+
self.done = False
|
776 |
+
|
777 |
+
# payload decompression wrapper
|
778 |
+
if response_with_body and compression and self._auto_decompress:
|
779 |
+
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
|
780 |
+
payload, compression
|
781 |
+
)
|
782 |
+
else:
|
783 |
+
real_payload = payload
|
784 |
+
|
785 |
+
# payload parser
|
786 |
+
if not response_with_body:
|
787 |
+
# don't parse payload if it's not expected to be received
|
788 |
+
self._type = ParseState.PARSE_NONE
|
789 |
+
real_payload.feed_eof()
|
790 |
+
self.done = True
|
791 |
+
elif chunked:
|
792 |
+
self._type = ParseState.PARSE_CHUNKED
|
793 |
+
elif length is not None:
|
794 |
+
self._type = ParseState.PARSE_LENGTH
|
795 |
+
self._length = length
|
796 |
+
if self._length == 0:
|
797 |
+
real_payload.feed_eof()
|
798 |
+
self.done = True
|
799 |
+
|
800 |
+
self.payload = real_payload
|
801 |
+
|
802 |
+
def feed_eof(self) -> None:
|
803 |
+
if self._type == ParseState.PARSE_UNTIL_EOF:
|
804 |
+
self.payload.feed_eof()
|
805 |
+
elif self._type == ParseState.PARSE_LENGTH:
|
806 |
+
raise ContentLengthError(
|
807 |
+
"Not enough data for satisfy content length header."
|
808 |
+
)
|
809 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
810 |
+
raise TransferEncodingError(
|
811 |
+
"Not enough data for satisfy transfer length header."
|
812 |
+
)
|
813 |
+
|
814 |
+
def feed_data(
|
815 |
+
self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
|
816 |
+
) -> Tuple[bool, bytes]:
|
817 |
+
# Read specified amount of bytes
|
818 |
+
if self._type == ParseState.PARSE_LENGTH:
|
819 |
+
required = self._length
|
820 |
+
chunk_len = len(chunk)
|
821 |
+
|
822 |
+
if required >= chunk_len:
|
823 |
+
self._length = required - chunk_len
|
824 |
+
self.payload.feed_data(chunk, chunk_len)
|
825 |
+
if self._length == 0:
|
826 |
+
self.payload.feed_eof()
|
827 |
+
return True, b""
|
828 |
+
else:
|
829 |
+
self._length = 0
|
830 |
+
self.payload.feed_data(chunk[:required], required)
|
831 |
+
self.payload.feed_eof()
|
832 |
+
return True, chunk[required:]
|
833 |
+
|
834 |
+
# Chunked transfer encoding parser
|
835 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
836 |
+
if self._chunk_tail:
|
837 |
+
chunk = self._chunk_tail + chunk
|
838 |
+
self._chunk_tail = b""
|
839 |
+
|
840 |
+
while chunk:
|
841 |
+
|
842 |
+
# read next chunk size
|
843 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
844 |
+
pos = chunk.find(SEP)
|
845 |
+
if pos >= 0:
|
846 |
+
i = chunk.find(CHUNK_EXT, 0, pos)
|
847 |
+
if i >= 0:
|
848 |
+
size_b = chunk[:i] # strip chunk-extensions
|
849 |
+
# Verify no LF in the chunk-extension
|
850 |
+
if b"\n" in (ext := chunk[i:pos]):
|
851 |
+
exc = BadHttpMessage(
|
852 |
+
f"Unexpected LF in chunk-extension: {ext!r}"
|
853 |
+
)
|
854 |
+
set_exception(self.payload, exc)
|
855 |
+
raise exc
|
856 |
+
else:
|
857 |
+
size_b = chunk[:pos]
|
858 |
+
|
859 |
+
if self._lax: # Allow whitespace in lax mode.
|
860 |
+
size_b = size_b.strip()
|
861 |
+
|
862 |
+
if not re.fullmatch(HEXDIGITS, size_b):
|
863 |
+
exc = TransferEncodingError(
|
864 |
+
chunk[:pos].decode("ascii", "surrogateescape")
|
865 |
+
)
|
866 |
+
set_exception(self.payload, exc)
|
867 |
+
raise exc
|
868 |
+
size = int(bytes(size_b), 16)
|
869 |
+
|
870 |
+
chunk = chunk[pos + len(SEP) :]
|
871 |
+
if size == 0: # eof marker
|
872 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
873 |
+
if self._lax and chunk.startswith(b"\r"):
|
874 |
+
chunk = chunk[1:]
|
875 |
+
else:
|
876 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
877 |
+
self._chunk_size = size
|
878 |
+
self.payload.begin_http_chunk_receiving()
|
879 |
+
else:
|
880 |
+
self._chunk_tail = chunk
|
881 |
+
return False, b""
|
882 |
+
|
883 |
+
# read chunk and feed buffer
|
884 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
885 |
+
required = self._chunk_size
|
886 |
+
chunk_len = len(chunk)
|
887 |
+
|
888 |
+
if required > chunk_len:
|
889 |
+
self._chunk_size = required - chunk_len
|
890 |
+
self.payload.feed_data(chunk, chunk_len)
|
891 |
+
return False, b""
|
892 |
+
else:
|
893 |
+
self._chunk_size = 0
|
894 |
+
self.payload.feed_data(chunk[:required], required)
|
895 |
+
chunk = chunk[required:]
|
896 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
897 |
+
self.payload.end_http_chunk_receiving()
|
898 |
+
|
899 |
+
# toss the CRLF at the end of the chunk
|
900 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
901 |
+
if self._lax and chunk.startswith(b"\r"):
|
902 |
+
chunk = chunk[1:]
|
903 |
+
if chunk[: len(SEP)] == SEP:
|
904 |
+
chunk = chunk[len(SEP) :]
|
905 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
906 |
+
else:
|
907 |
+
self._chunk_tail = chunk
|
908 |
+
return False, b""
|
909 |
+
|
910 |
+
# if stream does not contain trailer, after 0\r\n
|
911 |
+
# we should get another \r\n otherwise
|
912 |
+
# trailers needs to be skipped until \r\n\r\n
|
913 |
+
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
914 |
+
head = chunk[: len(SEP)]
|
915 |
+
if head == SEP:
|
916 |
+
# end of stream
|
917 |
+
self.payload.feed_eof()
|
918 |
+
return True, chunk[len(SEP) :]
|
919 |
+
# Both CR and LF, or only LF may not be received yet. It is
|
920 |
+
# expected that CRLF or LF will be shown at the very first
|
921 |
+
# byte next time, otherwise trailers should come. The last
|
922 |
+
# CRLF which marks the end of response might not be
|
923 |
+
# contained in the same TCP segment which delivered the
|
924 |
+
# size indicator.
|
925 |
+
if not head:
|
926 |
+
return False, b""
|
927 |
+
if head == SEP[:1]:
|
928 |
+
self._chunk_tail = head
|
929 |
+
return False, b""
|
930 |
+
self._chunk = ChunkState.PARSE_TRAILERS
|
931 |
+
|
932 |
+
# read and discard trailer up to the CRLF terminator
|
933 |
+
if self._chunk == ChunkState.PARSE_TRAILERS:
|
934 |
+
pos = chunk.find(SEP)
|
935 |
+
if pos >= 0:
|
936 |
+
chunk = chunk[pos + len(SEP) :]
|
937 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
938 |
+
else:
|
939 |
+
self._chunk_tail = chunk
|
940 |
+
return False, b""
|
941 |
+
|
942 |
+
# Read all bytes until eof
|
943 |
+
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
944 |
+
self.payload.feed_data(chunk, len(chunk))
|
945 |
+
|
946 |
+
return False, b""
|
947 |
+
|
948 |
+
|
949 |
+
class DeflateBuffer:
|
950 |
+
"""DeflateStream decompress stream and feed data into specified stream."""
|
951 |
+
|
952 |
+
decompressor: Any
|
953 |
+
|
954 |
+
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
955 |
+
self.out = out
|
956 |
+
self.size = 0
|
957 |
+
self.encoding = encoding
|
958 |
+
self._started_decoding = False
|
959 |
+
|
960 |
+
self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
|
961 |
+
if encoding == "br":
|
962 |
+
if not HAS_BROTLI: # pragma: no cover
|
963 |
+
raise ContentEncodingError(
|
964 |
+
"Can not decode content-encoding: brotli (br). "
|
965 |
+
"Please install `Brotli`"
|
966 |
+
)
|
967 |
+
self.decompressor = BrotliDecompressor()
|
968 |
+
else:
|
969 |
+
self.decompressor = ZLibDecompressor(encoding=encoding)
|
970 |
+
|
971 |
+
def set_exception(
|
972 |
+
self,
|
973 |
+
exc: BaseException,
|
974 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
975 |
+
) -> None:
|
976 |
+
set_exception(self.out, exc, exc_cause)
|
977 |
+
|
978 |
+
def feed_data(self, chunk: bytes, size: int) -> None:
|
979 |
+
if not size:
|
980 |
+
return
|
981 |
+
|
982 |
+
self.size += size
|
983 |
+
|
984 |
+
# RFC1950
|
985 |
+
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
986 |
+
# bits 4..7 = CINFO = 1..7 = windows size.
|
987 |
+
if (
|
988 |
+
not self._started_decoding
|
989 |
+
and self.encoding == "deflate"
|
990 |
+
and chunk[0] & 0xF != 8
|
991 |
+
):
|
992 |
+
# Change the decoder to decompress incorrectly compressed data
|
993 |
+
# Actually we should issue a warning about non-RFC-compliant data.
|
994 |
+
self.decompressor = ZLibDecompressor(
|
995 |
+
encoding=self.encoding, suppress_deflate_header=True
|
996 |
+
)
|
997 |
+
|
998 |
+
try:
|
999 |
+
chunk = self.decompressor.decompress_sync(chunk)
|
1000 |
+
except Exception:
|
1001 |
+
raise ContentEncodingError(
|
1002 |
+
"Can not decode content-encoding: %s" % self.encoding
|
1003 |
+
)
|
1004 |
+
|
1005 |
+
self._started_decoding = True
|
1006 |
+
|
1007 |
+
if chunk:
|
1008 |
+
self.out.feed_data(chunk, len(chunk))
|
1009 |
+
|
1010 |
+
def feed_eof(self) -> None:
|
1011 |
+
chunk = self.decompressor.flush()
|
1012 |
+
|
1013 |
+
if chunk or self.size > 0:
|
1014 |
+
self.out.feed_data(chunk, len(chunk))
|
1015 |
+
if self.encoding == "deflate" and not self.decompressor.eof:
|
1016 |
+
raise ContentEncodingError("deflate")
|
1017 |
+
|
1018 |
+
self.out.feed_eof()
|
1019 |
+
|
1020 |
+
def begin_http_chunk_receiving(self) -> None:
|
1021 |
+
self.out.begin_http_chunk_receiving()
|
1022 |
+
|
1023 |
+
def end_http_chunk_receiving(self) -> None:
|
1024 |
+
self.out.end_http_chunk_receiving()
|
1025 |
+
|
1026 |
+
|
1027 |
+
HttpRequestParserPy = HttpRequestParser
|
1028 |
+
HttpResponseParserPy = HttpResponseParser
|
1029 |
+
RawRequestMessagePy = RawRequestMessage
|
1030 |
+
RawResponseMessagePy = RawResponseMessage
|
1031 |
+
|
1032 |
+
try:
|
1033 |
+
if not NO_EXTENSIONS:
|
1034 |
+
from ._http_parser import ( # type: ignore[import-not-found,no-redef]
|
1035 |
+
HttpRequestParser,
|
1036 |
+
HttpResponseParser,
|
1037 |
+
RawRequestMessage,
|
1038 |
+
RawResponseMessage,
|
1039 |
+
)
|
1040 |
+
|
1041 |
+
HttpRequestParserC = HttpRequestParser
|
1042 |
+
HttpResponseParserC = HttpResponseParser
|
1043 |
+
RawRequestMessageC = RawRequestMessage
|
1044 |
+
RawResponseMessageC = RawResponseMessage
|
1045 |
+
except ImportError: # pragma: no cover
|
1046 |
+
pass
|
venv/Lib/site-packages/aiohttp/http_websocket.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""WebSocket protocol versions 13 and 8."""
|
2 |
+
|
3 |
+
from ._websocket.helpers import WS_KEY, ws_ext_gen, ws_ext_parse
|
4 |
+
from ._websocket.models import (
|
5 |
+
WS_CLOSED_MESSAGE,
|
6 |
+
WS_CLOSING_MESSAGE,
|
7 |
+
WebSocketError,
|
8 |
+
WSCloseCode,
|
9 |
+
WSHandshakeError,
|
10 |
+
WSMessage,
|
11 |
+
WSMsgType,
|
12 |
+
)
|
13 |
+
from ._websocket.reader import WebSocketReader
|
14 |
+
from ._websocket.writer import WebSocketWriter
|
15 |
+
|
16 |
+
# Messages that the WebSocketResponse.receive needs to handle internally
|
17 |
+
_INTERNAL_RECEIVE_TYPES = frozenset(
|
18 |
+
(WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.PING, WSMsgType.PONG)
|
19 |
+
)
|
20 |
+
|
21 |
+
|
22 |
+
__all__ = (
|
23 |
+
"WS_CLOSED_MESSAGE",
|
24 |
+
"WS_CLOSING_MESSAGE",
|
25 |
+
"WS_KEY",
|
26 |
+
"WebSocketReader",
|
27 |
+
"WebSocketWriter",
|
28 |
+
"WSMessage",
|
29 |
+
"WebSocketError",
|
30 |
+
"WSMsgType",
|
31 |
+
"WSCloseCode",
|
32 |
+
"ws_ext_gen",
|
33 |
+
"ws_ext_parse",
|
34 |
+
"WSHandshakeError",
|
35 |
+
"WSMessage",
|
36 |
+
)
|
venv/Lib/site-packages/aiohttp/http_writer.py
ADDED
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Http related parsers and protocol."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import sys
|
5 |
+
import zlib
|
6 |
+
from typing import ( # noqa
|
7 |
+
Any,
|
8 |
+
Awaitable,
|
9 |
+
Callable,
|
10 |
+
Iterable,
|
11 |
+
List,
|
12 |
+
NamedTuple,
|
13 |
+
Optional,
|
14 |
+
Union,
|
15 |
+
)
|
16 |
+
|
17 |
+
from multidict import CIMultiDict
|
18 |
+
|
19 |
+
from .abc import AbstractStreamWriter
|
20 |
+
from .base_protocol import BaseProtocol
|
21 |
+
from .client_exceptions import ClientConnectionResetError
|
22 |
+
from .compression_utils import ZLibCompressor
|
23 |
+
from .helpers import NO_EXTENSIONS
|
24 |
+
|
25 |
+
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
26 |
+
|
27 |
+
|
28 |
+
MIN_PAYLOAD_FOR_WRITELINES = 2048
|
29 |
+
IS_PY313_BEFORE_313_2 = (3, 13, 0) <= sys.version_info < (3, 13, 2)
|
30 |
+
IS_PY_BEFORE_312_9 = sys.version_info < (3, 12, 9)
|
31 |
+
SKIP_WRITELINES = IS_PY313_BEFORE_313_2 or IS_PY_BEFORE_312_9
|
32 |
+
# writelines is not safe for use
|
33 |
+
# on Python 3.12+ until 3.12.9
|
34 |
+
# on Python 3.13+ until 3.13.2
|
35 |
+
# and on older versions it not any faster than write
|
36 |
+
# CVE-2024-12254: https://github.com/python/cpython/pull/127656
|
37 |
+
|
38 |
+
|
39 |
+
class HttpVersion(NamedTuple):
|
40 |
+
major: int
|
41 |
+
minor: int
|
42 |
+
|
43 |
+
|
44 |
+
HttpVersion10 = HttpVersion(1, 0)
|
45 |
+
HttpVersion11 = HttpVersion(1, 1)
|
46 |
+
|
47 |
+
|
48 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
49 |
+
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
50 |
+
|
51 |
+
|
52 |
+
class StreamWriter(AbstractStreamWriter):
|
53 |
+
|
54 |
+
length: Optional[int] = None
|
55 |
+
chunked: bool = False
|
56 |
+
_eof: bool = False
|
57 |
+
_compress: Optional[ZLibCompressor] = None
|
58 |
+
|
59 |
+
def __init__(
|
60 |
+
self,
|
61 |
+
protocol: BaseProtocol,
|
62 |
+
loop: asyncio.AbstractEventLoop,
|
63 |
+
on_chunk_sent: _T_OnChunkSent = None,
|
64 |
+
on_headers_sent: _T_OnHeadersSent = None,
|
65 |
+
) -> None:
|
66 |
+
self._protocol = protocol
|
67 |
+
self.loop = loop
|
68 |
+
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
|
69 |
+
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
|
70 |
+
|
71 |
+
@property
|
72 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
73 |
+
return self._protocol.transport
|
74 |
+
|
75 |
+
@property
|
76 |
+
def protocol(self) -> BaseProtocol:
|
77 |
+
return self._protocol
|
78 |
+
|
79 |
+
def enable_chunking(self) -> None:
|
80 |
+
self.chunked = True
|
81 |
+
|
82 |
+
def enable_compression(
|
83 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
84 |
+
) -> None:
|
85 |
+
self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
|
86 |
+
|
87 |
+
def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
|
88 |
+
size = len(chunk)
|
89 |
+
self.buffer_size += size
|
90 |
+
self.output_size += size
|
91 |
+
transport = self._protocol.transport
|
92 |
+
if transport is None or transport.is_closing():
|
93 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
94 |
+
transport.write(chunk)
|
95 |
+
|
96 |
+
def _writelines(self, chunks: Iterable[bytes]) -> None:
|
97 |
+
size = 0
|
98 |
+
for chunk in chunks:
|
99 |
+
size += len(chunk)
|
100 |
+
self.buffer_size += size
|
101 |
+
self.output_size += size
|
102 |
+
transport = self._protocol.transport
|
103 |
+
if transport is None or transport.is_closing():
|
104 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
105 |
+
if SKIP_WRITELINES or size < MIN_PAYLOAD_FOR_WRITELINES:
|
106 |
+
transport.write(b"".join(chunks))
|
107 |
+
else:
|
108 |
+
transport.writelines(chunks)
|
109 |
+
|
110 |
+
async def write(
|
111 |
+
self,
|
112 |
+
chunk: Union[bytes, bytearray, memoryview],
|
113 |
+
*,
|
114 |
+
drain: bool = True,
|
115 |
+
LIMIT: int = 0x10000,
|
116 |
+
) -> None:
|
117 |
+
"""Writes chunk of data to a stream.
|
118 |
+
|
119 |
+
write_eof() indicates end of stream.
|
120 |
+
writer can't be used after write_eof() method being called.
|
121 |
+
write() return drain future.
|
122 |
+
"""
|
123 |
+
if self._on_chunk_sent is not None:
|
124 |
+
await self._on_chunk_sent(chunk)
|
125 |
+
|
126 |
+
if isinstance(chunk, memoryview):
|
127 |
+
if chunk.nbytes != len(chunk):
|
128 |
+
# just reshape it
|
129 |
+
chunk = chunk.cast("c")
|
130 |
+
|
131 |
+
if self._compress is not None:
|
132 |
+
chunk = await self._compress.compress(chunk)
|
133 |
+
if not chunk:
|
134 |
+
return
|
135 |
+
|
136 |
+
if self.length is not None:
|
137 |
+
chunk_len = len(chunk)
|
138 |
+
if self.length >= chunk_len:
|
139 |
+
self.length = self.length - chunk_len
|
140 |
+
else:
|
141 |
+
chunk = chunk[: self.length]
|
142 |
+
self.length = 0
|
143 |
+
if not chunk:
|
144 |
+
return
|
145 |
+
|
146 |
+
if chunk:
|
147 |
+
if self.chunked:
|
148 |
+
self._writelines(
|
149 |
+
(f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n")
|
150 |
+
)
|
151 |
+
else:
|
152 |
+
self._write(chunk)
|
153 |
+
|
154 |
+
if self.buffer_size > LIMIT and drain:
|
155 |
+
self.buffer_size = 0
|
156 |
+
await self.drain()
|
157 |
+
|
158 |
+
async def write_headers(
|
159 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
160 |
+
) -> None:
|
161 |
+
"""Write request/response status and headers."""
|
162 |
+
if self._on_headers_sent is not None:
|
163 |
+
await self._on_headers_sent(headers)
|
164 |
+
|
165 |
+
# status + headers
|
166 |
+
buf = _serialize_headers(status_line, headers)
|
167 |
+
self._write(buf)
|
168 |
+
|
169 |
+
def set_eof(self) -> None:
|
170 |
+
"""Indicate that the message is complete."""
|
171 |
+
self._eof = True
|
172 |
+
|
173 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
174 |
+
if self._eof:
|
175 |
+
return
|
176 |
+
|
177 |
+
if chunk and self._on_chunk_sent is not None:
|
178 |
+
await self._on_chunk_sent(chunk)
|
179 |
+
|
180 |
+
if self._compress:
|
181 |
+
chunks: List[bytes] = []
|
182 |
+
chunks_len = 0
|
183 |
+
if chunk and (compressed_chunk := await self._compress.compress(chunk)):
|
184 |
+
chunks_len = len(compressed_chunk)
|
185 |
+
chunks.append(compressed_chunk)
|
186 |
+
|
187 |
+
flush_chunk = self._compress.flush()
|
188 |
+
chunks_len += len(flush_chunk)
|
189 |
+
chunks.append(flush_chunk)
|
190 |
+
assert chunks_len
|
191 |
+
|
192 |
+
if self.chunked:
|
193 |
+
chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii")
|
194 |
+
self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n"))
|
195 |
+
elif len(chunks) > 1:
|
196 |
+
self._writelines(chunks)
|
197 |
+
else:
|
198 |
+
self._write(chunks[0])
|
199 |
+
elif self.chunked:
|
200 |
+
if chunk:
|
201 |
+
chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii")
|
202 |
+
self._writelines((chunk_len_pre, chunk, b"\r\n0\r\n\r\n"))
|
203 |
+
else:
|
204 |
+
self._write(b"0\r\n\r\n")
|
205 |
+
elif chunk:
|
206 |
+
self._write(chunk)
|
207 |
+
|
208 |
+
await self.drain()
|
209 |
+
|
210 |
+
self._eof = True
|
211 |
+
|
212 |
+
async def drain(self) -> None:
|
213 |
+
"""Flush the write buffer.
|
214 |
+
|
215 |
+
The intended use is to write
|
216 |
+
|
217 |
+
await w.write(data)
|
218 |
+
await w.drain()
|
219 |
+
"""
|
220 |
+
protocol = self._protocol
|
221 |
+
if protocol.transport is not None and protocol._paused:
|
222 |
+
await protocol._drain_helper()
|
223 |
+
|
224 |
+
|
225 |
+
def _safe_header(string: str) -> str:
|
226 |
+
if "\r" in string or "\n" in string:
|
227 |
+
raise ValueError(
|
228 |
+
"Newline or carriage return detected in headers. "
|
229 |
+
"Potential header injection attack."
|
230 |
+
)
|
231 |
+
return string
|
232 |
+
|
233 |
+
|
234 |
+
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
235 |
+
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
236 |
+
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
237 |
+
return line.encode("utf-8")
|
238 |
+
|
239 |
+
|
240 |
+
_serialize_headers = _py_serialize_headers
|
241 |
+
|
242 |
+
try:
|
243 |
+
import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
|
244 |
+
|
245 |
+
_c_serialize_headers = _http_writer._serialize_headers
|
246 |
+
if not NO_EXTENSIONS:
|
247 |
+
_serialize_headers = _c_serialize_headers
|
248 |
+
except ImportError:
|
249 |
+
pass
|
venv/Lib/site-packages/aiohttp/log.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
|
3 |
+
access_logger = logging.getLogger("aiohttp.access")
|
4 |
+
client_logger = logging.getLogger("aiohttp.client")
|
5 |
+
internal_logger = logging.getLogger("aiohttp.internal")
|
6 |
+
server_logger = logging.getLogger("aiohttp.server")
|
7 |
+
web_logger = logging.getLogger("aiohttp.web")
|
8 |
+
ws_logger = logging.getLogger("aiohttp.websocket")
|
venv/Lib/site-packages/aiohttp/multipart.py
ADDED
@@ -0,0 +1,1071 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import base64
|
2 |
+
import binascii
|
3 |
+
import json
|
4 |
+
import re
|
5 |
+
import sys
|
6 |
+
import uuid
|
7 |
+
import warnings
|
8 |
+
import zlib
|
9 |
+
from collections import deque
|
10 |
+
from types import TracebackType
|
11 |
+
from typing import (
|
12 |
+
TYPE_CHECKING,
|
13 |
+
Any,
|
14 |
+
Deque,
|
15 |
+
Dict,
|
16 |
+
Iterator,
|
17 |
+
List,
|
18 |
+
Mapping,
|
19 |
+
Optional,
|
20 |
+
Sequence,
|
21 |
+
Tuple,
|
22 |
+
Type,
|
23 |
+
Union,
|
24 |
+
cast,
|
25 |
+
)
|
26 |
+
from urllib.parse import parse_qsl, unquote, urlencode
|
27 |
+
|
28 |
+
from multidict import CIMultiDict, CIMultiDictProxy
|
29 |
+
|
30 |
+
from .compression_utils import ZLibCompressor, ZLibDecompressor
|
31 |
+
from .hdrs import (
|
32 |
+
CONTENT_DISPOSITION,
|
33 |
+
CONTENT_ENCODING,
|
34 |
+
CONTENT_LENGTH,
|
35 |
+
CONTENT_TRANSFER_ENCODING,
|
36 |
+
CONTENT_TYPE,
|
37 |
+
)
|
38 |
+
from .helpers import CHAR, TOKEN, parse_mimetype, reify
|
39 |
+
from .http import HeadersParser
|
40 |
+
from .payload import (
|
41 |
+
JsonPayload,
|
42 |
+
LookupError,
|
43 |
+
Order,
|
44 |
+
Payload,
|
45 |
+
StringPayload,
|
46 |
+
get_payload,
|
47 |
+
payload_type,
|
48 |
+
)
|
49 |
+
from .streams import StreamReader
|
50 |
+
|
51 |
+
if sys.version_info >= (3, 11):
|
52 |
+
from typing import Self
|
53 |
+
else:
|
54 |
+
from typing import TypeVar
|
55 |
+
|
56 |
+
Self = TypeVar("Self", bound="BodyPartReader")
|
57 |
+
|
58 |
+
__all__ = (
|
59 |
+
"MultipartReader",
|
60 |
+
"MultipartWriter",
|
61 |
+
"BodyPartReader",
|
62 |
+
"BadContentDispositionHeader",
|
63 |
+
"BadContentDispositionParam",
|
64 |
+
"parse_content_disposition",
|
65 |
+
"content_disposition_filename",
|
66 |
+
)
|
67 |
+
|
68 |
+
|
69 |
+
if TYPE_CHECKING:
|
70 |
+
from .client_reqrep import ClientResponse
|
71 |
+
|
72 |
+
|
73 |
+
class BadContentDispositionHeader(RuntimeWarning):
|
74 |
+
pass
|
75 |
+
|
76 |
+
|
77 |
+
class BadContentDispositionParam(RuntimeWarning):
|
78 |
+
pass
|
79 |
+
|
80 |
+
|
81 |
+
def parse_content_disposition(
|
82 |
+
header: Optional[str],
|
83 |
+
) -> Tuple[Optional[str], Dict[str, str]]:
|
84 |
+
def is_token(string: str) -> bool:
|
85 |
+
return bool(string) and TOKEN >= set(string)
|
86 |
+
|
87 |
+
def is_quoted(string: str) -> bool:
|
88 |
+
return string[0] == string[-1] == '"'
|
89 |
+
|
90 |
+
def is_rfc5987(string: str) -> bool:
|
91 |
+
return is_token(string) and string.count("'") == 2
|
92 |
+
|
93 |
+
def is_extended_param(string: str) -> bool:
|
94 |
+
return string.endswith("*")
|
95 |
+
|
96 |
+
def is_continuous_param(string: str) -> bool:
|
97 |
+
pos = string.find("*") + 1
|
98 |
+
if not pos:
|
99 |
+
return False
|
100 |
+
substring = string[pos:-1] if string.endswith("*") else string[pos:]
|
101 |
+
return substring.isdigit()
|
102 |
+
|
103 |
+
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
|
104 |
+
return re.sub(f"\\\\([{chars}])", "\\1", text)
|
105 |
+
|
106 |
+
if not header:
|
107 |
+
return None, {}
|
108 |
+
|
109 |
+
disptype, *parts = header.split(";")
|
110 |
+
if not is_token(disptype):
|
111 |
+
warnings.warn(BadContentDispositionHeader(header))
|
112 |
+
return None, {}
|
113 |
+
|
114 |
+
params: Dict[str, str] = {}
|
115 |
+
while parts:
|
116 |
+
item = parts.pop(0)
|
117 |
+
|
118 |
+
if "=" not in item:
|
119 |
+
warnings.warn(BadContentDispositionHeader(header))
|
120 |
+
return None, {}
|
121 |
+
|
122 |
+
key, value = item.split("=", 1)
|
123 |
+
key = key.lower().strip()
|
124 |
+
value = value.lstrip()
|
125 |
+
|
126 |
+
if key in params:
|
127 |
+
warnings.warn(BadContentDispositionHeader(header))
|
128 |
+
return None, {}
|
129 |
+
|
130 |
+
if not is_token(key):
|
131 |
+
warnings.warn(BadContentDispositionParam(item))
|
132 |
+
continue
|
133 |
+
|
134 |
+
elif is_continuous_param(key):
|
135 |
+
if is_quoted(value):
|
136 |
+
value = unescape(value[1:-1])
|
137 |
+
elif not is_token(value):
|
138 |
+
warnings.warn(BadContentDispositionParam(item))
|
139 |
+
continue
|
140 |
+
|
141 |
+
elif is_extended_param(key):
|
142 |
+
if is_rfc5987(value):
|
143 |
+
encoding, _, value = value.split("'", 2)
|
144 |
+
encoding = encoding or "utf-8"
|
145 |
+
else:
|
146 |
+
warnings.warn(BadContentDispositionParam(item))
|
147 |
+
continue
|
148 |
+
|
149 |
+
try:
|
150 |
+
value = unquote(value, encoding, "strict")
|
151 |
+
except UnicodeDecodeError: # pragma: nocover
|
152 |
+
warnings.warn(BadContentDispositionParam(item))
|
153 |
+
continue
|
154 |
+
|
155 |
+
else:
|
156 |
+
failed = True
|
157 |
+
if is_quoted(value):
|
158 |
+
failed = False
|
159 |
+
value = unescape(value[1:-1].lstrip("\\/"))
|
160 |
+
elif is_token(value):
|
161 |
+
failed = False
|
162 |
+
elif parts:
|
163 |
+
# maybe just ; in filename, in any case this is just
|
164 |
+
# one case fix, for proper fix we need to redesign parser
|
165 |
+
_value = f"{value};{parts[0]}"
|
166 |
+
if is_quoted(_value):
|
167 |
+
parts.pop(0)
|
168 |
+
value = unescape(_value[1:-1].lstrip("\\/"))
|
169 |
+
failed = False
|
170 |
+
|
171 |
+
if failed:
|
172 |
+
warnings.warn(BadContentDispositionHeader(header))
|
173 |
+
return None, {}
|
174 |
+
|
175 |
+
params[key] = value
|
176 |
+
|
177 |
+
return disptype.lower(), params
|
178 |
+
|
179 |
+
|
180 |
+
def content_disposition_filename(
|
181 |
+
params: Mapping[str, str], name: str = "filename"
|
182 |
+
) -> Optional[str]:
|
183 |
+
name_suf = "%s*" % name
|
184 |
+
if not params:
|
185 |
+
return None
|
186 |
+
elif name_suf in params:
|
187 |
+
return params[name_suf]
|
188 |
+
elif name in params:
|
189 |
+
return params[name]
|
190 |
+
else:
|
191 |
+
parts = []
|
192 |
+
fnparams = sorted(
|
193 |
+
(key, value) for key, value in params.items() if key.startswith(name_suf)
|
194 |
+
)
|
195 |
+
for num, (key, value) in enumerate(fnparams):
|
196 |
+
_, tail = key.split("*", 1)
|
197 |
+
if tail.endswith("*"):
|
198 |
+
tail = tail[:-1]
|
199 |
+
if tail == str(num):
|
200 |
+
parts.append(value)
|
201 |
+
else:
|
202 |
+
break
|
203 |
+
if not parts:
|
204 |
+
return None
|
205 |
+
value = "".join(parts)
|
206 |
+
if "'" in value:
|
207 |
+
encoding, _, value = value.split("'", 2)
|
208 |
+
encoding = encoding or "utf-8"
|
209 |
+
return unquote(value, encoding, "strict")
|
210 |
+
return value
|
211 |
+
|
212 |
+
|
213 |
+
class MultipartResponseWrapper:
|
214 |
+
"""Wrapper around the MultipartReader.
|
215 |
+
|
216 |
+
It takes care about
|
217 |
+
underlying connection and close it when it needs in.
|
218 |
+
"""
|
219 |
+
|
220 |
+
def __init__(
|
221 |
+
self,
|
222 |
+
resp: "ClientResponse",
|
223 |
+
stream: "MultipartReader",
|
224 |
+
) -> None:
|
225 |
+
self.resp = resp
|
226 |
+
self.stream = stream
|
227 |
+
|
228 |
+
def __aiter__(self) -> "MultipartResponseWrapper":
|
229 |
+
return self
|
230 |
+
|
231 |
+
async def __anext__(
|
232 |
+
self,
|
233 |
+
) -> Union["MultipartReader", "BodyPartReader"]:
|
234 |
+
part = await self.next()
|
235 |
+
if part is None:
|
236 |
+
raise StopAsyncIteration
|
237 |
+
return part
|
238 |
+
|
239 |
+
def at_eof(self) -> bool:
|
240 |
+
"""Returns True when all response data had been read."""
|
241 |
+
return self.resp.content.at_eof()
|
242 |
+
|
243 |
+
async def next(
|
244 |
+
self,
|
245 |
+
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
|
246 |
+
"""Emits next multipart reader object."""
|
247 |
+
item = await self.stream.next()
|
248 |
+
if self.stream.at_eof():
|
249 |
+
await self.release()
|
250 |
+
return item
|
251 |
+
|
252 |
+
async def release(self) -> None:
|
253 |
+
"""Release the connection gracefully.
|
254 |
+
|
255 |
+
All remaining content is read to the void.
|
256 |
+
"""
|
257 |
+
await self.resp.release()
|
258 |
+
|
259 |
+
|
260 |
+
class BodyPartReader:
|
261 |
+
"""Multipart reader for single body part."""
|
262 |
+
|
263 |
+
chunk_size = 8192
|
264 |
+
|
265 |
+
def __init__(
|
266 |
+
self,
|
267 |
+
boundary: bytes,
|
268 |
+
headers: "CIMultiDictProxy[str]",
|
269 |
+
content: StreamReader,
|
270 |
+
*,
|
271 |
+
subtype: str = "mixed",
|
272 |
+
default_charset: Optional[str] = None,
|
273 |
+
) -> None:
|
274 |
+
self.headers = headers
|
275 |
+
self._boundary = boundary
|
276 |
+
self._boundary_len = len(boundary) + 2 # Boundary + \r\n
|
277 |
+
self._content = content
|
278 |
+
self._default_charset = default_charset
|
279 |
+
self._at_eof = False
|
280 |
+
self._is_form_data = subtype == "form-data"
|
281 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
|
282 |
+
length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None)
|
283 |
+
self._length = int(length) if length is not None else None
|
284 |
+
self._read_bytes = 0
|
285 |
+
self._unread: Deque[bytes] = deque()
|
286 |
+
self._prev_chunk: Optional[bytes] = None
|
287 |
+
self._content_eof = 0
|
288 |
+
self._cache: Dict[str, Any] = {}
|
289 |
+
|
290 |
+
def __aiter__(self: Self) -> Self:
|
291 |
+
return self
|
292 |
+
|
293 |
+
async def __anext__(self) -> bytes:
|
294 |
+
part = await self.next()
|
295 |
+
if part is None:
|
296 |
+
raise StopAsyncIteration
|
297 |
+
return part
|
298 |
+
|
299 |
+
async def next(self) -> Optional[bytes]:
|
300 |
+
item = await self.read()
|
301 |
+
if not item:
|
302 |
+
return None
|
303 |
+
return item
|
304 |
+
|
305 |
+
async def read(self, *, decode: bool = False) -> bytes:
|
306 |
+
"""Reads body part data.
|
307 |
+
|
308 |
+
decode: Decodes data following by encoding
|
309 |
+
method from Content-Encoding header. If it missed
|
310 |
+
data remains untouched
|
311 |
+
"""
|
312 |
+
if self._at_eof:
|
313 |
+
return b""
|
314 |
+
data = bytearray()
|
315 |
+
while not self._at_eof:
|
316 |
+
data.extend(await self.read_chunk(self.chunk_size))
|
317 |
+
if decode:
|
318 |
+
return self.decode(data)
|
319 |
+
return data
|
320 |
+
|
321 |
+
async def read_chunk(self, size: int = chunk_size) -> bytes:
|
322 |
+
"""Reads body part content chunk of the specified size.
|
323 |
+
|
324 |
+
size: chunk size
|
325 |
+
"""
|
326 |
+
if self._at_eof:
|
327 |
+
return b""
|
328 |
+
if self._length:
|
329 |
+
chunk = await self._read_chunk_from_length(size)
|
330 |
+
else:
|
331 |
+
chunk = await self._read_chunk_from_stream(size)
|
332 |
+
|
333 |
+
# For the case of base64 data, we must read a fragment of size with a
|
334 |
+
# remainder of 0 by dividing by 4 for string without symbols \n or \r
|
335 |
+
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING)
|
336 |
+
if encoding and encoding.lower() == "base64":
|
337 |
+
stripped_chunk = b"".join(chunk.split())
|
338 |
+
remainder = len(stripped_chunk) % 4
|
339 |
+
|
340 |
+
while remainder != 0 and not self.at_eof():
|
341 |
+
over_chunk_size = 4 - remainder
|
342 |
+
over_chunk = b""
|
343 |
+
|
344 |
+
if self._prev_chunk:
|
345 |
+
over_chunk = self._prev_chunk[:over_chunk_size]
|
346 |
+
self._prev_chunk = self._prev_chunk[len(over_chunk) :]
|
347 |
+
|
348 |
+
if len(over_chunk) != over_chunk_size:
|
349 |
+
over_chunk += await self._content.read(4 - len(over_chunk))
|
350 |
+
|
351 |
+
if not over_chunk:
|
352 |
+
self._at_eof = True
|
353 |
+
|
354 |
+
stripped_chunk += b"".join(over_chunk.split())
|
355 |
+
chunk += over_chunk
|
356 |
+
remainder = len(stripped_chunk) % 4
|
357 |
+
|
358 |
+
self._read_bytes += len(chunk)
|
359 |
+
if self._read_bytes == self._length:
|
360 |
+
self._at_eof = True
|
361 |
+
if self._at_eof:
|
362 |
+
clrf = await self._content.readline()
|
363 |
+
assert (
|
364 |
+
b"\r\n" == clrf
|
365 |
+
), "reader did not read all the data or it is malformed"
|
366 |
+
return chunk
|
367 |
+
|
368 |
+
async def _read_chunk_from_length(self, size: int) -> bytes:
|
369 |
+
# Reads body part content chunk of the specified size.
|
370 |
+
# The body part must has Content-Length header with proper value.
|
371 |
+
assert self._length is not None, "Content-Length required for chunked read"
|
372 |
+
chunk_size = min(size, self._length - self._read_bytes)
|
373 |
+
chunk = await self._content.read(chunk_size)
|
374 |
+
if self._content.at_eof():
|
375 |
+
self._at_eof = True
|
376 |
+
return chunk
|
377 |
+
|
378 |
+
async def _read_chunk_from_stream(self, size: int) -> bytes:
|
379 |
+
# Reads content chunk of body part with unknown length.
|
380 |
+
# The Content-Length header for body part is not necessary.
|
381 |
+
assert (
|
382 |
+
size >= self._boundary_len
|
383 |
+
), "Chunk size must be greater or equal than boundary length + 2"
|
384 |
+
first_chunk = self._prev_chunk is None
|
385 |
+
if first_chunk:
|
386 |
+
self._prev_chunk = await self._content.read(size)
|
387 |
+
|
388 |
+
chunk = b""
|
389 |
+
# content.read() may return less than size, so we need to loop to ensure
|
390 |
+
# we have enough data to detect the boundary.
|
391 |
+
while len(chunk) < self._boundary_len:
|
392 |
+
chunk += await self._content.read(size)
|
393 |
+
self._content_eof += int(self._content.at_eof())
|
394 |
+
assert self._content_eof < 3, "Reading after EOF"
|
395 |
+
if self._content_eof:
|
396 |
+
break
|
397 |
+
if len(chunk) > size:
|
398 |
+
self._content.unread_data(chunk[size:])
|
399 |
+
chunk = chunk[:size]
|
400 |
+
|
401 |
+
assert self._prev_chunk is not None
|
402 |
+
window = self._prev_chunk + chunk
|
403 |
+
sub = b"\r\n" + self._boundary
|
404 |
+
if first_chunk:
|
405 |
+
idx = window.find(sub)
|
406 |
+
else:
|
407 |
+
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
|
408 |
+
if idx >= 0:
|
409 |
+
# pushing boundary back to content
|
410 |
+
with warnings.catch_warnings():
|
411 |
+
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
412 |
+
self._content.unread_data(window[idx:])
|
413 |
+
if size > idx:
|
414 |
+
self._prev_chunk = self._prev_chunk[:idx]
|
415 |
+
chunk = window[len(self._prev_chunk) : idx]
|
416 |
+
if not chunk:
|
417 |
+
self._at_eof = True
|
418 |
+
result = self._prev_chunk
|
419 |
+
self._prev_chunk = chunk
|
420 |
+
return result
|
421 |
+
|
422 |
+
async def readline(self) -> bytes:
|
423 |
+
"""Reads body part by line by line."""
|
424 |
+
if self._at_eof:
|
425 |
+
return b""
|
426 |
+
|
427 |
+
if self._unread:
|
428 |
+
line = self._unread.popleft()
|
429 |
+
else:
|
430 |
+
line = await self._content.readline()
|
431 |
+
|
432 |
+
if line.startswith(self._boundary):
|
433 |
+
# the very last boundary may not come with \r\n,
|
434 |
+
# so set single rules for everyone
|
435 |
+
sline = line.rstrip(b"\r\n")
|
436 |
+
boundary = self._boundary
|
437 |
+
last_boundary = self._boundary + b"--"
|
438 |
+
# ensure that we read exactly the boundary, not something alike
|
439 |
+
if sline == boundary or sline == last_boundary:
|
440 |
+
self._at_eof = True
|
441 |
+
self._unread.append(line)
|
442 |
+
return b""
|
443 |
+
else:
|
444 |
+
next_line = await self._content.readline()
|
445 |
+
if next_line.startswith(self._boundary):
|
446 |
+
line = line[:-2] # strip CRLF but only once
|
447 |
+
self._unread.append(next_line)
|
448 |
+
|
449 |
+
return line
|
450 |
+
|
451 |
+
async def release(self) -> None:
|
452 |
+
"""Like read(), but reads all the data to the void."""
|
453 |
+
if self._at_eof:
|
454 |
+
return
|
455 |
+
while not self._at_eof:
|
456 |
+
await self.read_chunk(self.chunk_size)
|
457 |
+
|
458 |
+
async def text(self, *, encoding: Optional[str] = None) -> str:
|
459 |
+
"""Like read(), but assumes that body part contains text data."""
|
460 |
+
data = await self.read(decode=True)
|
461 |
+
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm
|
462 |
+
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send
|
463 |
+
encoding = encoding or self.get_charset(default="utf-8")
|
464 |
+
return data.decode(encoding)
|
465 |
+
|
466 |
+
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
467 |
+
"""Like read(), but assumes that body parts contains JSON data."""
|
468 |
+
data = await self.read(decode=True)
|
469 |
+
if not data:
|
470 |
+
return None
|
471 |
+
encoding = encoding or self.get_charset(default="utf-8")
|
472 |
+
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
|
473 |
+
|
474 |
+
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
|
475 |
+
"""Like read(), but assumes that body parts contain form urlencoded data."""
|
476 |
+
data = await self.read(decode=True)
|
477 |
+
if not data:
|
478 |
+
return []
|
479 |
+
if encoding is not None:
|
480 |
+
real_encoding = encoding
|
481 |
+
else:
|
482 |
+
real_encoding = self.get_charset(default="utf-8")
|
483 |
+
try:
|
484 |
+
decoded_data = data.rstrip().decode(real_encoding)
|
485 |
+
except UnicodeDecodeError:
|
486 |
+
raise ValueError("data cannot be decoded with %s encoding" % real_encoding)
|
487 |
+
|
488 |
+
return parse_qsl(
|
489 |
+
decoded_data,
|
490 |
+
keep_blank_values=True,
|
491 |
+
encoding=real_encoding,
|
492 |
+
)
|
493 |
+
|
494 |
+
def at_eof(self) -> bool:
|
495 |
+
"""Returns True if the boundary was reached or False otherwise."""
|
496 |
+
return self._at_eof
|
497 |
+
|
498 |
+
def decode(self, data: bytes) -> bytes:
|
499 |
+
"""Decodes data.
|
500 |
+
|
501 |
+
Decoding is done according the specified Content-Encoding
|
502 |
+
or Content-Transfer-Encoding headers value.
|
503 |
+
"""
|
504 |
+
if CONTENT_TRANSFER_ENCODING in self.headers:
|
505 |
+
data = self._decode_content_transfer(data)
|
506 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
|
507 |
+
if not self._is_form_data and CONTENT_ENCODING in self.headers:
|
508 |
+
return self._decode_content(data)
|
509 |
+
return data
|
510 |
+
|
511 |
+
def _decode_content(self, data: bytes) -> bytes:
|
512 |
+
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
|
513 |
+
if encoding == "identity":
|
514 |
+
return data
|
515 |
+
if encoding in {"deflate", "gzip"}:
|
516 |
+
return ZLibDecompressor(
|
517 |
+
encoding=encoding,
|
518 |
+
suppress_deflate_header=True,
|
519 |
+
).decompress_sync(data)
|
520 |
+
|
521 |
+
raise RuntimeError(f"unknown content encoding: {encoding}")
|
522 |
+
|
523 |
+
def _decode_content_transfer(self, data: bytes) -> bytes:
|
524 |
+
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
525 |
+
|
526 |
+
if encoding == "base64":
|
527 |
+
return base64.b64decode(data)
|
528 |
+
elif encoding == "quoted-printable":
|
529 |
+
return binascii.a2b_qp(data)
|
530 |
+
elif encoding in ("binary", "8bit", "7bit"):
|
531 |
+
return data
|
532 |
+
else:
|
533 |
+
raise RuntimeError(f"unknown content transfer encoding: {encoding}")
|
534 |
+
|
535 |
+
def get_charset(self, default: str) -> str:
|
536 |
+
"""Returns charset parameter from Content-Type header or default."""
|
537 |
+
ctype = self.headers.get(CONTENT_TYPE, "")
|
538 |
+
mimetype = parse_mimetype(ctype)
|
539 |
+
return mimetype.parameters.get("charset", self._default_charset or default)
|
540 |
+
|
541 |
+
@reify
|
542 |
+
def name(self) -> Optional[str]:
|
543 |
+
"""Returns name specified in Content-Disposition header.
|
544 |
+
|
545 |
+
If the header is missing or malformed, returns None.
|
546 |
+
"""
|
547 |
+
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
548 |
+
return content_disposition_filename(params, "name")
|
549 |
+
|
550 |
+
@reify
|
551 |
+
def filename(self) -> Optional[str]:
|
552 |
+
"""Returns filename specified in Content-Disposition header.
|
553 |
+
|
554 |
+
Returns None if the header is missing or malformed.
|
555 |
+
"""
|
556 |
+
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
557 |
+
return content_disposition_filename(params, "filename")
|
558 |
+
|
559 |
+
|
560 |
+
@payload_type(BodyPartReader, order=Order.try_first)
|
561 |
+
class BodyPartReaderPayload(Payload):
|
562 |
+
_value: BodyPartReader
|
563 |
+
|
564 |
+
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
|
565 |
+
super().__init__(value, *args, **kwargs)
|
566 |
+
|
567 |
+
params: Dict[str, str] = {}
|
568 |
+
if value.name is not None:
|
569 |
+
params["name"] = value.name
|
570 |
+
if value.filename is not None:
|
571 |
+
params["filename"] = value.filename
|
572 |
+
|
573 |
+
if params:
|
574 |
+
self.set_content_disposition("attachment", True, **params)
|
575 |
+
|
576 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
577 |
+
raise TypeError("Unable to decode.")
|
578 |
+
|
579 |
+
async def write(self, writer: Any) -> None:
|
580 |
+
field = self._value
|
581 |
+
chunk = await field.read_chunk(size=2**16)
|
582 |
+
while chunk:
|
583 |
+
await writer.write(field.decode(chunk))
|
584 |
+
chunk = await field.read_chunk(size=2**16)
|
585 |
+
|
586 |
+
|
587 |
+
class MultipartReader:
|
588 |
+
"""Multipart body reader."""
|
589 |
+
|
590 |
+
#: Response wrapper, used when multipart readers constructs from response.
|
591 |
+
response_wrapper_cls = MultipartResponseWrapper
|
592 |
+
#: Multipart reader class, used to handle multipart/* body parts.
|
593 |
+
#: None points to type(self)
|
594 |
+
multipart_reader_cls: Optional[Type["MultipartReader"]] = None
|
595 |
+
#: Body part reader class for non multipart/* content types.
|
596 |
+
part_reader_cls = BodyPartReader
|
597 |
+
|
598 |
+
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
|
599 |
+
self._mimetype = parse_mimetype(headers[CONTENT_TYPE])
|
600 |
+
assert self._mimetype.type == "multipart", "multipart/* content type expected"
|
601 |
+
if "boundary" not in self._mimetype.parameters:
|
602 |
+
raise ValueError(
|
603 |
+
"boundary missed for Content-Type: %s" % headers[CONTENT_TYPE]
|
604 |
+
)
|
605 |
+
|
606 |
+
self.headers = headers
|
607 |
+
self._boundary = ("--" + self._get_boundary()).encode()
|
608 |
+
self._content = content
|
609 |
+
self._default_charset: Optional[str] = None
|
610 |
+
self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
|
611 |
+
self._at_eof = False
|
612 |
+
self._at_bof = True
|
613 |
+
self._unread: List[bytes] = []
|
614 |
+
|
615 |
+
def __aiter__(self: Self) -> Self:
|
616 |
+
return self
|
617 |
+
|
618 |
+
async def __anext__(
|
619 |
+
self,
|
620 |
+
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
621 |
+
part = await self.next()
|
622 |
+
if part is None:
|
623 |
+
raise StopAsyncIteration
|
624 |
+
return part
|
625 |
+
|
626 |
+
@classmethod
|
627 |
+
def from_response(
|
628 |
+
cls,
|
629 |
+
response: "ClientResponse",
|
630 |
+
) -> MultipartResponseWrapper:
|
631 |
+
"""Constructs reader instance from HTTP response.
|
632 |
+
|
633 |
+
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
634 |
+
"""
|
635 |
+
obj = cls.response_wrapper_cls(
|
636 |
+
response, cls(response.headers, response.content)
|
637 |
+
)
|
638 |
+
return obj
|
639 |
+
|
640 |
+
def at_eof(self) -> bool:
|
641 |
+
"""Returns True if the final boundary was reached, false otherwise."""
|
642 |
+
return self._at_eof
|
643 |
+
|
644 |
+
async def next(
|
645 |
+
self,
|
646 |
+
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
647 |
+
"""Emits the next multipart body part."""
|
648 |
+
# So, if we're at BOF, we need to skip till the boundary.
|
649 |
+
if self._at_eof:
|
650 |
+
return None
|
651 |
+
await self._maybe_release_last_part()
|
652 |
+
if self._at_bof:
|
653 |
+
await self._read_until_first_boundary()
|
654 |
+
self._at_bof = False
|
655 |
+
else:
|
656 |
+
await self._read_boundary()
|
657 |
+
if self._at_eof: # we just read the last boundary, nothing to do there
|
658 |
+
return None
|
659 |
+
|
660 |
+
part = await self.fetch_next_part()
|
661 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.6
|
662 |
+
if (
|
663 |
+
self._last_part is None
|
664 |
+
and self._mimetype.subtype == "form-data"
|
665 |
+
and isinstance(part, BodyPartReader)
|
666 |
+
):
|
667 |
+
_, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION))
|
668 |
+
if params.get("name") == "_charset_":
|
669 |
+
# Longest encoding in https://encoding.spec.whatwg.org/encodings.json
|
670 |
+
# is 19 characters, so 32 should be more than enough for any valid encoding.
|
671 |
+
charset = await part.read_chunk(32)
|
672 |
+
if len(charset) > 31:
|
673 |
+
raise RuntimeError("Invalid default charset")
|
674 |
+
self._default_charset = charset.strip().decode()
|
675 |
+
part = await self.fetch_next_part()
|
676 |
+
self._last_part = part
|
677 |
+
return self._last_part
|
678 |
+
|
679 |
+
async def release(self) -> None:
|
680 |
+
"""Reads all the body parts to the void till the final boundary."""
|
681 |
+
while not self._at_eof:
|
682 |
+
item = await self.next()
|
683 |
+
if item is None:
|
684 |
+
break
|
685 |
+
await item.release()
|
686 |
+
|
687 |
+
async def fetch_next_part(
|
688 |
+
self,
|
689 |
+
) -> Union["MultipartReader", BodyPartReader]:
|
690 |
+
"""Returns the next body part reader."""
|
691 |
+
headers = await self._read_headers()
|
692 |
+
return self._get_part_reader(headers)
|
693 |
+
|
694 |
+
def _get_part_reader(
|
695 |
+
self,
|
696 |
+
headers: "CIMultiDictProxy[str]",
|
697 |
+
) -> Union["MultipartReader", BodyPartReader]:
|
698 |
+
"""Dispatches the response by the `Content-Type` header.
|
699 |
+
|
700 |
+
Returns a suitable reader instance.
|
701 |
+
|
702 |
+
:param dict headers: Response headers
|
703 |
+
"""
|
704 |
+
ctype = headers.get(CONTENT_TYPE, "")
|
705 |
+
mimetype = parse_mimetype(ctype)
|
706 |
+
|
707 |
+
if mimetype.type == "multipart":
|
708 |
+
if self.multipart_reader_cls is None:
|
709 |
+
return type(self)(headers, self._content)
|
710 |
+
return self.multipart_reader_cls(headers, self._content)
|
711 |
+
else:
|
712 |
+
return self.part_reader_cls(
|
713 |
+
self._boundary,
|
714 |
+
headers,
|
715 |
+
self._content,
|
716 |
+
subtype=self._mimetype.subtype,
|
717 |
+
default_charset=self._default_charset,
|
718 |
+
)
|
719 |
+
|
720 |
+
def _get_boundary(self) -> str:
|
721 |
+
boundary = self._mimetype.parameters["boundary"]
|
722 |
+
if len(boundary) > 70:
|
723 |
+
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
|
724 |
+
|
725 |
+
return boundary
|
726 |
+
|
727 |
+
async def _readline(self) -> bytes:
|
728 |
+
if self._unread:
|
729 |
+
return self._unread.pop()
|
730 |
+
return await self._content.readline()
|
731 |
+
|
732 |
+
async def _read_until_first_boundary(self) -> None:
|
733 |
+
while True:
|
734 |
+
chunk = await self._readline()
|
735 |
+
if chunk == b"":
|
736 |
+
raise ValueError(
|
737 |
+
"Could not find starting boundary %r" % (self._boundary)
|
738 |
+
)
|
739 |
+
chunk = chunk.rstrip()
|
740 |
+
if chunk == self._boundary:
|
741 |
+
return
|
742 |
+
elif chunk == self._boundary + b"--":
|
743 |
+
self._at_eof = True
|
744 |
+
return
|
745 |
+
|
746 |
+
async def _read_boundary(self) -> None:
|
747 |
+
chunk = (await self._readline()).rstrip()
|
748 |
+
if chunk == self._boundary:
|
749 |
+
pass
|
750 |
+
elif chunk == self._boundary + b"--":
|
751 |
+
self._at_eof = True
|
752 |
+
epilogue = await self._readline()
|
753 |
+
next_line = await self._readline()
|
754 |
+
|
755 |
+
# the epilogue is expected and then either the end of input or the
|
756 |
+
# parent multipart boundary, if the parent boundary is found then
|
757 |
+
# it should be marked as unread and handed to the parent for
|
758 |
+
# processing
|
759 |
+
if next_line[:2] == b"--":
|
760 |
+
self._unread.append(next_line)
|
761 |
+
# otherwise the request is likely missing an epilogue and both
|
762 |
+
# lines should be passed to the parent for processing
|
763 |
+
# (this handles the old behavior gracefully)
|
764 |
+
else:
|
765 |
+
self._unread.extend([next_line, epilogue])
|
766 |
+
else:
|
767 |
+
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
|
768 |
+
|
769 |
+
async def _read_headers(self) -> "CIMultiDictProxy[str]":
|
770 |
+
lines = [b""]
|
771 |
+
while True:
|
772 |
+
chunk = await self._content.readline()
|
773 |
+
chunk = chunk.strip()
|
774 |
+
lines.append(chunk)
|
775 |
+
if not chunk:
|
776 |
+
break
|
777 |
+
parser = HeadersParser()
|
778 |
+
headers, raw_headers = parser.parse_headers(lines)
|
779 |
+
return headers
|
780 |
+
|
781 |
+
async def _maybe_release_last_part(self) -> None:
|
782 |
+
"""Ensures that the last read body part is read completely."""
|
783 |
+
if self._last_part is not None:
|
784 |
+
if not self._last_part.at_eof():
|
785 |
+
await self._last_part.release()
|
786 |
+
self._unread.extend(self._last_part._unread)
|
787 |
+
self._last_part = None
|
788 |
+
|
789 |
+
|
790 |
+
_Part = Tuple[Payload, str, str]
|
791 |
+
|
792 |
+
|
793 |
+
class MultipartWriter(Payload):
|
794 |
+
"""Multipart body writer."""
|
795 |
+
|
796 |
+
_value: None
|
797 |
+
|
798 |
+
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
|
799 |
+
boundary = boundary if boundary is not None else uuid.uuid4().hex
|
800 |
+
# The underlying Payload API demands a str (utf-8), not bytes,
|
801 |
+
# so we need to ensure we don't lose anything during conversion.
|
802 |
+
# As a result, require the boundary to be ASCII only.
|
803 |
+
# In both situations.
|
804 |
+
|
805 |
+
try:
|
806 |
+
self._boundary = boundary.encode("ascii")
|
807 |
+
except UnicodeEncodeError:
|
808 |
+
raise ValueError("boundary should contain ASCII only chars") from None
|
809 |
+
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
|
810 |
+
|
811 |
+
super().__init__(None, content_type=ctype)
|
812 |
+
|
813 |
+
self._parts: List[_Part] = []
|
814 |
+
self._is_form_data = subtype == "form-data"
|
815 |
+
|
816 |
+
def __enter__(self) -> "MultipartWriter":
|
817 |
+
return self
|
818 |
+
|
819 |
+
def __exit__(
|
820 |
+
self,
|
821 |
+
exc_type: Optional[Type[BaseException]],
|
822 |
+
exc_val: Optional[BaseException],
|
823 |
+
exc_tb: Optional[TracebackType],
|
824 |
+
) -> None:
|
825 |
+
pass
|
826 |
+
|
827 |
+
def __iter__(self) -> Iterator[_Part]:
|
828 |
+
return iter(self._parts)
|
829 |
+
|
830 |
+
def __len__(self) -> int:
|
831 |
+
return len(self._parts)
|
832 |
+
|
833 |
+
def __bool__(self) -> bool:
|
834 |
+
return True
|
835 |
+
|
836 |
+
_valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
837 |
+
_invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
|
838 |
+
|
839 |
+
@property
|
840 |
+
def _boundary_value(self) -> str:
|
841 |
+
"""Wrap boundary parameter value in quotes, if necessary.
|
842 |
+
|
843 |
+
Reads self.boundary and returns a unicode string.
|
844 |
+
"""
|
845 |
+
# Refer to RFCs 7231, 7230, 5234.
|
846 |
+
#
|
847 |
+
# parameter = token "=" ( token / quoted-string )
|
848 |
+
# token = 1*tchar
|
849 |
+
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
|
850 |
+
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
|
851 |
+
# obs-text = %x80-FF
|
852 |
+
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
|
853 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
|
854 |
+
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
|
855 |
+
# / DIGIT / ALPHA
|
856 |
+
# ; any VCHAR, except delimiters
|
857 |
+
# VCHAR = %x21-7E
|
858 |
+
value = self._boundary
|
859 |
+
if re.match(self._valid_tchar_regex, value):
|
860 |
+
return value.decode("ascii") # cannot fail
|
861 |
+
|
862 |
+
if re.search(self._invalid_qdtext_char_regex, value):
|
863 |
+
raise ValueError("boundary value contains invalid characters")
|
864 |
+
|
865 |
+
# escape %x5C and %x22
|
866 |
+
quoted_value_content = value.replace(b"\\", b"\\\\")
|
867 |
+
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
|
868 |
+
|
869 |
+
return '"' + quoted_value_content.decode("ascii") + '"'
|
870 |
+
|
871 |
+
@property
|
872 |
+
def boundary(self) -> str:
|
873 |
+
return self._boundary.decode("ascii")
|
874 |
+
|
875 |
+
def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload:
|
876 |
+
if headers is None:
|
877 |
+
headers = CIMultiDict()
|
878 |
+
|
879 |
+
if isinstance(obj, Payload):
|
880 |
+
obj.headers.update(headers)
|
881 |
+
return self.append_payload(obj)
|
882 |
+
else:
|
883 |
+
try:
|
884 |
+
payload = get_payload(obj, headers=headers)
|
885 |
+
except LookupError:
|
886 |
+
raise TypeError("Cannot create payload from %r" % obj)
|
887 |
+
else:
|
888 |
+
return self.append_payload(payload)
|
889 |
+
|
890 |
+
def append_payload(self, payload: Payload) -> Payload:
|
891 |
+
"""Adds a new body part to multipart writer."""
|
892 |
+
encoding: Optional[str] = None
|
893 |
+
te_encoding: Optional[str] = None
|
894 |
+
if self._is_form_data:
|
895 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.7
|
896 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
|
897 |
+
assert (
|
898 |
+
not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING}
|
899 |
+
& payload.headers.keys()
|
900 |
+
)
|
901 |
+
# Set default Content-Disposition in case user doesn't create one
|
902 |
+
if CONTENT_DISPOSITION not in payload.headers:
|
903 |
+
name = f"section-{len(self._parts)}"
|
904 |
+
payload.set_content_disposition("form-data", name=name)
|
905 |
+
else:
|
906 |
+
# compression
|
907 |
+
encoding = payload.headers.get(CONTENT_ENCODING, "").lower()
|
908 |
+
if encoding and encoding not in ("deflate", "gzip", "identity"):
|
909 |
+
raise RuntimeError(f"unknown content encoding: {encoding}")
|
910 |
+
if encoding == "identity":
|
911 |
+
encoding = None
|
912 |
+
|
913 |
+
# te encoding
|
914 |
+
te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
915 |
+
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
|
916 |
+
raise RuntimeError(f"unknown content transfer encoding: {te_encoding}")
|
917 |
+
if te_encoding == "binary":
|
918 |
+
te_encoding = None
|
919 |
+
|
920 |
+
# size
|
921 |
+
size = payload.size
|
922 |
+
if size is not None and not (encoding or te_encoding):
|
923 |
+
payload.headers[CONTENT_LENGTH] = str(size)
|
924 |
+
|
925 |
+
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
|
926 |
+
return payload
|
927 |
+
|
928 |
+
def append_json(
|
929 |
+
self, obj: Any, headers: Optional[Mapping[str, str]] = None
|
930 |
+
) -> Payload:
|
931 |
+
"""Helper to append JSON part."""
|
932 |
+
if headers is None:
|
933 |
+
headers = CIMultiDict()
|
934 |
+
|
935 |
+
return self.append_payload(JsonPayload(obj, headers=headers))
|
936 |
+
|
937 |
+
def append_form(
|
938 |
+
self,
|
939 |
+
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
|
940 |
+
headers: Optional[Mapping[str, str]] = None,
|
941 |
+
) -> Payload:
|
942 |
+
"""Helper to append form urlencoded part."""
|
943 |
+
assert isinstance(obj, (Sequence, Mapping))
|
944 |
+
|
945 |
+
if headers is None:
|
946 |
+
headers = CIMultiDict()
|
947 |
+
|
948 |
+
if isinstance(obj, Mapping):
|
949 |
+
obj = list(obj.items())
|
950 |
+
data = urlencode(obj, doseq=True)
|
951 |
+
|
952 |
+
return self.append_payload(
|
953 |
+
StringPayload(
|
954 |
+
data, headers=headers, content_type="application/x-www-form-urlencoded"
|
955 |
+
)
|
956 |
+
)
|
957 |
+
|
958 |
+
@property
|
959 |
+
def size(self) -> Optional[int]:
|
960 |
+
"""Size of the payload."""
|
961 |
+
total = 0
|
962 |
+
for part, encoding, te_encoding in self._parts:
|
963 |
+
if encoding or te_encoding or part.size is None:
|
964 |
+
return None
|
965 |
+
|
966 |
+
total += int(
|
967 |
+
2
|
968 |
+
+ len(self._boundary)
|
969 |
+
+ 2
|
970 |
+
+ part.size # b'--'+self._boundary+b'\r\n'
|
971 |
+
+ len(part._binary_headers)
|
972 |
+
+ 2 # b'\r\n'
|
973 |
+
)
|
974 |
+
|
975 |
+
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
|
976 |
+
return total
|
977 |
+
|
978 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
979 |
+
return "".join(
|
980 |
+
"--"
|
981 |
+
+ self.boundary
|
982 |
+
+ "\r\n"
|
983 |
+
+ part._binary_headers.decode(encoding, errors)
|
984 |
+
+ part.decode()
|
985 |
+
for part, _e, _te in self._parts
|
986 |
+
)
|
987 |
+
|
988 |
+
async def write(self, writer: Any, close_boundary: bool = True) -> None:
|
989 |
+
"""Write body."""
|
990 |
+
for part, encoding, te_encoding in self._parts:
|
991 |
+
if self._is_form_data:
|
992 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
993 |
+
assert CONTENT_DISPOSITION in part.headers
|
994 |
+
assert "name=" in part.headers[CONTENT_DISPOSITION]
|
995 |
+
|
996 |
+
await writer.write(b"--" + self._boundary + b"\r\n")
|
997 |
+
await writer.write(part._binary_headers)
|
998 |
+
|
999 |
+
if encoding or te_encoding:
|
1000 |
+
w = MultipartPayloadWriter(writer)
|
1001 |
+
if encoding:
|
1002 |
+
w.enable_compression(encoding)
|
1003 |
+
if te_encoding:
|
1004 |
+
w.enable_encoding(te_encoding)
|
1005 |
+
await part.write(w) # type: ignore[arg-type]
|
1006 |
+
await w.write_eof()
|
1007 |
+
else:
|
1008 |
+
await part.write(writer)
|
1009 |
+
|
1010 |
+
await writer.write(b"\r\n")
|
1011 |
+
|
1012 |
+
if close_boundary:
|
1013 |
+
await writer.write(b"--" + self._boundary + b"--\r\n")
|
1014 |
+
|
1015 |
+
|
1016 |
+
class MultipartPayloadWriter:
|
1017 |
+
def __init__(self, writer: Any) -> None:
|
1018 |
+
self._writer = writer
|
1019 |
+
self._encoding: Optional[str] = None
|
1020 |
+
self._compress: Optional[ZLibCompressor] = None
|
1021 |
+
self._encoding_buffer: Optional[bytearray] = None
|
1022 |
+
|
1023 |
+
def enable_encoding(self, encoding: str) -> None:
|
1024 |
+
if encoding == "base64":
|
1025 |
+
self._encoding = encoding
|
1026 |
+
self._encoding_buffer = bytearray()
|
1027 |
+
elif encoding == "quoted-printable":
|
1028 |
+
self._encoding = "quoted-printable"
|
1029 |
+
|
1030 |
+
def enable_compression(
|
1031 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
1032 |
+
) -> None:
|
1033 |
+
self._compress = ZLibCompressor(
|
1034 |
+
encoding=encoding,
|
1035 |
+
suppress_deflate_header=True,
|
1036 |
+
strategy=strategy,
|
1037 |
+
)
|
1038 |
+
|
1039 |
+
async def write_eof(self) -> None:
|
1040 |
+
if self._compress is not None:
|
1041 |
+
chunk = self._compress.flush()
|
1042 |
+
if chunk:
|
1043 |
+
self._compress = None
|
1044 |
+
await self.write(chunk)
|
1045 |
+
|
1046 |
+
if self._encoding == "base64":
|
1047 |
+
if self._encoding_buffer:
|
1048 |
+
await self._writer.write(base64.b64encode(self._encoding_buffer))
|
1049 |
+
|
1050 |
+
async def write(self, chunk: bytes) -> None:
|
1051 |
+
if self._compress is not None:
|
1052 |
+
if chunk:
|
1053 |
+
chunk = await self._compress.compress(chunk)
|
1054 |
+
if not chunk:
|
1055 |
+
return
|
1056 |
+
|
1057 |
+
if self._encoding == "base64":
|
1058 |
+
buf = self._encoding_buffer
|
1059 |
+
assert buf is not None
|
1060 |
+
buf.extend(chunk)
|
1061 |
+
|
1062 |
+
if buf:
|
1063 |
+
div, mod = divmod(len(buf), 3)
|
1064 |
+
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
|
1065 |
+
if enc_chunk:
|
1066 |
+
b64chunk = base64.b64encode(enc_chunk)
|
1067 |
+
await self._writer.write(b64chunk)
|
1068 |
+
elif self._encoding == "quoted-printable":
|
1069 |
+
await self._writer.write(binascii.b2a_qp(chunk))
|
1070 |
+
else:
|
1071 |
+
await self._writer.write(chunk)
|
venv/Lib/site-packages/aiohttp/payload.py
ADDED
@@ -0,0 +1,519 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import enum
|
3 |
+
import io
|
4 |
+
import json
|
5 |
+
import mimetypes
|
6 |
+
import os
|
7 |
+
import sys
|
8 |
+
import warnings
|
9 |
+
from abc import ABC, abstractmethod
|
10 |
+
from itertools import chain
|
11 |
+
from typing import (
|
12 |
+
IO,
|
13 |
+
TYPE_CHECKING,
|
14 |
+
Any,
|
15 |
+
Dict,
|
16 |
+
Final,
|
17 |
+
Iterable,
|
18 |
+
Optional,
|
19 |
+
TextIO,
|
20 |
+
Tuple,
|
21 |
+
Type,
|
22 |
+
Union,
|
23 |
+
)
|
24 |
+
|
25 |
+
from multidict import CIMultiDict
|
26 |
+
|
27 |
+
from . import hdrs
|
28 |
+
from .abc import AbstractStreamWriter
|
29 |
+
from .helpers import (
|
30 |
+
_SENTINEL,
|
31 |
+
content_disposition_header,
|
32 |
+
guess_filename,
|
33 |
+
parse_mimetype,
|
34 |
+
sentinel,
|
35 |
+
)
|
36 |
+
from .streams import StreamReader
|
37 |
+
from .typedefs import JSONEncoder, _CIMultiDict
|
38 |
+
|
39 |
+
__all__ = (
|
40 |
+
"PAYLOAD_REGISTRY",
|
41 |
+
"get_payload",
|
42 |
+
"payload_type",
|
43 |
+
"Payload",
|
44 |
+
"BytesPayload",
|
45 |
+
"StringPayload",
|
46 |
+
"IOBasePayload",
|
47 |
+
"BytesIOPayload",
|
48 |
+
"BufferedReaderPayload",
|
49 |
+
"TextIOPayload",
|
50 |
+
"StringIOPayload",
|
51 |
+
"JsonPayload",
|
52 |
+
"AsyncIterablePayload",
|
53 |
+
)
|
54 |
+
|
55 |
+
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
56 |
+
|
57 |
+
if TYPE_CHECKING:
|
58 |
+
from typing import List
|
59 |
+
|
60 |
+
|
61 |
+
class LookupError(Exception):
|
62 |
+
pass
|
63 |
+
|
64 |
+
|
65 |
+
class Order(str, enum.Enum):
|
66 |
+
normal = "normal"
|
67 |
+
try_first = "try_first"
|
68 |
+
try_last = "try_last"
|
69 |
+
|
70 |
+
|
71 |
+
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
72 |
+
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
73 |
+
|
74 |
+
|
75 |
+
def register_payload(
|
76 |
+
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
77 |
+
) -> None:
|
78 |
+
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
79 |
+
|
80 |
+
|
81 |
+
class payload_type:
|
82 |
+
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
83 |
+
self.type = type
|
84 |
+
self.order = order
|
85 |
+
|
86 |
+
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
87 |
+
register_payload(factory, self.type, order=self.order)
|
88 |
+
return factory
|
89 |
+
|
90 |
+
|
91 |
+
PayloadType = Type["Payload"]
|
92 |
+
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
93 |
+
|
94 |
+
|
95 |
+
class PayloadRegistry:
|
96 |
+
"""Payload registry.
|
97 |
+
|
98 |
+
note: we need zope.interface for more efficient adapter search
|
99 |
+
"""
|
100 |
+
|
101 |
+
__slots__ = ("_first", "_normal", "_last", "_normal_lookup")
|
102 |
+
|
103 |
+
def __init__(self) -> None:
|
104 |
+
self._first: List[_PayloadRegistryItem] = []
|
105 |
+
self._normal: List[_PayloadRegistryItem] = []
|
106 |
+
self._last: List[_PayloadRegistryItem] = []
|
107 |
+
self._normal_lookup: Dict[Any, PayloadType] = {}
|
108 |
+
|
109 |
+
def get(
|
110 |
+
self,
|
111 |
+
data: Any,
|
112 |
+
*args: Any,
|
113 |
+
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
114 |
+
**kwargs: Any,
|
115 |
+
) -> "Payload":
|
116 |
+
if self._first:
|
117 |
+
for factory, type_ in self._first:
|
118 |
+
if isinstance(data, type_):
|
119 |
+
return factory(data, *args, **kwargs)
|
120 |
+
# Try the fast lookup first
|
121 |
+
if lookup_factory := self._normal_lookup.get(type(data)):
|
122 |
+
return lookup_factory(data, *args, **kwargs)
|
123 |
+
# Bail early if its already a Payload
|
124 |
+
if isinstance(data, Payload):
|
125 |
+
return data
|
126 |
+
# Fallback to the slower linear search
|
127 |
+
for factory, type_ in _CHAIN(self._normal, self._last):
|
128 |
+
if isinstance(data, type_):
|
129 |
+
return factory(data, *args, **kwargs)
|
130 |
+
raise LookupError()
|
131 |
+
|
132 |
+
def register(
|
133 |
+
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
134 |
+
) -> None:
|
135 |
+
if order is Order.try_first:
|
136 |
+
self._first.append((factory, type))
|
137 |
+
elif order is Order.normal:
|
138 |
+
self._normal.append((factory, type))
|
139 |
+
if isinstance(type, Iterable):
|
140 |
+
for t in type:
|
141 |
+
self._normal_lookup[t] = factory
|
142 |
+
else:
|
143 |
+
self._normal_lookup[type] = factory
|
144 |
+
elif order is Order.try_last:
|
145 |
+
self._last.append((factory, type))
|
146 |
+
else:
|
147 |
+
raise ValueError(f"Unsupported order {order!r}")
|
148 |
+
|
149 |
+
|
150 |
+
class Payload(ABC):
|
151 |
+
|
152 |
+
_default_content_type: str = "application/octet-stream"
|
153 |
+
_size: Optional[int] = None
|
154 |
+
|
155 |
+
def __init__(
|
156 |
+
self,
|
157 |
+
value: Any,
|
158 |
+
headers: Optional[
|
159 |
+
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
160 |
+
] = None,
|
161 |
+
content_type: Union[str, None, _SENTINEL] = sentinel,
|
162 |
+
filename: Optional[str] = None,
|
163 |
+
encoding: Optional[str] = None,
|
164 |
+
**kwargs: Any,
|
165 |
+
) -> None:
|
166 |
+
self._encoding = encoding
|
167 |
+
self._filename = filename
|
168 |
+
self._headers: _CIMultiDict = CIMultiDict()
|
169 |
+
self._value = value
|
170 |
+
if content_type is not sentinel and content_type is not None:
|
171 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
172 |
+
elif self._filename is not None:
|
173 |
+
if sys.version_info >= (3, 13):
|
174 |
+
guesser = mimetypes.guess_file_type
|
175 |
+
else:
|
176 |
+
guesser = mimetypes.guess_type
|
177 |
+
content_type = guesser(self._filename)[0]
|
178 |
+
if content_type is None:
|
179 |
+
content_type = self._default_content_type
|
180 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
181 |
+
else:
|
182 |
+
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
183 |
+
if headers:
|
184 |
+
self._headers.update(headers)
|
185 |
+
|
186 |
+
@property
|
187 |
+
def size(self) -> Optional[int]:
|
188 |
+
"""Size of the payload."""
|
189 |
+
return self._size
|
190 |
+
|
191 |
+
@property
|
192 |
+
def filename(self) -> Optional[str]:
|
193 |
+
"""Filename of the payload."""
|
194 |
+
return self._filename
|
195 |
+
|
196 |
+
@property
|
197 |
+
def headers(self) -> _CIMultiDict:
|
198 |
+
"""Custom item headers"""
|
199 |
+
return self._headers
|
200 |
+
|
201 |
+
@property
|
202 |
+
def _binary_headers(self) -> bytes:
|
203 |
+
return (
|
204 |
+
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
205 |
+
"utf-8"
|
206 |
+
)
|
207 |
+
+ b"\r\n"
|
208 |
+
)
|
209 |
+
|
210 |
+
@property
|
211 |
+
def encoding(self) -> Optional[str]:
|
212 |
+
"""Payload encoding"""
|
213 |
+
return self._encoding
|
214 |
+
|
215 |
+
@property
|
216 |
+
def content_type(self) -> str:
|
217 |
+
"""Content type"""
|
218 |
+
return self._headers[hdrs.CONTENT_TYPE]
|
219 |
+
|
220 |
+
def set_content_disposition(
|
221 |
+
self,
|
222 |
+
disptype: str,
|
223 |
+
quote_fields: bool = True,
|
224 |
+
_charset: str = "utf-8",
|
225 |
+
**params: Any,
|
226 |
+
) -> None:
|
227 |
+
"""Sets ``Content-Disposition`` header."""
|
228 |
+
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
229 |
+
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
230 |
+
)
|
231 |
+
|
232 |
+
@abstractmethod
|
233 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
234 |
+
"""Return string representation of the value.
|
235 |
+
|
236 |
+
This is named decode() to allow compatibility with bytes objects.
|
237 |
+
"""
|
238 |
+
|
239 |
+
@abstractmethod
|
240 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
241 |
+
"""Write payload.
|
242 |
+
|
243 |
+
writer is an AbstractStreamWriter instance:
|
244 |
+
"""
|
245 |
+
|
246 |
+
|
247 |
+
class BytesPayload(Payload):
|
248 |
+
_value: bytes
|
249 |
+
|
250 |
+
def __init__(
|
251 |
+
self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any
|
252 |
+
) -> None:
|
253 |
+
if "content_type" not in kwargs:
|
254 |
+
kwargs["content_type"] = "application/octet-stream"
|
255 |
+
|
256 |
+
super().__init__(value, *args, **kwargs)
|
257 |
+
|
258 |
+
if isinstance(value, memoryview):
|
259 |
+
self._size = value.nbytes
|
260 |
+
elif isinstance(value, (bytes, bytearray)):
|
261 |
+
self._size = len(value)
|
262 |
+
else:
|
263 |
+
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
264 |
+
|
265 |
+
if self._size > TOO_LARGE_BYTES_BODY:
|
266 |
+
kwargs = {"source": self}
|
267 |
+
warnings.warn(
|
268 |
+
"Sending a large body directly with raw bytes might"
|
269 |
+
" lock the event loop. You should probably pass an "
|
270 |
+
"io.BytesIO object instead",
|
271 |
+
ResourceWarning,
|
272 |
+
**kwargs,
|
273 |
+
)
|
274 |
+
|
275 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
276 |
+
return self._value.decode(encoding, errors)
|
277 |
+
|
278 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
279 |
+
await writer.write(self._value)
|
280 |
+
|
281 |
+
|
282 |
+
class StringPayload(BytesPayload):
|
283 |
+
def __init__(
|
284 |
+
self,
|
285 |
+
value: str,
|
286 |
+
*args: Any,
|
287 |
+
encoding: Optional[str] = None,
|
288 |
+
content_type: Optional[str] = None,
|
289 |
+
**kwargs: Any,
|
290 |
+
) -> None:
|
291 |
+
|
292 |
+
if encoding is None:
|
293 |
+
if content_type is None:
|
294 |
+
real_encoding = "utf-8"
|
295 |
+
content_type = "text/plain; charset=utf-8"
|
296 |
+
else:
|
297 |
+
mimetype = parse_mimetype(content_type)
|
298 |
+
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
299 |
+
else:
|
300 |
+
if content_type is None:
|
301 |
+
content_type = "text/plain; charset=%s" % encoding
|
302 |
+
real_encoding = encoding
|
303 |
+
|
304 |
+
super().__init__(
|
305 |
+
value.encode(real_encoding),
|
306 |
+
encoding=real_encoding,
|
307 |
+
content_type=content_type,
|
308 |
+
*args,
|
309 |
+
**kwargs,
|
310 |
+
)
|
311 |
+
|
312 |
+
|
313 |
+
class StringIOPayload(StringPayload):
|
314 |
+
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
315 |
+
super().__init__(value.read(), *args, **kwargs)
|
316 |
+
|
317 |
+
|
318 |
+
class IOBasePayload(Payload):
|
319 |
+
_value: io.IOBase
|
320 |
+
|
321 |
+
def __init__(
|
322 |
+
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
323 |
+
) -> None:
|
324 |
+
if "filename" not in kwargs:
|
325 |
+
kwargs["filename"] = guess_filename(value)
|
326 |
+
|
327 |
+
super().__init__(value, *args, **kwargs)
|
328 |
+
|
329 |
+
if self._filename is not None and disposition is not None:
|
330 |
+
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
331 |
+
self.set_content_disposition(disposition, filename=self._filename)
|
332 |
+
|
333 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
334 |
+
loop = asyncio.get_event_loop()
|
335 |
+
try:
|
336 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
337 |
+
while chunk:
|
338 |
+
await writer.write(chunk)
|
339 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
340 |
+
finally:
|
341 |
+
await loop.run_in_executor(None, self._value.close)
|
342 |
+
|
343 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
344 |
+
return "".join(r.decode(encoding, errors) for r in self._value.readlines())
|
345 |
+
|
346 |
+
|
347 |
+
class TextIOPayload(IOBasePayload):
|
348 |
+
_value: io.TextIOBase
|
349 |
+
|
350 |
+
def __init__(
|
351 |
+
self,
|
352 |
+
value: TextIO,
|
353 |
+
*args: Any,
|
354 |
+
encoding: Optional[str] = None,
|
355 |
+
content_type: Optional[str] = None,
|
356 |
+
**kwargs: Any,
|
357 |
+
) -> None:
|
358 |
+
|
359 |
+
if encoding is None:
|
360 |
+
if content_type is None:
|
361 |
+
encoding = "utf-8"
|
362 |
+
content_type = "text/plain; charset=utf-8"
|
363 |
+
else:
|
364 |
+
mimetype = parse_mimetype(content_type)
|
365 |
+
encoding = mimetype.parameters.get("charset", "utf-8")
|
366 |
+
else:
|
367 |
+
if content_type is None:
|
368 |
+
content_type = "text/plain; charset=%s" % encoding
|
369 |
+
|
370 |
+
super().__init__(
|
371 |
+
value,
|
372 |
+
content_type=content_type,
|
373 |
+
encoding=encoding,
|
374 |
+
*args,
|
375 |
+
**kwargs,
|
376 |
+
)
|
377 |
+
|
378 |
+
@property
|
379 |
+
def size(self) -> Optional[int]:
|
380 |
+
try:
|
381 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
382 |
+
except OSError:
|
383 |
+
return None
|
384 |
+
|
385 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
386 |
+
return self._value.read()
|
387 |
+
|
388 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
389 |
+
loop = asyncio.get_event_loop()
|
390 |
+
try:
|
391 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
392 |
+
while chunk:
|
393 |
+
data = (
|
394 |
+
chunk.encode(encoding=self._encoding)
|
395 |
+
if self._encoding
|
396 |
+
else chunk.encode()
|
397 |
+
)
|
398 |
+
await writer.write(data)
|
399 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
400 |
+
finally:
|
401 |
+
await loop.run_in_executor(None, self._value.close)
|
402 |
+
|
403 |
+
|
404 |
+
class BytesIOPayload(IOBasePayload):
|
405 |
+
_value: io.BytesIO
|
406 |
+
|
407 |
+
@property
|
408 |
+
def size(self) -> int:
|
409 |
+
position = self._value.tell()
|
410 |
+
end = self._value.seek(0, os.SEEK_END)
|
411 |
+
self._value.seek(position)
|
412 |
+
return end - position
|
413 |
+
|
414 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
415 |
+
return self._value.read().decode(encoding, errors)
|
416 |
+
|
417 |
+
|
418 |
+
class BufferedReaderPayload(IOBasePayload):
|
419 |
+
_value: io.BufferedIOBase
|
420 |
+
|
421 |
+
@property
|
422 |
+
def size(self) -> Optional[int]:
|
423 |
+
try:
|
424 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
425 |
+
except (OSError, AttributeError):
|
426 |
+
# data.fileno() is not supported, e.g.
|
427 |
+
# io.BufferedReader(io.BytesIO(b'data'))
|
428 |
+
# For some file-like objects (e.g. tarfile), the fileno() attribute may
|
429 |
+
# not exist at all, and will instead raise an AttributeError.
|
430 |
+
return None
|
431 |
+
|
432 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
433 |
+
return self._value.read().decode(encoding, errors)
|
434 |
+
|
435 |
+
|
436 |
+
class JsonPayload(BytesPayload):
|
437 |
+
def __init__(
|
438 |
+
self,
|
439 |
+
value: Any,
|
440 |
+
encoding: str = "utf-8",
|
441 |
+
content_type: str = "application/json",
|
442 |
+
dumps: JSONEncoder = json.dumps,
|
443 |
+
*args: Any,
|
444 |
+
**kwargs: Any,
|
445 |
+
) -> None:
|
446 |
+
|
447 |
+
super().__init__(
|
448 |
+
dumps(value).encode(encoding),
|
449 |
+
content_type=content_type,
|
450 |
+
encoding=encoding,
|
451 |
+
*args,
|
452 |
+
**kwargs,
|
453 |
+
)
|
454 |
+
|
455 |
+
|
456 |
+
if TYPE_CHECKING:
|
457 |
+
from typing import AsyncIterable, AsyncIterator
|
458 |
+
|
459 |
+
_AsyncIterator = AsyncIterator[bytes]
|
460 |
+
_AsyncIterable = AsyncIterable[bytes]
|
461 |
+
else:
|
462 |
+
from collections.abc import AsyncIterable, AsyncIterator
|
463 |
+
|
464 |
+
_AsyncIterator = AsyncIterator
|
465 |
+
_AsyncIterable = AsyncIterable
|
466 |
+
|
467 |
+
|
468 |
+
class AsyncIterablePayload(Payload):
|
469 |
+
|
470 |
+
_iter: Optional[_AsyncIterator] = None
|
471 |
+
_value: _AsyncIterable
|
472 |
+
|
473 |
+
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
474 |
+
if not isinstance(value, AsyncIterable):
|
475 |
+
raise TypeError(
|
476 |
+
"value argument must support "
|
477 |
+
"collections.abc.AsyncIterable interface, "
|
478 |
+
"got {!r}".format(type(value))
|
479 |
+
)
|
480 |
+
|
481 |
+
if "content_type" not in kwargs:
|
482 |
+
kwargs["content_type"] = "application/octet-stream"
|
483 |
+
|
484 |
+
super().__init__(value, *args, **kwargs)
|
485 |
+
|
486 |
+
self._iter = value.__aiter__()
|
487 |
+
|
488 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
489 |
+
if self._iter:
|
490 |
+
try:
|
491 |
+
# iter is not None check prevents rare cases
|
492 |
+
# when the case iterable is used twice
|
493 |
+
while True:
|
494 |
+
chunk = await self._iter.__anext__()
|
495 |
+
await writer.write(chunk)
|
496 |
+
except StopAsyncIteration:
|
497 |
+
self._iter = None
|
498 |
+
|
499 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
500 |
+
raise TypeError("Unable to decode.")
|
501 |
+
|
502 |
+
|
503 |
+
class StreamReaderPayload(AsyncIterablePayload):
|
504 |
+
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
505 |
+
super().__init__(value.iter_any(), *args, **kwargs)
|
506 |
+
|
507 |
+
|
508 |
+
PAYLOAD_REGISTRY = PayloadRegistry()
|
509 |
+
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
510 |
+
PAYLOAD_REGISTRY.register(StringPayload, str)
|
511 |
+
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
512 |
+
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
513 |
+
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
514 |
+
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
515 |
+
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
516 |
+
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
517 |
+
# try_last for giving a chance to more specialized async interables like
|
518 |
+
# multidict.BodyPartReaderPayload override the default
|
519 |
+
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
venv/Lib/site-packages/aiohttp/payload_streamer.py
ADDED
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Payload implementation for coroutines as data provider.
|
3 |
+
|
4 |
+
As a simple case, you can upload data from file::
|
5 |
+
|
6 |
+
@aiohttp.streamer
|
7 |
+
async def file_sender(writer, file_name=None):
|
8 |
+
with open(file_name, 'rb') as f:
|
9 |
+
chunk = f.read(2**16)
|
10 |
+
while chunk:
|
11 |
+
await writer.write(chunk)
|
12 |
+
|
13 |
+
chunk = f.read(2**16)
|
14 |
+
|
15 |
+
Then you can use `file_sender` like this:
|
16 |
+
|
17 |
+
async with session.post('http://httpbin.org/post',
|
18 |
+
data=file_sender(file_name='huge_file')) as resp:
|
19 |
+
print(await resp.text())
|
20 |
+
|
21 |
+
..note:: Coroutine must accept `writer` as first argument
|
22 |
+
|
23 |
+
"""
|
24 |
+
|
25 |
+
import types
|
26 |
+
import warnings
|
27 |
+
from typing import Any, Awaitable, Callable, Dict, Tuple
|
28 |
+
|
29 |
+
from .abc import AbstractStreamWriter
|
30 |
+
from .payload import Payload, payload_type
|
31 |
+
|
32 |
+
__all__ = ("streamer",)
|
33 |
+
|
34 |
+
|
35 |
+
class _stream_wrapper:
|
36 |
+
def __init__(
|
37 |
+
self,
|
38 |
+
coro: Callable[..., Awaitable[None]],
|
39 |
+
args: Tuple[Any, ...],
|
40 |
+
kwargs: Dict[str, Any],
|
41 |
+
) -> None:
|
42 |
+
self.coro = types.coroutine(coro)
|
43 |
+
self.args = args
|
44 |
+
self.kwargs = kwargs
|
45 |
+
|
46 |
+
async def __call__(self, writer: AbstractStreamWriter) -> None:
|
47 |
+
await self.coro(writer, *self.args, **self.kwargs)
|
48 |
+
|
49 |
+
|
50 |
+
class streamer:
|
51 |
+
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
|
52 |
+
warnings.warn(
|
53 |
+
"@streamer is deprecated, use async generators instead",
|
54 |
+
DeprecationWarning,
|
55 |
+
stacklevel=2,
|
56 |
+
)
|
57 |
+
self.coro = coro
|
58 |
+
|
59 |
+
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
|
60 |
+
return _stream_wrapper(self.coro, args, kwargs)
|
61 |
+
|
62 |
+
|
63 |
+
@payload_type(_stream_wrapper)
|
64 |
+
class StreamWrapperPayload(Payload):
|
65 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
66 |
+
await self._value(writer)
|
67 |
+
|
68 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
69 |
+
raise TypeError("Unable to decode.")
|
70 |
+
|
71 |
+
|
72 |
+
@payload_type(streamer)
|
73 |
+
class StreamPayload(StreamWrapperPayload):
|
74 |
+
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
|
75 |
+
super().__init__(value(), *args, **kwargs)
|
76 |
+
|
77 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
78 |
+
await self._value(writer)
|
venv/Lib/site-packages/aiohttp/py.typed
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Marker
|
venv/Lib/site-packages/aiohttp/pytest_plugin.py
ADDED
@@ -0,0 +1,436 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import contextlib
|
3 |
+
import inspect
|
4 |
+
import warnings
|
5 |
+
from typing import (
|
6 |
+
Any,
|
7 |
+
Awaitable,
|
8 |
+
Callable,
|
9 |
+
Dict,
|
10 |
+
Iterator,
|
11 |
+
Optional,
|
12 |
+
Protocol,
|
13 |
+
Type,
|
14 |
+
Union,
|
15 |
+
overload,
|
16 |
+
)
|
17 |
+
|
18 |
+
import pytest
|
19 |
+
|
20 |
+
from .test_utils import (
|
21 |
+
BaseTestServer,
|
22 |
+
RawTestServer,
|
23 |
+
TestClient,
|
24 |
+
TestServer,
|
25 |
+
loop_context,
|
26 |
+
setup_test_loop,
|
27 |
+
teardown_test_loop,
|
28 |
+
unused_port as _unused_port,
|
29 |
+
)
|
30 |
+
from .web import Application, BaseRequest, Request
|
31 |
+
from .web_protocol import _RequestHandler
|
32 |
+
|
33 |
+
try:
|
34 |
+
import uvloop
|
35 |
+
except ImportError: # pragma: no cover
|
36 |
+
uvloop = None # type: ignore[assignment]
|
37 |
+
|
38 |
+
|
39 |
+
class AiohttpClient(Protocol):
|
40 |
+
@overload
|
41 |
+
async def __call__(
|
42 |
+
self,
|
43 |
+
__param: Application,
|
44 |
+
*,
|
45 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
46 |
+
**kwargs: Any,
|
47 |
+
) -> TestClient[Request, Application]: ...
|
48 |
+
@overload
|
49 |
+
async def __call__(
|
50 |
+
self,
|
51 |
+
__param: BaseTestServer,
|
52 |
+
*,
|
53 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
54 |
+
**kwargs: Any,
|
55 |
+
) -> TestClient[BaseRequest, None]: ...
|
56 |
+
|
57 |
+
|
58 |
+
class AiohttpServer(Protocol):
|
59 |
+
def __call__(
|
60 |
+
self, app: Application, *, port: Optional[int] = None, **kwargs: Any
|
61 |
+
) -> Awaitable[TestServer]: ...
|
62 |
+
|
63 |
+
|
64 |
+
class AiohttpRawServer(Protocol):
|
65 |
+
def __call__(
|
66 |
+
self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
|
67 |
+
) -> Awaitable[RawTestServer]: ...
|
68 |
+
|
69 |
+
|
70 |
+
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
71 |
+
parser.addoption(
|
72 |
+
"--aiohttp-fast",
|
73 |
+
action="store_true",
|
74 |
+
default=False,
|
75 |
+
help="run tests faster by disabling extra checks",
|
76 |
+
)
|
77 |
+
parser.addoption(
|
78 |
+
"--aiohttp-loop",
|
79 |
+
action="store",
|
80 |
+
default="pyloop",
|
81 |
+
help="run tests with specific loop: pyloop, uvloop or all",
|
82 |
+
)
|
83 |
+
parser.addoption(
|
84 |
+
"--aiohttp-enable-loop-debug",
|
85 |
+
action="store_true",
|
86 |
+
default=False,
|
87 |
+
help="enable event loop debug mode",
|
88 |
+
)
|
89 |
+
|
90 |
+
|
91 |
+
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
92 |
+
"""Set up pytest fixture.
|
93 |
+
|
94 |
+
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
95 |
+
"""
|
96 |
+
func = fixturedef.func
|
97 |
+
|
98 |
+
if inspect.isasyncgenfunction(func):
|
99 |
+
# async generator fixture
|
100 |
+
is_async_gen = True
|
101 |
+
elif inspect.iscoroutinefunction(func):
|
102 |
+
# regular async fixture
|
103 |
+
is_async_gen = False
|
104 |
+
else:
|
105 |
+
# not an async fixture, nothing to do
|
106 |
+
return
|
107 |
+
|
108 |
+
strip_request = False
|
109 |
+
if "request" not in fixturedef.argnames:
|
110 |
+
fixturedef.argnames += ("request",)
|
111 |
+
strip_request = True
|
112 |
+
|
113 |
+
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
114 |
+
request = kwargs["request"]
|
115 |
+
if strip_request:
|
116 |
+
del kwargs["request"]
|
117 |
+
|
118 |
+
# if neither the fixture nor the test use the 'loop' fixture,
|
119 |
+
# 'getfixturevalue' will fail because the test is not parameterized
|
120 |
+
# (this can be removed someday if 'loop' is no longer parameterized)
|
121 |
+
if "loop" not in request.fixturenames:
|
122 |
+
raise Exception(
|
123 |
+
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
124 |
+
"be used in tests depending from it."
|
125 |
+
)
|
126 |
+
|
127 |
+
_loop = request.getfixturevalue("loop")
|
128 |
+
|
129 |
+
if is_async_gen:
|
130 |
+
# for async generators, we need to advance the generator once,
|
131 |
+
# then advance it again in a finalizer
|
132 |
+
gen = func(*args, **kwargs)
|
133 |
+
|
134 |
+
def finalizer(): # type: ignore[no-untyped-def]
|
135 |
+
try:
|
136 |
+
return _loop.run_until_complete(gen.__anext__())
|
137 |
+
except StopAsyncIteration:
|
138 |
+
pass
|
139 |
+
|
140 |
+
request.addfinalizer(finalizer)
|
141 |
+
return _loop.run_until_complete(gen.__anext__())
|
142 |
+
else:
|
143 |
+
return _loop.run_until_complete(func(*args, **kwargs))
|
144 |
+
|
145 |
+
fixturedef.func = wrapper
|
146 |
+
|
147 |
+
|
148 |
+
@pytest.fixture
|
149 |
+
def fast(request): # type: ignore[no-untyped-def]
|
150 |
+
"""--fast config option"""
|
151 |
+
return request.config.getoption("--aiohttp-fast")
|
152 |
+
|
153 |
+
|
154 |
+
@pytest.fixture
|
155 |
+
def loop_debug(request): # type: ignore[no-untyped-def]
|
156 |
+
"""--enable-loop-debug config option"""
|
157 |
+
return request.config.getoption("--aiohttp-enable-loop-debug")
|
158 |
+
|
159 |
+
|
160 |
+
@contextlib.contextmanager
|
161 |
+
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
162 |
+
"""Context manager which checks for RuntimeWarnings.
|
163 |
+
|
164 |
+
This exists specifically to
|
165 |
+
avoid "coroutine 'X' was never awaited" warnings being missed.
|
166 |
+
|
167 |
+
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
168 |
+
"""
|
169 |
+
with warnings.catch_warnings(record=True) as _warnings:
|
170 |
+
yield
|
171 |
+
rw = [
|
172 |
+
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
173 |
+
for w in _warnings
|
174 |
+
if w.category == RuntimeWarning
|
175 |
+
]
|
176 |
+
if rw:
|
177 |
+
raise RuntimeError(
|
178 |
+
"{} Runtime Warning{},\n{}".format(
|
179 |
+
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
180 |
+
)
|
181 |
+
)
|
182 |
+
|
183 |
+
|
184 |
+
@contextlib.contextmanager
|
185 |
+
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
186 |
+
"""Passthrough loop context.
|
187 |
+
|
188 |
+
Sets up and tears down a loop unless one is passed in via the loop
|
189 |
+
argument when it's passed straight through.
|
190 |
+
"""
|
191 |
+
if loop:
|
192 |
+
# loop already exists, pass it straight through
|
193 |
+
yield loop
|
194 |
+
else:
|
195 |
+
# this shadows loop_context's standard behavior
|
196 |
+
loop = setup_test_loop()
|
197 |
+
yield loop
|
198 |
+
teardown_test_loop(loop, fast=fast)
|
199 |
+
|
200 |
+
|
201 |
+
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
202 |
+
"""Fix pytest collecting for coroutines."""
|
203 |
+
if collector.funcnamefilter(name) and inspect.iscoroutinefunction(obj):
|
204 |
+
return list(collector._genfunctions(name, obj))
|
205 |
+
|
206 |
+
|
207 |
+
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
208 |
+
"""Run coroutines in an event loop instead of a normal function call."""
|
209 |
+
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
210 |
+
if inspect.iscoroutinefunction(pyfuncitem.function):
|
211 |
+
existing_loop = pyfuncitem.funcargs.get(
|
212 |
+
"proactor_loop"
|
213 |
+
) or pyfuncitem.funcargs.get("loop", None)
|
214 |
+
with _runtime_warning_context():
|
215 |
+
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
216 |
+
testargs = {
|
217 |
+
arg: pyfuncitem.funcargs[arg]
|
218 |
+
for arg in pyfuncitem._fixtureinfo.argnames
|
219 |
+
}
|
220 |
+
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
221 |
+
|
222 |
+
return True
|
223 |
+
|
224 |
+
|
225 |
+
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
226 |
+
if "loop_factory" not in metafunc.fixturenames:
|
227 |
+
return
|
228 |
+
|
229 |
+
loops = metafunc.config.option.aiohttp_loop
|
230 |
+
avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
|
231 |
+
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
232 |
+
|
233 |
+
if uvloop is not None: # pragma: no cover
|
234 |
+
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
235 |
+
|
236 |
+
if loops == "all":
|
237 |
+
loops = "pyloop,uvloop?"
|
238 |
+
|
239 |
+
factories = {} # type: ignore[var-annotated]
|
240 |
+
for name in loops.split(","):
|
241 |
+
required = not name.endswith("?")
|
242 |
+
name = name.strip(" ?")
|
243 |
+
if name not in avail_factories: # pragma: no cover
|
244 |
+
if required:
|
245 |
+
raise ValueError(
|
246 |
+
"Unknown loop '%s', available loops: %s"
|
247 |
+
% (name, list(factories.keys()))
|
248 |
+
)
|
249 |
+
else:
|
250 |
+
continue
|
251 |
+
factories[name] = avail_factories[name]
|
252 |
+
metafunc.parametrize(
|
253 |
+
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
254 |
+
)
|
255 |
+
|
256 |
+
|
257 |
+
@pytest.fixture
|
258 |
+
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
259 |
+
"""Return an instance of the event loop."""
|
260 |
+
policy = loop_factory()
|
261 |
+
asyncio.set_event_loop_policy(policy)
|
262 |
+
with loop_context(fast=fast) as _loop:
|
263 |
+
if loop_debug:
|
264 |
+
_loop.set_debug(True) # pragma: no cover
|
265 |
+
asyncio.set_event_loop(_loop)
|
266 |
+
yield _loop
|
267 |
+
|
268 |
+
|
269 |
+
@pytest.fixture
|
270 |
+
def proactor_loop(): # type: ignore[no-untyped-def]
|
271 |
+
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
272 |
+
asyncio.set_event_loop_policy(policy)
|
273 |
+
|
274 |
+
with loop_context(policy.new_event_loop) as _loop:
|
275 |
+
asyncio.set_event_loop(_loop)
|
276 |
+
yield _loop
|
277 |
+
|
278 |
+
|
279 |
+
@pytest.fixture
|
280 |
+
def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
|
281 |
+
warnings.warn(
|
282 |
+
"Deprecated, use aiohttp_unused_port fixture instead",
|
283 |
+
DeprecationWarning,
|
284 |
+
stacklevel=2,
|
285 |
+
)
|
286 |
+
return aiohttp_unused_port
|
287 |
+
|
288 |
+
|
289 |
+
@pytest.fixture
|
290 |
+
def aiohttp_unused_port() -> Callable[[], int]:
|
291 |
+
"""Return a port that is unused on the current host."""
|
292 |
+
return _unused_port
|
293 |
+
|
294 |
+
|
295 |
+
@pytest.fixture
|
296 |
+
def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
|
297 |
+
"""Factory to create a TestServer instance, given an app.
|
298 |
+
|
299 |
+
aiohttp_server(app, **kwargs)
|
300 |
+
"""
|
301 |
+
servers = []
|
302 |
+
|
303 |
+
async def go(
|
304 |
+
app: Application, *, port: Optional[int] = None, **kwargs: Any
|
305 |
+
) -> TestServer:
|
306 |
+
server = TestServer(app, port=port)
|
307 |
+
await server.start_server(loop=loop, **kwargs)
|
308 |
+
servers.append(server)
|
309 |
+
return server
|
310 |
+
|
311 |
+
yield go
|
312 |
+
|
313 |
+
async def finalize() -> None:
|
314 |
+
while servers:
|
315 |
+
await servers.pop().close()
|
316 |
+
|
317 |
+
loop.run_until_complete(finalize())
|
318 |
+
|
319 |
+
|
320 |
+
@pytest.fixture
|
321 |
+
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
322 |
+
warnings.warn(
|
323 |
+
"Deprecated, use aiohttp_server fixture instead",
|
324 |
+
DeprecationWarning,
|
325 |
+
stacklevel=2,
|
326 |
+
)
|
327 |
+
return aiohttp_server
|
328 |
+
|
329 |
+
|
330 |
+
@pytest.fixture
|
331 |
+
def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
|
332 |
+
"""Factory to create a RawTestServer instance, given a web handler.
|
333 |
+
|
334 |
+
aiohttp_raw_server(handler, **kwargs)
|
335 |
+
"""
|
336 |
+
servers = []
|
337 |
+
|
338 |
+
async def go(
|
339 |
+
handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
|
340 |
+
) -> RawTestServer:
|
341 |
+
server = RawTestServer(handler, port=port)
|
342 |
+
await server.start_server(loop=loop, **kwargs)
|
343 |
+
servers.append(server)
|
344 |
+
return server
|
345 |
+
|
346 |
+
yield go
|
347 |
+
|
348 |
+
async def finalize() -> None:
|
349 |
+
while servers:
|
350 |
+
await servers.pop().close()
|
351 |
+
|
352 |
+
loop.run_until_complete(finalize())
|
353 |
+
|
354 |
+
|
355 |
+
@pytest.fixture
|
356 |
+
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
357 |
+
aiohttp_raw_server,
|
358 |
+
):
|
359 |
+
warnings.warn(
|
360 |
+
"Deprecated, use aiohttp_raw_server fixture instead",
|
361 |
+
DeprecationWarning,
|
362 |
+
stacklevel=2,
|
363 |
+
)
|
364 |
+
return aiohttp_raw_server
|
365 |
+
|
366 |
+
|
367 |
+
@pytest.fixture
|
368 |
+
def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]:
|
369 |
+
"""Factory to create a TestClient instance.
|
370 |
+
|
371 |
+
aiohttp_client(app, **kwargs)
|
372 |
+
aiohttp_client(server, **kwargs)
|
373 |
+
aiohttp_client(raw_server, **kwargs)
|
374 |
+
"""
|
375 |
+
clients = []
|
376 |
+
|
377 |
+
@overload
|
378 |
+
async def go(
|
379 |
+
__param: Application,
|
380 |
+
*,
|
381 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
382 |
+
**kwargs: Any,
|
383 |
+
) -> TestClient[Request, Application]: ...
|
384 |
+
|
385 |
+
@overload
|
386 |
+
async def go(
|
387 |
+
__param: BaseTestServer,
|
388 |
+
*,
|
389 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
390 |
+
**kwargs: Any,
|
391 |
+
) -> TestClient[BaseRequest, None]: ...
|
392 |
+
|
393 |
+
async def go(
|
394 |
+
__param: Union[Application, BaseTestServer],
|
395 |
+
*args: Any,
|
396 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
397 |
+
**kwargs: Any,
|
398 |
+
) -> TestClient[Any, Any]:
|
399 |
+
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
400 |
+
__param, (Application, BaseTestServer)
|
401 |
+
):
|
402 |
+
__param = __param(loop, *args, **kwargs)
|
403 |
+
kwargs = {}
|
404 |
+
else:
|
405 |
+
assert not args, "args should be empty"
|
406 |
+
|
407 |
+
if isinstance(__param, Application):
|
408 |
+
server_kwargs = server_kwargs or {}
|
409 |
+
server = TestServer(__param, loop=loop, **server_kwargs)
|
410 |
+
client = TestClient(server, loop=loop, **kwargs)
|
411 |
+
elif isinstance(__param, BaseTestServer):
|
412 |
+
client = TestClient(__param, loop=loop, **kwargs)
|
413 |
+
else:
|
414 |
+
raise ValueError("Unknown argument type: %r" % type(__param))
|
415 |
+
|
416 |
+
await client.start_server()
|
417 |
+
clients.append(client)
|
418 |
+
return client
|
419 |
+
|
420 |
+
yield go
|
421 |
+
|
422 |
+
async def finalize() -> None:
|
423 |
+
while clients:
|
424 |
+
await clients.pop().close()
|
425 |
+
|
426 |
+
loop.run_until_complete(finalize())
|
427 |
+
|
428 |
+
|
429 |
+
@pytest.fixture
|
430 |
+
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
431 |
+
warnings.warn(
|
432 |
+
"Deprecated, use aiohttp_client fixture instead",
|
433 |
+
DeprecationWarning,
|
434 |
+
stacklevel=2,
|
435 |
+
)
|
436 |
+
return aiohttp_client
|
venv/Lib/site-packages/aiohttp/resolver.py
ADDED
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import socket
|
3 |
+
from typing import Any, Dict, List, Optional, Tuple, Type, Union
|
4 |
+
|
5 |
+
from .abc import AbstractResolver, ResolveResult
|
6 |
+
|
7 |
+
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
8 |
+
|
9 |
+
|
10 |
+
try:
|
11 |
+
import aiodns
|
12 |
+
|
13 |
+
aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo")
|
14 |
+
except ImportError: # pragma: no cover
|
15 |
+
aiodns = None # type: ignore[assignment]
|
16 |
+
aiodns_default = False
|
17 |
+
|
18 |
+
|
19 |
+
_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV
|
20 |
+
_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
21 |
+
_AI_ADDRCONFIG = socket.AI_ADDRCONFIG
|
22 |
+
if hasattr(socket, "AI_MASK"):
|
23 |
+
_AI_ADDRCONFIG &= socket.AI_MASK
|
24 |
+
|
25 |
+
|
26 |
+
class ThreadedResolver(AbstractResolver):
|
27 |
+
"""Threaded resolver.
|
28 |
+
|
29 |
+
Uses an Executor for synchronous getaddrinfo() calls.
|
30 |
+
concurrent.futures.ThreadPoolExecutor is used by default.
|
31 |
+
"""
|
32 |
+
|
33 |
+
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
34 |
+
self._loop = loop or asyncio.get_running_loop()
|
35 |
+
|
36 |
+
async def resolve(
|
37 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
38 |
+
) -> List[ResolveResult]:
|
39 |
+
infos = await self._loop.getaddrinfo(
|
40 |
+
host,
|
41 |
+
port,
|
42 |
+
type=socket.SOCK_STREAM,
|
43 |
+
family=family,
|
44 |
+
flags=_AI_ADDRCONFIG,
|
45 |
+
)
|
46 |
+
|
47 |
+
hosts: List[ResolveResult] = []
|
48 |
+
for family, _, proto, _, address in infos:
|
49 |
+
if family == socket.AF_INET6:
|
50 |
+
if len(address) < 3:
|
51 |
+
# IPv6 is not supported by Python build,
|
52 |
+
# or IPv6 is not enabled in the host
|
53 |
+
continue
|
54 |
+
if address[3]:
|
55 |
+
# This is essential for link-local IPv6 addresses.
|
56 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
57 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
58 |
+
resolved_host, _port = await self._loop.getnameinfo(
|
59 |
+
address, _NAME_SOCKET_FLAGS
|
60 |
+
)
|
61 |
+
port = int(_port)
|
62 |
+
else:
|
63 |
+
resolved_host, port = address[:2]
|
64 |
+
else: # IPv4
|
65 |
+
assert family == socket.AF_INET
|
66 |
+
resolved_host, port = address # type: ignore[misc]
|
67 |
+
hosts.append(
|
68 |
+
ResolveResult(
|
69 |
+
hostname=host,
|
70 |
+
host=resolved_host,
|
71 |
+
port=port,
|
72 |
+
family=family,
|
73 |
+
proto=proto,
|
74 |
+
flags=_NUMERIC_SOCKET_FLAGS,
|
75 |
+
)
|
76 |
+
)
|
77 |
+
|
78 |
+
return hosts
|
79 |
+
|
80 |
+
async def close(self) -> None:
|
81 |
+
pass
|
82 |
+
|
83 |
+
|
84 |
+
class AsyncResolver(AbstractResolver):
|
85 |
+
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
86 |
+
|
87 |
+
def __init__(
|
88 |
+
self,
|
89 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
90 |
+
*args: Any,
|
91 |
+
**kwargs: Any,
|
92 |
+
) -> None:
|
93 |
+
if aiodns is None:
|
94 |
+
raise RuntimeError("Resolver requires aiodns library")
|
95 |
+
|
96 |
+
self._resolver = aiodns.DNSResolver(*args, **kwargs)
|
97 |
+
|
98 |
+
if not hasattr(self._resolver, "gethostbyname"):
|
99 |
+
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
100 |
+
self.resolve = self._resolve_with_query # type: ignore
|
101 |
+
|
102 |
+
async def resolve(
|
103 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
104 |
+
) -> List[ResolveResult]:
|
105 |
+
try:
|
106 |
+
resp = await self._resolver.getaddrinfo(
|
107 |
+
host,
|
108 |
+
port=port,
|
109 |
+
type=socket.SOCK_STREAM,
|
110 |
+
family=family,
|
111 |
+
flags=_AI_ADDRCONFIG,
|
112 |
+
)
|
113 |
+
except aiodns.error.DNSError as exc:
|
114 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
115 |
+
raise OSError(None, msg) from exc
|
116 |
+
hosts: List[ResolveResult] = []
|
117 |
+
for node in resp.nodes:
|
118 |
+
address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr
|
119 |
+
family = node.family
|
120 |
+
if family == socket.AF_INET6:
|
121 |
+
if len(address) > 3 and address[3]:
|
122 |
+
# This is essential for link-local IPv6 addresses.
|
123 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
124 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
125 |
+
result = await self._resolver.getnameinfo(
|
126 |
+
(address[0].decode("ascii"), *address[1:]),
|
127 |
+
_NAME_SOCKET_FLAGS,
|
128 |
+
)
|
129 |
+
resolved_host = result.node
|
130 |
+
else:
|
131 |
+
resolved_host = address[0].decode("ascii")
|
132 |
+
port = address[1]
|
133 |
+
else: # IPv4
|
134 |
+
assert family == socket.AF_INET
|
135 |
+
resolved_host = address[0].decode("ascii")
|
136 |
+
port = address[1]
|
137 |
+
hosts.append(
|
138 |
+
ResolveResult(
|
139 |
+
hostname=host,
|
140 |
+
host=resolved_host,
|
141 |
+
port=port,
|
142 |
+
family=family,
|
143 |
+
proto=0,
|
144 |
+
flags=_NUMERIC_SOCKET_FLAGS,
|
145 |
+
)
|
146 |
+
)
|
147 |
+
|
148 |
+
if not hosts:
|
149 |
+
raise OSError(None, "DNS lookup failed")
|
150 |
+
|
151 |
+
return hosts
|
152 |
+
|
153 |
+
async def _resolve_with_query(
|
154 |
+
self, host: str, port: int = 0, family: int = socket.AF_INET
|
155 |
+
) -> List[Dict[str, Any]]:
|
156 |
+
if family == socket.AF_INET6:
|
157 |
+
qtype = "AAAA"
|
158 |
+
else:
|
159 |
+
qtype = "A"
|
160 |
+
|
161 |
+
try:
|
162 |
+
resp = await self._resolver.query(host, qtype)
|
163 |
+
except aiodns.error.DNSError as exc:
|
164 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
165 |
+
raise OSError(None, msg) from exc
|
166 |
+
|
167 |
+
hosts = []
|
168 |
+
for rr in resp:
|
169 |
+
hosts.append(
|
170 |
+
{
|
171 |
+
"hostname": host,
|
172 |
+
"host": rr.host,
|
173 |
+
"port": port,
|
174 |
+
"family": family,
|
175 |
+
"proto": 0,
|
176 |
+
"flags": socket.AI_NUMERICHOST,
|
177 |
+
}
|
178 |
+
)
|
179 |
+
|
180 |
+
if not hosts:
|
181 |
+
raise OSError(None, "DNS lookup failed")
|
182 |
+
|
183 |
+
return hosts
|
184 |
+
|
185 |
+
async def close(self) -> None:
|
186 |
+
self._resolver.cancel()
|
187 |
+
|
188 |
+
|
189 |
+
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
190 |
+
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
venv/Lib/site-packages/aiohttp/streams.py
ADDED
@@ -0,0 +1,727 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import collections
|
3 |
+
import warnings
|
4 |
+
from typing import (
|
5 |
+
Awaitable,
|
6 |
+
Callable,
|
7 |
+
Deque,
|
8 |
+
Final,
|
9 |
+
Generic,
|
10 |
+
List,
|
11 |
+
Optional,
|
12 |
+
Tuple,
|
13 |
+
TypeVar,
|
14 |
+
)
|
15 |
+
|
16 |
+
from .base_protocol import BaseProtocol
|
17 |
+
from .helpers import (
|
18 |
+
_EXC_SENTINEL,
|
19 |
+
BaseTimerContext,
|
20 |
+
TimerNoop,
|
21 |
+
set_exception,
|
22 |
+
set_result,
|
23 |
+
)
|
24 |
+
from .log import internal_logger
|
25 |
+
|
26 |
+
__all__ = (
|
27 |
+
"EMPTY_PAYLOAD",
|
28 |
+
"EofStream",
|
29 |
+
"StreamReader",
|
30 |
+
"DataQueue",
|
31 |
+
)
|
32 |
+
|
33 |
+
_T = TypeVar("_T")
|
34 |
+
|
35 |
+
|
36 |
+
class EofStream(Exception):
|
37 |
+
"""eof stream indication."""
|
38 |
+
|
39 |
+
|
40 |
+
class AsyncStreamIterator(Generic[_T]):
|
41 |
+
|
42 |
+
__slots__ = ("read_func",)
|
43 |
+
|
44 |
+
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
|
45 |
+
self.read_func = read_func
|
46 |
+
|
47 |
+
def __aiter__(self) -> "AsyncStreamIterator[_T]":
|
48 |
+
return self
|
49 |
+
|
50 |
+
async def __anext__(self) -> _T:
|
51 |
+
try:
|
52 |
+
rv = await self.read_func()
|
53 |
+
except EofStream:
|
54 |
+
raise StopAsyncIteration
|
55 |
+
if rv == b"":
|
56 |
+
raise StopAsyncIteration
|
57 |
+
return rv
|
58 |
+
|
59 |
+
|
60 |
+
class ChunkTupleAsyncStreamIterator:
|
61 |
+
|
62 |
+
__slots__ = ("_stream",)
|
63 |
+
|
64 |
+
def __init__(self, stream: "StreamReader") -> None:
|
65 |
+
self._stream = stream
|
66 |
+
|
67 |
+
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
|
68 |
+
return self
|
69 |
+
|
70 |
+
async def __anext__(self) -> Tuple[bytes, bool]:
|
71 |
+
rv = await self._stream.readchunk()
|
72 |
+
if rv == (b"", False):
|
73 |
+
raise StopAsyncIteration
|
74 |
+
return rv
|
75 |
+
|
76 |
+
|
77 |
+
class AsyncStreamReaderMixin:
|
78 |
+
|
79 |
+
__slots__ = ()
|
80 |
+
|
81 |
+
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
82 |
+
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
|
83 |
+
|
84 |
+
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
85 |
+
"""Returns an asynchronous iterator that yields chunks of size n."""
|
86 |
+
return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined]
|
87 |
+
|
88 |
+
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
89 |
+
"""Yield all available data as soon as it is received."""
|
90 |
+
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
|
91 |
+
|
92 |
+
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
93 |
+
"""Yield chunks of data as they are received by the server.
|
94 |
+
|
95 |
+
The yielded objects are tuples
|
96 |
+
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
97 |
+
"""
|
98 |
+
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
|
99 |
+
|
100 |
+
|
101 |
+
class StreamReader(AsyncStreamReaderMixin):
|
102 |
+
"""An enhancement of asyncio.StreamReader.
|
103 |
+
|
104 |
+
Supports asynchronous iteration by line, chunk or as available::
|
105 |
+
|
106 |
+
async for line in reader:
|
107 |
+
...
|
108 |
+
async for chunk in reader.iter_chunked(1024):
|
109 |
+
...
|
110 |
+
async for slice in reader.iter_any():
|
111 |
+
...
|
112 |
+
|
113 |
+
"""
|
114 |
+
|
115 |
+
__slots__ = (
|
116 |
+
"_protocol",
|
117 |
+
"_low_water",
|
118 |
+
"_high_water",
|
119 |
+
"_loop",
|
120 |
+
"_size",
|
121 |
+
"_cursor",
|
122 |
+
"_http_chunk_splits",
|
123 |
+
"_buffer",
|
124 |
+
"_buffer_offset",
|
125 |
+
"_eof",
|
126 |
+
"_waiter",
|
127 |
+
"_eof_waiter",
|
128 |
+
"_exception",
|
129 |
+
"_timer",
|
130 |
+
"_eof_callbacks",
|
131 |
+
"_eof_counter",
|
132 |
+
"total_bytes",
|
133 |
+
)
|
134 |
+
|
135 |
+
def __init__(
|
136 |
+
self,
|
137 |
+
protocol: BaseProtocol,
|
138 |
+
limit: int,
|
139 |
+
*,
|
140 |
+
timer: Optional[BaseTimerContext] = None,
|
141 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
142 |
+
) -> None:
|
143 |
+
self._protocol = protocol
|
144 |
+
self._low_water = limit
|
145 |
+
self._high_water = limit * 2
|
146 |
+
if loop is None:
|
147 |
+
loop = asyncio.get_event_loop()
|
148 |
+
self._loop = loop
|
149 |
+
self._size = 0
|
150 |
+
self._cursor = 0
|
151 |
+
self._http_chunk_splits: Optional[List[int]] = None
|
152 |
+
self._buffer: Deque[bytes] = collections.deque()
|
153 |
+
self._buffer_offset = 0
|
154 |
+
self._eof = False
|
155 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
156 |
+
self._eof_waiter: Optional[asyncio.Future[None]] = None
|
157 |
+
self._exception: Optional[BaseException] = None
|
158 |
+
self._timer = TimerNoop() if timer is None else timer
|
159 |
+
self._eof_callbacks: List[Callable[[], None]] = []
|
160 |
+
self._eof_counter = 0
|
161 |
+
self.total_bytes = 0
|
162 |
+
|
163 |
+
def __repr__(self) -> str:
|
164 |
+
info = [self.__class__.__name__]
|
165 |
+
if self._size:
|
166 |
+
info.append("%d bytes" % self._size)
|
167 |
+
if self._eof:
|
168 |
+
info.append("eof")
|
169 |
+
if self._low_water != 2**16: # default limit
|
170 |
+
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
171 |
+
if self._waiter:
|
172 |
+
info.append("w=%r" % self._waiter)
|
173 |
+
if self._exception:
|
174 |
+
info.append("e=%r" % self._exception)
|
175 |
+
return "<%s>" % " ".join(info)
|
176 |
+
|
177 |
+
def get_read_buffer_limits(self) -> Tuple[int, int]:
|
178 |
+
return (self._low_water, self._high_water)
|
179 |
+
|
180 |
+
def exception(self) -> Optional[BaseException]:
|
181 |
+
return self._exception
|
182 |
+
|
183 |
+
def set_exception(
|
184 |
+
self,
|
185 |
+
exc: BaseException,
|
186 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
187 |
+
) -> None:
|
188 |
+
self._exception = exc
|
189 |
+
self._eof_callbacks.clear()
|
190 |
+
|
191 |
+
waiter = self._waiter
|
192 |
+
if waiter is not None:
|
193 |
+
self._waiter = None
|
194 |
+
set_exception(waiter, exc, exc_cause)
|
195 |
+
|
196 |
+
waiter = self._eof_waiter
|
197 |
+
if waiter is not None:
|
198 |
+
self._eof_waiter = None
|
199 |
+
set_exception(waiter, exc, exc_cause)
|
200 |
+
|
201 |
+
def on_eof(self, callback: Callable[[], None]) -> None:
|
202 |
+
if self._eof:
|
203 |
+
try:
|
204 |
+
callback()
|
205 |
+
except Exception:
|
206 |
+
internal_logger.exception("Exception in eof callback")
|
207 |
+
else:
|
208 |
+
self._eof_callbacks.append(callback)
|
209 |
+
|
210 |
+
def feed_eof(self) -> None:
|
211 |
+
self._eof = True
|
212 |
+
|
213 |
+
waiter = self._waiter
|
214 |
+
if waiter is not None:
|
215 |
+
self._waiter = None
|
216 |
+
set_result(waiter, None)
|
217 |
+
|
218 |
+
waiter = self._eof_waiter
|
219 |
+
if waiter is not None:
|
220 |
+
self._eof_waiter = None
|
221 |
+
set_result(waiter, None)
|
222 |
+
|
223 |
+
if self._protocol._reading_paused:
|
224 |
+
self._protocol.resume_reading()
|
225 |
+
|
226 |
+
for cb in self._eof_callbacks:
|
227 |
+
try:
|
228 |
+
cb()
|
229 |
+
except Exception:
|
230 |
+
internal_logger.exception("Exception in eof callback")
|
231 |
+
|
232 |
+
self._eof_callbacks.clear()
|
233 |
+
|
234 |
+
def is_eof(self) -> bool:
|
235 |
+
"""Return True if 'feed_eof' was called."""
|
236 |
+
return self._eof
|
237 |
+
|
238 |
+
def at_eof(self) -> bool:
|
239 |
+
"""Return True if the buffer is empty and 'feed_eof' was called."""
|
240 |
+
return self._eof and not self._buffer
|
241 |
+
|
242 |
+
async def wait_eof(self) -> None:
|
243 |
+
if self._eof:
|
244 |
+
return
|
245 |
+
|
246 |
+
assert self._eof_waiter is None
|
247 |
+
self._eof_waiter = self._loop.create_future()
|
248 |
+
try:
|
249 |
+
await self._eof_waiter
|
250 |
+
finally:
|
251 |
+
self._eof_waiter = None
|
252 |
+
|
253 |
+
def unread_data(self, data: bytes) -> None:
|
254 |
+
"""rollback reading some data from stream, inserting it to buffer head."""
|
255 |
+
warnings.warn(
|
256 |
+
"unread_data() is deprecated "
|
257 |
+
"and will be removed in future releases (#3260)",
|
258 |
+
DeprecationWarning,
|
259 |
+
stacklevel=2,
|
260 |
+
)
|
261 |
+
if not data:
|
262 |
+
return
|
263 |
+
|
264 |
+
if self._buffer_offset:
|
265 |
+
self._buffer[0] = self._buffer[0][self._buffer_offset :]
|
266 |
+
self._buffer_offset = 0
|
267 |
+
self._size += len(data)
|
268 |
+
self._cursor -= len(data)
|
269 |
+
self._buffer.appendleft(data)
|
270 |
+
self._eof_counter = 0
|
271 |
+
|
272 |
+
# TODO: size is ignored, remove the param later
|
273 |
+
def feed_data(self, data: bytes, size: int = 0) -> None:
|
274 |
+
assert not self._eof, "feed_data after feed_eof"
|
275 |
+
|
276 |
+
if not data:
|
277 |
+
return
|
278 |
+
|
279 |
+
data_len = len(data)
|
280 |
+
self._size += data_len
|
281 |
+
self._buffer.append(data)
|
282 |
+
self.total_bytes += data_len
|
283 |
+
|
284 |
+
waiter = self._waiter
|
285 |
+
if waiter is not None:
|
286 |
+
self._waiter = None
|
287 |
+
set_result(waiter, None)
|
288 |
+
|
289 |
+
if self._size > self._high_water and not self._protocol._reading_paused:
|
290 |
+
self._protocol.pause_reading()
|
291 |
+
|
292 |
+
def begin_http_chunk_receiving(self) -> None:
|
293 |
+
if self._http_chunk_splits is None:
|
294 |
+
if self.total_bytes:
|
295 |
+
raise RuntimeError(
|
296 |
+
"Called begin_http_chunk_receiving when some data was already fed"
|
297 |
+
)
|
298 |
+
self._http_chunk_splits = []
|
299 |
+
|
300 |
+
def end_http_chunk_receiving(self) -> None:
|
301 |
+
if self._http_chunk_splits is None:
|
302 |
+
raise RuntimeError(
|
303 |
+
"Called end_chunk_receiving without calling "
|
304 |
+
"begin_chunk_receiving first"
|
305 |
+
)
|
306 |
+
|
307 |
+
# self._http_chunk_splits contains logical byte offsets from start of
|
308 |
+
# the body transfer. Each offset is the offset of the end of a chunk.
|
309 |
+
# "Logical" means bytes, accessible for a user.
|
310 |
+
# If no chunks containing logical data were received, current position
|
311 |
+
# is difinitely zero.
|
312 |
+
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
|
313 |
+
|
314 |
+
if self.total_bytes == pos:
|
315 |
+
# We should not add empty chunks here. So we check for that.
|
316 |
+
# Note, when chunked + gzip is used, we can receive a chunk
|
317 |
+
# of compressed data, but that data may not be enough for gzip FSM
|
318 |
+
# to yield any uncompressed data. That's why current position may
|
319 |
+
# not change after receiving a chunk.
|
320 |
+
return
|
321 |
+
|
322 |
+
self._http_chunk_splits.append(self.total_bytes)
|
323 |
+
|
324 |
+
# wake up readchunk when end of http chunk received
|
325 |
+
waiter = self._waiter
|
326 |
+
if waiter is not None:
|
327 |
+
self._waiter = None
|
328 |
+
set_result(waiter, None)
|
329 |
+
|
330 |
+
async def _wait(self, func_name: str) -> None:
|
331 |
+
if not self._protocol.connected:
|
332 |
+
raise RuntimeError("Connection closed.")
|
333 |
+
|
334 |
+
# StreamReader uses a future to link the protocol feed_data() method
|
335 |
+
# to a read coroutine. Running two read coroutines at the same time
|
336 |
+
# would have an unexpected behaviour. It would not possible to know
|
337 |
+
# which coroutine would get the next data.
|
338 |
+
if self._waiter is not None:
|
339 |
+
raise RuntimeError(
|
340 |
+
"%s() called while another coroutine is "
|
341 |
+
"already waiting for incoming data" % func_name
|
342 |
+
)
|
343 |
+
|
344 |
+
waiter = self._waiter = self._loop.create_future()
|
345 |
+
try:
|
346 |
+
with self._timer:
|
347 |
+
await waiter
|
348 |
+
finally:
|
349 |
+
self._waiter = None
|
350 |
+
|
351 |
+
async def readline(self) -> bytes:
|
352 |
+
return await self.readuntil()
|
353 |
+
|
354 |
+
async def readuntil(self, separator: bytes = b"\n") -> bytes:
|
355 |
+
seplen = len(separator)
|
356 |
+
if seplen == 0:
|
357 |
+
raise ValueError("Separator should be at least one-byte string")
|
358 |
+
|
359 |
+
if self._exception is not None:
|
360 |
+
raise self._exception
|
361 |
+
|
362 |
+
chunk = b""
|
363 |
+
chunk_size = 0
|
364 |
+
not_enough = True
|
365 |
+
|
366 |
+
while not_enough:
|
367 |
+
while self._buffer and not_enough:
|
368 |
+
offset = self._buffer_offset
|
369 |
+
ichar = self._buffer[0].find(separator, offset) + 1
|
370 |
+
# Read from current offset to found separator or to the end.
|
371 |
+
data = self._read_nowait_chunk(
|
372 |
+
ichar - offset + seplen - 1 if ichar else -1
|
373 |
+
)
|
374 |
+
chunk += data
|
375 |
+
chunk_size += len(data)
|
376 |
+
if ichar:
|
377 |
+
not_enough = False
|
378 |
+
|
379 |
+
if chunk_size > self._high_water:
|
380 |
+
raise ValueError("Chunk too big")
|
381 |
+
|
382 |
+
if self._eof:
|
383 |
+
break
|
384 |
+
|
385 |
+
if not_enough:
|
386 |
+
await self._wait("readuntil")
|
387 |
+
|
388 |
+
return chunk
|
389 |
+
|
390 |
+
async def read(self, n: int = -1) -> bytes:
|
391 |
+
if self._exception is not None:
|
392 |
+
raise self._exception
|
393 |
+
|
394 |
+
# migration problem; with DataQueue you have to catch
|
395 |
+
# EofStream exception, so common way is to run payload.read() inside
|
396 |
+
# infinite loop. what can cause real infinite loop with StreamReader
|
397 |
+
# lets keep this code one major release.
|
398 |
+
if __debug__:
|
399 |
+
if self._eof and not self._buffer:
|
400 |
+
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
|
401 |
+
if self._eof_counter > 5:
|
402 |
+
internal_logger.warning(
|
403 |
+
"Multiple access to StreamReader in eof state, "
|
404 |
+
"might be infinite loop.",
|
405 |
+
stack_info=True,
|
406 |
+
)
|
407 |
+
|
408 |
+
if not n:
|
409 |
+
return b""
|
410 |
+
|
411 |
+
if n < 0:
|
412 |
+
# This used to just loop creating a new waiter hoping to
|
413 |
+
# collect everything in self._buffer, but that would
|
414 |
+
# deadlock if the subprocess sends more than self.limit
|
415 |
+
# bytes. So just call self.readany() until EOF.
|
416 |
+
blocks = []
|
417 |
+
while True:
|
418 |
+
block = await self.readany()
|
419 |
+
if not block:
|
420 |
+
break
|
421 |
+
blocks.append(block)
|
422 |
+
return b"".join(blocks)
|
423 |
+
|
424 |
+
# TODO: should be `if` instead of `while`
|
425 |
+
# because waiter maybe triggered on chunk end,
|
426 |
+
# without feeding any data
|
427 |
+
while not self._buffer and not self._eof:
|
428 |
+
await self._wait("read")
|
429 |
+
|
430 |
+
return self._read_nowait(n)
|
431 |
+
|
432 |
+
async def readany(self) -> bytes:
|
433 |
+
if self._exception is not None:
|
434 |
+
raise self._exception
|
435 |
+
|
436 |
+
# TODO: should be `if` instead of `while`
|
437 |
+
# because waiter maybe triggered on chunk end,
|
438 |
+
# without feeding any data
|
439 |
+
while not self._buffer and not self._eof:
|
440 |
+
await self._wait("readany")
|
441 |
+
|
442 |
+
return self._read_nowait(-1)
|
443 |
+
|
444 |
+
async def readchunk(self) -> Tuple[bytes, bool]:
|
445 |
+
"""Returns a tuple of (data, end_of_http_chunk).
|
446 |
+
|
447 |
+
When chunked transfer
|
448 |
+
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
449 |
+
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
450 |
+
always False.
|
451 |
+
"""
|
452 |
+
while True:
|
453 |
+
if self._exception is not None:
|
454 |
+
raise self._exception
|
455 |
+
|
456 |
+
while self._http_chunk_splits:
|
457 |
+
pos = self._http_chunk_splits.pop(0)
|
458 |
+
if pos == self._cursor:
|
459 |
+
return (b"", True)
|
460 |
+
if pos > self._cursor:
|
461 |
+
return (self._read_nowait(pos - self._cursor), True)
|
462 |
+
internal_logger.warning(
|
463 |
+
"Skipping HTTP chunk end due to data "
|
464 |
+
"consumption beyond chunk boundary"
|
465 |
+
)
|
466 |
+
|
467 |
+
if self._buffer:
|
468 |
+
return (self._read_nowait_chunk(-1), False)
|
469 |
+
# return (self._read_nowait(-1), False)
|
470 |
+
|
471 |
+
if self._eof:
|
472 |
+
# Special case for signifying EOF.
|
473 |
+
# (b'', True) is not a final return value actually.
|
474 |
+
return (b"", False)
|
475 |
+
|
476 |
+
await self._wait("readchunk")
|
477 |
+
|
478 |
+
async def readexactly(self, n: int) -> bytes:
|
479 |
+
if self._exception is not None:
|
480 |
+
raise self._exception
|
481 |
+
|
482 |
+
blocks: List[bytes] = []
|
483 |
+
while n > 0:
|
484 |
+
block = await self.read(n)
|
485 |
+
if not block:
|
486 |
+
partial = b"".join(blocks)
|
487 |
+
raise asyncio.IncompleteReadError(partial, len(partial) + n)
|
488 |
+
blocks.append(block)
|
489 |
+
n -= len(block)
|
490 |
+
|
491 |
+
return b"".join(blocks)
|
492 |
+
|
493 |
+
def read_nowait(self, n: int = -1) -> bytes:
|
494 |
+
# default was changed to be consistent with .read(-1)
|
495 |
+
#
|
496 |
+
# I believe the most users don't know about the method and
|
497 |
+
# they are not affected.
|
498 |
+
if self._exception is not None:
|
499 |
+
raise self._exception
|
500 |
+
|
501 |
+
if self._waiter and not self._waiter.done():
|
502 |
+
raise RuntimeError(
|
503 |
+
"Called while some coroutine is waiting for incoming data."
|
504 |
+
)
|
505 |
+
|
506 |
+
return self._read_nowait(n)
|
507 |
+
|
508 |
+
def _read_nowait_chunk(self, n: int) -> bytes:
|
509 |
+
first_buffer = self._buffer[0]
|
510 |
+
offset = self._buffer_offset
|
511 |
+
if n != -1 and len(first_buffer) - offset > n:
|
512 |
+
data = first_buffer[offset : offset + n]
|
513 |
+
self._buffer_offset += n
|
514 |
+
|
515 |
+
elif offset:
|
516 |
+
self._buffer.popleft()
|
517 |
+
data = first_buffer[offset:]
|
518 |
+
self._buffer_offset = 0
|
519 |
+
|
520 |
+
else:
|
521 |
+
data = self._buffer.popleft()
|
522 |
+
|
523 |
+
data_len = len(data)
|
524 |
+
self._size -= data_len
|
525 |
+
self._cursor += data_len
|
526 |
+
|
527 |
+
chunk_splits = self._http_chunk_splits
|
528 |
+
# Prevent memory leak: drop useless chunk splits
|
529 |
+
while chunk_splits and chunk_splits[0] < self._cursor:
|
530 |
+
chunk_splits.pop(0)
|
531 |
+
|
532 |
+
if self._size < self._low_water and self._protocol._reading_paused:
|
533 |
+
self._protocol.resume_reading()
|
534 |
+
return data
|
535 |
+
|
536 |
+
def _read_nowait(self, n: int) -> bytes:
|
537 |
+
"""Read not more than n bytes, or whole buffer if n == -1"""
|
538 |
+
self._timer.assert_timeout()
|
539 |
+
|
540 |
+
chunks = []
|
541 |
+
while self._buffer:
|
542 |
+
chunk = self._read_nowait_chunk(n)
|
543 |
+
chunks.append(chunk)
|
544 |
+
if n != -1:
|
545 |
+
n -= len(chunk)
|
546 |
+
if n == 0:
|
547 |
+
break
|
548 |
+
|
549 |
+
return b"".join(chunks) if chunks else b""
|
550 |
+
|
551 |
+
|
552 |
+
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
|
553 |
+
|
554 |
+
__slots__ = ("_read_eof_chunk",)
|
555 |
+
|
556 |
+
def __init__(self) -> None:
|
557 |
+
self._read_eof_chunk = False
|
558 |
+
self.total_bytes = 0
|
559 |
+
|
560 |
+
def __repr__(self) -> str:
|
561 |
+
return "<%s>" % self.__class__.__name__
|
562 |
+
|
563 |
+
def exception(self) -> Optional[BaseException]:
|
564 |
+
return None
|
565 |
+
|
566 |
+
def set_exception(
|
567 |
+
self,
|
568 |
+
exc: BaseException,
|
569 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
570 |
+
) -> None:
|
571 |
+
pass
|
572 |
+
|
573 |
+
def on_eof(self, callback: Callable[[], None]) -> None:
|
574 |
+
try:
|
575 |
+
callback()
|
576 |
+
except Exception:
|
577 |
+
internal_logger.exception("Exception in eof callback")
|
578 |
+
|
579 |
+
def feed_eof(self) -> None:
|
580 |
+
pass
|
581 |
+
|
582 |
+
def is_eof(self) -> bool:
|
583 |
+
return True
|
584 |
+
|
585 |
+
def at_eof(self) -> bool:
|
586 |
+
return True
|
587 |
+
|
588 |
+
async def wait_eof(self) -> None:
|
589 |
+
return
|
590 |
+
|
591 |
+
def feed_data(self, data: bytes, n: int = 0) -> None:
|
592 |
+
pass
|
593 |
+
|
594 |
+
async def readline(self) -> bytes:
|
595 |
+
return b""
|
596 |
+
|
597 |
+
async def read(self, n: int = -1) -> bytes:
|
598 |
+
return b""
|
599 |
+
|
600 |
+
# TODO add async def readuntil
|
601 |
+
|
602 |
+
async def readany(self) -> bytes:
|
603 |
+
return b""
|
604 |
+
|
605 |
+
async def readchunk(self) -> Tuple[bytes, bool]:
|
606 |
+
if not self._read_eof_chunk:
|
607 |
+
self._read_eof_chunk = True
|
608 |
+
return (b"", False)
|
609 |
+
|
610 |
+
return (b"", True)
|
611 |
+
|
612 |
+
async def readexactly(self, n: int) -> bytes:
|
613 |
+
raise asyncio.IncompleteReadError(b"", n)
|
614 |
+
|
615 |
+
def read_nowait(self, n: int = -1) -> bytes:
|
616 |
+
return b""
|
617 |
+
|
618 |
+
|
619 |
+
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
|
620 |
+
|
621 |
+
|
622 |
+
class DataQueue(Generic[_T]):
|
623 |
+
"""DataQueue is a general-purpose blocking queue with one reader."""
|
624 |
+
|
625 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
626 |
+
self._loop = loop
|
627 |
+
self._eof = False
|
628 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
629 |
+
self._exception: Optional[BaseException] = None
|
630 |
+
self._buffer: Deque[Tuple[_T, int]] = collections.deque()
|
631 |
+
|
632 |
+
def __len__(self) -> int:
|
633 |
+
return len(self._buffer)
|
634 |
+
|
635 |
+
def is_eof(self) -> bool:
|
636 |
+
return self._eof
|
637 |
+
|
638 |
+
def at_eof(self) -> bool:
|
639 |
+
return self._eof and not self._buffer
|
640 |
+
|
641 |
+
def exception(self) -> Optional[BaseException]:
|
642 |
+
return self._exception
|
643 |
+
|
644 |
+
def set_exception(
|
645 |
+
self,
|
646 |
+
exc: BaseException,
|
647 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
648 |
+
) -> None:
|
649 |
+
self._eof = True
|
650 |
+
self._exception = exc
|
651 |
+
if (waiter := self._waiter) is not None:
|
652 |
+
self._waiter = None
|
653 |
+
set_exception(waiter, exc, exc_cause)
|
654 |
+
|
655 |
+
def feed_data(self, data: _T, size: int = 0) -> None:
|
656 |
+
self._buffer.append((data, size))
|
657 |
+
if (waiter := self._waiter) is not None:
|
658 |
+
self._waiter = None
|
659 |
+
set_result(waiter, None)
|
660 |
+
|
661 |
+
def feed_eof(self) -> None:
|
662 |
+
self._eof = True
|
663 |
+
if (waiter := self._waiter) is not None:
|
664 |
+
self._waiter = None
|
665 |
+
set_result(waiter, None)
|
666 |
+
|
667 |
+
async def read(self) -> _T:
|
668 |
+
if not self._buffer and not self._eof:
|
669 |
+
assert not self._waiter
|
670 |
+
self._waiter = self._loop.create_future()
|
671 |
+
try:
|
672 |
+
await self._waiter
|
673 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
674 |
+
self._waiter = None
|
675 |
+
raise
|
676 |
+
if self._buffer:
|
677 |
+
data, _ = self._buffer.popleft()
|
678 |
+
return data
|
679 |
+
if self._exception is not None:
|
680 |
+
raise self._exception
|
681 |
+
raise EofStream
|
682 |
+
|
683 |
+
def __aiter__(self) -> AsyncStreamIterator[_T]:
|
684 |
+
return AsyncStreamIterator(self.read)
|
685 |
+
|
686 |
+
|
687 |
+
class FlowControlDataQueue(DataQueue[_T]):
|
688 |
+
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
689 |
+
|
690 |
+
It is a destination for parsed data.
|
691 |
+
|
692 |
+
This class is deprecated and will be removed in version 4.0.
|
693 |
+
"""
|
694 |
+
|
695 |
+
def __init__(
|
696 |
+
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
697 |
+
) -> None:
|
698 |
+
super().__init__(loop=loop)
|
699 |
+
self._size = 0
|
700 |
+
self._protocol = protocol
|
701 |
+
self._limit = limit * 2
|
702 |
+
|
703 |
+
def feed_data(self, data: _T, size: int = 0) -> None:
|
704 |
+
super().feed_data(data, size)
|
705 |
+
self._size += size
|
706 |
+
|
707 |
+
if self._size > self._limit and not self._protocol._reading_paused:
|
708 |
+
self._protocol.pause_reading()
|
709 |
+
|
710 |
+
async def read(self) -> _T:
|
711 |
+
if not self._buffer and not self._eof:
|
712 |
+
assert not self._waiter
|
713 |
+
self._waiter = self._loop.create_future()
|
714 |
+
try:
|
715 |
+
await self._waiter
|
716 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
717 |
+
self._waiter = None
|
718 |
+
raise
|
719 |
+
if self._buffer:
|
720 |
+
data, size = self._buffer.popleft()
|
721 |
+
self._size -= size
|
722 |
+
if self._size < self._limit and self._protocol._reading_paused:
|
723 |
+
self._protocol.resume_reading()
|
724 |
+
return data
|
725 |
+
if self._exception is not None:
|
726 |
+
raise self._exception
|
727 |
+
raise EofStream
|
venv/Lib/site-packages/aiohttp/tcp_helpers.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Helper methods to tune a TCP connection"""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import socket
|
5 |
+
from contextlib import suppress
|
6 |
+
from typing import Optional # noqa
|
7 |
+
|
8 |
+
__all__ = ("tcp_keepalive", "tcp_nodelay")
|
9 |
+
|
10 |
+
|
11 |
+
if hasattr(socket, "SO_KEEPALIVE"):
|
12 |
+
|
13 |
+
def tcp_keepalive(transport: asyncio.Transport) -> None:
|
14 |
+
sock = transport.get_extra_info("socket")
|
15 |
+
if sock is not None:
|
16 |
+
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
17 |
+
|
18 |
+
else:
|
19 |
+
|
20 |
+
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
|
21 |
+
pass
|
22 |
+
|
23 |
+
|
24 |
+
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
|
25 |
+
sock = transport.get_extra_info("socket")
|
26 |
+
|
27 |
+
if sock is None:
|
28 |
+
return
|
29 |
+
|
30 |
+
if sock.family not in (socket.AF_INET, socket.AF_INET6):
|
31 |
+
return
|
32 |
+
|
33 |
+
value = bool(value)
|
34 |
+
|
35 |
+
# socket may be closed already, on windows OSError get raised
|
36 |
+
with suppress(OSError):
|
37 |
+
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
|
venv/Lib/site-packages/aiohttp/test_utils.py
ADDED
@@ -0,0 +1,774 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Utilities shared by tests."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import contextlib
|
5 |
+
import gc
|
6 |
+
import inspect
|
7 |
+
import ipaddress
|
8 |
+
import os
|
9 |
+
import socket
|
10 |
+
import sys
|
11 |
+
import warnings
|
12 |
+
from abc import ABC, abstractmethod
|
13 |
+
from types import TracebackType
|
14 |
+
from typing import (
|
15 |
+
TYPE_CHECKING,
|
16 |
+
Any,
|
17 |
+
Callable,
|
18 |
+
Generic,
|
19 |
+
Iterator,
|
20 |
+
List,
|
21 |
+
Optional,
|
22 |
+
Type,
|
23 |
+
TypeVar,
|
24 |
+
cast,
|
25 |
+
overload,
|
26 |
+
)
|
27 |
+
from unittest import IsolatedAsyncioTestCase, mock
|
28 |
+
|
29 |
+
from aiosignal import Signal
|
30 |
+
from multidict import CIMultiDict, CIMultiDictProxy
|
31 |
+
from yarl import URL
|
32 |
+
|
33 |
+
import aiohttp
|
34 |
+
from aiohttp.client import (
|
35 |
+
_RequestContextManager,
|
36 |
+
_RequestOptions,
|
37 |
+
_WSRequestContextManager,
|
38 |
+
)
|
39 |
+
|
40 |
+
from . import ClientSession, hdrs
|
41 |
+
from .abc import AbstractCookieJar
|
42 |
+
from .client_reqrep import ClientResponse
|
43 |
+
from .client_ws import ClientWebSocketResponse
|
44 |
+
from .helpers import sentinel
|
45 |
+
from .http import HttpVersion, RawRequestMessage
|
46 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
47 |
+
from .typedefs import StrOrURL
|
48 |
+
from .web import (
|
49 |
+
Application,
|
50 |
+
AppRunner,
|
51 |
+
BaseRequest,
|
52 |
+
BaseRunner,
|
53 |
+
Request,
|
54 |
+
Server,
|
55 |
+
ServerRunner,
|
56 |
+
SockSite,
|
57 |
+
UrlMappingMatchInfo,
|
58 |
+
)
|
59 |
+
from .web_protocol import _RequestHandler
|
60 |
+
|
61 |
+
if TYPE_CHECKING:
|
62 |
+
from ssl import SSLContext
|
63 |
+
else:
|
64 |
+
SSLContext = None
|
65 |
+
|
66 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
67 |
+
from typing import Unpack
|
68 |
+
|
69 |
+
if sys.version_info >= (3, 11):
|
70 |
+
from typing import Self
|
71 |
+
else:
|
72 |
+
Self = Any
|
73 |
+
|
74 |
+
_ApplicationNone = TypeVar("_ApplicationNone", Application, None)
|
75 |
+
_Request = TypeVar("_Request", bound=BaseRequest)
|
76 |
+
|
77 |
+
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
|
78 |
+
|
79 |
+
|
80 |
+
def get_unused_port_socket(
|
81 |
+
host: str, family: socket.AddressFamily = socket.AF_INET
|
82 |
+
) -> socket.socket:
|
83 |
+
return get_port_socket(host, 0, family)
|
84 |
+
|
85 |
+
|
86 |
+
def get_port_socket(
|
87 |
+
host: str, port: int, family: socket.AddressFamily
|
88 |
+
) -> socket.socket:
|
89 |
+
s = socket.socket(family, socket.SOCK_STREAM)
|
90 |
+
if REUSE_ADDRESS:
|
91 |
+
# Windows has different semantics for SO_REUSEADDR,
|
92 |
+
# so don't set it. Ref:
|
93 |
+
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
|
94 |
+
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
95 |
+
s.bind((host, port))
|
96 |
+
return s
|
97 |
+
|
98 |
+
|
99 |
+
def unused_port() -> int:
|
100 |
+
"""Return a port that is unused on the current host."""
|
101 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
102 |
+
s.bind(("127.0.0.1", 0))
|
103 |
+
return cast(int, s.getsockname()[1])
|
104 |
+
|
105 |
+
|
106 |
+
class BaseTestServer(ABC):
|
107 |
+
__test__ = False
|
108 |
+
|
109 |
+
def __init__(
|
110 |
+
self,
|
111 |
+
*,
|
112 |
+
scheme: str = "",
|
113 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
114 |
+
host: str = "127.0.0.1",
|
115 |
+
port: Optional[int] = None,
|
116 |
+
skip_url_asserts: bool = False,
|
117 |
+
socket_factory: Callable[
|
118 |
+
[str, int, socket.AddressFamily], socket.socket
|
119 |
+
] = get_port_socket,
|
120 |
+
**kwargs: Any,
|
121 |
+
) -> None:
|
122 |
+
self._loop = loop
|
123 |
+
self.runner: Optional[BaseRunner] = None
|
124 |
+
self._root: Optional[URL] = None
|
125 |
+
self.host = host
|
126 |
+
self.port = port
|
127 |
+
self._closed = False
|
128 |
+
self.scheme = scheme
|
129 |
+
self.skip_url_asserts = skip_url_asserts
|
130 |
+
self.socket_factory = socket_factory
|
131 |
+
|
132 |
+
async def start_server(
|
133 |
+
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
|
134 |
+
) -> None:
|
135 |
+
if self.runner:
|
136 |
+
return
|
137 |
+
self._loop = loop
|
138 |
+
self._ssl = kwargs.pop("ssl", None)
|
139 |
+
self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
|
140 |
+
await self.runner.setup()
|
141 |
+
if not self.port:
|
142 |
+
self.port = 0
|
143 |
+
absolute_host = self.host
|
144 |
+
try:
|
145 |
+
version = ipaddress.ip_address(self.host).version
|
146 |
+
except ValueError:
|
147 |
+
version = 4
|
148 |
+
if version == 6:
|
149 |
+
absolute_host = f"[{self.host}]"
|
150 |
+
family = socket.AF_INET6 if version == 6 else socket.AF_INET
|
151 |
+
_sock = self.socket_factory(self.host, self.port, family)
|
152 |
+
self.host, self.port = _sock.getsockname()[:2]
|
153 |
+
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
|
154 |
+
await site.start()
|
155 |
+
server = site._server
|
156 |
+
assert server is not None
|
157 |
+
sockets = server.sockets # type: ignore[attr-defined]
|
158 |
+
assert sockets is not None
|
159 |
+
self.port = sockets[0].getsockname()[1]
|
160 |
+
if not self.scheme:
|
161 |
+
self.scheme = "https" if self._ssl else "http"
|
162 |
+
self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}")
|
163 |
+
|
164 |
+
@abstractmethod # pragma: no cover
|
165 |
+
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
166 |
+
pass
|
167 |
+
|
168 |
+
def make_url(self, path: StrOrURL) -> URL:
|
169 |
+
assert self._root is not None
|
170 |
+
url = URL(path)
|
171 |
+
if not self.skip_url_asserts:
|
172 |
+
assert not url.absolute
|
173 |
+
return self._root.join(url)
|
174 |
+
else:
|
175 |
+
return URL(str(self._root) + str(path))
|
176 |
+
|
177 |
+
@property
|
178 |
+
def started(self) -> bool:
|
179 |
+
return self.runner is not None
|
180 |
+
|
181 |
+
@property
|
182 |
+
def closed(self) -> bool:
|
183 |
+
return self._closed
|
184 |
+
|
185 |
+
@property
|
186 |
+
def handler(self) -> Server:
|
187 |
+
# for backward compatibility
|
188 |
+
# web.Server instance
|
189 |
+
runner = self.runner
|
190 |
+
assert runner is not None
|
191 |
+
assert runner.server is not None
|
192 |
+
return runner.server
|
193 |
+
|
194 |
+
async def close(self) -> None:
|
195 |
+
"""Close all fixtures created by the test client.
|
196 |
+
|
197 |
+
After that point, the TestClient is no longer usable.
|
198 |
+
|
199 |
+
This is an idempotent function: running close multiple times
|
200 |
+
will not have any additional effects.
|
201 |
+
|
202 |
+
close is also run when the object is garbage collected, and on
|
203 |
+
exit when used as a context manager.
|
204 |
+
|
205 |
+
"""
|
206 |
+
if self.started and not self.closed:
|
207 |
+
assert self.runner is not None
|
208 |
+
await self.runner.cleanup()
|
209 |
+
self._root = None
|
210 |
+
self.port = None
|
211 |
+
self._closed = True
|
212 |
+
|
213 |
+
def __enter__(self) -> None:
|
214 |
+
raise TypeError("Use async with instead")
|
215 |
+
|
216 |
+
def __exit__(
|
217 |
+
self,
|
218 |
+
exc_type: Optional[Type[BaseException]],
|
219 |
+
exc_value: Optional[BaseException],
|
220 |
+
traceback: Optional[TracebackType],
|
221 |
+
) -> None:
|
222 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
223 |
+
pass # pragma: no cover
|
224 |
+
|
225 |
+
async def __aenter__(self) -> "BaseTestServer":
|
226 |
+
await self.start_server(loop=self._loop)
|
227 |
+
return self
|
228 |
+
|
229 |
+
async def __aexit__(
|
230 |
+
self,
|
231 |
+
exc_type: Optional[Type[BaseException]],
|
232 |
+
exc_value: Optional[BaseException],
|
233 |
+
traceback: Optional[TracebackType],
|
234 |
+
) -> None:
|
235 |
+
await self.close()
|
236 |
+
|
237 |
+
|
238 |
+
class TestServer(BaseTestServer):
|
239 |
+
def __init__(
|
240 |
+
self,
|
241 |
+
app: Application,
|
242 |
+
*,
|
243 |
+
scheme: str = "",
|
244 |
+
host: str = "127.0.0.1",
|
245 |
+
port: Optional[int] = None,
|
246 |
+
**kwargs: Any,
|
247 |
+
):
|
248 |
+
self.app = app
|
249 |
+
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
250 |
+
|
251 |
+
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
252 |
+
return AppRunner(self.app, **kwargs)
|
253 |
+
|
254 |
+
|
255 |
+
class RawTestServer(BaseTestServer):
|
256 |
+
def __init__(
|
257 |
+
self,
|
258 |
+
handler: _RequestHandler,
|
259 |
+
*,
|
260 |
+
scheme: str = "",
|
261 |
+
host: str = "127.0.0.1",
|
262 |
+
port: Optional[int] = None,
|
263 |
+
**kwargs: Any,
|
264 |
+
) -> None:
|
265 |
+
self._handler = handler
|
266 |
+
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
267 |
+
|
268 |
+
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
|
269 |
+
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
|
270 |
+
return ServerRunner(srv, debug=debug, **kwargs)
|
271 |
+
|
272 |
+
|
273 |
+
class TestClient(Generic[_Request, _ApplicationNone]):
|
274 |
+
"""
|
275 |
+
A test client implementation.
|
276 |
+
|
277 |
+
To write functional tests for aiohttp based servers.
|
278 |
+
|
279 |
+
"""
|
280 |
+
|
281 |
+
__test__ = False
|
282 |
+
|
283 |
+
@overload
|
284 |
+
def __init__(
|
285 |
+
self: "TestClient[Request, Application]",
|
286 |
+
server: TestServer,
|
287 |
+
*,
|
288 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
289 |
+
**kwargs: Any,
|
290 |
+
) -> None: ...
|
291 |
+
@overload
|
292 |
+
def __init__(
|
293 |
+
self: "TestClient[_Request, None]",
|
294 |
+
server: BaseTestServer,
|
295 |
+
*,
|
296 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
297 |
+
**kwargs: Any,
|
298 |
+
) -> None: ...
|
299 |
+
def __init__(
|
300 |
+
self,
|
301 |
+
server: BaseTestServer,
|
302 |
+
*,
|
303 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
304 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
305 |
+
**kwargs: Any,
|
306 |
+
) -> None:
|
307 |
+
if not isinstance(server, BaseTestServer):
|
308 |
+
raise TypeError(
|
309 |
+
"server must be TestServer instance, found type: %r" % type(server)
|
310 |
+
)
|
311 |
+
self._server = server
|
312 |
+
self._loop = loop
|
313 |
+
if cookie_jar is None:
|
314 |
+
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
|
315 |
+
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
|
316 |
+
self._session._retry_connection = False
|
317 |
+
self._closed = False
|
318 |
+
self._responses: List[ClientResponse] = []
|
319 |
+
self._websockets: List[ClientWebSocketResponse] = []
|
320 |
+
|
321 |
+
async def start_server(self) -> None:
|
322 |
+
await self._server.start_server(loop=self._loop)
|
323 |
+
|
324 |
+
@property
|
325 |
+
def host(self) -> str:
|
326 |
+
return self._server.host
|
327 |
+
|
328 |
+
@property
|
329 |
+
def port(self) -> Optional[int]:
|
330 |
+
return self._server.port
|
331 |
+
|
332 |
+
@property
|
333 |
+
def server(self) -> BaseTestServer:
|
334 |
+
return self._server
|
335 |
+
|
336 |
+
@property
|
337 |
+
def app(self) -> _ApplicationNone:
|
338 |
+
return getattr(self._server, "app", None) # type: ignore[return-value]
|
339 |
+
|
340 |
+
@property
|
341 |
+
def session(self) -> ClientSession:
|
342 |
+
"""An internal aiohttp.ClientSession.
|
343 |
+
|
344 |
+
Unlike the methods on the TestClient, client session requests
|
345 |
+
do not automatically include the host in the url queried, and
|
346 |
+
will require an absolute path to the resource.
|
347 |
+
|
348 |
+
"""
|
349 |
+
return self._session
|
350 |
+
|
351 |
+
def make_url(self, path: StrOrURL) -> URL:
|
352 |
+
return self._server.make_url(path)
|
353 |
+
|
354 |
+
async def _request(
|
355 |
+
self, method: str, path: StrOrURL, **kwargs: Any
|
356 |
+
) -> ClientResponse:
|
357 |
+
resp = await self._session.request(method, self.make_url(path), **kwargs)
|
358 |
+
# save it to close later
|
359 |
+
self._responses.append(resp)
|
360 |
+
return resp
|
361 |
+
|
362 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
363 |
+
|
364 |
+
def request(
|
365 |
+
self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions]
|
366 |
+
) -> _RequestContextManager: ...
|
367 |
+
|
368 |
+
def get(
|
369 |
+
self,
|
370 |
+
path: StrOrURL,
|
371 |
+
**kwargs: Unpack[_RequestOptions],
|
372 |
+
) -> _RequestContextManager: ...
|
373 |
+
|
374 |
+
def options(
|
375 |
+
self,
|
376 |
+
path: StrOrURL,
|
377 |
+
**kwargs: Unpack[_RequestOptions],
|
378 |
+
) -> _RequestContextManager: ...
|
379 |
+
|
380 |
+
def head(
|
381 |
+
self,
|
382 |
+
path: StrOrURL,
|
383 |
+
**kwargs: Unpack[_RequestOptions],
|
384 |
+
) -> _RequestContextManager: ...
|
385 |
+
|
386 |
+
def post(
|
387 |
+
self,
|
388 |
+
path: StrOrURL,
|
389 |
+
**kwargs: Unpack[_RequestOptions],
|
390 |
+
) -> _RequestContextManager: ...
|
391 |
+
|
392 |
+
def put(
|
393 |
+
self,
|
394 |
+
path: StrOrURL,
|
395 |
+
**kwargs: Unpack[_RequestOptions],
|
396 |
+
) -> _RequestContextManager: ...
|
397 |
+
|
398 |
+
def patch(
|
399 |
+
self,
|
400 |
+
path: StrOrURL,
|
401 |
+
**kwargs: Unpack[_RequestOptions],
|
402 |
+
) -> _RequestContextManager: ...
|
403 |
+
|
404 |
+
def delete(
|
405 |
+
self,
|
406 |
+
path: StrOrURL,
|
407 |
+
**kwargs: Unpack[_RequestOptions],
|
408 |
+
) -> _RequestContextManager: ...
|
409 |
+
|
410 |
+
else:
|
411 |
+
|
412 |
+
def request(
|
413 |
+
self, method: str, path: StrOrURL, **kwargs: Any
|
414 |
+
) -> _RequestContextManager:
|
415 |
+
"""Routes a request to tested http server.
|
416 |
+
|
417 |
+
The interface is identical to aiohttp.ClientSession.request,
|
418 |
+
except the loop kwarg is overridden by the instance used by the
|
419 |
+
test server.
|
420 |
+
|
421 |
+
"""
|
422 |
+
return _RequestContextManager(self._request(method, path, **kwargs))
|
423 |
+
|
424 |
+
def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
425 |
+
"""Perform an HTTP GET request."""
|
426 |
+
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
|
427 |
+
|
428 |
+
def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
429 |
+
"""Perform an HTTP POST request."""
|
430 |
+
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
|
431 |
+
|
432 |
+
def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
433 |
+
"""Perform an HTTP OPTIONS request."""
|
434 |
+
return _RequestContextManager(
|
435 |
+
self._request(hdrs.METH_OPTIONS, path, **kwargs)
|
436 |
+
)
|
437 |
+
|
438 |
+
def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
439 |
+
"""Perform an HTTP HEAD request."""
|
440 |
+
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
|
441 |
+
|
442 |
+
def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
443 |
+
"""Perform an HTTP PUT request."""
|
444 |
+
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
|
445 |
+
|
446 |
+
def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
447 |
+
"""Perform an HTTP PATCH request."""
|
448 |
+
return _RequestContextManager(
|
449 |
+
self._request(hdrs.METH_PATCH, path, **kwargs)
|
450 |
+
)
|
451 |
+
|
452 |
+
def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
453 |
+
"""Perform an HTTP PATCH request."""
|
454 |
+
return _RequestContextManager(
|
455 |
+
self._request(hdrs.METH_DELETE, path, **kwargs)
|
456 |
+
)
|
457 |
+
|
458 |
+
def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
|
459 |
+
"""Initiate websocket connection.
|
460 |
+
|
461 |
+
The api corresponds to aiohttp.ClientSession.ws_connect.
|
462 |
+
|
463 |
+
"""
|
464 |
+
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
|
465 |
+
|
466 |
+
async def _ws_connect(
|
467 |
+
self, path: StrOrURL, **kwargs: Any
|
468 |
+
) -> ClientWebSocketResponse:
|
469 |
+
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
|
470 |
+
self._websockets.append(ws)
|
471 |
+
return ws
|
472 |
+
|
473 |
+
async def close(self) -> None:
|
474 |
+
"""Close all fixtures created by the test client.
|
475 |
+
|
476 |
+
After that point, the TestClient is no longer usable.
|
477 |
+
|
478 |
+
This is an idempotent function: running close multiple times
|
479 |
+
will not have any additional effects.
|
480 |
+
|
481 |
+
close is also run on exit when used as a(n) (asynchronous)
|
482 |
+
context manager.
|
483 |
+
|
484 |
+
"""
|
485 |
+
if not self._closed:
|
486 |
+
for resp in self._responses:
|
487 |
+
resp.close()
|
488 |
+
for ws in self._websockets:
|
489 |
+
await ws.close()
|
490 |
+
await self._session.close()
|
491 |
+
await self._server.close()
|
492 |
+
self._closed = True
|
493 |
+
|
494 |
+
def __enter__(self) -> None:
|
495 |
+
raise TypeError("Use async with instead")
|
496 |
+
|
497 |
+
def __exit__(
|
498 |
+
self,
|
499 |
+
exc_type: Optional[Type[BaseException]],
|
500 |
+
exc: Optional[BaseException],
|
501 |
+
tb: Optional[TracebackType],
|
502 |
+
) -> None:
|
503 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
504 |
+
pass # pragma: no cover
|
505 |
+
|
506 |
+
async def __aenter__(self) -> Self:
|
507 |
+
await self.start_server()
|
508 |
+
return self
|
509 |
+
|
510 |
+
async def __aexit__(
|
511 |
+
self,
|
512 |
+
exc_type: Optional[Type[BaseException]],
|
513 |
+
exc: Optional[BaseException],
|
514 |
+
tb: Optional[TracebackType],
|
515 |
+
) -> None:
|
516 |
+
await self.close()
|
517 |
+
|
518 |
+
|
519 |
+
class AioHTTPTestCase(IsolatedAsyncioTestCase):
|
520 |
+
"""A base class to allow for unittest web applications using aiohttp.
|
521 |
+
|
522 |
+
Provides the following:
|
523 |
+
|
524 |
+
* self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
|
525 |
+
* self.loop (asyncio.BaseEventLoop): the event loop in which the
|
526 |
+
application and server are running.
|
527 |
+
* self.app (aiohttp.web.Application): the application returned by
|
528 |
+
self.get_application()
|
529 |
+
|
530 |
+
Note that the TestClient's methods are asynchronous: you have to
|
531 |
+
execute function on the test client using asynchronous methods.
|
532 |
+
"""
|
533 |
+
|
534 |
+
async def get_application(self) -> Application:
|
535 |
+
"""Get application.
|
536 |
+
|
537 |
+
This method should be overridden
|
538 |
+
to return the aiohttp.web.Application
|
539 |
+
object to test.
|
540 |
+
"""
|
541 |
+
return self.get_app()
|
542 |
+
|
543 |
+
def get_app(self) -> Application:
|
544 |
+
"""Obsolete method used to constructing web application.
|
545 |
+
|
546 |
+
Use .get_application() coroutine instead.
|
547 |
+
"""
|
548 |
+
raise RuntimeError("Did you forget to define get_application()?")
|
549 |
+
|
550 |
+
async def asyncSetUp(self) -> None:
|
551 |
+
self.loop = asyncio.get_running_loop()
|
552 |
+
return await self.setUpAsync()
|
553 |
+
|
554 |
+
async def setUpAsync(self) -> None:
|
555 |
+
self.app = await self.get_application()
|
556 |
+
self.server = await self.get_server(self.app)
|
557 |
+
self.client = await self.get_client(self.server)
|
558 |
+
|
559 |
+
await self.client.start_server()
|
560 |
+
|
561 |
+
async def asyncTearDown(self) -> None:
|
562 |
+
return await self.tearDownAsync()
|
563 |
+
|
564 |
+
async def tearDownAsync(self) -> None:
|
565 |
+
await self.client.close()
|
566 |
+
|
567 |
+
async def get_server(self, app: Application) -> TestServer:
|
568 |
+
"""Return a TestServer instance."""
|
569 |
+
return TestServer(app, loop=self.loop)
|
570 |
+
|
571 |
+
async def get_client(self, server: TestServer) -> TestClient[Request, Application]:
|
572 |
+
"""Return a TestClient instance."""
|
573 |
+
return TestClient(server, loop=self.loop)
|
574 |
+
|
575 |
+
|
576 |
+
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
|
577 |
+
"""
|
578 |
+
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
|
579 |
+
|
580 |
+
In 3.8+, this does nothing.
|
581 |
+
"""
|
582 |
+
warnings.warn(
|
583 |
+
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
|
584 |
+
DeprecationWarning,
|
585 |
+
stacklevel=2,
|
586 |
+
)
|
587 |
+
return func
|
588 |
+
|
589 |
+
|
590 |
+
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
|
591 |
+
|
592 |
+
|
593 |
+
@contextlib.contextmanager
|
594 |
+
def loop_context(
|
595 |
+
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
|
596 |
+
) -> Iterator[asyncio.AbstractEventLoop]:
|
597 |
+
"""A contextmanager that creates an event_loop, for test purposes.
|
598 |
+
|
599 |
+
Handles the creation and cleanup of a test loop.
|
600 |
+
"""
|
601 |
+
loop = setup_test_loop(loop_factory)
|
602 |
+
yield loop
|
603 |
+
teardown_test_loop(loop, fast=fast)
|
604 |
+
|
605 |
+
|
606 |
+
def setup_test_loop(
|
607 |
+
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
|
608 |
+
) -> asyncio.AbstractEventLoop:
|
609 |
+
"""Create and return an asyncio.BaseEventLoop instance.
|
610 |
+
|
611 |
+
The caller should also call teardown_test_loop,
|
612 |
+
once they are done with the loop.
|
613 |
+
"""
|
614 |
+
loop = loop_factory()
|
615 |
+
asyncio.set_event_loop(loop)
|
616 |
+
return loop
|
617 |
+
|
618 |
+
|
619 |
+
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
|
620 |
+
"""Teardown and cleanup an event_loop created by setup_test_loop."""
|
621 |
+
closed = loop.is_closed()
|
622 |
+
if not closed:
|
623 |
+
loop.call_soon(loop.stop)
|
624 |
+
loop.run_forever()
|
625 |
+
loop.close()
|
626 |
+
|
627 |
+
if not fast:
|
628 |
+
gc.collect()
|
629 |
+
|
630 |
+
asyncio.set_event_loop(None)
|
631 |
+
|
632 |
+
|
633 |
+
def _create_app_mock() -> mock.MagicMock:
|
634 |
+
def get_dict(app: Any, key: str) -> Any:
|
635 |
+
return app.__app_dict[key]
|
636 |
+
|
637 |
+
def set_dict(app: Any, key: str, value: Any) -> None:
|
638 |
+
app.__app_dict[key] = value
|
639 |
+
|
640 |
+
app = mock.MagicMock(spec=Application)
|
641 |
+
app.__app_dict = {}
|
642 |
+
app.__getitem__ = get_dict
|
643 |
+
app.__setitem__ = set_dict
|
644 |
+
|
645 |
+
app._debug = False
|
646 |
+
app.on_response_prepare = Signal(app)
|
647 |
+
app.on_response_prepare.freeze()
|
648 |
+
return app
|
649 |
+
|
650 |
+
|
651 |
+
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
|
652 |
+
transport = mock.Mock()
|
653 |
+
|
654 |
+
def get_extra_info(key: str) -> Optional[SSLContext]:
|
655 |
+
if key == "sslcontext":
|
656 |
+
return sslcontext
|
657 |
+
else:
|
658 |
+
return None
|
659 |
+
|
660 |
+
transport.get_extra_info.side_effect = get_extra_info
|
661 |
+
return transport
|
662 |
+
|
663 |
+
|
664 |
+
def make_mocked_request(
|
665 |
+
method: str,
|
666 |
+
path: str,
|
667 |
+
headers: Any = None,
|
668 |
+
*,
|
669 |
+
match_info: Any = sentinel,
|
670 |
+
version: HttpVersion = HttpVersion(1, 1),
|
671 |
+
closing: bool = False,
|
672 |
+
app: Any = None,
|
673 |
+
writer: Any = sentinel,
|
674 |
+
protocol: Any = sentinel,
|
675 |
+
transport: Any = sentinel,
|
676 |
+
payload: StreamReader = EMPTY_PAYLOAD,
|
677 |
+
sslcontext: Optional[SSLContext] = None,
|
678 |
+
client_max_size: int = 1024**2,
|
679 |
+
loop: Any = ...,
|
680 |
+
) -> Request:
|
681 |
+
"""Creates mocked web.Request testing purposes.
|
682 |
+
|
683 |
+
Useful in unit tests, when spinning full web server is overkill or
|
684 |
+
specific conditions and errors are hard to trigger.
|
685 |
+
"""
|
686 |
+
task = mock.Mock()
|
687 |
+
if loop is ...:
|
688 |
+
# no loop passed, try to get the current one if
|
689 |
+
# its is running as we need a real loop to create
|
690 |
+
# executor jobs to be able to do testing
|
691 |
+
# with a real executor
|
692 |
+
try:
|
693 |
+
loop = asyncio.get_running_loop()
|
694 |
+
except RuntimeError:
|
695 |
+
loop = mock.Mock()
|
696 |
+
loop.create_future.return_value = ()
|
697 |
+
|
698 |
+
if version < HttpVersion(1, 1):
|
699 |
+
closing = True
|
700 |
+
|
701 |
+
if headers:
|
702 |
+
headers = CIMultiDictProxy(CIMultiDict(headers))
|
703 |
+
raw_hdrs = tuple(
|
704 |
+
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
|
705 |
+
)
|
706 |
+
else:
|
707 |
+
headers = CIMultiDictProxy(CIMultiDict())
|
708 |
+
raw_hdrs = ()
|
709 |
+
|
710 |
+
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
711 |
+
|
712 |
+
message = RawRequestMessage(
|
713 |
+
method,
|
714 |
+
path,
|
715 |
+
version,
|
716 |
+
headers,
|
717 |
+
raw_hdrs,
|
718 |
+
closing,
|
719 |
+
None,
|
720 |
+
False,
|
721 |
+
chunked,
|
722 |
+
URL(path),
|
723 |
+
)
|
724 |
+
if app is None:
|
725 |
+
app = _create_app_mock()
|
726 |
+
|
727 |
+
if transport is sentinel:
|
728 |
+
transport = _create_transport(sslcontext)
|
729 |
+
|
730 |
+
if protocol is sentinel:
|
731 |
+
protocol = mock.Mock()
|
732 |
+
protocol.transport = transport
|
733 |
+
type(protocol).peername = mock.PropertyMock(
|
734 |
+
return_value=transport.get_extra_info("peername")
|
735 |
+
)
|
736 |
+
type(protocol).ssl_context = mock.PropertyMock(return_value=sslcontext)
|
737 |
+
|
738 |
+
if writer is sentinel:
|
739 |
+
writer = mock.Mock()
|
740 |
+
writer.write_headers = make_mocked_coro(None)
|
741 |
+
writer.write = make_mocked_coro(None)
|
742 |
+
writer.write_eof = make_mocked_coro(None)
|
743 |
+
writer.drain = make_mocked_coro(None)
|
744 |
+
writer.transport = transport
|
745 |
+
|
746 |
+
protocol.transport = transport
|
747 |
+
protocol.writer = writer
|
748 |
+
|
749 |
+
req = Request(
|
750 |
+
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
|
751 |
+
)
|
752 |
+
|
753 |
+
match_info = UrlMappingMatchInfo(
|
754 |
+
{} if match_info is sentinel else match_info, mock.Mock()
|
755 |
+
)
|
756 |
+
match_info.add_app(app)
|
757 |
+
req._match_info = match_info
|
758 |
+
|
759 |
+
return req
|
760 |
+
|
761 |
+
|
762 |
+
def make_mocked_coro(
|
763 |
+
return_value: Any = sentinel, raise_exception: Any = sentinel
|
764 |
+
) -> Any:
|
765 |
+
"""Creates a coroutine mock."""
|
766 |
+
|
767 |
+
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
|
768 |
+
if raise_exception is not sentinel:
|
769 |
+
raise raise_exception
|
770 |
+
if not inspect.isawaitable(return_value):
|
771 |
+
return return_value
|
772 |
+
await return_value
|
773 |
+
|
774 |
+
return mock.Mock(wraps=mock_coro)
|
venv/Lib/site-packages/aiohttp/tracing.py
ADDED
@@ -0,0 +1,470 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from types import SimpleNamespace
|
2 |
+
from typing import TYPE_CHECKING, Awaitable, Mapping, Optional, Protocol, Type, TypeVar
|
3 |
+
|
4 |
+
import attr
|
5 |
+
from aiosignal import Signal
|
6 |
+
from multidict import CIMultiDict
|
7 |
+
from yarl import URL
|
8 |
+
|
9 |
+
from .client_reqrep import ClientResponse
|
10 |
+
|
11 |
+
if TYPE_CHECKING:
|
12 |
+
from .client import ClientSession
|
13 |
+
|
14 |
+
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
|
15 |
+
|
16 |
+
class _SignalCallback(Protocol[_ParamT_contra]):
|
17 |
+
def __call__(
|
18 |
+
self,
|
19 |
+
__client_session: ClientSession,
|
20 |
+
__trace_config_ctx: SimpleNamespace,
|
21 |
+
__params: _ParamT_contra,
|
22 |
+
) -> Awaitable[None]: ...
|
23 |
+
|
24 |
+
|
25 |
+
__all__ = (
|
26 |
+
"TraceConfig",
|
27 |
+
"TraceRequestStartParams",
|
28 |
+
"TraceRequestEndParams",
|
29 |
+
"TraceRequestExceptionParams",
|
30 |
+
"TraceConnectionQueuedStartParams",
|
31 |
+
"TraceConnectionQueuedEndParams",
|
32 |
+
"TraceConnectionCreateStartParams",
|
33 |
+
"TraceConnectionCreateEndParams",
|
34 |
+
"TraceConnectionReuseconnParams",
|
35 |
+
"TraceDnsResolveHostStartParams",
|
36 |
+
"TraceDnsResolveHostEndParams",
|
37 |
+
"TraceDnsCacheHitParams",
|
38 |
+
"TraceDnsCacheMissParams",
|
39 |
+
"TraceRequestRedirectParams",
|
40 |
+
"TraceRequestChunkSentParams",
|
41 |
+
"TraceResponseChunkReceivedParams",
|
42 |
+
"TraceRequestHeadersSentParams",
|
43 |
+
)
|
44 |
+
|
45 |
+
|
46 |
+
class TraceConfig:
|
47 |
+
"""First-class used to trace requests launched via ClientSession objects."""
|
48 |
+
|
49 |
+
def __init__(
|
50 |
+
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
|
51 |
+
) -> None:
|
52 |
+
self._on_request_start: Signal[_SignalCallback[TraceRequestStartParams]] = (
|
53 |
+
Signal(self)
|
54 |
+
)
|
55 |
+
self._on_request_chunk_sent: Signal[
|
56 |
+
_SignalCallback[TraceRequestChunkSentParams]
|
57 |
+
] = Signal(self)
|
58 |
+
self._on_response_chunk_received: Signal[
|
59 |
+
_SignalCallback[TraceResponseChunkReceivedParams]
|
60 |
+
] = Signal(self)
|
61 |
+
self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
|
62 |
+
self
|
63 |
+
)
|
64 |
+
self._on_request_exception: Signal[
|
65 |
+
_SignalCallback[TraceRequestExceptionParams]
|
66 |
+
] = Signal(self)
|
67 |
+
self._on_request_redirect: Signal[
|
68 |
+
_SignalCallback[TraceRequestRedirectParams]
|
69 |
+
] = Signal(self)
|
70 |
+
self._on_connection_queued_start: Signal[
|
71 |
+
_SignalCallback[TraceConnectionQueuedStartParams]
|
72 |
+
] = Signal(self)
|
73 |
+
self._on_connection_queued_end: Signal[
|
74 |
+
_SignalCallback[TraceConnectionQueuedEndParams]
|
75 |
+
] = Signal(self)
|
76 |
+
self._on_connection_create_start: Signal[
|
77 |
+
_SignalCallback[TraceConnectionCreateStartParams]
|
78 |
+
] = Signal(self)
|
79 |
+
self._on_connection_create_end: Signal[
|
80 |
+
_SignalCallback[TraceConnectionCreateEndParams]
|
81 |
+
] = Signal(self)
|
82 |
+
self._on_connection_reuseconn: Signal[
|
83 |
+
_SignalCallback[TraceConnectionReuseconnParams]
|
84 |
+
] = Signal(self)
|
85 |
+
self._on_dns_resolvehost_start: Signal[
|
86 |
+
_SignalCallback[TraceDnsResolveHostStartParams]
|
87 |
+
] = Signal(self)
|
88 |
+
self._on_dns_resolvehost_end: Signal[
|
89 |
+
_SignalCallback[TraceDnsResolveHostEndParams]
|
90 |
+
] = Signal(self)
|
91 |
+
self._on_dns_cache_hit: Signal[_SignalCallback[TraceDnsCacheHitParams]] = (
|
92 |
+
Signal(self)
|
93 |
+
)
|
94 |
+
self._on_dns_cache_miss: Signal[_SignalCallback[TraceDnsCacheMissParams]] = (
|
95 |
+
Signal(self)
|
96 |
+
)
|
97 |
+
self._on_request_headers_sent: Signal[
|
98 |
+
_SignalCallback[TraceRequestHeadersSentParams]
|
99 |
+
] = Signal(self)
|
100 |
+
|
101 |
+
self._trace_config_ctx_factory = trace_config_ctx_factory
|
102 |
+
|
103 |
+
def trace_config_ctx(
|
104 |
+
self, trace_request_ctx: Optional[Mapping[str, str]] = None
|
105 |
+
) -> SimpleNamespace:
|
106 |
+
"""Return a new trace_config_ctx instance"""
|
107 |
+
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
|
108 |
+
|
109 |
+
def freeze(self) -> None:
|
110 |
+
self._on_request_start.freeze()
|
111 |
+
self._on_request_chunk_sent.freeze()
|
112 |
+
self._on_response_chunk_received.freeze()
|
113 |
+
self._on_request_end.freeze()
|
114 |
+
self._on_request_exception.freeze()
|
115 |
+
self._on_request_redirect.freeze()
|
116 |
+
self._on_connection_queued_start.freeze()
|
117 |
+
self._on_connection_queued_end.freeze()
|
118 |
+
self._on_connection_create_start.freeze()
|
119 |
+
self._on_connection_create_end.freeze()
|
120 |
+
self._on_connection_reuseconn.freeze()
|
121 |
+
self._on_dns_resolvehost_start.freeze()
|
122 |
+
self._on_dns_resolvehost_end.freeze()
|
123 |
+
self._on_dns_cache_hit.freeze()
|
124 |
+
self._on_dns_cache_miss.freeze()
|
125 |
+
self._on_request_headers_sent.freeze()
|
126 |
+
|
127 |
+
@property
|
128 |
+
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
|
129 |
+
return self._on_request_start
|
130 |
+
|
131 |
+
@property
|
132 |
+
def on_request_chunk_sent(
|
133 |
+
self,
|
134 |
+
) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
|
135 |
+
return self._on_request_chunk_sent
|
136 |
+
|
137 |
+
@property
|
138 |
+
def on_response_chunk_received(
|
139 |
+
self,
|
140 |
+
) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
|
141 |
+
return self._on_response_chunk_received
|
142 |
+
|
143 |
+
@property
|
144 |
+
def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
|
145 |
+
return self._on_request_end
|
146 |
+
|
147 |
+
@property
|
148 |
+
def on_request_exception(
|
149 |
+
self,
|
150 |
+
) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
|
151 |
+
return self._on_request_exception
|
152 |
+
|
153 |
+
@property
|
154 |
+
def on_request_redirect(
|
155 |
+
self,
|
156 |
+
) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
|
157 |
+
return self._on_request_redirect
|
158 |
+
|
159 |
+
@property
|
160 |
+
def on_connection_queued_start(
|
161 |
+
self,
|
162 |
+
) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
|
163 |
+
return self._on_connection_queued_start
|
164 |
+
|
165 |
+
@property
|
166 |
+
def on_connection_queued_end(
|
167 |
+
self,
|
168 |
+
) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
|
169 |
+
return self._on_connection_queued_end
|
170 |
+
|
171 |
+
@property
|
172 |
+
def on_connection_create_start(
|
173 |
+
self,
|
174 |
+
) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
|
175 |
+
return self._on_connection_create_start
|
176 |
+
|
177 |
+
@property
|
178 |
+
def on_connection_create_end(
|
179 |
+
self,
|
180 |
+
) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
|
181 |
+
return self._on_connection_create_end
|
182 |
+
|
183 |
+
@property
|
184 |
+
def on_connection_reuseconn(
|
185 |
+
self,
|
186 |
+
) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
|
187 |
+
return self._on_connection_reuseconn
|
188 |
+
|
189 |
+
@property
|
190 |
+
def on_dns_resolvehost_start(
|
191 |
+
self,
|
192 |
+
) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
|
193 |
+
return self._on_dns_resolvehost_start
|
194 |
+
|
195 |
+
@property
|
196 |
+
def on_dns_resolvehost_end(
|
197 |
+
self,
|
198 |
+
) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
|
199 |
+
return self._on_dns_resolvehost_end
|
200 |
+
|
201 |
+
@property
|
202 |
+
def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
|
203 |
+
return self._on_dns_cache_hit
|
204 |
+
|
205 |
+
@property
|
206 |
+
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
|
207 |
+
return self._on_dns_cache_miss
|
208 |
+
|
209 |
+
@property
|
210 |
+
def on_request_headers_sent(
|
211 |
+
self,
|
212 |
+
) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
|
213 |
+
return self._on_request_headers_sent
|
214 |
+
|
215 |
+
|
216 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
217 |
+
class TraceRequestStartParams:
|
218 |
+
"""Parameters sent by the `on_request_start` signal"""
|
219 |
+
|
220 |
+
method: str
|
221 |
+
url: URL
|
222 |
+
headers: "CIMultiDict[str]"
|
223 |
+
|
224 |
+
|
225 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
226 |
+
class TraceRequestChunkSentParams:
|
227 |
+
"""Parameters sent by the `on_request_chunk_sent` signal"""
|
228 |
+
|
229 |
+
method: str
|
230 |
+
url: URL
|
231 |
+
chunk: bytes
|
232 |
+
|
233 |
+
|
234 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
235 |
+
class TraceResponseChunkReceivedParams:
|
236 |
+
"""Parameters sent by the `on_response_chunk_received` signal"""
|
237 |
+
|
238 |
+
method: str
|
239 |
+
url: URL
|
240 |
+
chunk: bytes
|
241 |
+
|
242 |
+
|
243 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
244 |
+
class TraceRequestEndParams:
|
245 |
+
"""Parameters sent by the `on_request_end` signal"""
|
246 |
+
|
247 |
+
method: str
|
248 |
+
url: URL
|
249 |
+
headers: "CIMultiDict[str]"
|
250 |
+
response: ClientResponse
|
251 |
+
|
252 |
+
|
253 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
254 |
+
class TraceRequestExceptionParams:
|
255 |
+
"""Parameters sent by the `on_request_exception` signal"""
|
256 |
+
|
257 |
+
method: str
|
258 |
+
url: URL
|
259 |
+
headers: "CIMultiDict[str]"
|
260 |
+
exception: BaseException
|
261 |
+
|
262 |
+
|
263 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
264 |
+
class TraceRequestRedirectParams:
|
265 |
+
"""Parameters sent by the `on_request_redirect` signal"""
|
266 |
+
|
267 |
+
method: str
|
268 |
+
url: URL
|
269 |
+
headers: "CIMultiDict[str]"
|
270 |
+
response: ClientResponse
|
271 |
+
|
272 |
+
|
273 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
274 |
+
class TraceConnectionQueuedStartParams:
|
275 |
+
"""Parameters sent by the `on_connection_queued_start` signal"""
|
276 |
+
|
277 |
+
|
278 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
279 |
+
class TraceConnectionQueuedEndParams:
|
280 |
+
"""Parameters sent by the `on_connection_queued_end` signal"""
|
281 |
+
|
282 |
+
|
283 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
284 |
+
class TraceConnectionCreateStartParams:
|
285 |
+
"""Parameters sent by the `on_connection_create_start` signal"""
|
286 |
+
|
287 |
+
|
288 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
289 |
+
class TraceConnectionCreateEndParams:
|
290 |
+
"""Parameters sent by the `on_connection_create_end` signal"""
|
291 |
+
|
292 |
+
|
293 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
294 |
+
class TraceConnectionReuseconnParams:
|
295 |
+
"""Parameters sent by the `on_connection_reuseconn` signal"""
|
296 |
+
|
297 |
+
|
298 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
299 |
+
class TraceDnsResolveHostStartParams:
|
300 |
+
"""Parameters sent by the `on_dns_resolvehost_start` signal"""
|
301 |
+
|
302 |
+
host: str
|
303 |
+
|
304 |
+
|
305 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
306 |
+
class TraceDnsResolveHostEndParams:
|
307 |
+
"""Parameters sent by the `on_dns_resolvehost_end` signal"""
|
308 |
+
|
309 |
+
host: str
|
310 |
+
|
311 |
+
|
312 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
313 |
+
class TraceDnsCacheHitParams:
|
314 |
+
"""Parameters sent by the `on_dns_cache_hit` signal"""
|
315 |
+
|
316 |
+
host: str
|
317 |
+
|
318 |
+
|
319 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
320 |
+
class TraceDnsCacheMissParams:
|
321 |
+
"""Parameters sent by the `on_dns_cache_miss` signal"""
|
322 |
+
|
323 |
+
host: str
|
324 |
+
|
325 |
+
|
326 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
327 |
+
class TraceRequestHeadersSentParams:
|
328 |
+
"""Parameters sent by the `on_request_headers_sent` signal"""
|
329 |
+
|
330 |
+
method: str
|
331 |
+
url: URL
|
332 |
+
headers: "CIMultiDict[str]"
|
333 |
+
|
334 |
+
|
335 |
+
class Trace:
|
336 |
+
"""Internal dependency holder class.
|
337 |
+
|
338 |
+
Used to keep together the main dependencies used
|
339 |
+
at the moment of send a signal.
|
340 |
+
"""
|
341 |
+
|
342 |
+
def __init__(
|
343 |
+
self,
|
344 |
+
session: "ClientSession",
|
345 |
+
trace_config: TraceConfig,
|
346 |
+
trace_config_ctx: SimpleNamespace,
|
347 |
+
) -> None:
|
348 |
+
self._trace_config = trace_config
|
349 |
+
self._trace_config_ctx = trace_config_ctx
|
350 |
+
self._session = session
|
351 |
+
|
352 |
+
async def send_request_start(
|
353 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
354 |
+
) -> None:
|
355 |
+
return await self._trace_config.on_request_start.send(
|
356 |
+
self._session,
|
357 |
+
self._trace_config_ctx,
|
358 |
+
TraceRequestStartParams(method, url, headers),
|
359 |
+
)
|
360 |
+
|
361 |
+
async def send_request_chunk_sent(
|
362 |
+
self, method: str, url: URL, chunk: bytes
|
363 |
+
) -> None:
|
364 |
+
return await self._trace_config.on_request_chunk_sent.send(
|
365 |
+
self._session,
|
366 |
+
self._trace_config_ctx,
|
367 |
+
TraceRequestChunkSentParams(method, url, chunk),
|
368 |
+
)
|
369 |
+
|
370 |
+
async def send_response_chunk_received(
|
371 |
+
self, method: str, url: URL, chunk: bytes
|
372 |
+
) -> None:
|
373 |
+
return await self._trace_config.on_response_chunk_received.send(
|
374 |
+
self._session,
|
375 |
+
self._trace_config_ctx,
|
376 |
+
TraceResponseChunkReceivedParams(method, url, chunk),
|
377 |
+
)
|
378 |
+
|
379 |
+
async def send_request_end(
|
380 |
+
self,
|
381 |
+
method: str,
|
382 |
+
url: URL,
|
383 |
+
headers: "CIMultiDict[str]",
|
384 |
+
response: ClientResponse,
|
385 |
+
) -> None:
|
386 |
+
return await self._trace_config.on_request_end.send(
|
387 |
+
self._session,
|
388 |
+
self._trace_config_ctx,
|
389 |
+
TraceRequestEndParams(method, url, headers, response),
|
390 |
+
)
|
391 |
+
|
392 |
+
async def send_request_exception(
|
393 |
+
self,
|
394 |
+
method: str,
|
395 |
+
url: URL,
|
396 |
+
headers: "CIMultiDict[str]",
|
397 |
+
exception: BaseException,
|
398 |
+
) -> None:
|
399 |
+
return await self._trace_config.on_request_exception.send(
|
400 |
+
self._session,
|
401 |
+
self._trace_config_ctx,
|
402 |
+
TraceRequestExceptionParams(method, url, headers, exception),
|
403 |
+
)
|
404 |
+
|
405 |
+
async def send_request_redirect(
|
406 |
+
self,
|
407 |
+
method: str,
|
408 |
+
url: URL,
|
409 |
+
headers: "CIMultiDict[str]",
|
410 |
+
response: ClientResponse,
|
411 |
+
) -> None:
|
412 |
+
return await self._trace_config._on_request_redirect.send(
|
413 |
+
self._session,
|
414 |
+
self._trace_config_ctx,
|
415 |
+
TraceRequestRedirectParams(method, url, headers, response),
|
416 |
+
)
|
417 |
+
|
418 |
+
async def send_connection_queued_start(self) -> None:
|
419 |
+
return await self._trace_config.on_connection_queued_start.send(
|
420 |
+
self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
|
421 |
+
)
|
422 |
+
|
423 |
+
async def send_connection_queued_end(self) -> None:
|
424 |
+
return await self._trace_config.on_connection_queued_end.send(
|
425 |
+
self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
|
426 |
+
)
|
427 |
+
|
428 |
+
async def send_connection_create_start(self) -> None:
|
429 |
+
return await self._trace_config.on_connection_create_start.send(
|
430 |
+
self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
|
431 |
+
)
|
432 |
+
|
433 |
+
async def send_connection_create_end(self) -> None:
|
434 |
+
return await self._trace_config.on_connection_create_end.send(
|
435 |
+
self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
|
436 |
+
)
|
437 |
+
|
438 |
+
async def send_connection_reuseconn(self) -> None:
|
439 |
+
return await self._trace_config.on_connection_reuseconn.send(
|
440 |
+
self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
|
441 |
+
)
|
442 |
+
|
443 |
+
async def send_dns_resolvehost_start(self, host: str) -> None:
|
444 |
+
return await self._trace_config.on_dns_resolvehost_start.send(
|
445 |
+
self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
|
446 |
+
)
|
447 |
+
|
448 |
+
async def send_dns_resolvehost_end(self, host: str) -> None:
|
449 |
+
return await self._trace_config.on_dns_resolvehost_end.send(
|
450 |
+
self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
|
451 |
+
)
|
452 |
+
|
453 |
+
async def send_dns_cache_hit(self, host: str) -> None:
|
454 |
+
return await self._trace_config.on_dns_cache_hit.send(
|
455 |
+
self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
|
456 |
+
)
|
457 |
+
|
458 |
+
async def send_dns_cache_miss(self, host: str) -> None:
|
459 |
+
return await self._trace_config.on_dns_cache_miss.send(
|
460 |
+
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
|
461 |
+
)
|
462 |
+
|
463 |
+
async def send_request_headers(
|
464 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
465 |
+
) -> None:
|
466 |
+
return await self._trace_config._on_request_headers_sent.send(
|
467 |
+
self._session,
|
468 |
+
self._trace_config_ctx,
|
469 |
+
TraceRequestHeadersSentParams(method, url, headers),
|
470 |
+
)
|
venv/Lib/site-packages/aiohttp/typedefs.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import os
|
3 |
+
from typing import (
|
4 |
+
TYPE_CHECKING,
|
5 |
+
Any,
|
6 |
+
Awaitable,
|
7 |
+
Callable,
|
8 |
+
Iterable,
|
9 |
+
Mapping,
|
10 |
+
Protocol,
|
11 |
+
Tuple,
|
12 |
+
Union,
|
13 |
+
)
|
14 |
+
|
15 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
16 |
+
from yarl import URL, Query as _Query
|
17 |
+
|
18 |
+
Query = _Query
|
19 |
+
|
20 |
+
DEFAULT_JSON_ENCODER = json.dumps
|
21 |
+
DEFAULT_JSON_DECODER = json.loads
|
22 |
+
|
23 |
+
if TYPE_CHECKING:
|
24 |
+
_CIMultiDict = CIMultiDict[str]
|
25 |
+
_CIMultiDictProxy = CIMultiDictProxy[str]
|
26 |
+
_MultiDict = MultiDict[str]
|
27 |
+
_MultiDictProxy = MultiDictProxy[str]
|
28 |
+
from http.cookies import BaseCookie, Morsel
|
29 |
+
|
30 |
+
from .web import Request, StreamResponse
|
31 |
+
else:
|
32 |
+
_CIMultiDict = CIMultiDict
|
33 |
+
_CIMultiDictProxy = CIMultiDictProxy
|
34 |
+
_MultiDict = MultiDict
|
35 |
+
_MultiDictProxy = MultiDictProxy
|
36 |
+
|
37 |
+
Byteish = Union[bytes, bytearray, memoryview]
|
38 |
+
JSONEncoder = Callable[[Any], str]
|
39 |
+
JSONDecoder = Callable[[str], Any]
|
40 |
+
LooseHeaders = Union[
|
41 |
+
Mapping[str, str],
|
42 |
+
Mapping[istr, str],
|
43 |
+
_CIMultiDict,
|
44 |
+
_CIMultiDictProxy,
|
45 |
+
Iterable[Tuple[Union[str, istr], str]],
|
46 |
+
]
|
47 |
+
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
48 |
+
StrOrURL = Union[str, URL]
|
49 |
+
|
50 |
+
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
51 |
+
LooseCookiesIterables = Iterable[
|
52 |
+
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
53 |
+
]
|
54 |
+
LooseCookies = Union[
|
55 |
+
LooseCookiesMappings,
|
56 |
+
LooseCookiesIterables,
|
57 |
+
"BaseCookie[str]",
|
58 |
+
]
|
59 |
+
|
60 |
+
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
61 |
+
|
62 |
+
|
63 |
+
class Middleware(Protocol):
|
64 |
+
def __call__(
|
65 |
+
self, request: "Request", handler: Handler
|
66 |
+
) -> Awaitable["StreamResponse"]: ...
|
67 |
+
|
68 |
+
|
69 |
+
PathLike = Union[str, "os.PathLike[str]"]
|
venv/Lib/site-packages/aiohttp/web.py
ADDED
@@ -0,0 +1,605 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
import socket
|
5 |
+
import sys
|
6 |
+
import warnings
|
7 |
+
from argparse import ArgumentParser
|
8 |
+
from collections.abc import Iterable
|
9 |
+
from contextlib import suppress
|
10 |
+
from importlib import import_module
|
11 |
+
from typing import (
|
12 |
+
TYPE_CHECKING,
|
13 |
+
Any,
|
14 |
+
Awaitable,
|
15 |
+
Callable,
|
16 |
+
Iterable as TypingIterable,
|
17 |
+
List,
|
18 |
+
Optional,
|
19 |
+
Set,
|
20 |
+
Type,
|
21 |
+
Union,
|
22 |
+
cast,
|
23 |
+
)
|
24 |
+
|
25 |
+
from .abc import AbstractAccessLogger
|
26 |
+
from .helpers import AppKey as AppKey
|
27 |
+
from .log import access_logger
|
28 |
+
from .typedefs import PathLike
|
29 |
+
from .web_app import Application as Application, CleanupError as CleanupError
|
30 |
+
from .web_exceptions import (
|
31 |
+
HTTPAccepted as HTTPAccepted,
|
32 |
+
HTTPBadGateway as HTTPBadGateway,
|
33 |
+
HTTPBadRequest as HTTPBadRequest,
|
34 |
+
HTTPClientError as HTTPClientError,
|
35 |
+
HTTPConflict as HTTPConflict,
|
36 |
+
HTTPCreated as HTTPCreated,
|
37 |
+
HTTPError as HTTPError,
|
38 |
+
HTTPException as HTTPException,
|
39 |
+
HTTPExpectationFailed as HTTPExpectationFailed,
|
40 |
+
HTTPFailedDependency as HTTPFailedDependency,
|
41 |
+
HTTPForbidden as HTTPForbidden,
|
42 |
+
HTTPFound as HTTPFound,
|
43 |
+
HTTPGatewayTimeout as HTTPGatewayTimeout,
|
44 |
+
HTTPGone as HTTPGone,
|
45 |
+
HTTPInsufficientStorage as HTTPInsufficientStorage,
|
46 |
+
HTTPInternalServerError as HTTPInternalServerError,
|
47 |
+
HTTPLengthRequired as HTTPLengthRequired,
|
48 |
+
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
|
49 |
+
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
|
50 |
+
HTTPMove as HTTPMove,
|
51 |
+
HTTPMovedPermanently as HTTPMovedPermanently,
|
52 |
+
HTTPMultipleChoices as HTTPMultipleChoices,
|
53 |
+
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
54 |
+
HTTPNoContent as HTTPNoContent,
|
55 |
+
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
56 |
+
HTTPNotAcceptable as HTTPNotAcceptable,
|
57 |
+
HTTPNotExtended as HTTPNotExtended,
|
58 |
+
HTTPNotFound as HTTPNotFound,
|
59 |
+
HTTPNotImplemented as HTTPNotImplemented,
|
60 |
+
HTTPNotModified as HTTPNotModified,
|
61 |
+
HTTPOk as HTTPOk,
|
62 |
+
HTTPPartialContent as HTTPPartialContent,
|
63 |
+
HTTPPaymentRequired as HTTPPaymentRequired,
|
64 |
+
HTTPPermanentRedirect as HTTPPermanentRedirect,
|
65 |
+
HTTPPreconditionFailed as HTTPPreconditionFailed,
|
66 |
+
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
67 |
+
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
68 |
+
HTTPRedirection as HTTPRedirection,
|
69 |
+
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
70 |
+
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
71 |
+
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
72 |
+
HTTPRequestTimeout as HTTPRequestTimeout,
|
73 |
+
HTTPRequestURITooLong as HTTPRequestURITooLong,
|
74 |
+
HTTPResetContent as HTTPResetContent,
|
75 |
+
HTTPSeeOther as HTTPSeeOther,
|
76 |
+
HTTPServerError as HTTPServerError,
|
77 |
+
HTTPServiceUnavailable as HTTPServiceUnavailable,
|
78 |
+
HTTPSuccessful as HTTPSuccessful,
|
79 |
+
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
|
80 |
+
HTTPTooManyRequests as HTTPTooManyRequests,
|
81 |
+
HTTPUnauthorized as HTTPUnauthorized,
|
82 |
+
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
83 |
+
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
|
84 |
+
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
85 |
+
HTTPUpgradeRequired as HTTPUpgradeRequired,
|
86 |
+
HTTPUseProxy as HTTPUseProxy,
|
87 |
+
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
88 |
+
HTTPVersionNotSupported as HTTPVersionNotSupported,
|
89 |
+
NotAppKeyWarning as NotAppKeyWarning,
|
90 |
+
)
|
91 |
+
from .web_fileresponse import FileResponse as FileResponse
|
92 |
+
from .web_log import AccessLogger
|
93 |
+
from .web_middlewares import (
|
94 |
+
middleware as middleware,
|
95 |
+
normalize_path_middleware as normalize_path_middleware,
|
96 |
+
)
|
97 |
+
from .web_protocol import (
|
98 |
+
PayloadAccessError as PayloadAccessError,
|
99 |
+
RequestHandler as RequestHandler,
|
100 |
+
RequestPayloadError as RequestPayloadError,
|
101 |
+
)
|
102 |
+
from .web_request import (
|
103 |
+
BaseRequest as BaseRequest,
|
104 |
+
FileField as FileField,
|
105 |
+
Request as Request,
|
106 |
+
)
|
107 |
+
from .web_response import (
|
108 |
+
ContentCoding as ContentCoding,
|
109 |
+
Response as Response,
|
110 |
+
StreamResponse as StreamResponse,
|
111 |
+
json_response as json_response,
|
112 |
+
)
|
113 |
+
from .web_routedef import (
|
114 |
+
AbstractRouteDef as AbstractRouteDef,
|
115 |
+
RouteDef as RouteDef,
|
116 |
+
RouteTableDef as RouteTableDef,
|
117 |
+
StaticDef as StaticDef,
|
118 |
+
delete as delete,
|
119 |
+
get as get,
|
120 |
+
head as head,
|
121 |
+
options as options,
|
122 |
+
patch as patch,
|
123 |
+
post as post,
|
124 |
+
put as put,
|
125 |
+
route as route,
|
126 |
+
static as static,
|
127 |
+
view as view,
|
128 |
+
)
|
129 |
+
from .web_runner import (
|
130 |
+
AppRunner as AppRunner,
|
131 |
+
BaseRunner as BaseRunner,
|
132 |
+
BaseSite as BaseSite,
|
133 |
+
GracefulExit as GracefulExit,
|
134 |
+
NamedPipeSite as NamedPipeSite,
|
135 |
+
ServerRunner as ServerRunner,
|
136 |
+
SockSite as SockSite,
|
137 |
+
TCPSite as TCPSite,
|
138 |
+
UnixSite as UnixSite,
|
139 |
+
)
|
140 |
+
from .web_server import Server as Server
|
141 |
+
from .web_urldispatcher import (
|
142 |
+
AbstractResource as AbstractResource,
|
143 |
+
AbstractRoute as AbstractRoute,
|
144 |
+
DynamicResource as DynamicResource,
|
145 |
+
PlainResource as PlainResource,
|
146 |
+
PrefixedSubAppResource as PrefixedSubAppResource,
|
147 |
+
Resource as Resource,
|
148 |
+
ResourceRoute as ResourceRoute,
|
149 |
+
StaticResource as StaticResource,
|
150 |
+
UrlDispatcher as UrlDispatcher,
|
151 |
+
UrlMappingMatchInfo as UrlMappingMatchInfo,
|
152 |
+
View as View,
|
153 |
+
)
|
154 |
+
from .web_ws import (
|
155 |
+
WebSocketReady as WebSocketReady,
|
156 |
+
WebSocketResponse as WebSocketResponse,
|
157 |
+
WSMsgType as WSMsgType,
|
158 |
+
)
|
159 |
+
|
160 |
+
__all__ = (
|
161 |
+
# web_app
|
162 |
+
"AppKey",
|
163 |
+
"Application",
|
164 |
+
"CleanupError",
|
165 |
+
# web_exceptions
|
166 |
+
"NotAppKeyWarning",
|
167 |
+
"HTTPAccepted",
|
168 |
+
"HTTPBadGateway",
|
169 |
+
"HTTPBadRequest",
|
170 |
+
"HTTPClientError",
|
171 |
+
"HTTPConflict",
|
172 |
+
"HTTPCreated",
|
173 |
+
"HTTPError",
|
174 |
+
"HTTPException",
|
175 |
+
"HTTPExpectationFailed",
|
176 |
+
"HTTPFailedDependency",
|
177 |
+
"HTTPForbidden",
|
178 |
+
"HTTPFound",
|
179 |
+
"HTTPGatewayTimeout",
|
180 |
+
"HTTPGone",
|
181 |
+
"HTTPInsufficientStorage",
|
182 |
+
"HTTPInternalServerError",
|
183 |
+
"HTTPLengthRequired",
|
184 |
+
"HTTPMethodNotAllowed",
|
185 |
+
"HTTPMisdirectedRequest",
|
186 |
+
"HTTPMove",
|
187 |
+
"HTTPMovedPermanently",
|
188 |
+
"HTTPMultipleChoices",
|
189 |
+
"HTTPNetworkAuthenticationRequired",
|
190 |
+
"HTTPNoContent",
|
191 |
+
"HTTPNonAuthoritativeInformation",
|
192 |
+
"HTTPNotAcceptable",
|
193 |
+
"HTTPNotExtended",
|
194 |
+
"HTTPNotFound",
|
195 |
+
"HTTPNotImplemented",
|
196 |
+
"HTTPNotModified",
|
197 |
+
"HTTPOk",
|
198 |
+
"HTTPPartialContent",
|
199 |
+
"HTTPPaymentRequired",
|
200 |
+
"HTTPPermanentRedirect",
|
201 |
+
"HTTPPreconditionFailed",
|
202 |
+
"HTTPPreconditionRequired",
|
203 |
+
"HTTPProxyAuthenticationRequired",
|
204 |
+
"HTTPRedirection",
|
205 |
+
"HTTPRequestEntityTooLarge",
|
206 |
+
"HTTPRequestHeaderFieldsTooLarge",
|
207 |
+
"HTTPRequestRangeNotSatisfiable",
|
208 |
+
"HTTPRequestTimeout",
|
209 |
+
"HTTPRequestURITooLong",
|
210 |
+
"HTTPResetContent",
|
211 |
+
"HTTPSeeOther",
|
212 |
+
"HTTPServerError",
|
213 |
+
"HTTPServiceUnavailable",
|
214 |
+
"HTTPSuccessful",
|
215 |
+
"HTTPTemporaryRedirect",
|
216 |
+
"HTTPTooManyRequests",
|
217 |
+
"HTTPUnauthorized",
|
218 |
+
"HTTPUnavailableForLegalReasons",
|
219 |
+
"HTTPUnprocessableEntity",
|
220 |
+
"HTTPUnsupportedMediaType",
|
221 |
+
"HTTPUpgradeRequired",
|
222 |
+
"HTTPUseProxy",
|
223 |
+
"HTTPVariantAlsoNegotiates",
|
224 |
+
"HTTPVersionNotSupported",
|
225 |
+
# web_fileresponse
|
226 |
+
"FileResponse",
|
227 |
+
# web_middlewares
|
228 |
+
"middleware",
|
229 |
+
"normalize_path_middleware",
|
230 |
+
# web_protocol
|
231 |
+
"PayloadAccessError",
|
232 |
+
"RequestHandler",
|
233 |
+
"RequestPayloadError",
|
234 |
+
# web_request
|
235 |
+
"BaseRequest",
|
236 |
+
"FileField",
|
237 |
+
"Request",
|
238 |
+
# web_response
|
239 |
+
"ContentCoding",
|
240 |
+
"Response",
|
241 |
+
"StreamResponse",
|
242 |
+
"json_response",
|
243 |
+
# web_routedef
|
244 |
+
"AbstractRouteDef",
|
245 |
+
"RouteDef",
|
246 |
+
"RouteTableDef",
|
247 |
+
"StaticDef",
|
248 |
+
"delete",
|
249 |
+
"get",
|
250 |
+
"head",
|
251 |
+
"options",
|
252 |
+
"patch",
|
253 |
+
"post",
|
254 |
+
"put",
|
255 |
+
"route",
|
256 |
+
"static",
|
257 |
+
"view",
|
258 |
+
# web_runner
|
259 |
+
"AppRunner",
|
260 |
+
"BaseRunner",
|
261 |
+
"BaseSite",
|
262 |
+
"GracefulExit",
|
263 |
+
"ServerRunner",
|
264 |
+
"SockSite",
|
265 |
+
"TCPSite",
|
266 |
+
"UnixSite",
|
267 |
+
"NamedPipeSite",
|
268 |
+
# web_server
|
269 |
+
"Server",
|
270 |
+
# web_urldispatcher
|
271 |
+
"AbstractResource",
|
272 |
+
"AbstractRoute",
|
273 |
+
"DynamicResource",
|
274 |
+
"PlainResource",
|
275 |
+
"PrefixedSubAppResource",
|
276 |
+
"Resource",
|
277 |
+
"ResourceRoute",
|
278 |
+
"StaticResource",
|
279 |
+
"UrlDispatcher",
|
280 |
+
"UrlMappingMatchInfo",
|
281 |
+
"View",
|
282 |
+
# web_ws
|
283 |
+
"WebSocketReady",
|
284 |
+
"WebSocketResponse",
|
285 |
+
"WSMsgType",
|
286 |
+
# web
|
287 |
+
"run_app",
|
288 |
+
)
|
289 |
+
|
290 |
+
|
291 |
+
if TYPE_CHECKING:
|
292 |
+
from ssl import SSLContext
|
293 |
+
else:
|
294 |
+
try:
|
295 |
+
from ssl import SSLContext
|
296 |
+
except ImportError: # pragma: no cover
|
297 |
+
SSLContext = object # type: ignore[misc,assignment]
|
298 |
+
|
299 |
+
# Only display warning when using -Wdefault, -We, -X dev or similar.
|
300 |
+
warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
|
301 |
+
|
302 |
+
HostSequence = TypingIterable[str]
|
303 |
+
|
304 |
+
|
305 |
+
async def _run_app(
|
306 |
+
app: Union[Application, Awaitable[Application]],
|
307 |
+
*,
|
308 |
+
host: Optional[Union[str, HostSequence]] = None,
|
309 |
+
port: Optional[int] = None,
|
310 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
311 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
312 |
+
shutdown_timeout: float = 60.0,
|
313 |
+
keepalive_timeout: float = 75.0,
|
314 |
+
ssl_context: Optional[SSLContext] = None,
|
315 |
+
print: Optional[Callable[..., None]] = print,
|
316 |
+
backlog: int = 128,
|
317 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
318 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
319 |
+
access_log: Optional[logging.Logger] = access_logger,
|
320 |
+
handle_signals: bool = True,
|
321 |
+
reuse_address: Optional[bool] = None,
|
322 |
+
reuse_port: Optional[bool] = None,
|
323 |
+
handler_cancellation: bool = False,
|
324 |
+
) -> None:
|
325 |
+
# An internal function to actually do all dirty job for application running
|
326 |
+
if asyncio.iscoroutine(app):
|
327 |
+
app = await app
|
328 |
+
|
329 |
+
app = cast(Application, app)
|
330 |
+
|
331 |
+
runner = AppRunner(
|
332 |
+
app,
|
333 |
+
handle_signals=handle_signals,
|
334 |
+
access_log_class=access_log_class,
|
335 |
+
access_log_format=access_log_format,
|
336 |
+
access_log=access_log,
|
337 |
+
keepalive_timeout=keepalive_timeout,
|
338 |
+
shutdown_timeout=shutdown_timeout,
|
339 |
+
handler_cancellation=handler_cancellation,
|
340 |
+
)
|
341 |
+
|
342 |
+
await runner.setup()
|
343 |
+
|
344 |
+
sites: List[BaseSite] = []
|
345 |
+
|
346 |
+
try:
|
347 |
+
if host is not None:
|
348 |
+
if isinstance(host, (str, bytes, bytearray, memoryview)):
|
349 |
+
sites.append(
|
350 |
+
TCPSite(
|
351 |
+
runner,
|
352 |
+
host,
|
353 |
+
port,
|
354 |
+
ssl_context=ssl_context,
|
355 |
+
backlog=backlog,
|
356 |
+
reuse_address=reuse_address,
|
357 |
+
reuse_port=reuse_port,
|
358 |
+
)
|
359 |
+
)
|
360 |
+
else:
|
361 |
+
for h in host:
|
362 |
+
sites.append(
|
363 |
+
TCPSite(
|
364 |
+
runner,
|
365 |
+
h,
|
366 |
+
port,
|
367 |
+
ssl_context=ssl_context,
|
368 |
+
backlog=backlog,
|
369 |
+
reuse_address=reuse_address,
|
370 |
+
reuse_port=reuse_port,
|
371 |
+
)
|
372 |
+
)
|
373 |
+
elif path is None and sock is None or port is not None:
|
374 |
+
sites.append(
|
375 |
+
TCPSite(
|
376 |
+
runner,
|
377 |
+
port=port,
|
378 |
+
ssl_context=ssl_context,
|
379 |
+
backlog=backlog,
|
380 |
+
reuse_address=reuse_address,
|
381 |
+
reuse_port=reuse_port,
|
382 |
+
)
|
383 |
+
)
|
384 |
+
|
385 |
+
if path is not None:
|
386 |
+
if isinstance(path, (str, os.PathLike)):
|
387 |
+
sites.append(
|
388 |
+
UnixSite(
|
389 |
+
runner,
|
390 |
+
path,
|
391 |
+
ssl_context=ssl_context,
|
392 |
+
backlog=backlog,
|
393 |
+
)
|
394 |
+
)
|
395 |
+
else:
|
396 |
+
for p in path:
|
397 |
+
sites.append(
|
398 |
+
UnixSite(
|
399 |
+
runner,
|
400 |
+
p,
|
401 |
+
ssl_context=ssl_context,
|
402 |
+
backlog=backlog,
|
403 |
+
)
|
404 |
+
)
|
405 |
+
|
406 |
+
if sock is not None:
|
407 |
+
if not isinstance(sock, Iterable):
|
408 |
+
sites.append(
|
409 |
+
SockSite(
|
410 |
+
runner,
|
411 |
+
sock,
|
412 |
+
ssl_context=ssl_context,
|
413 |
+
backlog=backlog,
|
414 |
+
)
|
415 |
+
)
|
416 |
+
else:
|
417 |
+
for s in sock:
|
418 |
+
sites.append(
|
419 |
+
SockSite(
|
420 |
+
runner,
|
421 |
+
s,
|
422 |
+
ssl_context=ssl_context,
|
423 |
+
backlog=backlog,
|
424 |
+
)
|
425 |
+
)
|
426 |
+
for site in sites:
|
427 |
+
await site.start()
|
428 |
+
|
429 |
+
if print: # pragma: no branch
|
430 |
+
names = sorted(str(s.name) for s in runner.sites)
|
431 |
+
print(
|
432 |
+
"======== Running on {} ========\n"
|
433 |
+
"(Press CTRL+C to quit)".format(", ".join(names))
|
434 |
+
)
|
435 |
+
|
436 |
+
# sleep forever by 1 hour intervals,
|
437 |
+
while True:
|
438 |
+
await asyncio.sleep(3600)
|
439 |
+
finally:
|
440 |
+
await runner.cleanup()
|
441 |
+
|
442 |
+
|
443 |
+
def _cancel_tasks(
|
444 |
+
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
|
445 |
+
) -> None:
|
446 |
+
if not to_cancel:
|
447 |
+
return
|
448 |
+
|
449 |
+
for task in to_cancel:
|
450 |
+
task.cancel()
|
451 |
+
|
452 |
+
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
453 |
+
|
454 |
+
for task in to_cancel:
|
455 |
+
if task.cancelled():
|
456 |
+
continue
|
457 |
+
if task.exception() is not None:
|
458 |
+
loop.call_exception_handler(
|
459 |
+
{
|
460 |
+
"message": "unhandled exception during asyncio.run() shutdown",
|
461 |
+
"exception": task.exception(),
|
462 |
+
"task": task,
|
463 |
+
}
|
464 |
+
)
|
465 |
+
|
466 |
+
|
467 |
+
def run_app(
|
468 |
+
app: Union[Application, Awaitable[Application]],
|
469 |
+
*,
|
470 |
+
host: Optional[Union[str, HostSequence]] = None,
|
471 |
+
port: Optional[int] = None,
|
472 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
473 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
474 |
+
shutdown_timeout: float = 60.0,
|
475 |
+
keepalive_timeout: float = 75.0,
|
476 |
+
ssl_context: Optional[SSLContext] = None,
|
477 |
+
print: Optional[Callable[..., None]] = print,
|
478 |
+
backlog: int = 128,
|
479 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
480 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
481 |
+
access_log: Optional[logging.Logger] = access_logger,
|
482 |
+
handle_signals: bool = True,
|
483 |
+
reuse_address: Optional[bool] = None,
|
484 |
+
reuse_port: Optional[bool] = None,
|
485 |
+
handler_cancellation: bool = False,
|
486 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
487 |
+
) -> None:
|
488 |
+
"""Run an app locally"""
|
489 |
+
if loop is None:
|
490 |
+
loop = asyncio.new_event_loop()
|
491 |
+
|
492 |
+
# Configure if and only if in debugging mode and using the default logger
|
493 |
+
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
494 |
+
if access_log.level == logging.NOTSET:
|
495 |
+
access_log.setLevel(logging.DEBUG)
|
496 |
+
if not access_log.hasHandlers():
|
497 |
+
access_log.addHandler(logging.StreamHandler())
|
498 |
+
|
499 |
+
main_task = loop.create_task(
|
500 |
+
_run_app(
|
501 |
+
app,
|
502 |
+
host=host,
|
503 |
+
port=port,
|
504 |
+
path=path,
|
505 |
+
sock=sock,
|
506 |
+
shutdown_timeout=shutdown_timeout,
|
507 |
+
keepalive_timeout=keepalive_timeout,
|
508 |
+
ssl_context=ssl_context,
|
509 |
+
print=print,
|
510 |
+
backlog=backlog,
|
511 |
+
access_log_class=access_log_class,
|
512 |
+
access_log_format=access_log_format,
|
513 |
+
access_log=access_log,
|
514 |
+
handle_signals=handle_signals,
|
515 |
+
reuse_address=reuse_address,
|
516 |
+
reuse_port=reuse_port,
|
517 |
+
handler_cancellation=handler_cancellation,
|
518 |
+
)
|
519 |
+
)
|
520 |
+
|
521 |
+
try:
|
522 |
+
asyncio.set_event_loop(loop)
|
523 |
+
loop.run_until_complete(main_task)
|
524 |
+
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
525 |
+
pass
|
526 |
+
finally:
|
527 |
+
try:
|
528 |
+
main_task.cancel()
|
529 |
+
with suppress(asyncio.CancelledError):
|
530 |
+
loop.run_until_complete(main_task)
|
531 |
+
finally:
|
532 |
+
_cancel_tasks(asyncio.all_tasks(loop), loop)
|
533 |
+
loop.run_until_complete(loop.shutdown_asyncgens())
|
534 |
+
loop.close()
|
535 |
+
|
536 |
+
|
537 |
+
def main(argv: List[str]) -> None:
|
538 |
+
arg_parser = ArgumentParser(
|
539 |
+
description="aiohttp.web Application server", prog="aiohttp.web"
|
540 |
+
)
|
541 |
+
arg_parser.add_argument(
|
542 |
+
"entry_func",
|
543 |
+
help=(
|
544 |
+
"Callable returning the `aiohttp.web.Application` instance to "
|
545 |
+
"run. Should be specified in the 'module:function' syntax."
|
546 |
+
),
|
547 |
+
metavar="entry-func",
|
548 |
+
)
|
549 |
+
arg_parser.add_argument(
|
550 |
+
"-H",
|
551 |
+
"--hostname",
|
552 |
+
help="TCP/IP hostname to serve on (default: localhost)",
|
553 |
+
default=None,
|
554 |
+
)
|
555 |
+
arg_parser.add_argument(
|
556 |
+
"-P",
|
557 |
+
"--port",
|
558 |
+
help="TCP/IP port to serve on (default: %(default)r)",
|
559 |
+
type=int,
|
560 |
+
default=8080,
|
561 |
+
)
|
562 |
+
arg_parser.add_argument(
|
563 |
+
"-U",
|
564 |
+
"--path",
|
565 |
+
help="Unix file system path to serve on. Can be combined with hostname "
|
566 |
+
"to serve on both Unix and TCP.",
|
567 |
+
)
|
568 |
+
args, extra_argv = arg_parser.parse_known_args(argv)
|
569 |
+
|
570 |
+
# Import logic
|
571 |
+
mod_str, _, func_str = args.entry_func.partition(":")
|
572 |
+
if not func_str or not mod_str:
|
573 |
+
arg_parser.error("'entry-func' not in 'module:function' syntax")
|
574 |
+
if mod_str.startswith("."):
|
575 |
+
arg_parser.error("relative module names not supported")
|
576 |
+
try:
|
577 |
+
module = import_module(mod_str)
|
578 |
+
except ImportError as ex:
|
579 |
+
arg_parser.error(f"unable to import {mod_str}: {ex}")
|
580 |
+
try:
|
581 |
+
func = getattr(module, func_str)
|
582 |
+
except AttributeError:
|
583 |
+
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
|
584 |
+
|
585 |
+
# Compatibility logic
|
586 |
+
if args.path is not None and not hasattr(socket, "AF_UNIX"):
|
587 |
+
arg_parser.error(
|
588 |
+
"file system paths not supported by your operating environment"
|
589 |
+
)
|
590 |
+
|
591 |
+
logging.basicConfig(level=logging.DEBUG)
|
592 |
+
|
593 |
+
if args.path and args.hostname is None:
|
594 |
+
host = port = None
|
595 |
+
else:
|
596 |
+
host = args.hostname or "localhost"
|
597 |
+
port = args.port
|
598 |
+
|
599 |
+
app = func(extra_argv)
|
600 |
+
run_app(app, host=host, port=port, path=args.path)
|
601 |
+
arg_parser.exit(message="Stopped\n")
|
602 |
+
|
603 |
+
|
604 |
+
if __name__ == "__main__": # pragma: no branch
|
605 |
+
main(sys.argv[1:]) # pragma: no cover
|
venv/Lib/site-packages/aiohttp/web_app.py
ADDED
@@ -0,0 +1,620 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import logging
|
3 |
+
import warnings
|
4 |
+
from functools import lru_cache, partial, update_wrapper
|
5 |
+
from typing import (
|
6 |
+
TYPE_CHECKING,
|
7 |
+
Any,
|
8 |
+
AsyncIterator,
|
9 |
+
Awaitable,
|
10 |
+
Callable,
|
11 |
+
Dict,
|
12 |
+
Iterable,
|
13 |
+
Iterator,
|
14 |
+
List,
|
15 |
+
Mapping,
|
16 |
+
MutableMapping,
|
17 |
+
Optional,
|
18 |
+
Sequence,
|
19 |
+
Tuple,
|
20 |
+
Type,
|
21 |
+
TypeVar,
|
22 |
+
Union,
|
23 |
+
cast,
|
24 |
+
overload,
|
25 |
+
)
|
26 |
+
|
27 |
+
from aiosignal import Signal
|
28 |
+
from frozenlist import FrozenList
|
29 |
+
|
30 |
+
from . import hdrs
|
31 |
+
from .abc import (
|
32 |
+
AbstractAccessLogger,
|
33 |
+
AbstractMatchInfo,
|
34 |
+
AbstractRouter,
|
35 |
+
AbstractStreamWriter,
|
36 |
+
)
|
37 |
+
from .helpers import DEBUG, AppKey
|
38 |
+
from .http_parser import RawRequestMessage
|
39 |
+
from .log import web_logger
|
40 |
+
from .streams import StreamReader
|
41 |
+
from .typedefs import Handler, Middleware
|
42 |
+
from .web_exceptions import NotAppKeyWarning
|
43 |
+
from .web_log import AccessLogger
|
44 |
+
from .web_middlewares import _fix_request_current_app
|
45 |
+
from .web_protocol import RequestHandler
|
46 |
+
from .web_request import Request
|
47 |
+
from .web_response import StreamResponse
|
48 |
+
from .web_routedef import AbstractRouteDef
|
49 |
+
from .web_server import Server
|
50 |
+
from .web_urldispatcher import (
|
51 |
+
AbstractResource,
|
52 |
+
AbstractRoute,
|
53 |
+
Domain,
|
54 |
+
MaskDomain,
|
55 |
+
MatchedSubAppResource,
|
56 |
+
PrefixedSubAppResource,
|
57 |
+
SystemRoute,
|
58 |
+
UrlDispatcher,
|
59 |
+
)
|
60 |
+
|
61 |
+
__all__ = ("Application", "CleanupError")
|
62 |
+
|
63 |
+
|
64 |
+
if TYPE_CHECKING:
|
65 |
+
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
|
66 |
+
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
|
67 |
+
_Middlewares = FrozenList[Middleware]
|
68 |
+
_MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]]
|
69 |
+
_Subapps = List["Application"]
|
70 |
+
else:
|
71 |
+
# No type checker mode, skip types
|
72 |
+
_AppSignal = Signal
|
73 |
+
_RespPrepareSignal = Signal
|
74 |
+
_Middlewares = FrozenList
|
75 |
+
_MiddlewaresHandlers = Optional[Sequence]
|
76 |
+
_Subapps = List
|
77 |
+
|
78 |
+
_T = TypeVar("_T")
|
79 |
+
_U = TypeVar("_U")
|
80 |
+
_Resource = TypeVar("_Resource", bound=AbstractResource)
|
81 |
+
|
82 |
+
|
83 |
+
def _build_middlewares(
|
84 |
+
handler: Handler, apps: Tuple["Application", ...]
|
85 |
+
) -> Callable[[Request], Awaitable[StreamResponse]]:
|
86 |
+
"""Apply middlewares to handler."""
|
87 |
+
for app in apps[::-1]:
|
88 |
+
for m, _ in app._middlewares_handlers: # type: ignore[union-attr]
|
89 |
+
handler = update_wrapper(partial(m, handler=handler), handler) # type: ignore[misc]
|
90 |
+
return handler
|
91 |
+
|
92 |
+
|
93 |
+
_cached_build_middleware = lru_cache(maxsize=1024)(_build_middlewares)
|
94 |
+
|
95 |
+
|
96 |
+
class Application(MutableMapping[Union[str, AppKey[Any]], Any]):
|
97 |
+
ATTRS = frozenset(
|
98 |
+
[
|
99 |
+
"logger",
|
100 |
+
"_debug",
|
101 |
+
"_router",
|
102 |
+
"_loop",
|
103 |
+
"_handler_args",
|
104 |
+
"_middlewares",
|
105 |
+
"_middlewares_handlers",
|
106 |
+
"_has_legacy_middlewares",
|
107 |
+
"_run_middlewares",
|
108 |
+
"_state",
|
109 |
+
"_frozen",
|
110 |
+
"_pre_frozen",
|
111 |
+
"_subapps",
|
112 |
+
"_on_response_prepare",
|
113 |
+
"_on_startup",
|
114 |
+
"_on_shutdown",
|
115 |
+
"_on_cleanup",
|
116 |
+
"_client_max_size",
|
117 |
+
"_cleanup_ctx",
|
118 |
+
]
|
119 |
+
)
|
120 |
+
|
121 |
+
def __init__(
|
122 |
+
self,
|
123 |
+
*,
|
124 |
+
logger: logging.Logger = web_logger,
|
125 |
+
router: Optional[UrlDispatcher] = None,
|
126 |
+
middlewares: Iterable[Middleware] = (),
|
127 |
+
handler_args: Optional[Mapping[str, Any]] = None,
|
128 |
+
client_max_size: int = 1024**2,
|
129 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
130 |
+
debug: Any = ..., # mypy doesn't support ellipsis
|
131 |
+
) -> None:
|
132 |
+
if router is None:
|
133 |
+
router = UrlDispatcher()
|
134 |
+
else:
|
135 |
+
warnings.warn(
|
136 |
+
"router argument is deprecated", DeprecationWarning, stacklevel=2
|
137 |
+
)
|
138 |
+
assert isinstance(router, AbstractRouter), router
|
139 |
+
|
140 |
+
if loop is not None:
|
141 |
+
warnings.warn(
|
142 |
+
"loop argument is deprecated", DeprecationWarning, stacklevel=2
|
143 |
+
)
|
144 |
+
|
145 |
+
if debug is not ...:
|
146 |
+
warnings.warn(
|
147 |
+
"debug argument is deprecated", DeprecationWarning, stacklevel=2
|
148 |
+
)
|
149 |
+
self._debug = debug
|
150 |
+
self._router: UrlDispatcher = router
|
151 |
+
self._loop = loop
|
152 |
+
self._handler_args = handler_args
|
153 |
+
self.logger = logger
|
154 |
+
|
155 |
+
self._middlewares: _Middlewares = FrozenList(middlewares)
|
156 |
+
|
157 |
+
# initialized on freezing
|
158 |
+
self._middlewares_handlers: _MiddlewaresHandlers = None
|
159 |
+
# initialized on freezing
|
160 |
+
self._run_middlewares: Optional[bool] = None
|
161 |
+
self._has_legacy_middlewares: bool = True
|
162 |
+
|
163 |
+
self._state: Dict[Union[AppKey[Any], str], object] = {}
|
164 |
+
self._frozen = False
|
165 |
+
self._pre_frozen = False
|
166 |
+
self._subapps: _Subapps = []
|
167 |
+
|
168 |
+
self._on_response_prepare: _RespPrepareSignal = Signal(self)
|
169 |
+
self._on_startup: _AppSignal = Signal(self)
|
170 |
+
self._on_shutdown: _AppSignal = Signal(self)
|
171 |
+
self._on_cleanup: _AppSignal = Signal(self)
|
172 |
+
self._cleanup_ctx = CleanupContext()
|
173 |
+
self._on_startup.append(self._cleanup_ctx._on_startup)
|
174 |
+
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
|
175 |
+
self._client_max_size = client_max_size
|
176 |
+
|
177 |
+
def __init_subclass__(cls: Type["Application"]) -> None:
|
178 |
+
warnings.warn(
|
179 |
+
"Inheritance class {} from web.Application "
|
180 |
+
"is discouraged".format(cls.__name__),
|
181 |
+
DeprecationWarning,
|
182 |
+
stacklevel=3,
|
183 |
+
)
|
184 |
+
|
185 |
+
if DEBUG: # pragma: no cover
|
186 |
+
|
187 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
188 |
+
if name not in self.ATTRS:
|
189 |
+
warnings.warn(
|
190 |
+
"Setting custom web.Application.{} attribute "
|
191 |
+
"is discouraged".format(name),
|
192 |
+
DeprecationWarning,
|
193 |
+
stacklevel=2,
|
194 |
+
)
|
195 |
+
super().__setattr__(name, val)
|
196 |
+
|
197 |
+
# MutableMapping API
|
198 |
+
|
199 |
+
def __eq__(self, other: object) -> bool:
|
200 |
+
return self is other
|
201 |
+
|
202 |
+
@overload # type: ignore[override]
|
203 |
+
def __getitem__(self, key: AppKey[_T]) -> _T: ...
|
204 |
+
|
205 |
+
@overload
|
206 |
+
def __getitem__(self, key: str) -> Any: ...
|
207 |
+
|
208 |
+
def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
|
209 |
+
return self._state[key]
|
210 |
+
|
211 |
+
def _check_frozen(self) -> None:
|
212 |
+
if self._frozen:
|
213 |
+
warnings.warn(
|
214 |
+
"Changing state of started or joined application is deprecated",
|
215 |
+
DeprecationWarning,
|
216 |
+
stacklevel=3,
|
217 |
+
)
|
218 |
+
|
219 |
+
@overload # type: ignore[override]
|
220 |
+
def __setitem__(self, key: AppKey[_T], value: _T) -> None: ...
|
221 |
+
|
222 |
+
@overload
|
223 |
+
def __setitem__(self, key: str, value: Any) -> None: ...
|
224 |
+
|
225 |
+
def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None:
|
226 |
+
self._check_frozen()
|
227 |
+
if not isinstance(key, AppKey):
|
228 |
+
warnings.warn(
|
229 |
+
"It is recommended to use web.AppKey instances for keys.\n"
|
230 |
+
+ "https://docs.aiohttp.org/en/stable/web_advanced.html"
|
231 |
+
+ "#application-s-config",
|
232 |
+
category=NotAppKeyWarning,
|
233 |
+
stacklevel=2,
|
234 |
+
)
|
235 |
+
self._state[key] = value
|
236 |
+
|
237 |
+
def __delitem__(self, key: Union[str, AppKey[_T]]) -> None:
|
238 |
+
self._check_frozen()
|
239 |
+
del self._state[key]
|
240 |
+
|
241 |
+
def __len__(self) -> int:
|
242 |
+
return len(self._state)
|
243 |
+
|
244 |
+
def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
|
245 |
+
return iter(self._state)
|
246 |
+
|
247 |
+
def __hash__(self) -> int:
|
248 |
+
return id(self)
|
249 |
+
|
250 |
+
@overload # type: ignore[override]
|
251 |
+
def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ...
|
252 |
+
|
253 |
+
@overload
|
254 |
+
def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: ...
|
255 |
+
|
256 |
+
@overload
|
257 |
+
def get(self, key: str, default: Any = ...) -> Any: ...
|
258 |
+
|
259 |
+
def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
|
260 |
+
return self._state.get(key, default)
|
261 |
+
|
262 |
+
########
|
263 |
+
@property
|
264 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
265 |
+
# Technically the loop can be None
|
266 |
+
# but we mask it by explicit type cast
|
267 |
+
# to provide more convenient type annotation
|
268 |
+
warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
|
269 |
+
return cast(asyncio.AbstractEventLoop, self._loop)
|
270 |
+
|
271 |
+
def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
|
272 |
+
if loop is None:
|
273 |
+
loop = asyncio.get_event_loop()
|
274 |
+
if self._loop is not None and self._loop is not loop:
|
275 |
+
raise RuntimeError(
|
276 |
+
"web.Application instance initialized with different loop"
|
277 |
+
)
|
278 |
+
|
279 |
+
self._loop = loop
|
280 |
+
|
281 |
+
# set loop debug
|
282 |
+
if self._debug is ...:
|
283 |
+
self._debug = loop.get_debug()
|
284 |
+
|
285 |
+
# set loop to sub applications
|
286 |
+
for subapp in self._subapps:
|
287 |
+
subapp._set_loop(loop)
|
288 |
+
|
289 |
+
@property
|
290 |
+
def pre_frozen(self) -> bool:
|
291 |
+
return self._pre_frozen
|
292 |
+
|
293 |
+
def pre_freeze(self) -> None:
|
294 |
+
if self._pre_frozen:
|
295 |
+
return
|
296 |
+
|
297 |
+
self._pre_frozen = True
|
298 |
+
self._middlewares.freeze()
|
299 |
+
self._router.freeze()
|
300 |
+
self._on_response_prepare.freeze()
|
301 |
+
self._cleanup_ctx.freeze()
|
302 |
+
self._on_startup.freeze()
|
303 |
+
self._on_shutdown.freeze()
|
304 |
+
self._on_cleanup.freeze()
|
305 |
+
self._middlewares_handlers = tuple(self._prepare_middleware())
|
306 |
+
self._has_legacy_middlewares = any(
|
307 |
+
not new_style for _, new_style in self._middlewares_handlers
|
308 |
+
)
|
309 |
+
|
310 |
+
# If current app and any subapp do not have middlewares avoid run all
|
311 |
+
# of the code footprint that it implies, which have a middleware
|
312 |
+
# hardcoded per app that sets up the current_app attribute. If no
|
313 |
+
# middlewares are configured the handler will receive the proper
|
314 |
+
# current_app without needing all of this code.
|
315 |
+
self._run_middlewares = True if self.middlewares else False
|
316 |
+
|
317 |
+
for subapp in self._subapps:
|
318 |
+
subapp.pre_freeze()
|
319 |
+
self._run_middlewares = self._run_middlewares or subapp._run_middlewares
|
320 |
+
|
321 |
+
@property
|
322 |
+
def frozen(self) -> bool:
|
323 |
+
return self._frozen
|
324 |
+
|
325 |
+
def freeze(self) -> None:
|
326 |
+
if self._frozen:
|
327 |
+
return
|
328 |
+
|
329 |
+
self.pre_freeze()
|
330 |
+
self._frozen = True
|
331 |
+
for subapp in self._subapps:
|
332 |
+
subapp.freeze()
|
333 |
+
|
334 |
+
@property
|
335 |
+
def debug(self) -> bool:
|
336 |
+
warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
|
337 |
+
return self._debug # type: ignore[no-any-return]
|
338 |
+
|
339 |
+
def _reg_subapp_signals(self, subapp: "Application") -> None:
|
340 |
+
def reg_handler(signame: str) -> None:
|
341 |
+
subsig = getattr(subapp, signame)
|
342 |
+
|
343 |
+
async def handler(app: "Application") -> None:
|
344 |
+
await subsig.send(subapp)
|
345 |
+
|
346 |
+
appsig = getattr(self, signame)
|
347 |
+
appsig.append(handler)
|
348 |
+
|
349 |
+
reg_handler("on_startup")
|
350 |
+
reg_handler("on_shutdown")
|
351 |
+
reg_handler("on_cleanup")
|
352 |
+
|
353 |
+
def add_subapp(self, prefix: str, subapp: "Application") -> PrefixedSubAppResource:
|
354 |
+
if not isinstance(prefix, str):
|
355 |
+
raise TypeError("Prefix must be str")
|
356 |
+
prefix = prefix.rstrip("/")
|
357 |
+
if not prefix:
|
358 |
+
raise ValueError("Prefix cannot be empty")
|
359 |
+
factory = partial(PrefixedSubAppResource, prefix, subapp)
|
360 |
+
return self._add_subapp(factory, subapp)
|
361 |
+
|
362 |
+
def _add_subapp(
|
363 |
+
self, resource_factory: Callable[[], _Resource], subapp: "Application"
|
364 |
+
) -> _Resource:
|
365 |
+
if self.frozen:
|
366 |
+
raise RuntimeError("Cannot add sub application to frozen application")
|
367 |
+
if subapp.frozen:
|
368 |
+
raise RuntimeError("Cannot add frozen application")
|
369 |
+
resource = resource_factory()
|
370 |
+
self.router.register_resource(resource)
|
371 |
+
self._reg_subapp_signals(subapp)
|
372 |
+
self._subapps.append(subapp)
|
373 |
+
subapp.pre_freeze()
|
374 |
+
if self._loop is not None:
|
375 |
+
subapp._set_loop(self._loop)
|
376 |
+
return resource
|
377 |
+
|
378 |
+
def add_domain(self, domain: str, subapp: "Application") -> MatchedSubAppResource:
|
379 |
+
if not isinstance(domain, str):
|
380 |
+
raise TypeError("Domain must be str")
|
381 |
+
elif "*" in domain:
|
382 |
+
rule: Domain = MaskDomain(domain)
|
383 |
+
else:
|
384 |
+
rule = Domain(domain)
|
385 |
+
factory = partial(MatchedSubAppResource, rule, subapp)
|
386 |
+
return self._add_subapp(factory, subapp)
|
387 |
+
|
388 |
+
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
|
389 |
+
return self.router.add_routes(routes)
|
390 |
+
|
391 |
+
@property
|
392 |
+
def on_response_prepare(self) -> _RespPrepareSignal:
|
393 |
+
return self._on_response_prepare
|
394 |
+
|
395 |
+
@property
|
396 |
+
def on_startup(self) -> _AppSignal:
|
397 |
+
return self._on_startup
|
398 |
+
|
399 |
+
@property
|
400 |
+
def on_shutdown(self) -> _AppSignal:
|
401 |
+
return self._on_shutdown
|
402 |
+
|
403 |
+
@property
|
404 |
+
def on_cleanup(self) -> _AppSignal:
|
405 |
+
return self._on_cleanup
|
406 |
+
|
407 |
+
@property
|
408 |
+
def cleanup_ctx(self) -> "CleanupContext":
|
409 |
+
return self._cleanup_ctx
|
410 |
+
|
411 |
+
@property
|
412 |
+
def router(self) -> UrlDispatcher:
|
413 |
+
return self._router
|
414 |
+
|
415 |
+
@property
|
416 |
+
def middlewares(self) -> _Middlewares:
|
417 |
+
return self._middlewares
|
418 |
+
|
419 |
+
def _make_handler(
|
420 |
+
self,
|
421 |
+
*,
|
422 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
423 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
424 |
+
**kwargs: Any,
|
425 |
+
) -> Server:
|
426 |
+
|
427 |
+
if not issubclass(access_log_class, AbstractAccessLogger):
|
428 |
+
raise TypeError(
|
429 |
+
"access_log_class must be subclass of "
|
430 |
+
"aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
|
431 |
+
)
|
432 |
+
|
433 |
+
self._set_loop(loop)
|
434 |
+
self.freeze()
|
435 |
+
|
436 |
+
kwargs["debug"] = self._debug
|
437 |
+
kwargs["access_log_class"] = access_log_class
|
438 |
+
if self._handler_args:
|
439 |
+
for k, v in self._handler_args.items():
|
440 |
+
kwargs[k] = v
|
441 |
+
|
442 |
+
return Server(
|
443 |
+
self._handle, # type: ignore[arg-type]
|
444 |
+
request_factory=self._make_request,
|
445 |
+
loop=self._loop,
|
446 |
+
**kwargs,
|
447 |
+
)
|
448 |
+
|
449 |
+
def make_handler(
|
450 |
+
self,
|
451 |
+
*,
|
452 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
453 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
454 |
+
**kwargs: Any,
|
455 |
+
) -> Server:
|
456 |
+
|
457 |
+
warnings.warn(
|
458 |
+
"Application.make_handler(...) is deprecated, use AppRunner API instead",
|
459 |
+
DeprecationWarning,
|
460 |
+
stacklevel=2,
|
461 |
+
)
|
462 |
+
|
463 |
+
return self._make_handler(
|
464 |
+
loop=loop, access_log_class=access_log_class, **kwargs
|
465 |
+
)
|
466 |
+
|
467 |
+
async def startup(self) -> None:
|
468 |
+
"""Causes on_startup signal
|
469 |
+
|
470 |
+
Should be called in the event loop along with the request handler.
|
471 |
+
"""
|
472 |
+
await self.on_startup.send(self)
|
473 |
+
|
474 |
+
async def shutdown(self) -> None:
|
475 |
+
"""Causes on_shutdown signal
|
476 |
+
|
477 |
+
Should be called before cleanup()
|
478 |
+
"""
|
479 |
+
await self.on_shutdown.send(self)
|
480 |
+
|
481 |
+
async def cleanup(self) -> None:
|
482 |
+
"""Causes on_cleanup signal
|
483 |
+
|
484 |
+
Should be called after shutdown()
|
485 |
+
"""
|
486 |
+
if self.on_cleanup.frozen:
|
487 |
+
await self.on_cleanup.send(self)
|
488 |
+
else:
|
489 |
+
# If an exception occurs in startup, ensure cleanup contexts are completed.
|
490 |
+
await self._cleanup_ctx._on_cleanup(self)
|
491 |
+
|
492 |
+
def _make_request(
|
493 |
+
self,
|
494 |
+
message: RawRequestMessage,
|
495 |
+
payload: StreamReader,
|
496 |
+
protocol: RequestHandler,
|
497 |
+
writer: AbstractStreamWriter,
|
498 |
+
task: "asyncio.Task[None]",
|
499 |
+
_cls: Type[Request] = Request,
|
500 |
+
) -> Request:
|
501 |
+
if TYPE_CHECKING:
|
502 |
+
assert self._loop is not None
|
503 |
+
return _cls(
|
504 |
+
message,
|
505 |
+
payload,
|
506 |
+
protocol,
|
507 |
+
writer,
|
508 |
+
task,
|
509 |
+
self._loop,
|
510 |
+
client_max_size=self._client_max_size,
|
511 |
+
)
|
512 |
+
|
513 |
+
def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]:
|
514 |
+
for m in reversed(self._middlewares):
|
515 |
+
if getattr(m, "__middleware_version__", None) == 1:
|
516 |
+
yield m, True
|
517 |
+
else:
|
518 |
+
warnings.warn(
|
519 |
+
f'old-style middleware "{m!r}" deprecated, see #2252',
|
520 |
+
DeprecationWarning,
|
521 |
+
stacklevel=2,
|
522 |
+
)
|
523 |
+
yield m, False
|
524 |
+
|
525 |
+
yield _fix_request_current_app(self), True
|
526 |
+
|
527 |
+
async def _handle(self, request: Request) -> StreamResponse:
|
528 |
+
loop = asyncio.get_event_loop()
|
529 |
+
debug = loop.get_debug()
|
530 |
+
match_info = await self._router.resolve(request)
|
531 |
+
if debug: # pragma: no cover
|
532 |
+
if not isinstance(match_info, AbstractMatchInfo):
|
533 |
+
raise TypeError(
|
534 |
+
"match_info should be AbstractMatchInfo "
|
535 |
+
"instance, not {!r}".format(match_info)
|
536 |
+
)
|
537 |
+
match_info.add_app(self)
|
538 |
+
|
539 |
+
match_info.freeze()
|
540 |
+
|
541 |
+
request._match_info = match_info
|
542 |
+
|
543 |
+
if request.headers.get(hdrs.EXPECT):
|
544 |
+
resp = await match_info.expect_handler(request)
|
545 |
+
await request.writer.drain()
|
546 |
+
if resp is not None:
|
547 |
+
return resp
|
548 |
+
|
549 |
+
handler = match_info.handler
|
550 |
+
|
551 |
+
if self._run_middlewares:
|
552 |
+
# If its a SystemRoute, don't cache building the middlewares since
|
553 |
+
# they are constructed for every MatchInfoError as a new handler
|
554 |
+
# is made each time.
|
555 |
+
if not self._has_legacy_middlewares and not isinstance(
|
556 |
+
match_info.route, SystemRoute
|
557 |
+
):
|
558 |
+
handler = _cached_build_middleware(handler, match_info.apps)
|
559 |
+
else:
|
560 |
+
for app in match_info.apps[::-1]:
|
561 |
+
for m, new_style in app._middlewares_handlers: # type: ignore[union-attr]
|
562 |
+
if new_style:
|
563 |
+
handler = update_wrapper(
|
564 |
+
partial(m, handler=handler), handler # type: ignore[misc]
|
565 |
+
)
|
566 |
+
else:
|
567 |
+
handler = await m(app, handler) # type: ignore[arg-type,assignment]
|
568 |
+
|
569 |
+
return await handler(request)
|
570 |
+
|
571 |
+
def __call__(self) -> "Application":
|
572 |
+
"""gunicorn compatibility"""
|
573 |
+
return self
|
574 |
+
|
575 |
+
def __repr__(self) -> str:
|
576 |
+
return f"<Application 0x{id(self):x}>"
|
577 |
+
|
578 |
+
def __bool__(self) -> bool:
|
579 |
+
return True
|
580 |
+
|
581 |
+
|
582 |
+
class CleanupError(RuntimeError):
|
583 |
+
@property
|
584 |
+
def exceptions(self) -> List[BaseException]:
|
585 |
+
return cast(List[BaseException], self.args[1])
|
586 |
+
|
587 |
+
|
588 |
+
if TYPE_CHECKING:
|
589 |
+
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
|
590 |
+
else:
|
591 |
+
_CleanupContextBase = FrozenList
|
592 |
+
|
593 |
+
|
594 |
+
class CleanupContext(_CleanupContextBase):
|
595 |
+
def __init__(self) -> None:
|
596 |
+
super().__init__()
|
597 |
+
self._exits: List[AsyncIterator[None]] = []
|
598 |
+
|
599 |
+
async def _on_startup(self, app: Application) -> None:
|
600 |
+
for cb in self:
|
601 |
+
it = cb(app).__aiter__()
|
602 |
+
await it.__anext__()
|
603 |
+
self._exits.append(it)
|
604 |
+
|
605 |
+
async def _on_cleanup(self, app: Application) -> None:
|
606 |
+
errors = []
|
607 |
+
for it in reversed(self._exits):
|
608 |
+
try:
|
609 |
+
await it.__anext__()
|
610 |
+
except StopAsyncIteration:
|
611 |
+
pass
|
612 |
+
except (Exception, asyncio.CancelledError) as exc:
|
613 |
+
errors.append(exc)
|
614 |
+
else:
|
615 |
+
errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
|
616 |
+
if errors:
|
617 |
+
if len(errors) == 1:
|
618 |
+
raise errors[0]
|
619 |
+
else:
|
620 |
+
raise CleanupError("Multiple errors on cleanup stage", errors)
|
venv/Lib/site-packages/aiohttp/web_exceptions.py
ADDED
@@ -0,0 +1,452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import warnings
|
2 |
+
from typing import Any, Dict, Iterable, List, Optional, Set # noqa
|
3 |
+
|
4 |
+
from yarl import URL
|
5 |
+
|
6 |
+
from .typedefs import LooseHeaders, StrOrURL
|
7 |
+
from .web_response import Response
|
8 |
+
|
9 |
+
__all__ = (
|
10 |
+
"HTTPException",
|
11 |
+
"HTTPError",
|
12 |
+
"HTTPRedirection",
|
13 |
+
"HTTPSuccessful",
|
14 |
+
"HTTPOk",
|
15 |
+
"HTTPCreated",
|
16 |
+
"HTTPAccepted",
|
17 |
+
"HTTPNonAuthoritativeInformation",
|
18 |
+
"HTTPNoContent",
|
19 |
+
"HTTPResetContent",
|
20 |
+
"HTTPPartialContent",
|
21 |
+
"HTTPMove",
|
22 |
+
"HTTPMultipleChoices",
|
23 |
+
"HTTPMovedPermanently",
|
24 |
+
"HTTPFound",
|
25 |
+
"HTTPSeeOther",
|
26 |
+
"HTTPNotModified",
|
27 |
+
"HTTPUseProxy",
|
28 |
+
"HTTPTemporaryRedirect",
|
29 |
+
"HTTPPermanentRedirect",
|
30 |
+
"HTTPClientError",
|
31 |
+
"HTTPBadRequest",
|
32 |
+
"HTTPUnauthorized",
|
33 |
+
"HTTPPaymentRequired",
|
34 |
+
"HTTPForbidden",
|
35 |
+
"HTTPNotFound",
|
36 |
+
"HTTPMethodNotAllowed",
|
37 |
+
"HTTPNotAcceptable",
|
38 |
+
"HTTPProxyAuthenticationRequired",
|
39 |
+
"HTTPRequestTimeout",
|
40 |
+
"HTTPConflict",
|
41 |
+
"HTTPGone",
|
42 |
+
"HTTPLengthRequired",
|
43 |
+
"HTTPPreconditionFailed",
|
44 |
+
"HTTPRequestEntityTooLarge",
|
45 |
+
"HTTPRequestURITooLong",
|
46 |
+
"HTTPUnsupportedMediaType",
|
47 |
+
"HTTPRequestRangeNotSatisfiable",
|
48 |
+
"HTTPExpectationFailed",
|
49 |
+
"HTTPMisdirectedRequest",
|
50 |
+
"HTTPUnprocessableEntity",
|
51 |
+
"HTTPFailedDependency",
|
52 |
+
"HTTPUpgradeRequired",
|
53 |
+
"HTTPPreconditionRequired",
|
54 |
+
"HTTPTooManyRequests",
|
55 |
+
"HTTPRequestHeaderFieldsTooLarge",
|
56 |
+
"HTTPUnavailableForLegalReasons",
|
57 |
+
"HTTPServerError",
|
58 |
+
"HTTPInternalServerError",
|
59 |
+
"HTTPNotImplemented",
|
60 |
+
"HTTPBadGateway",
|
61 |
+
"HTTPServiceUnavailable",
|
62 |
+
"HTTPGatewayTimeout",
|
63 |
+
"HTTPVersionNotSupported",
|
64 |
+
"HTTPVariantAlsoNegotiates",
|
65 |
+
"HTTPInsufficientStorage",
|
66 |
+
"HTTPNotExtended",
|
67 |
+
"HTTPNetworkAuthenticationRequired",
|
68 |
+
)
|
69 |
+
|
70 |
+
|
71 |
+
class NotAppKeyWarning(UserWarning):
|
72 |
+
"""Warning when not using AppKey in Application."""
|
73 |
+
|
74 |
+
|
75 |
+
############################################################
|
76 |
+
# HTTP Exceptions
|
77 |
+
############################################################
|
78 |
+
|
79 |
+
|
80 |
+
class HTTPException(Response, Exception):
|
81 |
+
|
82 |
+
# You should set in subclasses:
|
83 |
+
# status = 200
|
84 |
+
|
85 |
+
status_code = -1
|
86 |
+
empty_body = False
|
87 |
+
|
88 |
+
__http_exception__ = True
|
89 |
+
|
90 |
+
def __init__(
|
91 |
+
self,
|
92 |
+
*,
|
93 |
+
headers: Optional[LooseHeaders] = None,
|
94 |
+
reason: Optional[str] = None,
|
95 |
+
body: Any = None,
|
96 |
+
text: Optional[str] = None,
|
97 |
+
content_type: Optional[str] = None,
|
98 |
+
) -> None:
|
99 |
+
if body is not None:
|
100 |
+
warnings.warn(
|
101 |
+
"body argument is deprecated for http web exceptions",
|
102 |
+
DeprecationWarning,
|
103 |
+
)
|
104 |
+
Response.__init__(
|
105 |
+
self,
|
106 |
+
status=self.status_code,
|
107 |
+
headers=headers,
|
108 |
+
reason=reason,
|
109 |
+
body=body,
|
110 |
+
text=text,
|
111 |
+
content_type=content_type,
|
112 |
+
)
|
113 |
+
Exception.__init__(self, self.reason)
|
114 |
+
if self.body is None and not self.empty_body:
|
115 |
+
self.text = f"{self.status}: {self.reason}"
|
116 |
+
|
117 |
+
def __bool__(self) -> bool:
|
118 |
+
return True
|
119 |
+
|
120 |
+
|
121 |
+
class HTTPError(HTTPException):
|
122 |
+
"""Base class for exceptions with status codes in the 400s and 500s."""
|
123 |
+
|
124 |
+
|
125 |
+
class HTTPRedirection(HTTPException):
|
126 |
+
"""Base class for exceptions with status codes in the 300s."""
|
127 |
+
|
128 |
+
|
129 |
+
class HTTPSuccessful(HTTPException):
|
130 |
+
"""Base class for exceptions with status codes in the 200s."""
|
131 |
+
|
132 |
+
|
133 |
+
class HTTPOk(HTTPSuccessful):
|
134 |
+
status_code = 200
|
135 |
+
|
136 |
+
|
137 |
+
class HTTPCreated(HTTPSuccessful):
|
138 |
+
status_code = 201
|
139 |
+
|
140 |
+
|
141 |
+
class HTTPAccepted(HTTPSuccessful):
|
142 |
+
status_code = 202
|
143 |
+
|
144 |
+
|
145 |
+
class HTTPNonAuthoritativeInformation(HTTPSuccessful):
|
146 |
+
status_code = 203
|
147 |
+
|
148 |
+
|
149 |
+
class HTTPNoContent(HTTPSuccessful):
|
150 |
+
status_code = 204
|
151 |
+
empty_body = True
|
152 |
+
|
153 |
+
|
154 |
+
class HTTPResetContent(HTTPSuccessful):
|
155 |
+
status_code = 205
|
156 |
+
empty_body = True
|
157 |
+
|
158 |
+
|
159 |
+
class HTTPPartialContent(HTTPSuccessful):
|
160 |
+
status_code = 206
|
161 |
+
|
162 |
+
|
163 |
+
############################################################
|
164 |
+
# 3xx redirection
|
165 |
+
############################################################
|
166 |
+
|
167 |
+
|
168 |
+
class HTTPMove(HTTPRedirection):
|
169 |
+
def __init__(
|
170 |
+
self,
|
171 |
+
location: StrOrURL,
|
172 |
+
*,
|
173 |
+
headers: Optional[LooseHeaders] = None,
|
174 |
+
reason: Optional[str] = None,
|
175 |
+
body: Any = None,
|
176 |
+
text: Optional[str] = None,
|
177 |
+
content_type: Optional[str] = None,
|
178 |
+
) -> None:
|
179 |
+
if not location:
|
180 |
+
raise ValueError("HTTP redirects need a location to redirect to.")
|
181 |
+
super().__init__(
|
182 |
+
headers=headers,
|
183 |
+
reason=reason,
|
184 |
+
body=body,
|
185 |
+
text=text,
|
186 |
+
content_type=content_type,
|
187 |
+
)
|
188 |
+
self.headers["Location"] = str(URL(location))
|
189 |
+
self.location = location
|
190 |
+
|
191 |
+
|
192 |
+
class HTTPMultipleChoices(HTTPMove):
|
193 |
+
status_code = 300
|
194 |
+
|
195 |
+
|
196 |
+
class HTTPMovedPermanently(HTTPMove):
|
197 |
+
status_code = 301
|
198 |
+
|
199 |
+
|
200 |
+
class HTTPFound(HTTPMove):
|
201 |
+
status_code = 302
|
202 |
+
|
203 |
+
|
204 |
+
# This one is safe after a POST (the redirected location will be
|
205 |
+
# retrieved with GET):
|
206 |
+
class HTTPSeeOther(HTTPMove):
|
207 |
+
status_code = 303
|
208 |
+
|
209 |
+
|
210 |
+
class HTTPNotModified(HTTPRedirection):
|
211 |
+
# FIXME: this should include a date or etag header
|
212 |
+
status_code = 304
|
213 |
+
empty_body = True
|
214 |
+
|
215 |
+
|
216 |
+
class HTTPUseProxy(HTTPMove):
|
217 |
+
# Not a move, but looks a little like one
|
218 |
+
status_code = 305
|
219 |
+
|
220 |
+
|
221 |
+
class HTTPTemporaryRedirect(HTTPMove):
|
222 |
+
status_code = 307
|
223 |
+
|
224 |
+
|
225 |
+
class HTTPPermanentRedirect(HTTPMove):
|
226 |
+
status_code = 308
|
227 |
+
|
228 |
+
|
229 |
+
############################################################
|
230 |
+
# 4xx client error
|
231 |
+
############################################################
|
232 |
+
|
233 |
+
|
234 |
+
class HTTPClientError(HTTPError):
|
235 |
+
pass
|
236 |
+
|
237 |
+
|
238 |
+
class HTTPBadRequest(HTTPClientError):
|
239 |
+
status_code = 400
|
240 |
+
|
241 |
+
|
242 |
+
class HTTPUnauthorized(HTTPClientError):
|
243 |
+
status_code = 401
|
244 |
+
|
245 |
+
|
246 |
+
class HTTPPaymentRequired(HTTPClientError):
|
247 |
+
status_code = 402
|
248 |
+
|
249 |
+
|
250 |
+
class HTTPForbidden(HTTPClientError):
|
251 |
+
status_code = 403
|
252 |
+
|
253 |
+
|
254 |
+
class HTTPNotFound(HTTPClientError):
|
255 |
+
status_code = 404
|
256 |
+
|
257 |
+
|
258 |
+
class HTTPMethodNotAllowed(HTTPClientError):
|
259 |
+
status_code = 405
|
260 |
+
|
261 |
+
def __init__(
|
262 |
+
self,
|
263 |
+
method: str,
|
264 |
+
allowed_methods: Iterable[str],
|
265 |
+
*,
|
266 |
+
headers: Optional[LooseHeaders] = None,
|
267 |
+
reason: Optional[str] = None,
|
268 |
+
body: Any = None,
|
269 |
+
text: Optional[str] = None,
|
270 |
+
content_type: Optional[str] = None,
|
271 |
+
) -> None:
|
272 |
+
allow = ",".join(sorted(allowed_methods))
|
273 |
+
super().__init__(
|
274 |
+
headers=headers,
|
275 |
+
reason=reason,
|
276 |
+
body=body,
|
277 |
+
text=text,
|
278 |
+
content_type=content_type,
|
279 |
+
)
|
280 |
+
self.headers["Allow"] = allow
|
281 |
+
self.allowed_methods: Set[str] = set(allowed_methods)
|
282 |
+
self.method = method.upper()
|
283 |
+
|
284 |
+
|
285 |
+
class HTTPNotAcceptable(HTTPClientError):
|
286 |
+
status_code = 406
|
287 |
+
|
288 |
+
|
289 |
+
class HTTPProxyAuthenticationRequired(HTTPClientError):
|
290 |
+
status_code = 407
|
291 |
+
|
292 |
+
|
293 |
+
class HTTPRequestTimeout(HTTPClientError):
|
294 |
+
status_code = 408
|
295 |
+
|
296 |
+
|
297 |
+
class HTTPConflict(HTTPClientError):
|
298 |
+
status_code = 409
|
299 |
+
|
300 |
+
|
301 |
+
class HTTPGone(HTTPClientError):
|
302 |
+
status_code = 410
|
303 |
+
|
304 |
+
|
305 |
+
class HTTPLengthRequired(HTTPClientError):
|
306 |
+
status_code = 411
|
307 |
+
|
308 |
+
|
309 |
+
class HTTPPreconditionFailed(HTTPClientError):
|
310 |
+
status_code = 412
|
311 |
+
|
312 |
+
|
313 |
+
class HTTPRequestEntityTooLarge(HTTPClientError):
|
314 |
+
status_code = 413
|
315 |
+
|
316 |
+
def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
|
317 |
+
kwargs.setdefault(
|
318 |
+
"text",
|
319 |
+
"Maximum request body size {} exceeded, "
|
320 |
+
"actual body size {}".format(max_size, actual_size),
|
321 |
+
)
|
322 |
+
super().__init__(**kwargs)
|
323 |
+
|
324 |
+
|
325 |
+
class HTTPRequestURITooLong(HTTPClientError):
|
326 |
+
status_code = 414
|
327 |
+
|
328 |
+
|
329 |
+
class HTTPUnsupportedMediaType(HTTPClientError):
|
330 |
+
status_code = 415
|
331 |
+
|
332 |
+
|
333 |
+
class HTTPRequestRangeNotSatisfiable(HTTPClientError):
|
334 |
+
status_code = 416
|
335 |
+
|
336 |
+
|
337 |
+
class HTTPExpectationFailed(HTTPClientError):
|
338 |
+
status_code = 417
|
339 |
+
|
340 |
+
|
341 |
+
class HTTPMisdirectedRequest(HTTPClientError):
|
342 |
+
status_code = 421
|
343 |
+
|
344 |
+
|
345 |
+
class HTTPUnprocessableEntity(HTTPClientError):
|
346 |
+
status_code = 422
|
347 |
+
|
348 |
+
|
349 |
+
class HTTPFailedDependency(HTTPClientError):
|
350 |
+
status_code = 424
|
351 |
+
|
352 |
+
|
353 |
+
class HTTPUpgradeRequired(HTTPClientError):
|
354 |
+
status_code = 426
|
355 |
+
|
356 |
+
|
357 |
+
class HTTPPreconditionRequired(HTTPClientError):
|
358 |
+
status_code = 428
|
359 |
+
|
360 |
+
|
361 |
+
class HTTPTooManyRequests(HTTPClientError):
|
362 |
+
status_code = 429
|
363 |
+
|
364 |
+
|
365 |
+
class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
|
366 |
+
status_code = 431
|
367 |
+
|
368 |
+
|
369 |
+
class HTTPUnavailableForLegalReasons(HTTPClientError):
|
370 |
+
status_code = 451
|
371 |
+
|
372 |
+
def __init__(
|
373 |
+
self,
|
374 |
+
link: Optional[StrOrURL],
|
375 |
+
*,
|
376 |
+
headers: Optional[LooseHeaders] = None,
|
377 |
+
reason: Optional[str] = None,
|
378 |
+
body: Any = None,
|
379 |
+
text: Optional[str] = None,
|
380 |
+
content_type: Optional[str] = None,
|
381 |
+
) -> None:
|
382 |
+
super().__init__(
|
383 |
+
headers=headers,
|
384 |
+
reason=reason,
|
385 |
+
body=body,
|
386 |
+
text=text,
|
387 |
+
content_type=content_type,
|
388 |
+
)
|
389 |
+
self._link = None
|
390 |
+
if link:
|
391 |
+
self._link = URL(link)
|
392 |
+
self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"'
|
393 |
+
|
394 |
+
@property
|
395 |
+
def link(self) -> Optional[URL]:
|
396 |
+
return self._link
|
397 |
+
|
398 |
+
|
399 |
+
############################################################
|
400 |
+
# 5xx Server Error
|
401 |
+
############################################################
|
402 |
+
# Response status codes beginning with the digit "5" indicate cases in
|
403 |
+
# which the server is aware that it has erred or is incapable of
|
404 |
+
# performing the request. Except when responding to a HEAD request, the
|
405 |
+
# server SHOULD include an entity containing an explanation of the error
|
406 |
+
# situation, and whether it is a temporary or permanent condition. User
|
407 |
+
# agents SHOULD display any included entity to the user. These response
|
408 |
+
# codes are applicable to any request method.
|
409 |
+
|
410 |
+
|
411 |
+
class HTTPServerError(HTTPError):
|
412 |
+
pass
|
413 |
+
|
414 |
+
|
415 |
+
class HTTPInternalServerError(HTTPServerError):
|
416 |
+
status_code = 500
|
417 |
+
|
418 |
+
|
419 |
+
class HTTPNotImplemented(HTTPServerError):
|
420 |
+
status_code = 501
|
421 |
+
|
422 |
+
|
423 |
+
class HTTPBadGateway(HTTPServerError):
|
424 |
+
status_code = 502
|
425 |
+
|
426 |
+
|
427 |
+
class HTTPServiceUnavailable(HTTPServerError):
|
428 |
+
status_code = 503
|
429 |
+
|
430 |
+
|
431 |
+
class HTTPGatewayTimeout(HTTPServerError):
|
432 |
+
status_code = 504
|
433 |
+
|
434 |
+
|
435 |
+
class HTTPVersionNotSupported(HTTPServerError):
|
436 |
+
status_code = 505
|
437 |
+
|
438 |
+
|
439 |
+
class HTTPVariantAlsoNegotiates(HTTPServerError):
|
440 |
+
status_code = 506
|
441 |
+
|
442 |
+
|
443 |
+
class HTTPInsufficientStorage(HTTPServerError):
|
444 |
+
status_code = 507
|
445 |
+
|
446 |
+
|
447 |
+
class HTTPNotExtended(HTTPServerError):
|
448 |
+
status_code = 510
|
449 |
+
|
450 |
+
|
451 |
+
class HTTPNetworkAuthenticationRequired(HTTPServerError):
|
452 |
+
status_code = 511
|
venv/Lib/site-packages/aiohttp/web_fileresponse.py
ADDED
@@ -0,0 +1,418 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import io
|
3 |
+
import os
|
4 |
+
import pathlib
|
5 |
+
import sys
|
6 |
+
from contextlib import suppress
|
7 |
+
from enum import Enum, auto
|
8 |
+
from mimetypes import MimeTypes
|
9 |
+
from stat import S_ISREG
|
10 |
+
from types import MappingProxyType
|
11 |
+
from typing import ( # noqa
|
12 |
+
IO,
|
13 |
+
TYPE_CHECKING,
|
14 |
+
Any,
|
15 |
+
Awaitable,
|
16 |
+
Callable,
|
17 |
+
Final,
|
18 |
+
Iterator,
|
19 |
+
List,
|
20 |
+
Optional,
|
21 |
+
Set,
|
22 |
+
Tuple,
|
23 |
+
Union,
|
24 |
+
cast,
|
25 |
+
)
|
26 |
+
|
27 |
+
from . import hdrs
|
28 |
+
from .abc import AbstractStreamWriter
|
29 |
+
from .helpers import ETAG_ANY, ETag, must_be_empty_body
|
30 |
+
from .typedefs import LooseHeaders, PathLike
|
31 |
+
from .web_exceptions import (
|
32 |
+
HTTPForbidden,
|
33 |
+
HTTPNotFound,
|
34 |
+
HTTPNotModified,
|
35 |
+
HTTPPartialContent,
|
36 |
+
HTTPPreconditionFailed,
|
37 |
+
HTTPRequestRangeNotSatisfiable,
|
38 |
+
)
|
39 |
+
from .web_response import StreamResponse
|
40 |
+
|
41 |
+
__all__ = ("FileResponse",)
|
42 |
+
|
43 |
+
if TYPE_CHECKING:
|
44 |
+
from .web_request import BaseRequest
|
45 |
+
|
46 |
+
|
47 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
48 |
+
|
49 |
+
|
50 |
+
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
|
51 |
+
|
52 |
+
CONTENT_TYPES: Final[MimeTypes] = MimeTypes()
|
53 |
+
|
54 |
+
# File extension to IANA encodings map that will be checked in the order defined.
|
55 |
+
ENCODING_EXTENSIONS = MappingProxyType(
|
56 |
+
{ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")}
|
57 |
+
)
|
58 |
+
|
59 |
+
FALLBACK_CONTENT_TYPE = "application/octet-stream"
|
60 |
+
|
61 |
+
# Provide additional MIME type/extension pairs to be recognized.
|
62 |
+
# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only
|
63 |
+
ADDITIONAL_CONTENT_TYPES = MappingProxyType(
|
64 |
+
{
|
65 |
+
"application/gzip": ".gz",
|
66 |
+
"application/x-brotli": ".br",
|
67 |
+
"application/x-bzip2": ".bz2",
|
68 |
+
"application/x-compress": ".Z",
|
69 |
+
"application/x-xz": ".xz",
|
70 |
+
}
|
71 |
+
)
|
72 |
+
|
73 |
+
|
74 |
+
class _FileResponseResult(Enum):
|
75 |
+
"""The result of the file response."""
|
76 |
+
|
77 |
+
SEND_FILE = auto() # Ie a regular file to send
|
78 |
+
NOT_ACCEPTABLE = auto() # Ie a socket, or non-regular file
|
79 |
+
PRE_CONDITION_FAILED = auto() # Ie If-Match or If-None-Match failed
|
80 |
+
NOT_MODIFIED = auto() # 304 Not Modified
|
81 |
+
|
82 |
+
|
83 |
+
# Add custom pairs and clear the encodings map so guess_type ignores them.
|
84 |
+
CONTENT_TYPES.encodings_map.clear()
|
85 |
+
for content_type, extension in ADDITIONAL_CONTENT_TYPES.items():
|
86 |
+
CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined]
|
87 |
+
|
88 |
+
|
89 |
+
_CLOSE_FUTURES: Set[asyncio.Future[None]] = set()
|
90 |
+
|
91 |
+
|
92 |
+
class FileResponse(StreamResponse):
|
93 |
+
"""A response object can be used to send files."""
|
94 |
+
|
95 |
+
def __init__(
|
96 |
+
self,
|
97 |
+
path: PathLike,
|
98 |
+
chunk_size: int = 256 * 1024,
|
99 |
+
status: int = 200,
|
100 |
+
reason: Optional[str] = None,
|
101 |
+
headers: Optional[LooseHeaders] = None,
|
102 |
+
) -> None:
|
103 |
+
super().__init__(status=status, reason=reason, headers=headers)
|
104 |
+
|
105 |
+
self._path = pathlib.Path(path)
|
106 |
+
self._chunk_size = chunk_size
|
107 |
+
|
108 |
+
def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes:
|
109 |
+
fobj.seek(offset)
|
110 |
+
return fobj.read(chunk_size) # type: ignore[no-any-return]
|
111 |
+
|
112 |
+
async def _sendfile_fallback(
|
113 |
+
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
|
114 |
+
) -> AbstractStreamWriter:
|
115 |
+
# To keep memory usage low,fobj is transferred in chunks
|
116 |
+
# controlled by the constructor's chunk_size argument.
|
117 |
+
|
118 |
+
chunk_size = self._chunk_size
|
119 |
+
loop = asyncio.get_event_loop()
|
120 |
+
chunk = await loop.run_in_executor(
|
121 |
+
None, self._seek_and_read, fobj, offset, chunk_size
|
122 |
+
)
|
123 |
+
while chunk:
|
124 |
+
await writer.write(chunk)
|
125 |
+
count = count - chunk_size
|
126 |
+
if count <= 0:
|
127 |
+
break
|
128 |
+
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
|
129 |
+
|
130 |
+
await writer.drain()
|
131 |
+
return writer
|
132 |
+
|
133 |
+
async def _sendfile(
|
134 |
+
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
|
135 |
+
) -> AbstractStreamWriter:
|
136 |
+
writer = await super().prepare(request)
|
137 |
+
assert writer is not None
|
138 |
+
|
139 |
+
if NOSENDFILE or self.compression:
|
140 |
+
return await self._sendfile_fallback(writer, fobj, offset, count)
|
141 |
+
|
142 |
+
loop = request._loop
|
143 |
+
transport = request.transport
|
144 |
+
assert transport is not None
|
145 |
+
|
146 |
+
try:
|
147 |
+
await loop.sendfile(transport, fobj, offset, count)
|
148 |
+
except NotImplementedError:
|
149 |
+
return await self._sendfile_fallback(writer, fobj, offset, count)
|
150 |
+
|
151 |
+
await super().write_eof()
|
152 |
+
return writer
|
153 |
+
|
154 |
+
@staticmethod
|
155 |
+
def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool:
|
156 |
+
if len(etags) == 1 and etags[0].value == ETAG_ANY:
|
157 |
+
return True
|
158 |
+
return any(
|
159 |
+
etag.value == etag_value for etag in etags if weak or not etag.is_weak
|
160 |
+
)
|
161 |
+
|
162 |
+
async def _not_modified(
|
163 |
+
self, request: "BaseRequest", etag_value: str, last_modified: float
|
164 |
+
) -> Optional[AbstractStreamWriter]:
|
165 |
+
self.set_status(HTTPNotModified.status_code)
|
166 |
+
self._length_check = False
|
167 |
+
self.etag = etag_value # type: ignore[assignment]
|
168 |
+
self.last_modified = last_modified # type: ignore[assignment]
|
169 |
+
# Delete any Content-Length headers provided by user. HTTP 304
|
170 |
+
# should always have empty response body
|
171 |
+
return await super().prepare(request)
|
172 |
+
|
173 |
+
async def _precondition_failed(
|
174 |
+
self, request: "BaseRequest"
|
175 |
+
) -> Optional[AbstractStreamWriter]:
|
176 |
+
self.set_status(HTTPPreconditionFailed.status_code)
|
177 |
+
self.content_length = 0
|
178 |
+
return await super().prepare(request)
|
179 |
+
|
180 |
+
def _make_response(
|
181 |
+
self, request: "BaseRequest", accept_encoding: str
|
182 |
+
) -> Tuple[
|
183 |
+
_FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str]
|
184 |
+
]:
|
185 |
+
"""Return the response result, io object, stat result, and encoding.
|
186 |
+
|
187 |
+
If an uncompressed file is returned, the encoding is set to
|
188 |
+
:py:data:`None`.
|
189 |
+
|
190 |
+
This method should be called from a thread executor
|
191 |
+
since it calls os.stat which may block.
|
192 |
+
"""
|
193 |
+
file_path, st, file_encoding = self._get_file_path_stat_encoding(
|
194 |
+
accept_encoding
|
195 |
+
)
|
196 |
+
if not file_path:
|
197 |
+
return _FileResponseResult.NOT_ACCEPTABLE, None, st, None
|
198 |
+
|
199 |
+
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
200 |
+
|
201 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2
|
202 |
+
if (ifmatch := request.if_match) is not None and not self._etag_match(
|
203 |
+
etag_value, ifmatch, weak=False
|
204 |
+
):
|
205 |
+
return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding
|
206 |
+
|
207 |
+
if (
|
208 |
+
(unmodsince := request.if_unmodified_since) is not None
|
209 |
+
and ifmatch is None
|
210 |
+
and st.st_mtime > unmodsince.timestamp()
|
211 |
+
):
|
212 |
+
return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding
|
213 |
+
|
214 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2
|
215 |
+
if (ifnonematch := request.if_none_match) is not None and self._etag_match(
|
216 |
+
etag_value, ifnonematch, weak=True
|
217 |
+
):
|
218 |
+
return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding
|
219 |
+
|
220 |
+
if (
|
221 |
+
(modsince := request.if_modified_since) is not None
|
222 |
+
and ifnonematch is None
|
223 |
+
and st.st_mtime <= modsince.timestamp()
|
224 |
+
):
|
225 |
+
return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding
|
226 |
+
|
227 |
+
fobj = file_path.open("rb")
|
228 |
+
with suppress(OSError):
|
229 |
+
# fstat() may not be available on all platforms
|
230 |
+
# Once we open the file, we want the fstat() to ensure
|
231 |
+
# the file has not changed between the first stat()
|
232 |
+
# and the open().
|
233 |
+
st = os.stat(fobj.fileno())
|
234 |
+
return _FileResponseResult.SEND_FILE, fobj, st, file_encoding
|
235 |
+
|
236 |
+
def _get_file_path_stat_encoding(
|
237 |
+
self, accept_encoding: str
|
238 |
+
) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]:
|
239 |
+
file_path = self._path
|
240 |
+
for file_extension, file_encoding in ENCODING_EXTENSIONS.items():
|
241 |
+
if file_encoding not in accept_encoding:
|
242 |
+
continue
|
243 |
+
|
244 |
+
compressed_path = file_path.with_suffix(file_path.suffix + file_extension)
|
245 |
+
with suppress(OSError):
|
246 |
+
# Do not follow symlinks and ignore any non-regular files.
|
247 |
+
st = compressed_path.lstat()
|
248 |
+
if S_ISREG(st.st_mode):
|
249 |
+
return compressed_path, st, file_encoding
|
250 |
+
|
251 |
+
# Fallback to the uncompressed file
|
252 |
+
st = file_path.stat()
|
253 |
+
return file_path if S_ISREG(st.st_mode) else None, st, None
|
254 |
+
|
255 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
256 |
+
loop = asyncio.get_running_loop()
|
257 |
+
# Encoding comparisons should be case-insensitive
|
258 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
|
259 |
+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
260 |
+
try:
|
261 |
+
response_result, fobj, st, file_encoding = await loop.run_in_executor(
|
262 |
+
None, self._make_response, request, accept_encoding
|
263 |
+
)
|
264 |
+
except PermissionError:
|
265 |
+
self.set_status(HTTPForbidden.status_code)
|
266 |
+
return await super().prepare(request)
|
267 |
+
except OSError:
|
268 |
+
# Most likely to be FileNotFoundError or OSError for circular
|
269 |
+
# symlinks in python >= 3.13, so respond with 404.
|
270 |
+
self.set_status(HTTPNotFound.status_code)
|
271 |
+
return await super().prepare(request)
|
272 |
+
|
273 |
+
# Forbid special files like sockets, pipes, devices, etc.
|
274 |
+
if response_result is _FileResponseResult.NOT_ACCEPTABLE:
|
275 |
+
self.set_status(HTTPForbidden.status_code)
|
276 |
+
return await super().prepare(request)
|
277 |
+
|
278 |
+
if response_result is _FileResponseResult.PRE_CONDITION_FAILED:
|
279 |
+
return await self._precondition_failed(request)
|
280 |
+
|
281 |
+
if response_result is _FileResponseResult.NOT_MODIFIED:
|
282 |
+
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
283 |
+
last_modified = st.st_mtime
|
284 |
+
return await self._not_modified(request, etag_value, last_modified)
|
285 |
+
|
286 |
+
assert fobj is not None
|
287 |
+
try:
|
288 |
+
return await self._prepare_open_file(request, fobj, st, file_encoding)
|
289 |
+
finally:
|
290 |
+
# We do not await here because we do not want to wait
|
291 |
+
# for the executor to finish before returning the response
|
292 |
+
# so the connection can begin servicing another request
|
293 |
+
# as soon as possible.
|
294 |
+
close_future = loop.run_in_executor(None, fobj.close)
|
295 |
+
# Hold a strong reference to the future to prevent it from being
|
296 |
+
# garbage collected before it completes.
|
297 |
+
_CLOSE_FUTURES.add(close_future)
|
298 |
+
close_future.add_done_callback(_CLOSE_FUTURES.remove)
|
299 |
+
|
300 |
+
async def _prepare_open_file(
|
301 |
+
self,
|
302 |
+
request: "BaseRequest",
|
303 |
+
fobj: io.BufferedReader,
|
304 |
+
st: os.stat_result,
|
305 |
+
file_encoding: Optional[str],
|
306 |
+
) -> Optional[AbstractStreamWriter]:
|
307 |
+
status = self._status
|
308 |
+
file_size: int = st.st_size
|
309 |
+
file_mtime: float = st.st_mtime
|
310 |
+
count: int = file_size
|
311 |
+
start: Optional[int] = None
|
312 |
+
|
313 |
+
if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp():
|
314 |
+
# If-Range header check:
|
315 |
+
# condition = cached date >= last modification date
|
316 |
+
# return 206 if True else 200.
|
317 |
+
# if False:
|
318 |
+
# Range header would not be processed, return 200
|
319 |
+
# if True but Range header missing
|
320 |
+
# return 200
|
321 |
+
try:
|
322 |
+
rng = request.http_range
|
323 |
+
start = rng.start
|
324 |
+
end: Optional[int] = rng.stop
|
325 |
+
except ValueError:
|
326 |
+
# https://tools.ietf.org/html/rfc7233:
|
327 |
+
# A server generating a 416 (Range Not Satisfiable) response to
|
328 |
+
# a byte-range request SHOULD send a Content-Range header field
|
329 |
+
# with an unsatisfied-range value.
|
330 |
+
# The complete-length in a 416 response indicates the current
|
331 |
+
# length of the selected representation.
|
332 |
+
#
|
333 |
+
# Will do the same below. Many servers ignore this and do not
|
334 |
+
# send a Content-Range header with HTTP 416
|
335 |
+
self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
336 |
+
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
337 |
+
return await super().prepare(request)
|
338 |
+
|
339 |
+
# If a range request has been made, convert start, end slice
|
340 |
+
# notation into file pointer offset and count
|
341 |
+
if start is not None:
|
342 |
+
if start < 0 and end is None: # return tail of file
|
343 |
+
start += file_size
|
344 |
+
if start < 0:
|
345 |
+
# if Range:bytes=-1000 in request header but file size
|
346 |
+
# is only 200, there would be trouble without this
|
347 |
+
start = 0
|
348 |
+
count = file_size - start
|
349 |
+
else:
|
350 |
+
# rfc7233:If the last-byte-pos value is
|
351 |
+
# absent, or if the value is greater than or equal to
|
352 |
+
# the current length of the representation data,
|
353 |
+
# the byte range is interpreted as the remainder
|
354 |
+
# of the representation (i.e., the server replaces the
|
355 |
+
# value of last-byte-pos with a value that is one less than
|
356 |
+
# the current length of the selected representation).
|
357 |
+
count = (
|
358 |
+
min(end if end is not None else file_size, file_size) - start
|
359 |
+
)
|
360 |
+
|
361 |
+
if start >= file_size:
|
362 |
+
# HTTP 416 should be returned in this case.
|
363 |
+
#
|
364 |
+
# According to https://tools.ietf.org/html/rfc7233:
|
365 |
+
# If a valid byte-range-set includes at least one
|
366 |
+
# byte-range-spec with a first-byte-pos that is less than
|
367 |
+
# the current length of the representation, or at least one
|
368 |
+
# suffix-byte-range-spec with a non-zero suffix-length,
|
369 |
+
# then the byte-range-set is satisfiable. Otherwise, the
|
370 |
+
# byte-range-set is unsatisfiable.
|
371 |
+
self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
372 |
+
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
373 |
+
return await super().prepare(request)
|
374 |
+
|
375 |
+
status = HTTPPartialContent.status_code
|
376 |
+
# Even though you are sending the whole file, you should still
|
377 |
+
# return a HTTP 206 for a Range request.
|
378 |
+
self.set_status(status)
|
379 |
+
|
380 |
+
# If the Content-Type header is not already set, guess it based on the
|
381 |
+
# extension of the request path. The encoding returned by guess_type
|
382 |
+
# can be ignored since the map was cleared above.
|
383 |
+
if hdrs.CONTENT_TYPE not in self._headers:
|
384 |
+
if sys.version_info >= (3, 13):
|
385 |
+
guesser = CONTENT_TYPES.guess_file_type
|
386 |
+
else:
|
387 |
+
guesser = CONTENT_TYPES.guess_type
|
388 |
+
self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE
|
389 |
+
|
390 |
+
if file_encoding:
|
391 |
+
self._headers[hdrs.CONTENT_ENCODING] = file_encoding
|
392 |
+
self._headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
|
393 |
+
# Disable compression if we are already sending
|
394 |
+
# a compressed file since we don't want to double
|
395 |
+
# compress.
|
396 |
+
self._compression = False
|
397 |
+
|
398 |
+
self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" # type: ignore[assignment]
|
399 |
+
self.last_modified = file_mtime # type: ignore[assignment]
|
400 |
+
self.content_length = count
|
401 |
+
|
402 |
+
self._headers[hdrs.ACCEPT_RANGES] = "bytes"
|
403 |
+
|
404 |
+
if status == HTTPPartialContent.status_code:
|
405 |
+
real_start = start
|
406 |
+
assert real_start is not None
|
407 |
+
self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
|
408 |
+
real_start, real_start + count - 1, file_size
|
409 |
+
)
|
410 |
+
|
411 |
+
# If we are sending 0 bytes calling sendfile() will throw a ValueError
|
412 |
+
if count == 0 or must_be_empty_body(request.method, status):
|
413 |
+
return await super().prepare(request)
|
414 |
+
|
415 |
+
# be aware that start could be None or int=0 here.
|
416 |
+
offset = start or 0
|
417 |
+
|
418 |
+
return await self._sendfile(request, fobj, offset, count)
|
venv/Lib/site-packages/aiohttp/web_log.py
ADDED
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import functools
|
3 |
+
import logging
|
4 |
+
import os
|
5 |
+
import re
|
6 |
+
import time as time_mod
|
7 |
+
from collections import namedtuple
|
8 |
+
from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
|
9 |
+
|
10 |
+
from .abc import AbstractAccessLogger
|
11 |
+
from .web_request import BaseRequest
|
12 |
+
from .web_response import StreamResponse
|
13 |
+
|
14 |
+
KeyMethod = namedtuple("KeyMethod", "key method")
|
15 |
+
|
16 |
+
|
17 |
+
class AccessLogger(AbstractAccessLogger):
|
18 |
+
"""Helper object to log access.
|
19 |
+
|
20 |
+
Usage:
|
21 |
+
log = logging.getLogger("spam")
|
22 |
+
log_format = "%a %{User-Agent}i"
|
23 |
+
access_logger = AccessLogger(log, log_format)
|
24 |
+
access_logger.log(request, response, time)
|
25 |
+
|
26 |
+
Format:
|
27 |
+
%% The percent sign
|
28 |
+
%a Remote IP-address (IP-address of proxy if using reverse proxy)
|
29 |
+
%t Time when the request was started to process
|
30 |
+
%P The process ID of the child that serviced the request
|
31 |
+
%r First line of request
|
32 |
+
%s Response status code
|
33 |
+
%b Size of response in bytes, including HTTP headers
|
34 |
+
%T Time taken to serve the request, in seconds
|
35 |
+
%Tf Time taken to serve the request, in seconds with floating fraction
|
36 |
+
in .06f format
|
37 |
+
%D Time taken to serve the request, in microseconds
|
38 |
+
%{FOO}i request.headers['FOO']
|
39 |
+
%{FOO}o response.headers['FOO']
|
40 |
+
%{FOO}e os.environ['FOO']
|
41 |
+
|
42 |
+
"""
|
43 |
+
|
44 |
+
LOG_FORMAT_MAP = {
|
45 |
+
"a": "remote_address",
|
46 |
+
"t": "request_start_time",
|
47 |
+
"P": "process_id",
|
48 |
+
"r": "first_request_line",
|
49 |
+
"s": "response_status",
|
50 |
+
"b": "response_size",
|
51 |
+
"T": "request_time",
|
52 |
+
"Tf": "request_time_frac",
|
53 |
+
"D": "request_time_micro",
|
54 |
+
"i": "request_header",
|
55 |
+
"o": "response_header",
|
56 |
+
}
|
57 |
+
|
58 |
+
LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
|
59 |
+
FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
|
60 |
+
CLEANUP_RE = re.compile(r"(%[^s])")
|
61 |
+
_FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {}
|
62 |
+
|
63 |
+
def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
|
64 |
+
"""Initialise the logger.
|
65 |
+
|
66 |
+
logger is a logger object to be used for logging.
|
67 |
+
log_format is a string with apache compatible log format description.
|
68 |
+
|
69 |
+
"""
|
70 |
+
super().__init__(logger, log_format=log_format)
|
71 |
+
|
72 |
+
_compiled_format = AccessLogger._FORMAT_CACHE.get(log_format)
|
73 |
+
if not _compiled_format:
|
74 |
+
_compiled_format = self.compile_format(log_format)
|
75 |
+
AccessLogger._FORMAT_CACHE[log_format] = _compiled_format
|
76 |
+
|
77 |
+
self._log_format, self._methods = _compiled_format
|
78 |
+
|
79 |
+
def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]:
|
80 |
+
"""Translate log_format into form usable by modulo formatting
|
81 |
+
|
82 |
+
All known atoms will be replaced with %s
|
83 |
+
Also methods for formatting of those atoms will be added to
|
84 |
+
_methods in appropriate order
|
85 |
+
|
86 |
+
For example we have log_format = "%a %t"
|
87 |
+
This format will be translated to "%s %s"
|
88 |
+
Also contents of _methods will be
|
89 |
+
[self._format_a, self._format_t]
|
90 |
+
These method will be called and results will be passed
|
91 |
+
to translated string format.
|
92 |
+
|
93 |
+
Each _format_* method receive 'args' which is list of arguments
|
94 |
+
given to self.log
|
95 |
+
|
96 |
+
Exceptions are _format_e, _format_i and _format_o methods which
|
97 |
+
also receive key name (by functools.partial)
|
98 |
+
|
99 |
+
"""
|
100 |
+
# list of (key, method) tuples, we don't use an OrderedDict as users
|
101 |
+
# can repeat the same key more than once
|
102 |
+
methods = list()
|
103 |
+
|
104 |
+
for atom in self.FORMAT_RE.findall(log_format):
|
105 |
+
if atom[1] == "":
|
106 |
+
format_key1 = self.LOG_FORMAT_MAP[atom[0]]
|
107 |
+
m = getattr(AccessLogger, "_format_%s" % atom[0])
|
108 |
+
key_method = KeyMethod(format_key1, m)
|
109 |
+
else:
|
110 |
+
format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
|
111 |
+
m = getattr(AccessLogger, "_format_%s" % atom[2])
|
112 |
+
key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
|
113 |
+
|
114 |
+
methods.append(key_method)
|
115 |
+
|
116 |
+
log_format = self.FORMAT_RE.sub(r"%s", log_format)
|
117 |
+
log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
|
118 |
+
return log_format, methods
|
119 |
+
|
120 |
+
@staticmethod
|
121 |
+
def _format_i(
|
122 |
+
key: str, request: BaseRequest, response: StreamResponse, time: float
|
123 |
+
) -> str:
|
124 |
+
if request is None:
|
125 |
+
return "(no headers)"
|
126 |
+
|
127 |
+
# suboptimal, make istr(key) once
|
128 |
+
return request.headers.get(key, "-")
|
129 |
+
|
130 |
+
@staticmethod
|
131 |
+
def _format_o(
|
132 |
+
key: str, request: BaseRequest, response: StreamResponse, time: float
|
133 |
+
) -> str:
|
134 |
+
# suboptimal, make istr(key) once
|
135 |
+
return response.headers.get(key, "-")
|
136 |
+
|
137 |
+
@staticmethod
|
138 |
+
def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
139 |
+
if request is None:
|
140 |
+
return "-"
|
141 |
+
ip = request.remote
|
142 |
+
return ip if ip is not None else "-"
|
143 |
+
|
144 |
+
@staticmethod
|
145 |
+
def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
146 |
+
tz = datetime.timezone(datetime.timedelta(seconds=-time_mod.timezone))
|
147 |
+
now = datetime.datetime.now(tz)
|
148 |
+
start_time = now - datetime.timedelta(seconds=time)
|
149 |
+
return start_time.strftime("[%d/%b/%Y:%H:%M:%S %z]")
|
150 |
+
|
151 |
+
@staticmethod
|
152 |
+
def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
153 |
+
return "<%s>" % os.getpid()
|
154 |
+
|
155 |
+
@staticmethod
|
156 |
+
def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
157 |
+
if request is None:
|
158 |
+
return "-"
|
159 |
+
return "{} {} HTTP/{}.{}".format(
|
160 |
+
request.method,
|
161 |
+
request.path_qs,
|
162 |
+
request.version.major,
|
163 |
+
request.version.minor,
|
164 |
+
)
|
165 |
+
|
166 |
+
@staticmethod
|
167 |
+
def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
|
168 |
+
return response.status
|
169 |
+
|
170 |
+
@staticmethod
|
171 |
+
def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
|
172 |
+
return response.body_length
|
173 |
+
|
174 |
+
@staticmethod
|
175 |
+
def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
176 |
+
return str(round(time))
|
177 |
+
|
178 |
+
@staticmethod
|
179 |
+
def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
180 |
+
return "%06f" % time
|
181 |
+
|
182 |
+
@staticmethod
|
183 |
+
def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
184 |
+
return str(round(time * 1000000))
|
185 |
+
|
186 |
+
def _format_line(
|
187 |
+
self, request: BaseRequest, response: StreamResponse, time: float
|
188 |
+
) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
|
189 |
+
return [(key, method(request, response, time)) for key, method in self._methods]
|
190 |
+
|
191 |
+
@property
|
192 |
+
def enabled(self) -> bool:
|
193 |
+
"""Check if logger is enabled."""
|
194 |
+
# Avoid formatting the log line if it will not be emitted.
|
195 |
+
return self.logger.isEnabledFor(logging.INFO)
|
196 |
+
|
197 |
+
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
198 |
+
try:
|
199 |
+
fmt_info = self._format_line(request, response, time)
|
200 |
+
|
201 |
+
values = list()
|
202 |
+
extra = dict()
|
203 |
+
for key, value in fmt_info:
|
204 |
+
values.append(value)
|
205 |
+
|
206 |
+
if key.__class__ is str:
|
207 |
+
extra[key] = value
|
208 |
+
else:
|
209 |
+
k1, k2 = key # type: ignore[misc]
|
210 |
+
dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
|
211 |
+
dct[k2] = value # type: ignore[index,has-type]
|
212 |
+
extra[k1] = dct # type: ignore[has-type,assignment]
|
213 |
+
|
214 |
+
self.logger.info(self._log_format % tuple(values), extra=extra)
|
215 |
+
except Exception:
|
216 |
+
self.logger.exception("Error in logging")
|
venv/Lib/site-packages/aiohttp/web_middlewares.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import re
|
2 |
+
from typing import TYPE_CHECKING, Tuple, Type, TypeVar
|
3 |
+
|
4 |
+
from .typedefs import Handler, Middleware
|
5 |
+
from .web_exceptions import HTTPMove, HTTPPermanentRedirect
|
6 |
+
from .web_request import Request
|
7 |
+
from .web_response import StreamResponse
|
8 |
+
from .web_urldispatcher import SystemRoute
|
9 |
+
|
10 |
+
__all__ = (
|
11 |
+
"middleware",
|
12 |
+
"normalize_path_middleware",
|
13 |
+
)
|
14 |
+
|
15 |
+
if TYPE_CHECKING:
|
16 |
+
from .web_app import Application
|
17 |
+
|
18 |
+
_Func = TypeVar("_Func")
|
19 |
+
|
20 |
+
|
21 |
+
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
|
22 |
+
alt_request = request.clone(rel_url=path)
|
23 |
+
|
24 |
+
match_info = await request.app.router.resolve(alt_request)
|
25 |
+
alt_request._match_info = match_info
|
26 |
+
|
27 |
+
if match_info.http_exception is None:
|
28 |
+
return True, alt_request
|
29 |
+
|
30 |
+
return False, request
|
31 |
+
|
32 |
+
|
33 |
+
def middleware(f: _Func) -> _Func:
|
34 |
+
f.__middleware_version__ = 1 # type: ignore[attr-defined]
|
35 |
+
return f
|
36 |
+
|
37 |
+
|
38 |
+
def normalize_path_middleware(
|
39 |
+
*,
|
40 |
+
append_slash: bool = True,
|
41 |
+
remove_slash: bool = False,
|
42 |
+
merge_slashes: bool = True,
|
43 |
+
redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
|
44 |
+
) -> Middleware:
|
45 |
+
"""Factory for producing a middleware that normalizes the path of a request.
|
46 |
+
|
47 |
+
Normalizing means:
|
48 |
+
- Add or remove a trailing slash to the path.
|
49 |
+
- Double slashes are replaced by one.
|
50 |
+
|
51 |
+
The middleware returns as soon as it finds a path that resolves
|
52 |
+
correctly. The order if both merge and append/remove are enabled is
|
53 |
+
1) merge slashes
|
54 |
+
2) append/remove slash
|
55 |
+
3) both merge slashes and append/remove slash.
|
56 |
+
If the path resolves with at least one of those conditions, it will
|
57 |
+
redirect to the new path.
|
58 |
+
|
59 |
+
Only one of `append_slash` and `remove_slash` can be enabled. If both
|
60 |
+
are `True` the factory will raise an assertion error
|
61 |
+
|
62 |
+
If `append_slash` is `True` the middleware will append a slash when
|
63 |
+
needed. If a resource is defined with trailing slash and the request
|
64 |
+
comes without it, it will append it automatically.
|
65 |
+
|
66 |
+
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
|
67 |
+
the middleware will remove trailing slashes and redirect if the resource
|
68 |
+
is defined
|
69 |
+
|
70 |
+
If merge_slashes is True, merge multiple consecutive slashes in the
|
71 |
+
path into one.
|
72 |
+
"""
|
73 |
+
correct_configuration = not (append_slash and remove_slash)
|
74 |
+
assert correct_configuration, "Cannot both remove and append slash"
|
75 |
+
|
76 |
+
@middleware
|
77 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
78 |
+
if isinstance(request.match_info.route, SystemRoute):
|
79 |
+
paths_to_check = []
|
80 |
+
if "?" in request.raw_path:
|
81 |
+
path, query = request.raw_path.split("?", 1)
|
82 |
+
query = "?" + query
|
83 |
+
else:
|
84 |
+
query = ""
|
85 |
+
path = request.raw_path
|
86 |
+
|
87 |
+
if merge_slashes:
|
88 |
+
paths_to_check.append(re.sub("//+", "/", path))
|
89 |
+
if append_slash and not request.path.endswith("/"):
|
90 |
+
paths_to_check.append(path + "/")
|
91 |
+
if remove_slash and request.path.endswith("/"):
|
92 |
+
paths_to_check.append(path[:-1])
|
93 |
+
if merge_slashes and append_slash:
|
94 |
+
paths_to_check.append(re.sub("//+", "/", path + "/"))
|
95 |
+
if merge_slashes and remove_slash:
|
96 |
+
merged_slashes = re.sub("//+", "/", path)
|
97 |
+
paths_to_check.append(merged_slashes[:-1])
|
98 |
+
|
99 |
+
for path in paths_to_check:
|
100 |
+
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
|
101 |
+
resolves, request = await _check_request_resolves(request, path)
|
102 |
+
if resolves:
|
103 |
+
raise redirect_class(request.raw_path + query)
|
104 |
+
|
105 |
+
return await handler(request)
|
106 |
+
|
107 |
+
return impl
|
108 |
+
|
109 |
+
|
110 |
+
def _fix_request_current_app(app: "Application") -> Middleware:
|
111 |
+
@middleware
|
112 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
113 |
+
match_info = request.match_info
|
114 |
+
prev = match_info.current_app
|
115 |
+
match_info.current_app = app
|
116 |
+
try:
|
117 |
+
return await handler(request)
|
118 |
+
finally:
|
119 |
+
match_info.current_app = prev
|
120 |
+
|
121 |
+
return impl
|
venv/Lib/site-packages/aiohttp/web_protocol.py
ADDED
@@ -0,0 +1,792 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import asyncio.streams
|
3 |
+
import sys
|
4 |
+
import traceback
|
5 |
+
import warnings
|
6 |
+
from collections import deque
|
7 |
+
from contextlib import suppress
|
8 |
+
from html import escape as html_escape
|
9 |
+
from http import HTTPStatus
|
10 |
+
from logging import Logger
|
11 |
+
from typing import (
|
12 |
+
TYPE_CHECKING,
|
13 |
+
Any,
|
14 |
+
Awaitable,
|
15 |
+
Callable,
|
16 |
+
Deque,
|
17 |
+
Optional,
|
18 |
+
Sequence,
|
19 |
+
Tuple,
|
20 |
+
Type,
|
21 |
+
Union,
|
22 |
+
cast,
|
23 |
+
)
|
24 |
+
|
25 |
+
import attr
|
26 |
+
import yarl
|
27 |
+
from propcache import under_cached_property
|
28 |
+
|
29 |
+
from .abc import AbstractAccessLogger, AbstractStreamWriter
|
30 |
+
from .base_protocol import BaseProtocol
|
31 |
+
from .helpers import ceil_timeout
|
32 |
+
from .http import (
|
33 |
+
HttpProcessingError,
|
34 |
+
HttpRequestParser,
|
35 |
+
HttpVersion10,
|
36 |
+
RawRequestMessage,
|
37 |
+
StreamWriter,
|
38 |
+
)
|
39 |
+
from .http_exceptions import BadHttpMethod
|
40 |
+
from .log import access_logger, server_logger
|
41 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
42 |
+
from .tcp_helpers import tcp_keepalive
|
43 |
+
from .web_exceptions import HTTPException, HTTPInternalServerError
|
44 |
+
from .web_log import AccessLogger
|
45 |
+
from .web_request import BaseRequest
|
46 |
+
from .web_response import Response, StreamResponse
|
47 |
+
|
48 |
+
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
|
49 |
+
|
50 |
+
if TYPE_CHECKING:
|
51 |
+
import ssl
|
52 |
+
|
53 |
+
from .web_server import Server
|
54 |
+
|
55 |
+
|
56 |
+
_RequestFactory = Callable[
|
57 |
+
[
|
58 |
+
RawRequestMessage,
|
59 |
+
StreamReader,
|
60 |
+
"RequestHandler",
|
61 |
+
AbstractStreamWriter,
|
62 |
+
"asyncio.Task[None]",
|
63 |
+
],
|
64 |
+
BaseRequest,
|
65 |
+
]
|
66 |
+
|
67 |
+
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
|
68 |
+
|
69 |
+
ERROR = RawRequestMessage(
|
70 |
+
"UNKNOWN",
|
71 |
+
"/",
|
72 |
+
HttpVersion10,
|
73 |
+
{}, # type: ignore[arg-type]
|
74 |
+
{}, # type: ignore[arg-type]
|
75 |
+
True,
|
76 |
+
None,
|
77 |
+
False,
|
78 |
+
False,
|
79 |
+
yarl.URL("/"),
|
80 |
+
)
|
81 |
+
|
82 |
+
|
83 |
+
class RequestPayloadError(Exception):
|
84 |
+
"""Payload parsing error."""
|
85 |
+
|
86 |
+
|
87 |
+
class PayloadAccessError(Exception):
|
88 |
+
"""Payload was accessed after response was sent."""
|
89 |
+
|
90 |
+
|
91 |
+
_PAYLOAD_ACCESS_ERROR = PayloadAccessError()
|
92 |
+
|
93 |
+
|
94 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
95 |
+
class _ErrInfo:
|
96 |
+
status: int
|
97 |
+
exc: BaseException
|
98 |
+
message: str
|
99 |
+
|
100 |
+
|
101 |
+
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
|
102 |
+
|
103 |
+
|
104 |
+
class RequestHandler(BaseProtocol):
|
105 |
+
"""HTTP protocol implementation.
|
106 |
+
|
107 |
+
RequestHandler handles incoming HTTP request. It reads request line,
|
108 |
+
request headers and request payload and calls handle_request() method.
|
109 |
+
By default it always returns with 404 response.
|
110 |
+
|
111 |
+
RequestHandler handles errors in incoming request, like bad
|
112 |
+
status line, bad headers or incomplete payload. If any error occurs,
|
113 |
+
connection gets closed.
|
114 |
+
|
115 |
+
keepalive_timeout -- number of seconds before closing
|
116 |
+
keep-alive connection
|
117 |
+
|
118 |
+
tcp_keepalive -- TCP keep-alive is on, default is on
|
119 |
+
|
120 |
+
debug -- enable debug mode
|
121 |
+
|
122 |
+
logger -- custom logger object
|
123 |
+
|
124 |
+
access_log_class -- custom class for access_logger
|
125 |
+
|
126 |
+
access_log -- custom logging object
|
127 |
+
|
128 |
+
access_log_format -- access log format string
|
129 |
+
|
130 |
+
loop -- Optional event loop
|
131 |
+
|
132 |
+
max_line_size -- Optional maximum header line size
|
133 |
+
|
134 |
+
max_field_size -- Optional maximum header field size
|
135 |
+
|
136 |
+
max_headers -- Optional maximum header size
|
137 |
+
|
138 |
+
timeout_ceil_threshold -- Optional value to specify
|
139 |
+
threshold to ceil() timeout
|
140 |
+
values
|
141 |
+
|
142 |
+
"""
|
143 |
+
|
144 |
+
__slots__ = (
|
145 |
+
"_request_count",
|
146 |
+
"_keepalive",
|
147 |
+
"_manager",
|
148 |
+
"_request_handler",
|
149 |
+
"_request_factory",
|
150 |
+
"_tcp_keepalive",
|
151 |
+
"_next_keepalive_close_time",
|
152 |
+
"_keepalive_handle",
|
153 |
+
"_keepalive_timeout",
|
154 |
+
"_lingering_time",
|
155 |
+
"_messages",
|
156 |
+
"_message_tail",
|
157 |
+
"_handler_waiter",
|
158 |
+
"_waiter",
|
159 |
+
"_task_handler",
|
160 |
+
"_upgrade",
|
161 |
+
"_payload_parser",
|
162 |
+
"_request_parser",
|
163 |
+
"_reading_paused",
|
164 |
+
"logger",
|
165 |
+
"debug",
|
166 |
+
"access_log",
|
167 |
+
"access_logger",
|
168 |
+
"_close",
|
169 |
+
"_force_close",
|
170 |
+
"_current_request",
|
171 |
+
"_timeout_ceil_threshold",
|
172 |
+
"_request_in_progress",
|
173 |
+
"_logging_enabled",
|
174 |
+
"_cache",
|
175 |
+
)
|
176 |
+
|
177 |
+
def __init__(
|
178 |
+
self,
|
179 |
+
manager: "Server",
|
180 |
+
*,
|
181 |
+
loop: asyncio.AbstractEventLoop,
|
182 |
+
# Default should be high enough that it's likely longer than a reverse proxy.
|
183 |
+
keepalive_timeout: float = 3630,
|
184 |
+
tcp_keepalive: bool = True,
|
185 |
+
logger: Logger = server_logger,
|
186 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
187 |
+
access_log: Logger = access_logger,
|
188 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
189 |
+
debug: bool = False,
|
190 |
+
max_line_size: int = 8190,
|
191 |
+
max_headers: int = 32768,
|
192 |
+
max_field_size: int = 8190,
|
193 |
+
lingering_time: float = 10.0,
|
194 |
+
read_bufsize: int = 2**16,
|
195 |
+
auto_decompress: bool = True,
|
196 |
+
timeout_ceil_threshold: float = 5,
|
197 |
+
):
|
198 |
+
super().__init__(loop)
|
199 |
+
|
200 |
+
# _request_count is the number of requests processed with the same connection.
|
201 |
+
self._request_count = 0
|
202 |
+
self._keepalive = False
|
203 |
+
self._current_request: Optional[BaseRequest] = None
|
204 |
+
self._manager: Optional[Server] = manager
|
205 |
+
self._request_handler: Optional[_RequestHandler] = manager.request_handler
|
206 |
+
self._request_factory: Optional[_RequestFactory] = manager.request_factory
|
207 |
+
|
208 |
+
self._tcp_keepalive = tcp_keepalive
|
209 |
+
# placeholder to be replaced on keepalive timeout setup
|
210 |
+
self._next_keepalive_close_time = 0.0
|
211 |
+
self._keepalive_handle: Optional[asyncio.Handle] = None
|
212 |
+
self._keepalive_timeout = keepalive_timeout
|
213 |
+
self._lingering_time = float(lingering_time)
|
214 |
+
|
215 |
+
self._messages: Deque[_MsgType] = deque()
|
216 |
+
self._message_tail = b""
|
217 |
+
|
218 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
219 |
+
self._handler_waiter: Optional[asyncio.Future[None]] = None
|
220 |
+
self._task_handler: Optional[asyncio.Task[None]] = None
|
221 |
+
|
222 |
+
self._upgrade = False
|
223 |
+
self._payload_parser: Any = None
|
224 |
+
self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
|
225 |
+
self,
|
226 |
+
loop,
|
227 |
+
read_bufsize,
|
228 |
+
max_line_size=max_line_size,
|
229 |
+
max_field_size=max_field_size,
|
230 |
+
max_headers=max_headers,
|
231 |
+
payload_exception=RequestPayloadError,
|
232 |
+
auto_decompress=auto_decompress,
|
233 |
+
)
|
234 |
+
|
235 |
+
self._timeout_ceil_threshold: float = 5
|
236 |
+
try:
|
237 |
+
self._timeout_ceil_threshold = float(timeout_ceil_threshold)
|
238 |
+
except (TypeError, ValueError):
|
239 |
+
pass
|
240 |
+
|
241 |
+
self.logger = logger
|
242 |
+
self.debug = debug
|
243 |
+
self.access_log = access_log
|
244 |
+
if access_log:
|
245 |
+
self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
|
246 |
+
access_log, access_log_format
|
247 |
+
)
|
248 |
+
self._logging_enabled = self.access_logger.enabled
|
249 |
+
else:
|
250 |
+
self.access_logger = None
|
251 |
+
self._logging_enabled = False
|
252 |
+
|
253 |
+
self._close = False
|
254 |
+
self._force_close = False
|
255 |
+
self._request_in_progress = False
|
256 |
+
self._cache: dict[str, Any] = {}
|
257 |
+
|
258 |
+
def __repr__(self) -> str:
|
259 |
+
return "<{} {}>".format(
|
260 |
+
self.__class__.__name__,
|
261 |
+
"connected" if self.transport is not None else "disconnected",
|
262 |
+
)
|
263 |
+
|
264 |
+
@under_cached_property
|
265 |
+
def ssl_context(self) -> Optional["ssl.SSLContext"]:
|
266 |
+
"""Return SSLContext if available."""
|
267 |
+
return (
|
268 |
+
None
|
269 |
+
if self.transport is None
|
270 |
+
else self.transport.get_extra_info("sslcontext")
|
271 |
+
)
|
272 |
+
|
273 |
+
@under_cached_property
|
274 |
+
def peername(
|
275 |
+
self,
|
276 |
+
) -> Optional[Union[str, Tuple[str, int, int, int], Tuple[str, int]]]:
|
277 |
+
"""Return peername if available."""
|
278 |
+
return (
|
279 |
+
None
|
280 |
+
if self.transport is None
|
281 |
+
else self.transport.get_extra_info("peername")
|
282 |
+
)
|
283 |
+
|
284 |
+
@property
|
285 |
+
def keepalive_timeout(self) -> float:
|
286 |
+
return self._keepalive_timeout
|
287 |
+
|
288 |
+
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
|
289 |
+
"""Do worker process exit preparations.
|
290 |
+
|
291 |
+
We need to clean up everything and stop accepting requests.
|
292 |
+
It is especially important for keep-alive connections.
|
293 |
+
"""
|
294 |
+
self._force_close = True
|
295 |
+
|
296 |
+
if self._keepalive_handle is not None:
|
297 |
+
self._keepalive_handle.cancel()
|
298 |
+
|
299 |
+
# Wait for graceful handler completion
|
300 |
+
if self._request_in_progress:
|
301 |
+
# The future is only created when we are shutting
|
302 |
+
# down while the handler is still processing a request
|
303 |
+
# to avoid creating a future for every request.
|
304 |
+
self._handler_waiter = self._loop.create_future()
|
305 |
+
try:
|
306 |
+
async with ceil_timeout(timeout):
|
307 |
+
await self._handler_waiter
|
308 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
309 |
+
self._handler_waiter = None
|
310 |
+
if (
|
311 |
+
sys.version_info >= (3, 11)
|
312 |
+
and (task := asyncio.current_task())
|
313 |
+
and task.cancelling()
|
314 |
+
):
|
315 |
+
raise
|
316 |
+
# Then cancel handler and wait
|
317 |
+
try:
|
318 |
+
async with ceil_timeout(timeout):
|
319 |
+
if self._current_request is not None:
|
320 |
+
self._current_request._cancel(asyncio.CancelledError())
|
321 |
+
|
322 |
+
if self._task_handler is not None and not self._task_handler.done():
|
323 |
+
await asyncio.shield(self._task_handler)
|
324 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
325 |
+
if (
|
326 |
+
sys.version_info >= (3, 11)
|
327 |
+
and (task := asyncio.current_task())
|
328 |
+
and task.cancelling()
|
329 |
+
):
|
330 |
+
raise
|
331 |
+
|
332 |
+
# force-close non-idle handler
|
333 |
+
if self._task_handler is not None:
|
334 |
+
self._task_handler.cancel()
|
335 |
+
|
336 |
+
self.force_close()
|
337 |
+
|
338 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
339 |
+
super().connection_made(transport)
|
340 |
+
|
341 |
+
real_transport = cast(asyncio.Transport, transport)
|
342 |
+
if self._tcp_keepalive:
|
343 |
+
tcp_keepalive(real_transport)
|
344 |
+
|
345 |
+
assert self._manager is not None
|
346 |
+
self._manager.connection_made(self, real_transport)
|
347 |
+
|
348 |
+
loop = self._loop
|
349 |
+
if sys.version_info >= (3, 12):
|
350 |
+
task = asyncio.Task(self.start(), loop=loop, eager_start=True)
|
351 |
+
else:
|
352 |
+
task = loop.create_task(self.start())
|
353 |
+
self._task_handler = task
|
354 |
+
|
355 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
356 |
+
if self._manager is None:
|
357 |
+
return
|
358 |
+
self._manager.connection_lost(self, exc)
|
359 |
+
|
360 |
+
# Grab value before setting _manager to None.
|
361 |
+
handler_cancellation = self._manager.handler_cancellation
|
362 |
+
|
363 |
+
self.force_close()
|
364 |
+
super().connection_lost(exc)
|
365 |
+
self._manager = None
|
366 |
+
self._request_factory = None
|
367 |
+
self._request_handler = None
|
368 |
+
self._request_parser = None
|
369 |
+
|
370 |
+
if self._keepalive_handle is not None:
|
371 |
+
self._keepalive_handle.cancel()
|
372 |
+
|
373 |
+
if self._current_request is not None:
|
374 |
+
if exc is None:
|
375 |
+
exc = ConnectionResetError("Connection lost")
|
376 |
+
self._current_request._cancel(exc)
|
377 |
+
|
378 |
+
if handler_cancellation and self._task_handler is not None:
|
379 |
+
self._task_handler.cancel()
|
380 |
+
|
381 |
+
self._task_handler = None
|
382 |
+
|
383 |
+
if self._payload_parser is not None:
|
384 |
+
self._payload_parser.feed_eof()
|
385 |
+
self._payload_parser = None
|
386 |
+
|
387 |
+
def set_parser(self, parser: Any) -> None:
|
388 |
+
# Actual type is WebReader
|
389 |
+
assert self._payload_parser is None
|
390 |
+
|
391 |
+
self._payload_parser = parser
|
392 |
+
|
393 |
+
if self._message_tail:
|
394 |
+
self._payload_parser.feed_data(self._message_tail)
|
395 |
+
self._message_tail = b""
|
396 |
+
|
397 |
+
def eof_received(self) -> None:
|
398 |
+
pass
|
399 |
+
|
400 |
+
def data_received(self, data: bytes) -> None:
|
401 |
+
if self._force_close or self._close:
|
402 |
+
return
|
403 |
+
# parse http messages
|
404 |
+
messages: Sequence[_MsgType]
|
405 |
+
if self._payload_parser is None and not self._upgrade:
|
406 |
+
assert self._request_parser is not None
|
407 |
+
try:
|
408 |
+
messages, upgraded, tail = self._request_parser.feed_data(data)
|
409 |
+
except HttpProcessingError as exc:
|
410 |
+
messages = [
|
411 |
+
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
|
412 |
+
]
|
413 |
+
upgraded = False
|
414 |
+
tail = b""
|
415 |
+
|
416 |
+
for msg, payload in messages or ():
|
417 |
+
self._request_count += 1
|
418 |
+
self._messages.append((msg, payload))
|
419 |
+
|
420 |
+
waiter = self._waiter
|
421 |
+
if messages and waiter is not None and not waiter.done():
|
422 |
+
# don't set result twice
|
423 |
+
waiter.set_result(None)
|
424 |
+
|
425 |
+
self._upgrade = upgraded
|
426 |
+
if upgraded and tail:
|
427 |
+
self._message_tail = tail
|
428 |
+
|
429 |
+
# no parser, just store
|
430 |
+
elif self._payload_parser is None and self._upgrade and data:
|
431 |
+
self._message_tail += data
|
432 |
+
|
433 |
+
# feed payload
|
434 |
+
elif data:
|
435 |
+
eof, tail = self._payload_parser.feed_data(data)
|
436 |
+
if eof:
|
437 |
+
self.close()
|
438 |
+
|
439 |
+
def keep_alive(self, val: bool) -> None:
|
440 |
+
"""Set keep-alive connection mode.
|
441 |
+
|
442 |
+
:param bool val: new state.
|
443 |
+
"""
|
444 |
+
self._keepalive = val
|
445 |
+
if self._keepalive_handle:
|
446 |
+
self._keepalive_handle.cancel()
|
447 |
+
self._keepalive_handle = None
|
448 |
+
|
449 |
+
def close(self) -> None:
|
450 |
+
"""Close connection.
|
451 |
+
|
452 |
+
Stop accepting new pipelining messages and close
|
453 |
+
connection when handlers done processing messages.
|
454 |
+
"""
|
455 |
+
self._close = True
|
456 |
+
if self._waiter:
|
457 |
+
self._waiter.cancel()
|
458 |
+
|
459 |
+
def force_close(self) -> None:
|
460 |
+
"""Forcefully close connection."""
|
461 |
+
self._force_close = True
|
462 |
+
if self._waiter:
|
463 |
+
self._waiter.cancel()
|
464 |
+
if self.transport is not None:
|
465 |
+
self.transport.close()
|
466 |
+
self.transport = None
|
467 |
+
|
468 |
+
def log_access(
|
469 |
+
self, request: BaseRequest, response: StreamResponse, time: Optional[float]
|
470 |
+
) -> None:
|
471 |
+
if self.access_logger is not None and self.access_logger.enabled:
|
472 |
+
if TYPE_CHECKING:
|
473 |
+
assert time is not None
|
474 |
+
self.access_logger.log(request, response, self._loop.time() - time)
|
475 |
+
|
476 |
+
def log_debug(self, *args: Any, **kw: Any) -> None:
|
477 |
+
if self.debug:
|
478 |
+
self.logger.debug(*args, **kw)
|
479 |
+
|
480 |
+
def log_exception(self, *args: Any, **kw: Any) -> None:
|
481 |
+
self.logger.exception(*args, **kw)
|
482 |
+
|
483 |
+
def _process_keepalive(self) -> None:
|
484 |
+
self._keepalive_handle = None
|
485 |
+
if self._force_close or not self._keepalive:
|
486 |
+
return
|
487 |
+
|
488 |
+
loop = self._loop
|
489 |
+
now = loop.time()
|
490 |
+
close_time = self._next_keepalive_close_time
|
491 |
+
if now < close_time:
|
492 |
+
# Keep alive close check fired too early, reschedule
|
493 |
+
self._keepalive_handle = loop.call_at(close_time, self._process_keepalive)
|
494 |
+
return
|
495 |
+
|
496 |
+
# handler in idle state
|
497 |
+
if self._waiter and not self._waiter.done():
|
498 |
+
self.force_close()
|
499 |
+
|
500 |
+
async def _handle_request(
|
501 |
+
self,
|
502 |
+
request: BaseRequest,
|
503 |
+
start_time: Optional[float],
|
504 |
+
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
|
505 |
+
) -> Tuple[StreamResponse, bool]:
|
506 |
+
self._request_in_progress = True
|
507 |
+
try:
|
508 |
+
try:
|
509 |
+
self._current_request = request
|
510 |
+
resp = await request_handler(request)
|
511 |
+
finally:
|
512 |
+
self._current_request = None
|
513 |
+
except HTTPException as exc:
|
514 |
+
resp = exc
|
515 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
516 |
+
except asyncio.CancelledError:
|
517 |
+
raise
|
518 |
+
except asyncio.TimeoutError as exc:
|
519 |
+
self.log_debug("Request handler timed out.", exc_info=exc)
|
520 |
+
resp = self.handle_error(request, 504)
|
521 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
522 |
+
except Exception as exc:
|
523 |
+
resp = self.handle_error(request, 500, exc)
|
524 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
525 |
+
else:
|
526 |
+
# Deprecation warning (See #2415)
|
527 |
+
if getattr(resp, "__http_exception__", False):
|
528 |
+
warnings.warn(
|
529 |
+
"returning HTTPException object is deprecated "
|
530 |
+
"(#2415) and will be removed, "
|
531 |
+
"please raise the exception instead",
|
532 |
+
DeprecationWarning,
|
533 |
+
)
|
534 |
+
|
535 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
536 |
+
finally:
|
537 |
+
self._request_in_progress = False
|
538 |
+
if self._handler_waiter is not None:
|
539 |
+
self._handler_waiter.set_result(None)
|
540 |
+
|
541 |
+
return resp, reset
|
542 |
+
|
543 |
+
async def start(self) -> None:
|
544 |
+
"""Process incoming request.
|
545 |
+
|
546 |
+
It reads request line, request headers and request payload, then
|
547 |
+
calls handle_request() method. Subclass has to override
|
548 |
+
handle_request(). start() handles various exceptions in request
|
549 |
+
or response handling. Connection is being closed always unless
|
550 |
+
keep_alive(True) specified.
|
551 |
+
"""
|
552 |
+
loop = self._loop
|
553 |
+
manager = self._manager
|
554 |
+
assert manager is not None
|
555 |
+
keepalive_timeout = self._keepalive_timeout
|
556 |
+
resp = None
|
557 |
+
assert self._request_factory is not None
|
558 |
+
assert self._request_handler is not None
|
559 |
+
|
560 |
+
while not self._force_close:
|
561 |
+
if not self._messages:
|
562 |
+
try:
|
563 |
+
# wait for next request
|
564 |
+
self._waiter = loop.create_future()
|
565 |
+
await self._waiter
|
566 |
+
finally:
|
567 |
+
self._waiter = None
|
568 |
+
|
569 |
+
message, payload = self._messages.popleft()
|
570 |
+
|
571 |
+
# time is only fetched if logging is enabled as otherwise
|
572 |
+
# its thrown away and never used.
|
573 |
+
start = loop.time() if self._logging_enabled else None
|
574 |
+
|
575 |
+
manager.requests_count += 1
|
576 |
+
writer = StreamWriter(self, loop)
|
577 |
+
if isinstance(message, _ErrInfo):
|
578 |
+
# make request_factory work
|
579 |
+
request_handler = self._make_error_handler(message)
|
580 |
+
message = ERROR
|
581 |
+
else:
|
582 |
+
request_handler = self._request_handler
|
583 |
+
|
584 |
+
# Important don't hold a reference to the current task
|
585 |
+
# as on traceback it will prevent the task from being
|
586 |
+
# collected and will cause a memory leak.
|
587 |
+
request = self._request_factory(
|
588 |
+
message,
|
589 |
+
payload,
|
590 |
+
self,
|
591 |
+
writer,
|
592 |
+
self._task_handler or asyncio.current_task(loop), # type: ignore[arg-type]
|
593 |
+
)
|
594 |
+
try:
|
595 |
+
# a new task is used for copy context vars (#3406)
|
596 |
+
coro = self._handle_request(request, start, request_handler)
|
597 |
+
if sys.version_info >= (3, 12):
|
598 |
+
task = asyncio.Task(coro, loop=loop, eager_start=True)
|
599 |
+
else:
|
600 |
+
task = loop.create_task(coro)
|
601 |
+
try:
|
602 |
+
resp, reset = await task
|
603 |
+
except ConnectionError:
|
604 |
+
self.log_debug("Ignored premature client disconnection")
|
605 |
+
break
|
606 |
+
|
607 |
+
# Drop the processed task from asyncio.Task.all_tasks() early
|
608 |
+
del task
|
609 |
+
if reset:
|
610 |
+
self.log_debug("Ignored premature client disconnection 2")
|
611 |
+
break
|
612 |
+
|
613 |
+
# notify server about keep-alive
|
614 |
+
self._keepalive = bool(resp.keep_alive)
|
615 |
+
|
616 |
+
# check payload
|
617 |
+
if not payload.is_eof():
|
618 |
+
lingering_time = self._lingering_time
|
619 |
+
if not self._force_close and lingering_time:
|
620 |
+
self.log_debug(
|
621 |
+
"Start lingering close timer for %s sec.", lingering_time
|
622 |
+
)
|
623 |
+
|
624 |
+
now = loop.time()
|
625 |
+
end_t = now + lingering_time
|
626 |
+
|
627 |
+
try:
|
628 |
+
while not payload.is_eof() and now < end_t:
|
629 |
+
async with ceil_timeout(end_t - now):
|
630 |
+
# read and ignore
|
631 |
+
await payload.readany()
|
632 |
+
now = loop.time()
|
633 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
634 |
+
if (
|
635 |
+
sys.version_info >= (3, 11)
|
636 |
+
and (t := asyncio.current_task())
|
637 |
+
and t.cancelling()
|
638 |
+
):
|
639 |
+
raise
|
640 |
+
|
641 |
+
# if payload still uncompleted
|
642 |
+
if not payload.is_eof() and not self._force_close:
|
643 |
+
self.log_debug("Uncompleted request.")
|
644 |
+
self.close()
|
645 |
+
|
646 |
+
payload.set_exception(_PAYLOAD_ACCESS_ERROR)
|
647 |
+
|
648 |
+
except asyncio.CancelledError:
|
649 |
+
self.log_debug("Ignored premature client disconnection")
|
650 |
+
self.force_close()
|
651 |
+
raise
|
652 |
+
except Exception as exc:
|
653 |
+
self.log_exception("Unhandled exception", exc_info=exc)
|
654 |
+
self.force_close()
|
655 |
+
except BaseException:
|
656 |
+
self.force_close()
|
657 |
+
raise
|
658 |
+
finally:
|
659 |
+
request._task = None # type: ignore[assignment] # Break reference cycle in case of exception
|
660 |
+
if self.transport is None and resp is not None:
|
661 |
+
self.log_debug("Ignored premature client disconnection.")
|
662 |
+
|
663 |
+
if self._keepalive and not self._close and not self._force_close:
|
664 |
+
# start keep-alive timer
|
665 |
+
close_time = loop.time() + keepalive_timeout
|
666 |
+
self._next_keepalive_close_time = close_time
|
667 |
+
if self._keepalive_handle is None:
|
668 |
+
self._keepalive_handle = loop.call_at(
|
669 |
+
close_time, self._process_keepalive
|
670 |
+
)
|
671 |
+
else:
|
672 |
+
break
|
673 |
+
|
674 |
+
# remove handler, close transport if no handlers left
|
675 |
+
if not self._force_close:
|
676 |
+
self._task_handler = None
|
677 |
+
if self.transport is not None:
|
678 |
+
self.transport.close()
|
679 |
+
|
680 |
+
async def finish_response(
|
681 |
+
self, request: BaseRequest, resp: StreamResponse, start_time: Optional[float]
|
682 |
+
) -> Tuple[StreamResponse, bool]:
|
683 |
+
"""Prepare the response and write_eof, then log access.
|
684 |
+
|
685 |
+
This has to
|
686 |
+
be called within the context of any exception so the access logger
|
687 |
+
can get exception information. Returns True if the client disconnects
|
688 |
+
prematurely.
|
689 |
+
"""
|
690 |
+
request._finish()
|
691 |
+
if self._request_parser is not None:
|
692 |
+
self._request_parser.set_upgraded(False)
|
693 |
+
self._upgrade = False
|
694 |
+
if self._message_tail:
|
695 |
+
self._request_parser.feed_data(self._message_tail)
|
696 |
+
self._message_tail = b""
|
697 |
+
try:
|
698 |
+
prepare_meth = resp.prepare
|
699 |
+
except AttributeError:
|
700 |
+
if resp is None:
|
701 |
+
self.log_exception("Missing return statement on request handler")
|
702 |
+
else:
|
703 |
+
self.log_exception(
|
704 |
+
"Web-handler should return a response instance, "
|
705 |
+
"got {!r}".format(resp)
|
706 |
+
)
|
707 |
+
exc = HTTPInternalServerError()
|
708 |
+
resp = Response(
|
709 |
+
status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
|
710 |
+
)
|
711 |
+
prepare_meth = resp.prepare
|
712 |
+
try:
|
713 |
+
await prepare_meth(request)
|
714 |
+
await resp.write_eof()
|
715 |
+
except ConnectionError:
|
716 |
+
self.log_access(request, resp, start_time)
|
717 |
+
return resp, True
|
718 |
+
|
719 |
+
self.log_access(request, resp, start_time)
|
720 |
+
return resp, False
|
721 |
+
|
722 |
+
def handle_error(
|
723 |
+
self,
|
724 |
+
request: BaseRequest,
|
725 |
+
status: int = 500,
|
726 |
+
exc: Optional[BaseException] = None,
|
727 |
+
message: Optional[str] = None,
|
728 |
+
) -> StreamResponse:
|
729 |
+
"""Handle errors.
|
730 |
+
|
731 |
+
Returns HTTP response with specific status code. Logs additional
|
732 |
+
information. It always closes current connection.
|
733 |
+
"""
|
734 |
+
if self._request_count == 1 and isinstance(exc, BadHttpMethod):
|
735 |
+
# BadHttpMethod is common when a client sends non-HTTP
|
736 |
+
# or encrypted traffic to an HTTP port. This is expected
|
737 |
+
# to happen when connected to the public internet so we log
|
738 |
+
# it at the debug level as to not fill logs with noise.
|
739 |
+
self.logger.debug(
|
740 |
+
"Error handling request from %s", request.remote, exc_info=exc
|
741 |
+
)
|
742 |
+
else:
|
743 |
+
self.log_exception(
|
744 |
+
"Error handling request from %s", request.remote, exc_info=exc
|
745 |
+
)
|
746 |
+
|
747 |
+
# some data already got sent, connection is broken
|
748 |
+
if request.writer.output_size > 0:
|
749 |
+
raise ConnectionError(
|
750 |
+
"Response is sent already, cannot send another response "
|
751 |
+
"with the error message"
|
752 |
+
)
|
753 |
+
|
754 |
+
ct = "text/plain"
|
755 |
+
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
|
756 |
+
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
|
757 |
+
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
|
758 |
+
tb = None
|
759 |
+
if self.debug:
|
760 |
+
with suppress(Exception):
|
761 |
+
tb = traceback.format_exc()
|
762 |
+
|
763 |
+
if "text/html" in request.headers.get("Accept", ""):
|
764 |
+
if tb:
|
765 |
+
tb = html_escape(tb)
|
766 |
+
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
|
767 |
+
message = (
|
768 |
+
"<html><head>"
|
769 |
+
"<title>{title}</title>"
|
770 |
+
"</head><body>\n<h1>{title}</h1>"
|
771 |
+
"\n{msg}\n</body></html>\n"
|
772 |
+
).format(title=title, msg=msg)
|
773 |
+
ct = "text/html"
|
774 |
+
else:
|
775 |
+
if tb:
|
776 |
+
msg = tb
|
777 |
+
message = title + "\n\n" + msg
|
778 |
+
|
779 |
+
resp = Response(status=status, text=message, content_type=ct)
|
780 |
+
resp.force_close()
|
781 |
+
|
782 |
+
return resp
|
783 |
+
|
784 |
+
def _make_error_handler(
|
785 |
+
self, err_info: _ErrInfo
|
786 |
+
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
|
787 |
+
async def handler(request: BaseRequest) -> StreamResponse:
|
788 |
+
return self.handle_error(
|
789 |
+
request, err_info.status, err_info.exc, err_info.message
|
790 |
+
)
|
791 |
+
|
792 |
+
return handler
|
venv/Lib/site-packages/aiohttp/web_request.py
ADDED
@@ -0,0 +1,914 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import datetime
|
3 |
+
import io
|
4 |
+
import re
|
5 |
+
import socket
|
6 |
+
import string
|
7 |
+
import tempfile
|
8 |
+
import types
|
9 |
+
import warnings
|
10 |
+
from http.cookies import SimpleCookie
|
11 |
+
from types import MappingProxyType
|
12 |
+
from typing import (
|
13 |
+
TYPE_CHECKING,
|
14 |
+
Any,
|
15 |
+
Dict,
|
16 |
+
Final,
|
17 |
+
Iterator,
|
18 |
+
Mapping,
|
19 |
+
MutableMapping,
|
20 |
+
Optional,
|
21 |
+
Pattern,
|
22 |
+
Tuple,
|
23 |
+
Union,
|
24 |
+
cast,
|
25 |
+
)
|
26 |
+
from urllib.parse import parse_qsl
|
27 |
+
|
28 |
+
import attr
|
29 |
+
from multidict import (
|
30 |
+
CIMultiDict,
|
31 |
+
CIMultiDictProxy,
|
32 |
+
MultiDict,
|
33 |
+
MultiDictProxy,
|
34 |
+
MultiMapping,
|
35 |
+
)
|
36 |
+
from yarl import URL
|
37 |
+
|
38 |
+
from . import hdrs
|
39 |
+
from .abc import AbstractStreamWriter
|
40 |
+
from .helpers import (
|
41 |
+
_SENTINEL,
|
42 |
+
DEBUG,
|
43 |
+
ETAG_ANY,
|
44 |
+
LIST_QUOTED_ETAG_RE,
|
45 |
+
ChainMapProxy,
|
46 |
+
ETag,
|
47 |
+
HeadersMixin,
|
48 |
+
parse_http_date,
|
49 |
+
reify,
|
50 |
+
sentinel,
|
51 |
+
set_exception,
|
52 |
+
)
|
53 |
+
from .http_parser import RawRequestMessage
|
54 |
+
from .http_writer import HttpVersion
|
55 |
+
from .multipart import BodyPartReader, MultipartReader
|
56 |
+
from .streams import EmptyStreamReader, StreamReader
|
57 |
+
from .typedefs import (
|
58 |
+
DEFAULT_JSON_DECODER,
|
59 |
+
JSONDecoder,
|
60 |
+
LooseHeaders,
|
61 |
+
RawHeaders,
|
62 |
+
StrOrURL,
|
63 |
+
)
|
64 |
+
from .web_exceptions import HTTPRequestEntityTooLarge
|
65 |
+
from .web_response import StreamResponse
|
66 |
+
|
67 |
+
__all__ = ("BaseRequest", "FileField", "Request")
|
68 |
+
|
69 |
+
|
70 |
+
if TYPE_CHECKING:
|
71 |
+
from .web_app import Application
|
72 |
+
from .web_protocol import RequestHandler
|
73 |
+
from .web_urldispatcher import UrlMappingMatchInfo
|
74 |
+
|
75 |
+
|
76 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
77 |
+
class FileField:
|
78 |
+
name: str
|
79 |
+
filename: str
|
80 |
+
file: io.BufferedReader
|
81 |
+
content_type: str
|
82 |
+
headers: CIMultiDictProxy[str]
|
83 |
+
|
84 |
+
|
85 |
+
_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
|
86 |
+
# '-' at the end to prevent interpretation as range in a char class
|
87 |
+
|
88 |
+
_TOKEN: Final[str] = rf"[{_TCHAR}]+"
|
89 |
+
|
90 |
+
_QDTEXT: Final[str] = r"[{}]".format(
|
91 |
+
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
|
92 |
+
)
|
93 |
+
# qdtext includes 0x5C to escape 0x5D ('\]')
|
94 |
+
# qdtext excludes obs-text (because obsoleted, and encoding not specified)
|
95 |
+
|
96 |
+
_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
|
97 |
+
|
98 |
+
_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
|
99 |
+
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
|
100 |
+
)
|
101 |
+
|
102 |
+
_FORWARDED_PAIR: Final[str] = (
|
103 |
+
r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
|
104 |
+
token=_TOKEN, quoted_string=_QUOTED_STRING
|
105 |
+
)
|
106 |
+
)
|
107 |
+
|
108 |
+
_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
|
109 |
+
# same pattern as _QUOTED_PAIR but contains a capture group
|
110 |
+
|
111 |
+
_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
|
112 |
+
|
113 |
+
############################################################
|
114 |
+
# HTTP Request
|
115 |
+
############################################################
|
116 |
+
|
117 |
+
|
118 |
+
class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
119 |
+
|
120 |
+
POST_METHODS = {
|
121 |
+
hdrs.METH_PATCH,
|
122 |
+
hdrs.METH_POST,
|
123 |
+
hdrs.METH_PUT,
|
124 |
+
hdrs.METH_TRACE,
|
125 |
+
hdrs.METH_DELETE,
|
126 |
+
}
|
127 |
+
|
128 |
+
ATTRS = HeadersMixin.ATTRS | frozenset(
|
129 |
+
[
|
130 |
+
"_message",
|
131 |
+
"_protocol",
|
132 |
+
"_payload_writer",
|
133 |
+
"_payload",
|
134 |
+
"_headers",
|
135 |
+
"_method",
|
136 |
+
"_version",
|
137 |
+
"_rel_url",
|
138 |
+
"_post",
|
139 |
+
"_read_bytes",
|
140 |
+
"_state",
|
141 |
+
"_cache",
|
142 |
+
"_task",
|
143 |
+
"_client_max_size",
|
144 |
+
"_loop",
|
145 |
+
"_transport_sslcontext",
|
146 |
+
"_transport_peername",
|
147 |
+
]
|
148 |
+
)
|
149 |
+
_post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
|
150 |
+
_read_bytes: Optional[bytes] = None
|
151 |
+
|
152 |
+
def __init__(
|
153 |
+
self,
|
154 |
+
message: RawRequestMessage,
|
155 |
+
payload: StreamReader,
|
156 |
+
protocol: "RequestHandler",
|
157 |
+
payload_writer: AbstractStreamWriter,
|
158 |
+
task: "asyncio.Task[None]",
|
159 |
+
loop: asyncio.AbstractEventLoop,
|
160 |
+
*,
|
161 |
+
client_max_size: int = 1024**2,
|
162 |
+
state: Optional[Dict[str, Any]] = None,
|
163 |
+
scheme: Optional[str] = None,
|
164 |
+
host: Optional[str] = None,
|
165 |
+
remote: Optional[str] = None,
|
166 |
+
) -> None:
|
167 |
+
self._message = message
|
168 |
+
self._protocol = protocol
|
169 |
+
self._payload_writer = payload_writer
|
170 |
+
|
171 |
+
self._payload = payload
|
172 |
+
self._headers: CIMultiDictProxy[str] = message.headers
|
173 |
+
self._method = message.method
|
174 |
+
self._version = message.version
|
175 |
+
self._cache: Dict[str, Any] = {}
|
176 |
+
url = message.url
|
177 |
+
if url.absolute:
|
178 |
+
if scheme is not None:
|
179 |
+
url = url.with_scheme(scheme)
|
180 |
+
if host is not None:
|
181 |
+
url = url.with_host(host)
|
182 |
+
# absolute URL is given,
|
183 |
+
# override auto-calculating url, host, and scheme
|
184 |
+
# all other properties should be good
|
185 |
+
self._cache["url"] = url
|
186 |
+
self._cache["host"] = url.host
|
187 |
+
self._cache["scheme"] = url.scheme
|
188 |
+
self._rel_url = url.relative()
|
189 |
+
else:
|
190 |
+
self._rel_url = url
|
191 |
+
if scheme is not None:
|
192 |
+
self._cache["scheme"] = scheme
|
193 |
+
if host is not None:
|
194 |
+
self._cache["host"] = host
|
195 |
+
|
196 |
+
self._state = {} if state is None else state
|
197 |
+
self._task = task
|
198 |
+
self._client_max_size = client_max_size
|
199 |
+
self._loop = loop
|
200 |
+
|
201 |
+
self._transport_sslcontext = protocol.ssl_context
|
202 |
+
self._transport_peername = protocol.peername
|
203 |
+
|
204 |
+
if remote is not None:
|
205 |
+
self._cache["remote"] = remote
|
206 |
+
|
207 |
+
def clone(
|
208 |
+
self,
|
209 |
+
*,
|
210 |
+
method: Union[str, _SENTINEL] = sentinel,
|
211 |
+
rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
|
212 |
+
headers: Union[LooseHeaders, _SENTINEL] = sentinel,
|
213 |
+
scheme: Union[str, _SENTINEL] = sentinel,
|
214 |
+
host: Union[str, _SENTINEL] = sentinel,
|
215 |
+
remote: Union[str, _SENTINEL] = sentinel,
|
216 |
+
client_max_size: Union[int, _SENTINEL] = sentinel,
|
217 |
+
) -> "BaseRequest":
|
218 |
+
"""Clone itself with replacement some attributes.
|
219 |
+
|
220 |
+
Creates and returns a new instance of Request object. If no parameters
|
221 |
+
are given, an exact copy is returned. If a parameter is not passed, it
|
222 |
+
will reuse the one from the current request object.
|
223 |
+
"""
|
224 |
+
if self._read_bytes:
|
225 |
+
raise RuntimeError("Cannot clone request after reading its content")
|
226 |
+
|
227 |
+
dct: Dict[str, Any] = {}
|
228 |
+
if method is not sentinel:
|
229 |
+
dct["method"] = method
|
230 |
+
if rel_url is not sentinel:
|
231 |
+
new_url: URL = URL(rel_url)
|
232 |
+
dct["url"] = new_url
|
233 |
+
dct["path"] = str(new_url)
|
234 |
+
if headers is not sentinel:
|
235 |
+
# a copy semantic
|
236 |
+
dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
|
237 |
+
dct["raw_headers"] = tuple(
|
238 |
+
(k.encode("utf-8"), v.encode("utf-8"))
|
239 |
+
for k, v in dct["headers"].items()
|
240 |
+
)
|
241 |
+
|
242 |
+
message = self._message._replace(**dct)
|
243 |
+
|
244 |
+
kwargs = {}
|
245 |
+
if scheme is not sentinel:
|
246 |
+
kwargs["scheme"] = scheme
|
247 |
+
if host is not sentinel:
|
248 |
+
kwargs["host"] = host
|
249 |
+
if remote is not sentinel:
|
250 |
+
kwargs["remote"] = remote
|
251 |
+
if client_max_size is sentinel:
|
252 |
+
client_max_size = self._client_max_size
|
253 |
+
|
254 |
+
return self.__class__(
|
255 |
+
message,
|
256 |
+
self._payload,
|
257 |
+
self._protocol,
|
258 |
+
self._payload_writer,
|
259 |
+
self._task,
|
260 |
+
self._loop,
|
261 |
+
client_max_size=client_max_size,
|
262 |
+
state=self._state.copy(),
|
263 |
+
**kwargs,
|
264 |
+
)
|
265 |
+
|
266 |
+
@property
|
267 |
+
def task(self) -> "asyncio.Task[None]":
|
268 |
+
return self._task
|
269 |
+
|
270 |
+
@property
|
271 |
+
def protocol(self) -> "RequestHandler":
|
272 |
+
return self._protocol
|
273 |
+
|
274 |
+
@property
|
275 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
276 |
+
if self._protocol is None:
|
277 |
+
return None
|
278 |
+
return self._protocol.transport
|
279 |
+
|
280 |
+
@property
|
281 |
+
def writer(self) -> AbstractStreamWriter:
|
282 |
+
return self._payload_writer
|
283 |
+
|
284 |
+
@property
|
285 |
+
def client_max_size(self) -> int:
|
286 |
+
return self._client_max_size
|
287 |
+
|
288 |
+
@reify
|
289 |
+
def message(self) -> RawRequestMessage:
|
290 |
+
warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
|
291 |
+
return self._message
|
292 |
+
|
293 |
+
@reify
|
294 |
+
def rel_url(self) -> URL:
|
295 |
+
return self._rel_url
|
296 |
+
|
297 |
+
@reify
|
298 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
299 |
+
warnings.warn(
|
300 |
+
"request.loop property is deprecated", DeprecationWarning, stacklevel=2
|
301 |
+
)
|
302 |
+
return self._loop
|
303 |
+
|
304 |
+
# MutableMapping API
|
305 |
+
|
306 |
+
def __getitem__(self, key: str) -> Any:
|
307 |
+
return self._state[key]
|
308 |
+
|
309 |
+
def __setitem__(self, key: str, value: Any) -> None:
|
310 |
+
self._state[key] = value
|
311 |
+
|
312 |
+
def __delitem__(self, key: str) -> None:
|
313 |
+
del self._state[key]
|
314 |
+
|
315 |
+
def __len__(self) -> int:
|
316 |
+
return len(self._state)
|
317 |
+
|
318 |
+
def __iter__(self) -> Iterator[str]:
|
319 |
+
return iter(self._state)
|
320 |
+
|
321 |
+
########
|
322 |
+
|
323 |
+
@reify
|
324 |
+
def secure(self) -> bool:
|
325 |
+
"""A bool indicating if the request is handled with SSL."""
|
326 |
+
return self.scheme == "https"
|
327 |
+
|
328 |
+
@reify
|
329 |
+
def forwarded(self) -> Tuple[Mapping[str, str], ...]:
|
330 |
+
"""A tuple containing all parsed Forwarded header(s).
|
331 |
+
|
332 |
+
Makes an effort to parse Forwarded headers as specified by RFC 7239:
|
333 |
+
|
334 |
+
- It adds one (immutable) dictionary per Forwarded 'field-value', ie
|
335 |
+
per proxy. The element corresponds to the data in the Forwarded
|
336 |
+
field-value added by the first proxy encountered by the client. Each
|
337 |
+
subsequent item corresponds to those added by later proxies.
|
338 |
+
- It checks that every value has valid syntax in general as specified
|
339 |
+
in section 4: either a 'token' or a 'quoted-string'.
|
340 |
+
- It un-escapes found escape sequences.
|
341 |
+
- It does NOT validate 'by' and 'for' contents as specified in section
|
342 |
+
6.
|
343 |
+
- It does NOT validate 'host' contents (Host ABNF).
|
344 |
+
- It does NOT validate 'proto' contents for valid URI scheme names.
|
345 |
+
|
346 |
+
Returns a tuple containing one or more immutable dicts
|
347 |
+
"""
|
348 |
+
elems = []
|
349 |
+
for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
|
350 |
+
length = len(field_value)
|
351 |
+
pos = 0
|
352 |
+
need_separator = False
|
353 |
+
elem: Dict[str, str] = {}
|
354 |
+
elems.append(types.MappingProxyType(elem))
|
355 |
+
while 0 <= pos < length:
|
356 |
+
match = _FORWARDED_PAIR_RE.match(field_value, pos)
|
357 |
+
if match is not None: # got a valid forwarded-pair
|
358 |
+
if need_separator:
|
359 |
+
# bad syntax here, skip to next comma
|
360 |
+
pos = field_value.find(",", pos)
|
361 |
+
else:
|
362 |
+
name, value, port = match.groups()
|
363 |
+
if value[0] == '"':
|
364 |
+
# quoted string: remove quotes and unescape
|
365 |
+
value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
|
366 |
+
if port:
|
367 |
+
value += port
|
368 |
+
elem[name.lower()] = value
|
369 |
+
pos += len(match.group(0))
|
370 |
+
need_separator = True
|
371 |
+
elif field_value[pos] == ",": # next forwarded-element
|
372 |
+
need_separator = False
|
373 |
+
elem = {}
|
374 |
+
elems.append(types.MappingProxyType(elem))
|
375 |
+
pos += 1
|
376 |
+
elif field_value[pos] == ";": # next forwarded-pair
|
377 |
+
need_separator = False
|
378 |
+
pos += 1
|
379 |
+
elif field_value[pos] in " \t":
|
380 |
+
# Allow whitespace even between forwarded-pairs, though
|
381 |
+
# RFC 7239 doesn't. This simplifies code and is in line
|
382 |
+
# with Postel's law.
|
383 |
+
pos += 1
|
384 |
+
else:
|
385 |
+
# bad syntax here, skip to next comma
|
386 |
+
pos = field_value.find(",", pos)
|
387 |
+
return tuple(elems)
|
388 |
+
|
389 |
+
@reify
|
390 |
+
def scheme(self) -> str:
|
391 |
+
"""A string representing the scheme of the request.
|
392 |
+
|
393 |
+
Hostname is resolved in this order:
|
394 |
+
|
395 |
+
- overridden value by .clone(scheme=new_scheme) call.
|
396 |
+
- type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
|
397 |
+
|
398 |
+
'http' or 'https'.
|
399 |
+
"""
|
400 |
+
if self._transport_sslcontext:
|
401 |
+
return "https"
|
402 |
+
else:
|
403 |
+
return "http"
|
404 |
+
|
405 |
+
@reify
|
406 |
+
def method(self) -> str:
|
407 |
+
"""Read only property for getting HTTP method.
|
408 |
+
|
409 |
+
The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
|
410 |
+
"""
|
411 |
+
return self._method
|
412 |
+
|
413 |
+
@reify
|
414 |
+
def version(self) -> HttpVersion:
|
415 |
+
"""Read only property for getting HTTP version of request.
|
416 |
+
|
417 |
+
Returns aiohttp.protocol.HttpVersion instance.
|
418 |
+
"""
|
419 |
+
return self._version
|
420 |
+
|
421 |
+
@reify
|
422 |
+
def host(self) -> str:
|
423 |
+
"""Hostname of the request.
|
424 |
+
|
425 |
+
Hostname is resolved in this order:
|
426 |
+
|
427 |
+
- overridden value by .clone(host=new_host) call.
|
428 |
+
- HOST HTTP header
|
429 |
+
- socket.getfqdn() value
|
430 |
+
|
431 |
+
For example, 'example.com' or 'localhost:8080'.
|
432 |
+
|
433 |
+
For historical reasons, the port number may be included.
|
434 |
+
"""
|
435 |
+
host = self._message.headers.get(hdrs.HOST)
|
436 |
+
if host is not None:
|
437 |
+
return host
|
438 |
+
return socket.getfqdn()
|
439 |
+
|
440 |
+
@reify
|
441 |
+
def remote(self) -> Optional[str]:
|
442 |
+
"""Remote IP of client initiated HTTP request.
|
443 |
+
|
444 |
+
The IP is resolved in this order:
|
445 |
+
|
446 |
+
- overridden value by .clone(remote=new_remote) call.
|
447 |
+
- peername of opened socket
|
448 |
+
"""
|
449 |
+
if self._transport_peername is None:
|
450 |
+
return None
|
451 |
+
if isinstance(self._transport_peername, (list, tuple)):
|
452 |
+
return str(self._transport_peername[0])
|
453 |
+
return str(self._transport_peername)
|
454 |
+
|
455 |
+
@reify
|
456 |
+
def url(self) -> URL:
|
457 |
+
"""The full URL of the request."""
|
458 |
+
# authority is used here because it may include the port number
|
459 |
+
# and we want yarl to parse it correctly
|
460 |
+
return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url)
|
461 |
+
|
462 |
+
@reify
|
463 |
+
def path(self) -> str:
|
464 |
+
"""The URL including *PATH INFO* without the host or scheme.
|
465 |
+
|
466 |
+
E.g., ``/app/blog``
|
467 |
+
"""
|
468 |
+
return self._rel_url.path
|
469 |
+
|
470 |
+
@reify
|
471 |
+
def path_qs(self) -> str:
|
472 |
+
"""The URL including PATH_INFO and the query string.
|
473 |
+
|
474 |
+
E.g, /app/blog?id=10
|
475 |
+
"""
|
476 |
+
return str(self._rel_url)
|
477 |
+
|
478 |
+
@reify
|
479 |
+
def raw_path(self) -> str:
|
480 |
+
"""The URL including raw *PATH INFO* without the host or scheme.
|
481 |
+
|
482 |
+
Warning, the path is unquoted and may contains non valid URL characters
|
483 |
+
|
484 |
+
E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
|
485 |
+
"""
|
486 |
+
return self._message.path
|
487 |
+
|
488 |
+
@reify
|
489 |
+
def query(self) -> "MultiMapping[str]":
|
490 |
+
"""A multidict with all the variables in the query string."""
|
491 |
+
return self._rel_url.query
|
492 |
+
|
493 |
+
@reify
|
494 |
+
def query_string(self) -> str:
|
495 |
+
"""The query string in the URL.
|
496 |
+
|
497 |
+
E.g., id=10
|
498 |
+
"""
|
499 |
+
return self._rel_url.query_string
|
500 |
+
|
501 |
+
@reify
|
502 |
+
def headers(self) -> CIMultiDictProxy[str]:
|
503 |
+
"""A case-insensitive multidict proxy with all headers."""
|
504 |
+
return self._headers
|
505 |
+
|
506 |
+
@reify
|
507 |
+
def raw_headers(self) -> RawHeaders:
|
508 |
+
"""A sequence of pairs for all headers."""
|
509 |
+
return self._message.raw_headers
|
510 |
+
|
511 |
+
@reify
|
512 |
+
def if_modified_since(self) -> Optional[datetime.datetime]:
|
513 |
+
"""The value of If-Modified-Since HTTP header, or None.
|
514 |
+
|
515 |
+
This header is represented as a `datetime` object.
|
516 |
+
"""
|
517 |
+
return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
|
518 |
+
|
519 |
+
@reify
|
520 |
+
def if_unmodified_since(self) -> Optional[datetime.datetime]:
|
521 |
+
"""The value of If-Unmodified-Since HTTP header, or None.
|
522 |
+
|
523 |
+
This header is represented as a `datetime` object.
|
524 |
+
"""
|
525 |
+
return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
|
526 |
+
|
527 |
+
@staticmethod
|
528 |
+
def _etag_values(etag_header: str) -> Iterator[ETag]:
|
529 |
+
"""Extract `ETag` objects from raw header."""
|
530 |
+
if etag_header == ETAG_ANY:
|
531 |
+
yield ETag(
|
532 |
+
is_weak=False,
|
533 |
+
value=ETAG_ANY,
|
534 |
+
)
|
535 |
+
else:
|
536 |
+
for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
|
537 |
+
is_weak, value, garbage = match.group(2, 3, 4)
|
538 |
+
# Any symbol captured by 4th group means
|
539 |
+
# that the following sequence is invalid.
|
540 |
+
if garbage:
|
541 |
+
break
|
542 |
+
|
543 |
+
yield ETag(
|
544 |
+
is_weak=bool(is_weak),
|
545 |
+
value=value,
|
546 |
+
)
|
547 |
+
|
548 |
+
@classmethod
|
549 |
+
def _if_match_or_none_impl(
|
550 |
+
cls, header_value: Optional[str]
|
551 |
+
) -> Optional[Tuple[ETag, ...]]:
|
552 |
+
if not header_value:
|
553 |
+
return None
|
554 |
+
|
555 |
+
return tuple(cls._etag_values(header_value))
|
556 |
+
|
557 |
+
@reify
|
558 |
+
def if_match(self) -> Optional[Tuple[ETag, ...]]:
|
559 |
+
"""The value of If-Match HTTP header, or None.
|
560 |
+
|
561 |
+
This header is represented as a `tuple` of `ETag` objects.
|
562 |
+
"""
|
563 |
+
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
|
564 |
+
|
565 |
+
@reify
|
566 |
+
def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
|
567 |
+
"""The value of If-None-Match HTTP header, or None.
|
568 |
+
|
569 |
+
This header is represented as a `tuple` of `ETag` objects.
|
570 |
+
"""
|
571 |
+
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
|
572 |
+
|
573 |
+
@reify
|
574 |
+
def if_range(self) -> Optional[datetime.datetime]:
|
575 |
+
"""The value of If-Range HTTP header, or None.
|
576 |
+
|
577 |
+
This header is represented as a `datetime` object.
|
578 |
+
"""
|
579 |
+
return parse_http_date(self.headers.get(hdrs.IF_RANGE))
|
580 |
+
|
581 |
+
@reify
|
582 |
+
def keep_alive(self) -> bool:
|
583 |
+
"""Is keepalive enabled by client?"""
|
584 |
+
return not self._message.should_close
|
585 |
+
|
586 |
+
@reify
|
587 |
+
def cookies(self) -> Mapping[str, str]:
|
588 |
+
"""Return request cookies.
|
589 |
+
|
590 |
+
A read-only dictionary-like object.
|
591 |
+
"""
|
592 |
+
raw = self.headers.get(hdrs.COOKIE, "")
|
593 |
+
parsed = SimpleCookie(raw)
|
594 |
+
return MappingProxyType({key: val.value for key, val in parsed.items()})
|
595 |
+
|
596 |
+
@reify
|
597 |
+
def http_range(self) -> slice:
|
598 |
+
"""The content of Range HTTP header.
|
599 |
+
|
600 |
+
Return a slice instance.
|
601 |
+
|
602 |
+
"""
|
603 |
+
rng = self._headers.get(hdrs.RANGE)
|
604 |
+
start, end = None, None
|
605 |
+
if rng is not None:
|
606 |
+
try:
|
607 |
+
pattern = r"^bytes=(\d*)-(\d*)$"
|
608 |
+
start, end = re.findall(pattern, rng)[0]
|
609 |
+
except IndexError: # pattern was not found in header
|
610 |
+
raise ValueError("range not in acceptable format")
|
611 |
+
|
612 |
+
end = int(end) if end else None
|
613 |
+
start = int(start) if start else None
|
614 |
+
|
615 |
+
if start is None and end is not None:
|
616 |
+
# end with no start is to return tail of content
|
617 |
+
start = -end
|
618 |
+
end = None
|
619 |
+
|
620 |
+
if start is not None and end is not None:
|
621 |
+
# end is inclusive in range header, exclusive for slice
|
622 |
+
end += 1
|
623 |
+
|
624 |
+
if start >= end:
|
625 |
+
raise ValueError("start cannot be after end")
|
626 |
+
|
627 |
+
if start is end is None: # No valid range supplied
|
628 |
+
raise ValueError("No start or end of range specified")
|
629 |
+
|
630 |
+
return slice(start, end, 1)
|
631 |
+
|
632 |
+
@reify
|
633 |
+
def content(self) -> StreamReader:
|
634 |
+
"""Return raw payload stream."""
|
635 |
+
return self._payload
|
636 |
+
|
637 |
+
@property
|
638 |
+
def has_body(self) -> bool:
|
639 |
+
"""Return True if request's HTTP BODY can be read, False otherwise."""
|
640 |
+
warnings.warn(
|
641 |
+
"Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
|
642 |
+
)
|
643 |
+
return not self._payload.at_eof()
|
644 |
+
|
645 |
+
@property
|
646 |
+
def can_read_body(self) -> bool:
|
647 |
+
"""Return True if request's HTTP BODY can be read, False otherwise."""
|
648 |
+
return not self._payload.at_eof()
|
649 |
+
|
650 |
+
@reify
|
651 |
+
def body_exists(self) -> bool:
|
652 |
+
"""Return True if request has HTTP BODY, False otherwise."""
|
653 |
+
return type(self._payload) is not EmptyStreamReader
|
654 |
+
|
655 |
+
async def release(self) -> None:
|
656 |
+
"""Release request.
|
657 |
+
|
658 |
+
Eat unread part of HTTP BODY if present.
|
659 |
+
"""
|
660 |
+
while not self._payload.at_eof():
|
661 |
+
await self._payload.readany()
|
662 |
+
|
663 |
+
async def read(self) -> bytes:
|
664 |
+
"""Read request body if present.
|
665 |
+
|
666 |
+
Returns bytes object with full request content.
|
667 |
+
"""
|
668 |
+
if self._read_bytes is None:
|
669 |
+
body = bytearray()
|
670 |
+
while True:
|
671 |
+
chunk = await self._payload.readany()
|
672 |
+
body.extend(chunk)
|
673 |
+
if self._client_max_size:
|
674 |
+
body_size = len(body)
|
675 |
+
if body_size >= self._client_max_size:
|
676 |
+
raise HTTPRequestEntityTooLarge(
|
677 |
+
max_size=self._client_max_size, actual_size=body_size
|
678 |
+
)
|
679 |
+
if not chunk:
|
680 |
+
break
|
681 |
+
self._read_bytes = bytes(body)
|
682 |
+
return self._read_bytes
|
683 |
+
|
684 |
+
async def text(self) -> str:
|
685 |
+
"""Return BODY as text using encoding from .charset."""
|
686 |
+
bytes_body = await self.read()
|
687 |
+
encoding = self.charset or "utf-8"
|
688 |
+
return bytes_body.decode(encoding)
|
689 |
+
|
690 |
+
async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
|
691 |
+
"""Return BODY as JSON."""
|
692 |
+
body = await self.text()
|
693 |
+
return loads(body)
|
694 |
+
|
695 |
+
async def multipart(self) -> MultipartReader:
|
696 |
+
"""Return async iterator to process BODY as multipart."""
|
697 |
+
return MultipartReader(self._headers, self._payload)
|
698 |
+
|
699 |
+
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
|
700 |
+
"""Return POST parameters."""
|
701 |
+
if self._post is not None:
|
702 |
+
return self._post
|
703 |
+
if self._method not in self.POST_METHODS:
|
704 |
+
self._post = MultiDictProxy(MultiDict())
|
705 |
+
return self._post
|
706 |
+
|
707 |
+
content_type = self.content_type
|
708 |
+
if content_type not in (
|
709 |
+
"",
|
710 |
+
"application/x-www-form-urlencoded",
|
711 |
+
"multipart/form-data",
|
712 |
+
):
|
713 |
+
self._post = MultiDictProxy(MultiDict())
|
714 |
+
return self._post
|
715 |
+
|
716 |
+
out: MultiDict[Union[str, bytes, FileField]] = MultiDict()
|
717 |
+
|
718 |
+
if content_type == "multipart/form-data":
|
719 |
+
multipart = await self.multipart()
|
720 |
+
max_size = self._client_max_size
|
721 |
+
|
722 |
+
field = await multipart.next()
|
723 |
+
while field is not None:
|
724 |
+
size = 0
|
725 |
+
field_ct = field.headers.get(hdrs.CONTENT_TYPE)
|
726 |
+
|
727 |
+
if isinstance(field, BodyPartReader):
|
728 |
+
assert field.name is not None
|
729 |
+
|
730 |
+
# Note that according to RFC 7578, the Content-Type header
|
731 |
+
# is optional, even for files, so we can't assume it's
|
732 |
+
# present.
|
733 |
+
# https://tools.ietf.org/html/rfc7578#section-4.4
|
734 |
+
if field.filename:
|
735 |
+
# store file in temp file
|
736 |
+
tmp = await self._loop.run_in_executor(
|
737 |
+
None, tempfile.TemporaryFile
|
738 |
+
)
|
739 |
+
chunk = await field.read_chunk(size=2**16)
|
740 |
+
while chunk:
|
741 |
+
chunk = field.decode(chunk)
|
742 |
+
await self._loop.run_in_executor(None, tmp.write, chunk)
|
743 |
+
size += len(chunk)
|
744 |
+
if 0 < max_size < size:
|
745 |
+
await self._loop.run_in_executor(None, tmp.close)
|
746 |
+
raise HTTPRequestEntityTooLarge(
|
747 |
+
max_size=max_size, actual_size=size
|
748 |
+
)
|
749 |
+
chunk = await field.read_chunk(size=2**16)
|
750 |
+
await self._loop.run_in_executor(None, tmp.seek, 0)
|
751 |
+
|
752 |
+
if field_ct is None:
|
753 |
+
field_ct = "application/octet-stream"
|
754 |
+
|
755 |
+
ff = FileField(
|
756 |
+
field.name,
|
757 |
+
field.filename,
|
758 |
+
cast(io.BufferedReader, tmp),
|
759 |
+
field_ct,
|
760 |
+
field.headers,
|
761 |
+
)
|
762 |
+
out.add(field.name, ff)
|
763 |
+
else:
|
764 |
+
# deal with ordinary data
|
765 |
+
value = await field.read(decode=True)
|
766 |
+
if field_ct is None or field_ct.startswith("text/"):
|
767 |
+
charset = field.get_charset(default="utf-8")
|
768 |
+
out.add(field.name, value.decode(charset))
|
769 |
+
else:
|
770 |
+
out.add(field.name, value)
|
771 |
+
size += len(value)
|
772 |
+
if 0 < max_size < size:
|
773 |
+
raise HTTPRequestEntityTooLarge(
|
774 |
+
max_size=max_size, actual_size=size
|
775 |
+
)
|
776 |
+
else:
|
777 |
+
raise ValueError(
|
778 |
+
"To decode nested multipart you need to use custom reader",
|
779 |
+
)
|
780 |
+
|
781 |
+
field = await multipart.next()
|
782 |
+
else:
|
783 |
+
data = await self.read()
|
784 |
+
if data:
|
785 |
+
charset = self.charset or "utf-8"
|
786 |
+
out.extend(
|
787 |
+
parse_qsl(
|
788 |
+
data.rstrip().decode(charset),
|
789 |
+
keep_blank_values=True,
|
790 |
+
encoding=charset,
|
791 |
+
)
|
792 |
+
)
|
793 |
+
|
794 |
+
self._post = MultiDictProxy(out)
|
795 |
+
return self._post
|
796 |
+
|
797 |
+
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
798 |
+
"""Extra info from protocol transport"""
|
799 |
+
protocol = self._protocol
|
800 |
+
if protocol is None:
|
801 |
+
return default
|
802 |
+
|
803 |
+
transport = protocol.transport
|
804 |
+
if transport is None:
|
805 |
+
return default
|
806 |
+
|
807 |
+
return transport.get_extra_info(name, default)
|
808 |
+
|
809 |
+
def __repr__(self) -> str:
|
810 |
+
ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
|
811 |
+
"ascii"
|
812 |
+
)
|
813 |
+
return "<{} {} {} >".format(
|
814 |
+
self.__class__.__name__, self._method, ascii_encodable_path
|
815 |
+
)
|
816 |
+
|
817 |
+
def __eq__(self, other: object) -> bool:
|
818 |
+
return id(self) == id(other)
|
819 |
+
|
820 |
+
def __bool__(self) -> bool:
|
821 |
+
return True
|
822 |
+
|
823 |
+
async def _prepare_hook(self, response: StreamResponse) -> None:
|
824 |
+
return
|
825 |
+
|
826 |
+
def _cancel(self, exc: BaseException) -> None:
|
827 |
+
set_exception(self._payload, exc)
|
828 |
+
|
829 |
+
def _finish(self) -> None:
|
830 |
+
if self._post is None or self.content_type != "multipart/form-data":
|
831 |
+
return
|
832 |
+
|
833 |
+
# NOTE: Release file descriptors for the
|
834 |
+
# NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom`
|
835 |
+
# NOTE: instances of files sent within multipart request body
|
836 |
+
# NOTE: via HTTP POST request.
|
837 |
+
for file_name, file_field_object in self._post.items():
|
838 |
+
if isinstance(file_field_object, FileField):
|
839 |
+
file_field_object.file.close()
|
840 |
+
|
841 |
+
|
842 |
+
class Request(BaseRequest):
|
843 |
+
|
844 |
+
ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
|
845 |
+
|
846 |
+
_match_info: Optional["UrlMappingMatchInfo"] = None
|
847 |
+
|
848 |
+
if DEBUG:
|
849 |
+
|
850 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
851 |
+
if name not in self.ATTRS:
|
852 |
+
warnings.warn(
|
853 |
+
"Setting custom {}.{} attribute "
|
854 |
+
"is discouraged".format(self.__class__.__name__, name),
|
855 |
+
DeprecationWarning,
|
856 |
+
stacklevel=2,
|
857 |
+
)
|
858 |
+
super().__setattr__(name, val)
|
859 |
+
|
860 |
+
def clone(
|
861 |
+
self,
|
862 |
+
*,
|
863 |
+
method: Union[str, _SENTINEL] = sentinel,
|
864 |
+
rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
|
865 |
+
headers: Union[LooseHeaders, _SENTINEL] = sentinel,
|
866 |
+
scheme: Union[str, _SENTINEL] = sentinel,
|
867 |
+
host: Union[str, _SENTINEL] = sentinel,
|
868 |
+
remote: Union[str, _SENTINEL] = sentinel,
|
869 |
+
client_max_size: Union[int, _SENTINEL] = sentinel,
|
870 |
+
) -> "Request":
|
871 |
+
ret = super().clone(
|
872 |
+
method=method,
|
873 |
+
rel_url=rel_url,
|
874 |
+
headers=headers,
|
875 |
+
scheme=scheme,
|
876 |
+
host=host,
|
877 |
+
remote=remote,
|
878 |
+
client_max_size=client_max_size,
|
879 |
+
)
|
880 |
+
new_ret = cast(Request, ret)
|
881 |
+
new_ret._match_info = self._match_info
|
882 |
+
return new_ret
|
883 |
+
|
884 |
+
@reify
|
885 |
+
def match_info(self) -> "UrlMappingMatchInfo":
|
886 |
+
"""Result of route resolving."""
|
887 |
+
match_info = self._match_info
|
888 |
+
assert match_info is not None
|
889 |
+
return match_info
|
890 |
+
|
891 |
+
@property
|
892 |
+
def app(self) -> "Application":
|
893 |
+
"""Application instance."""
|
894 |
+
match_info = self._match_info
|
895 |
+
assert match_info is not None
|
896 |
+
return match_info.current_app
|
897 |
+
|
898 |
+
@property
|
899 |
+
def config_dict(self) -> ChainMapProxy:
|
900 |
+
match_info = self._match_info
|
901 |
+
assert match_info is not None
|
902 |
+
lst = match_info.apps
|
903 |
+
app = self.app
|
904 |
+
idx = lst.index(app)
|
905 |
+
sublist = list(reversed(lst[: idx + 1]))
|
906 |
+
return ChainMapProxy(sublist)
|
907 |
+
|
908 |
+
async def _prepare_hook(self, response: StreamResponse) -> None:
|
909 |
+
match_info = self._match_info
|
910 |
+
if match_info is None:
|
911 |
+
return
|
912 |
+
for app in match_info._apps:
|
913 |
+
if on_response_prepare := app.on_response_prepare:
|
914 |
+
await on_response_prepare.send(self, response)
|
venv/Lib/site-packages/aiohttp/web_response.py
ADDED
@@ -0,0 +1,838 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import collections.abc
|
3 |
+
import datetime
|
4 |
+
import enum
|
5 |
+
import json
|
6 |
+
import math
|
7 |
+
import time
|
8 |
+
import warnings
|
9 |
+
import zlib
|
10 |
+
from concurrent.futures import Executor
|
11 |
+
from http import HTTPStatus
|
12 |
+
from http.cookies import SimpleCookie
|
13 |
+
from typing import (
|
14 |
+
TYPE_CHECKING,
|
15 |
+
Any,
|
16 |
+
Dict,
|
17 |
+
Iterator,
|
18 |
+
MutableMapping,
|
19 |
+
Optional,
|
20 |
+
Union,
|
21 |
+
cast,
|
22 |
+
)
|
23 |
+
|
24 |
+
from multidict import CIMultiDict, istr
|
25 |
+
|
26 |
+
from . import hdrs, payload
|
27 |
+
from .abc import AbstractStreamWriter
|
28 |
+
from .compression_utils import ZLibCompressor
|
29 |
+
from .helpers import (
|
30 |
+
ETAG_ANY,
|
31 |
+
QUOTED_ETAG_RE,
|
32 |
+
ETag,
|
33 |
+
HeadersMixin,
|
34 |
+
must_be_empty_body,
|
35 |
+
parse_http_date,
|
36 |
+
rfc822_formatted_time,
|
37 |
+
sentinel,
|
38 |
+
should_remove_content_length,
|
39 |
+
validate_etag_value,
|
40 |
+
)
|
41 |
+
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
|
42 |
+
from .payload import Payload
|
43 |
+
from .typedefs import JSONEncoder, LooseHeaders
|
44 |
+
|
45 |
+
REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
|
46 |
+
LARGE_BODY_SIZE = 1024**2
|
47 |
+
|
48 |
+
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
|
49 |
+
|
50 |
+
|
51 |
+
if TYPE_CHECKING:
|
52 |
+
from .web_request import BaseRequest
|
53 |
+
|
54 |
+
BaseClass = MutableMapping[str, Any]
|
55 |
+
else:
|
56 |
+
BaseClass = collections.abc.MutableMapping
|
57 |
+
|
58 |
+
|
59 |
+
# TODO(py311): Convert to StrEnum for wider use
|
60 |
+
class ContentCoding(enum.Enum):
|
61 |
+
# The content codings that we have support for.
|
62 |
+
#
|
63 |
+
# Additional registered codings are listed at:
|
64 |
+
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
|
65 |
+
deflate = "deflate"
|
66 |
+
gzip = "gzip"
|
67 |
+
identity = "identity"
|
68 |
+
|
69 |
+
|
70 |
+
CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding}
|
71 |
+
|
72 |
+
############################################################
|
73 |
+
# HTTP Response classes
|
74 |
+
############################################################
|
75 |
+
|
76 |
+
|
77 |
+
class StreamResponse(BaseClass, HeadersMixin):
|
78 |
+
|
79 |
+
_body: Union[None, bytes, bytearray, Payload]
|
80 |
+
_length_check = True
|
81 |
+
_body = None
|
82 |
+
_keep_alive: Optional[bool] = None
|
83 |
+
_chunked: bool = False
|
84 |
+
_compression: bool = False
|
85 |
+
_compression_strategy: int = zlib.Z_DEFAULT_STRATEGY
|
86 |
+
_compression_force: Optional[ContentCoding] = None
|
87 |
+
_req: Optional["BaseRequest"] = None
|
88 |
+
_payload_writer: Optional[AbstractStreamWriter] = None
|
89 |
+
_eof_sent: bool = False
|
90 |
+
_must_be_empty_body: Optional[bool] = None
|
91 |
+
_body_length = 0
|
92 |
+
_cookies: Optional[SimpleCookie] = None
|
93 |
+
|
94 |
+
def __init__(
|
95 |
+
self,
|
96 |
+
*,
|
97 |
+
status: int = 200,
|
98 |
+
reason: Optional[str] = None,
|
99 |
+
headers: Optional[LooseHeaders] = None,
|
100 |
+
_real_headers: Optional[CIMultiDict[str]] = None,
|
101 |
+
) -> None:
|
102 |
+
"""Initialize a new stream response object.
|
103 |
+
|
104 |
+
_real_headers is an internal parameter used to pass a pre-populated
|
105 |
+
headers object. It is used by the `Response` class to avoid copying
|
106 |
+
the headers when creating a new response object. It is not intended
|
107 |
+
to be used by external code.
|
108 |
+
"""
|
109 |
+
self._state: Dict[str, Any] = {}
|
110 |
+
|
111 |
+
if _real_headers is not None:
|
112 |
+
self._headers = _real_headers
|
113 |
+
elif headers is not None:
|
114 |
+
self._headers: CIMultiDict[str] = CIMultiDict(headers)
|
115 |
+
else:
|
116 |
+
self._headers = CIMultiDict()
|
117 |
+
|
118 |
+
self._set_status(status, reason)
|
119 |
+
|
120 |
+
@property
|
121 |
+
def prepared(self) -> bool:
|
122 |
+
return self._eof_sent or self._payload_writer is not None
|
123 |
+
|
124 |
+
@property
|
125 |
+
def task(self) -> "Optional[asyncio.Task[None]]":
|
126 |
+
if self._req:
|
127 |
+
return self._req.task
|
128 |
+
else:
|
129 |
+
return None
|
130 |
+
|
131 |
+
@property
|
132 |
+
def status(self) -> int:
|
133 |
+
return self._status
|
134 |
+
|
135 |
+
@property
|
136 |
+
def chunked(self) -> bool:
|
137 |
+
return self._chunked
|
138 |
+
|
139 |
+
@property
|
140 |
+
def compression(self) -> bool:
|
141 |
+
return self._compression
|
142 |
+
|
143 |
+
@property
|
144 |
+
def reason(self) -> str:
|
145 |
+
return self._reason
|
146 |
+
|
147 |
+
def set_status(
|
148 |
+
self,
|
149 |
+
status: int,
|
150 |
+
reason: Optional[str] = None,
|
151 |
+
) -> None:
|
152 |
+
assert (
|
153 |
+
not self.prepared
|
154 |
+
), "Cannot change the response status code after the headers have been sent"
|
155 |
+
self._set_status(status, reason)
|
156 |
+
|
157 |
+
def _set_status(self, status: int, reason: Optional[str]) -> None:
|
158 |
+
self._status = int(status)
|
159 |
+
if reason is None:
|
160 |
+
reason = REASON_PHRASES.get(self._status, "")
|
161 |
+
elif "\n" in reason:
|
162 |
+
raise ValueError("Reason cannot contain \\n")
|
163 |
+
self._reason = reason
|
164 |
+
|
165 |
+
@property
|
166 |
+
def keep_alive(self) -> Optional[bool]:
|
167 |
+
return self._keep_alive
|
168 |
+
|
169 |
+
def force_close(self) -> None:
|
170 |
+
self._keep_alive = False
|
171 |
+
|
172 |
+
@property
|
173 |
+
def body_length(self) -> int:
|
174 |
+
return self._body_length
|
175 |
+
|
176 |
+
@property
|
177 |
+
def output_length(self) -> int:
|
178 |
+
warnings.warn("output_length is deprecated", DeprecationWarning)
|
179 |
+
assert self._payload_writer
|
180 |
+
return self._payload_writer.buffer_size
|
181 |
+
|
182 |
+
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
|
183 |
+
"""Enables automatic chunked transfer encoding."""
|
184 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
185 |
+
raise RuntimeError(
|
186 |
+
"You can't enable chunked encoding when a content length is set"
|
187 |
+
)
|
188 |
+
if chunk_size is not None:
|
189 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
190 |
+
self._chunked = True
|
191 |
+
|
192 |
+
def enable_compression(
|
193 |
+
self,
|
194 |
+
force: Optional[Union[bool, ContentCoding]] = None,
|
195 |
+
strategy: int = zlib.Z_DEFAULT_STRATEGY,
|
196 |
+
) -> None:
|
197 |
+
"""Enables response compression encoding."""
|
198 |
+
# Backwards compatibility for when force was a bool <0.17.
|
199 |
+
if isinstance(force, bool):
|
200 |
+
force = ContentCoding.deflate if force else ContentCoding.identity
|
201 |
+
warnings.warn(
|
202 |
+
"Using boolean for force is deprecated #3318", DeprecationWarning
|
203 |
+
)
|
204 |
+
elif force is not None:
|
205 |
+
assert isinstance(
|
206 |
+
force, ContentCoding
|
207 |
+
), "force should one of None, bool or ContentEncoding"
|
208 |
+
|
209 |
+
self._compression = True
|
210 |
+
self._compression_force = force
|
211 |
+
self._compression_strategy = strategy
|
212 |
+
|
213 |
+
@property
|
214 |
+
def headers(self) -> "CIMultiDict[str]":
|
215 |
+
return self._headers
|
216 |
+
|
217 |
+
@property
|
218 |
+
def cookies(self) -> SimpleCookie:
|
219 |
+
if self._cookies is None:
|
220 |
+
self._cookies = SimpleCookie()
|
221 |
+
return self._cookies
|
222 |
+
|
223 |
+
def set_cookie(
|
224 |
+
self,
|
225 |
+
name: str,
|
226 |
+
value: str,
|
227 |
+
*,
|
228 |
+
expires: Optional[str] = None,
|
229 |
+
domain: Optional[str] = None,
|
230 |
+
max_age: Optional[Union[int, str]] = None,
|
231 |
+
path: str = "/",
|
232 |
+
secure: Optional[bool] = None,
|
233 |
+
httponly: Optional[bool] = None,
|
234 |
+
version: Optional[str] = None,
|
235 |
+
samesite: Optional[str] = None,
|
236 |
+
) -> None:
|
237 |
+
"""Set or update response cookie.
|
238 |
+
|
239 |
+
Sets new cookie or updates existent with new value.
|
240 |
+
Also updates only those params which are not None.
|
241 |
+
"""
|
242 |
+
if self._cookies is None:
|
243 |
+
self._cookies = SimpleCookie()
|
244 |
+
|
245 |
+
self._cookies[name] = value
|
246 |
+
c = self._cookies[name]
|
247 |
+
|
248 |
+
if expires is not None:
|
249 |
+
c["expires"] = expires
|
250 |
+
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
|
251 |
+
del c["expires"]
|
252 |
+
|
253 |
+
if domain is not None:
|
254 |
+
c["domain"] = domain
|
255 |
+
|
256 |
+
if max_age is not None:
|
257 |
+
c["max-age"] = str(max_age)
|
258 |
+
elif "max-age" in c:
|
259 |
+
del c["max-age"]
|
260 |
+
|
261 |
+
c["path"] = path
|
262 |
+
|
263 |
+
if secure is not None:
|
264 |
+
c["secure"] = secure
|
265 |
+
if httponly is not None:
|
266 |
+
c["httponly"] = httponly
|
267 |
+
if version is not None:
|
268 |
+
c["version"] = version
|
269 |
+
if samesite is not None:
|
270 |
+
c["samesite"] = samesite
|
271 |
+
|
272 |
+
def del_cookie(
|
273 |
+
self,
|
274 |
+
name: str,
|
275 |
+
*,
|
276 |
+
domain: Optional[str] = None,
|
277 |
+
path: str = "/",
|
278 |
+
secure: Optional[bool] = None,
|
279 |
+
httponly: Optional[bool] = None,
|
280 |
+
samesite: Optional[str] = None,
|
281 |
+
) -> None:
|
282 |
+
"""Delete cookie.
|
283 |
+
|
284 |
+
Creates new empty expired cookie.
|
285 |
+
"""
|
286 |
+
# TODO: do we need domain/path here?
|
287 |
+
if self._cookies is not None:
|
288 |
+
self._cookies.pop(name, None)
|
289 |
+
self.set_cookie(
|
290 |
+
name,
|
291 |
+
"",
|
292 |
+
max_age=0,
|
293 |
+
expires="Thu, 01 Jan 1970 00:00:00 GMT",
|
294 |
+
domain=domain,
|
295 |
+
path=path,
|
296 |
+
secure=secure,
|
297 |
+
httponly=httponly,
|
298 |
+
samesite=samesite,
|
299 |
+
)
|
300 |
+
|
301 |
+
@property
|
302 |
+
def content_length(self) -> Optional[int]:
|
303 |
+
# Just a placeholder for adding setter
|
304 |
+
return super().content_length
|
305 |
+
|
306 |
+
@content_length.setter
|
307 |
+
def content_length(self, value: Optional[int]) -> None:
|
308 |
+
if value is not None:
|
309 |
+
value = int(value)
|
310 |
+
if self._chunked:
|
311 |
+
raise RuntimeError(
|
312 |
+
"You can't set content length when chunked encoding is enable"
|
313 |
+
)
|
314 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(value)
|
315 |
+
else:
|
316 |
+
self._headers.pop(hdrs.CONTENT_LENGTH, None)
|
317 |
+
|
318 |
+
@property
|
319 |
+
def content_type(self) -> str:
|
320 |
+
# Just a placeholder for adding setter
|
321 |
+
return super().content_type
|
322 |
+
|
323 |
+
@content_type.setter
|
324 |
+
def content_type(self, value: str) -> None:
|
325 |
+
self.content_type # read header values if needed
|
326 |
+
self._content_type = str(value)
|
327 |
+
self._generate_content_type_header()
|
328 |
+
|
329 |
+
@property
|
330 |
+
def charset(self) -> Optional[str]:
|
331 |
+
# Just a placeholder for adding setter
|
332 |
+
return super().charset
|
333 |
+
|
334 |
+
@charset.setter
|
335 |
+
def charset(self, value: Optional[str]) -> None:
|
336 |
+
ctype = self.content_type # read header values if needed
|
337 |
+
if ctype == "application/octet-stream":
|
338 |
+
raise RuntimeError(
|
339 |
+
"Setting charset for application/octet-stream "
|
340 |
+
"doesn't make sense, setup content_type first"
|
341 |
+
)
|
342 |
+
assert self._content_dict is not None
|
343 |
+
if value is None:
|
344 |
+
self._content_dict.pop("charset", None)
|
345 |
+
else:
|
346 |
+
self._content_dict["charset"] = str(value).lower()
|
347 |
+
self._generate_content_type_header()
|
348 |
+
|
349 |
+
@property
|
350 |
+
def last_modified(self) -> Optional[datetime.datetime]:
|
351 |
+
"""The value of Last-Modified HTTP header, or None.
|
352 |
+
|
353 |
+
This header is represented as a `datetime` object.
|
354 |
+
"""
|
355 |
+
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
|
356 |
+
|
357 |
+
@last_modified.setter
|
358 |
+
def last_modified(
|
359 |
+
self, value: Optional[Union[int, float, datetime.datetime, str]]
|
360 |
+
) -> None:
|
361 |
+
if value is None:
|
362 |
+
self._headers.pop(hdrs.LAST_MODIFIED, None)
|
363 |
+
elif isinstance(value, (int, float)):
|
364 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
365 |
+
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
|
366 |
+
)
|
367 |
+
elif isinstance(value, datetime.datetime):
|
368 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
369 |
+
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
|
370 |
+
)
|
371 |
+
elif isinstance(value, str):
|
372 |
+
self._headers[hdrs.LAST_MODIFIED] = value
|
373 |
+
|
374 |
+
@property
|
375 |
+
def etag(self) -> Optional[ETag]:
|
376 |
+
quoted_value = self._headers.get(hdrs.ETAG)
|
377 |
+
if not quoted_value:
|
378 |
+
return None
|
379 |
+
elif quoted_value == ETAG_ANY:
|
380 |
+
return ETag(value=ETAG_ANY)
|
381 |
+
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
|
382 |
+
if not match:
|
383 |
+
return None
|
384 |
+
is_weak, value = match.group(1, 2)
|
385 |
+
return ETag(
|
386 |
+
is_weak=bool(is_weak),
|
387 |
+
value=value,
|
388 |
+
)
|
389 |
+
|
390 |
+
@etag.setter
|
391 |
+
def etag(self, value: Optional[Union[ETag, str]]) -> None:
|
392 |
+
if value is None:
|
393 |
+
self._headers.pop(hdrs.ETAG, None)
|
394 |
+
elif (isinstance(value, str) and value == ETAG_ANY) or (
|
395 |
+
isinstance(value, ETag) and value.value == ETAG_ANY
|
396 |
+
):
|
397 |
+
self._headers[hdrs.ETAG] = ETAG_ANY
|
398 |
+
elif isinstance(value, str):
|
399 |
+
validate_etag_value(value)
|
400 |
+
self._headers[hdrs.ETAG] = f'"{value}"'
|
401 |
+
elif isinstance(value, ETag) and isinstance(value.value, str):
|
402 |
+
validate_etag_value(value.value)
|
403 |
+
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
|
404 |
+
self._headers[hdrs.ETAG] = hdr_value
|
405 |
+
else:
|
406 |
+
raise ValueError(
|
407 |
+
f"Unsupported etag type: {type(value)}. "
|
408 |
+
f"etag must be str, ETag or None"
|
409 |
+
)
|
410 |
+
|
411 |
+
def _generate_content_type_header(
|
412 |
+
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
|
413 |
+
) -> None:
|
414 |
+
assert self._content_dict is not None
|
415 |
+
assert self._content_type is not None
|
416 |
+
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
|
417 |
+
if params:
|
418 |
+
ctype = self._content_type + "; " + params
|
419 |
+
else:
|
420 |
+
ctype = self._content_type
|
421 |
+
self._headers[CONTENT_TYPE] = ctype
|
422 |
+
|
423 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
424 |
+
if coding is ContentCoding.identity:
|
425 |
+
return
|
426 |
+
assert self._payload_writer is not None
|
427 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
428 |
+
self._payload_writer.enable_compression(
|
429 |
+
coding.value, self._compression_strategy
|
430 |
+
)
|
431 |
+
# Compressed payload may have different content length,
|
432 |
+
# remove the header
|
433 |
+
self._headers.popall(hdrs.CONTENT_LENGTH, None)
|
434 |
+
|
435 |
+
async def _start_compression(self, request: "BaseRequest") -> None:
|
436 |
+
if self._compression_force:
|
437 |
+
await self._do_start_compression(self._compression_force)
|
438 |
+
return
|
439 |
+
# Encoding comparisons should be case-insensitive
|
440 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
|
441 |
+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
442 |
+
for value, coding in CONTENT_CODINGS.items():
|
443 |
+
if value in accept_encoding:
|
444 |
+
await self._do_start_compression(coding)
|
445 |
+
return
|
446 |
+
|
447 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
448 |
+
if self._eof_sent:
|
449 |
+
return None
|
450 |
+
if self._payload_writer is not None:
|
451 |
+
return self._payload_writer
|
452 |
+
self._must_be_empty_body = must_be_empty_body(request.method, self.status)
|
453 |
+
return await self._start(request)
|
454 |
+
|
455 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
456 |
+
self._req = request
|
457 |
+
writer = self._payload_writer = request._payload_writer
|
458 |
+
|
459 |
+
await self._prepare_headers()
|
460 |
+
await request._prepare_hook(self)
|
461 |
+
await self._write_headers()
|
462 |
+
|
463 |
+
return writer
|
464 |
+
|
465 |
+
async def _prepare_headers(self) -> None:
|
466 |
+
request = self._req
|
467 |
+
assert request is not None
|
468 |
+
writer = self._payload_writer
|
469 |
+
assert writer is not None
|
470 |
+
keep_alive = self._keep_alive
|
471 |
+
if keep_alive is None:
|
472 |
+
keep_alive = request.keep_alive
|
473 |
+
self._keep_alive = keep_alive
|
474 |
+
|
475 |
+
version = request.version
|
476 |
+
|
477 |
+
headers = self._headers
|
478 |
+
if self._cookies:
|
479 |
+
for cookie in self._cookies.values():
|
480 |
+
value = cookie.output(header="")[1:]
|
481 |
+
headers.add(hdrs.SET_COOKIE, value)
|
482 |
+
|
483 |
+
if self._compression:
|
484 |
+
await self._start_compression(request)
|
485 |
+
|
486 |
+
if self._chunked:
|
487 |
+
if version != HttpVersion11:
|
488 |
+
raise RuntimeError(
|
489 |
+
"Using chunked encoding is forbidden "
|
490 |
+
"for HTTP/{0.major}.{0.minor}".format(request.version)
|
491 |
+
)
|
492 |
+
if not self._must_be_empty_body:
|
493 |
+
writer.enable_chunking()
|
494 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
495 |
+
elif self._length_check: # Disabled for WebSockets
|
496 |
+
writer.length = self.content_length
|
497 |
+
if writer.length is None:
|
498 |
+
if version >= HttpVersion11:
|
499 |
+
if not self._must_be_empty_body:
|
500 |
+
writer.enable_chunking()
|
501 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
502 |
+
elif not self._must_be_empty_body:
|
503 |
+
keep_alive = False
|
504 |
+
|
505 |
+
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
|
506 |
+
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
|
507 |
+
if self._must_be_empty_body:
|
508 |
+
if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
|
509 |
+
request.method, self.status
|
510 |
+
):
|
511 |
+
del headers[hdrs.CONTENT_LENGTH]
|
512 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
|
513 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
|
514 |
+
if hdrs.TRANSFER_ENCODING in headers:
|
515 |
+
del headers[hdrs.TRANSFER_ENCODING]
|
516 |
+
elif (writer.length if self._length_check else self.content_length) != 0:
|
517 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5
|
518 |
+
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
|
519 |
+
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
|
520 |
+
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
|
521 |
+
|
522 |
+
# connection header
|
523 |
+
if hdrs.CONNECTION not in headers:
|
524 |
+
if keep_alive:
|
525 |
+
if version == HttpVersion10:
|
526 |
+
headers[hdrs.CONNECTION] = "keep-alive"
|
527 |
+
elif version == HttpVersion11:
|
528 |
+
headers[hdrs.CONNECTION] = "close"
|
529 |
+
|
530 |
+
async def _write_headers(self) -> None:
|
531 |
+
request = self._req
|
532 |
+
assert request is not None
|
533 |
+
writer = self._payload_writer
|
534 |
+
assert writer is not None
|
535 |
+
# status line
|
536 |
+
version = request.version
|
537 |
+
status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}"
|
538 |
+
await writer.write_headers(status_line, self._headers)
|
539 |
+
|
540 |
+
async def write(self, data: Union[bytes, bytearray, memoryview]) -> None:
|
541 |
+
assert isinstance(
|
542 |
+
data, (bytes, bytearray, memoryview)
|
543 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
544 |
+
|
545 |
+
if self._eof_sent:
|
546 |
+
raise RuntimeError("Cannot call write() after write_eof()")
|
547 |
+
if self._payload_writer is None:
|
548 |
+
raise RuntimeError("Cannot call write() before prepare()")
|
549 |
+
|
550 |
+
await self._payload_writer.write(data)
|
551 |
+
|
552 |
+
async def drain(self) -> None:
|
553 |
+
assert not self._eof_sent, "EOF has already been sent"
|
554 |
+
assert self._payload_writer is not None, "Response has not been started"
|
555 |
+
warnings.warn(
|
556 |
+
"drain method is deprecated, use await resp.write()",
|
557 |
+
DeprecationWarning,
|
558 |
+
stacklevel=2,
|
559 |
+
)
|
560 |
+
await self._payload_writer.drain()
|
561 |
+
|
562 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
563 |
+
assert isinstance(
|
564 |
+
data, (bytes, bytearray, memoryview)
|
565 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
566 |
+
|
567 |
+
if self._eof_sent:
|
568 |
+
return
|
569 |
+
|
570 |
+
assert self._payload_writer is not None, "Response has not been started"
|
571 |
+
|
572 |
+
await self._payload_writer.write_eof(data)
|
573 |
+
self._eof_sent = True
|
574 |
+
self._req = None
|
575 |
+
self._body_length = self._payload_writer.output_size
|
576 |
+
self._payload_writer = None
|
577 |
+
|
578 |
+
def __repr__(self) -> str:
|
579 |
+
if self._eof_sent:
|
580 |
+
info = "eof"
|
581 |
+
elif self.prepared:
|
582 |
+
assert self._req is not None
|
583 |
+
info = f"{self._req.method} {self._req.path} "
|
584 |
+
else:
|
585 |
+
info = "not prepared"
|
586 |
+
return f"<{self.__class__.__name__} {self.reason} {info}>"
|
587 |
+
|
588 |
+
def __getitem__(self, key: str) -> Any:
|
589 |
+
return self._state[key]
|
590 |
+
|
591 |
+
def __setitem__(self, key: str, value: Any) -> None:
|
592 |
+
self._state[key] = value
|
593 |
+
|
594 |
+
def __delitem__(self, key: str) -> None:
|
595 |
+
del self._state[key]
|
596 |
+
|
597 |
+
def __len__(self) -> int:
|
598 |
+
return len(self._state)
|
599 |
+
|
600 |
+
def __iter__(self) -> Iterator[str]:
|
601 |
+
return iter(self._state)
|
602 |
+
|
603 |
+
def __hash__(self) -> int:
|
604 |
+
return hash(id(self))
|
605 |
+
|
606 |
+
def __eq__(self, other: object) -> bool:
|
607 |
+
return self is other
|
608 |
+
|
609 |
+
|
610 |
+
class Response(StreamResponse):
|
611 |
+
|
612 |
+
_compressed_body: Optional[bytes] = None
|
613 |
+
|
614 |
+
def __init__(
|
615 |
+
self,
|
616 |
+
*,
|
617 |
+
body: Any = None,
|
618 |
+
status: int = 200,
|
619 |
+
reason: Optional[str] = None,
|
620 |
+
text: Optional[str] = None,
|
621 |
+
headers: Optional[LooseHeaders] = None,
|
622 |
+
content_type: Optional[str] = None,
|
623 |
+
charset: Optional[str] = None,
|
624 |
+
zlib_executor_size: Optional[int] = None,
|
625 |
+
zlib_executor: Optional[Executor] = None,
|
626 |
+
) -> None:
|
627 |
+
if body is not None and text is not None:
|
628 |
+
raise ValueError("body and text are not allowed together")
|
629 |
+
|
630 |
+
if headers is None:
|
631 |
+
real_headers: CIMultiDict[str] = CIMultiDict()
|
632 |
+
else:
|
633 |
+
real_headers = CIMultiDict(headers)
|
634 |
+
|
635 |
+
if content_type is not None and "charset" in content_type:
|
636 |
+
raise ValueError("charset must not be in content_type argument")
|
637 |
+
|
638 |
+
if text is not None:
|
639 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
640 |
+
if content_type or charset:
|
641 |
+
raise ValueError(
|
642 |
+
"passing both Content-Type header and "
|
643 |
+
"content_type or charset params "
|
644 |
+
"is forbidden"
|
645 |
+
)
|
646 |
+
else:
|
647 |
+
# fast path for filling headers
|
648 |
+
if not isinstance(text, str):
|
649 |
+
raise TypeError("text argument must be str (%r)" % type(text))
|
650 |
+
if content_type is None:
|
651 |
+
content_type = "text/plain"
|
652 |
+
if charset is None:
|
653 |
+
charset = "utf-8"
|
654 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
|
655 |
+
body = text.encode(charset)
|
656 |
+
text = None
|
657 |
+
elif hdrs.CONTENT_TYPE in real_headers:
|
658 |
+
if content_type is not None or charset is not None:
|
659 |
+
raise ValueError(
|
660 |
+
"passing both Content-Type header and "
|
661 |
+
"content_type or charset params "
|
662 |
+
"is forbidden"
|
663 |
+
)
|
664 |
+
elif content_type is not None:
|
665 |
+
if charset is not None:
|
666 |
+
content_type += "; charset=" + charset
|
667 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type
|
668 |
+
|
669 |
+
super().__init__(status=status, reason=reason, _real_headers=real_headers)
|
670 |
+
|
671 |
+
if text is not None:
|
672 |
+
self.text = text
|
673 |
+
else:
|
674 |
+
self.body = body
|
675 |
+
|
676 |
+
self._zlib_executor_size = zlib_executor_size
|
677 |
+
self._zlib_executor = zlib_executor
|
678 |
+
|
679 |
+
@property
|
680 |
+
def body(self) -> Optional[Union[bytes, Payload]]:
|
681 |
+
return self._body
|
682 |
+
|
683 |
+
@body.setter
|
684 |
+
def body(self, body: Any) -> None:
|
685 |
+
if body is None:
|
686 |
+
self._body = None
|
687 |
+
elif isinstance(body, (bytes, bytearray)):
|
688 |
+
self._body = body
|
689 |
+
else:
|
690 |
+
try:
|
691 |
+
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
|
692 |
+
except payload.LookupError:
|
693 |
+
raise ValueError("Unsupported body type %r" % type(body))
|
694 |
+
|
695 |
+
headers = self._headers
|
696 |
+
|
697 |
+
# set content-type
|
698 |
+
if hdrs.CONTENT_TYPE not in headers:
|
699 |
+
headers[hdrs.CONTENT_TYPE] = body.content_type
|
700 |
+
|
701 |
+
# copy payload headers
|
702 |
+
if body.headers:
|
703 |
+
for key, value in body.headers.items():
|
704 |
+
if key not in headers:
|
705 |
+
headers[key] = value
|
706 |
+
|
707 |
+
self._compressed_body = None
|
708 |
+
|
709 |
+
@property
|
710 |
+
def text(self) -> Optional[str]:
|
711 |
+
if self._body is None:
|
712 |
+
return None
|
713 |
+
return self._body.decode(self.charset or "utf-8")
|
714 |
+
|
715 |
+
@text.setter
|
716 |
+
def text(self, text: str) -> None:
|
717 |
+
assert text is None or isinstance(
|
718 |
+
text, str
|
719 |
+
), "text argument must be str (%r)" % type(text)
|
720 |
+
|
721 |
+
if self.content_type == "application/octet-stream":
|
722 |
+
self.content_type = "text/plain"
|
723 |
+
if self.charset is None:
|
724 |
+
self.charset = "utf-8"
|
725 |
+
|
726 |
+
self._body = text.encode(self.charset)
|
727 |
+
self._compressed_body = None
|
728 |
+
|
729 |
+
@property
|
730 |
+
def content_length(self) -> Optional[int]:
|
731 |
+
if self._chunked:
|
732 |
+
return None
|
733 |
+
|
734 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
735 |
+
return int(self._headers[hdrs.CONTENT_LENGTH])
|
736 |
+
|
737 |
+
if self._compressed_body is not None:
|
738 |
+
# Return length of the compressed body
|
739 |
+
return len(self._compressed_body)
|
740 |
+
elif isinstance(self._body, Payload):
|
741 |
+
# A payload without content length, or a compressed payload
|
742 |
+
return None
|
743 |
+
elif self._body is not None:
|
744 |
+
return len(self._body)
|
745 |
+
else:
|
746 |
+
return 0
|
747 |
+
|
748 |
+
@content_length.setter
|
749 |
+
def content_length(self, value: Optional[int]) -> None:
|
750 |
+
raise RuntimeError("Content length is set automatically")
|
751 |
+
|
752 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
753 |
+
if self._eof_sent:
|
754 |
+
return
|
755 |
+
if self._compressed_body is None:
|
756 |
+
body: Optional[Union[bytes, Payload]] = self._body
|
757 |
+
else:
|
758 |
+
body = self._compressed_body
|
759 |
+
assert not data, f"data arg is not supported, got {data!r}"
|
760 |
+
assert self._req is not None
|
761 |
+
assert self._payload_writer is not None
|
762 |
+
if body is None or self._must_be_empty_body:
|
763 |
+
await super().write_eof()
|
764 |
+
elif isinstance(self._body, Payload):
|
765 |
+
await self._body.write(self._payload_writer)
|
766 |
+
await super().write_eof()
|
767 |
+
else:
|
768 |
+
await super().write_eof(cast(bytes, body))
|
769 |
+
|
770 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
771 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
772 |
+
if should_remove_content_length(request.method, self.status):
|
773 |
+
del self._headers[hdrs.CONTENT_LENGTH]
|
774 |
+
elif not self._chunked:
|
775 |
+
if isinstance(self._body, Payload):
|
776 |
+
if self._body.size is not None:
|
777 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size)
|
778 |
+
else:
|
779 |
+
body_len = len(self._body) if self._body else "0"
|
780 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
|
781 |
+
if body_len != "0" or (
|
782 |
+
self.status != 304 and request.method not in hdrs.METH_HEAD_ALL
|
783 |
+
):
|
784 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
|
785 |
+
|
786 |
+
return await super()._start(request)
|
787 |
+
|
788 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
789 |
+
if self._chunked or isinstance(self._body, Payload):
|
790 |
+
return await super()._do_start_compression(coding)
|
791 |
+
if coding is ContentCoding.identity:
|
792 |
+
return
|
793 |
+
# Instead of using _payload_writer.enable_compression,
|
794 |
+
# compress the whole body
|
795 |
+
compressor = ZLibCompressor(
|
796 |
+
encoding=coding.value,
|
797 |
+
max_sync_chunk_size=self._zlib_executor_size,
|
798 |
+
executor=self._zlib_executor,
|
799 |
+
)
|
800 |
+
assert self._body is not None
|
801 |
+
if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE:
|
802 |
+
warnings.warn(
|
803 |
+
"Synchronous compression of large response bodies "
|
804 |
+
f"({len(self._body)} bytes) might block the async event loop. "
|
805 |
+
"Consider providing a custom value to zlib_executor_size/"
|
806 |
+
"zlib_executor response properties or disabling compression on it."
|
807 |
+
)
|
808 |
+
self._compressed_body = (
|
809 |
+
await compressor.compress(self._body) + compressor.flush()
|
810 |
+
)
|
811 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
812 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
|
813 |
+
|
814 |
+
|
815 |
+
def json_response(
|
816 |
+
data: Any = sentinel,
|
817 |
+
*,
|
818 |
+
text: Optional[str] = None,
|
819 |
+
body: Optional[bytes] = None,
|
820 |
+
status: int = 200,
|
821 |
+
reason: Optional[str] = None,
|
822 |
+
headers: Optional[LooseHeaders] = None,
|
823 |
+
content_type: str = "application/json",
|
824 |
+
dumps: JSONEncoder = json.dumps,
|
825 |
+
) -> Response:
|
826 |
+
if data is not sentinel:
|
827 |
+
if text or body:
|
828 |
+
raise ValueError("only one of data, text, or body should be specified")
|
829 |
+
else:
|
830 |
+
text = dumps(data)
|
831 |
+
return Response(
|
832 |
+
text=text,
|
833 |
+
body=body,
|
834 |
+
status=status,
|
835 |
+
reason=reason,
|
836 |
+
headers=headers,
|
837 |
+
content_type=content_type,
|
838 |
+
)
|